From 98820922e96de46a7ff1e019fd5c5c83d513c1ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 10:12:54 +0200 Subject: [PATCH 01/26] Add option to include docstrings with stubgen Add a --include-docstrings flag to stubgen This was suggested in #11965. When using this flag, the .pyi files will include docstrings for Python classes and functions and for C extension functions. --- mypy/fastparse.py | 4 ++++ mypy/nodes.py | 4 ++++ mypy/stubgen.py | 27 ++++++++++++++++++++++++--- mypy/stubgenc.py | 36 +++++++++++++++++++++++++----------- test-data/unit/stubgen.test | 17 +++++++++++++++++ 5 files changed, 74 insertions(+), 14 deletions(-) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 3be7444198fc..6196490b324d 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1028,6 +1028,8 @@ def do_func_def( # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset, end_line, end_column) retval = func_def + if ast3.get_docstring(n): + func_def.docstring = ast3.get_docstring(n, clean=False) self.class_and_function_stack.pop() return retval @@ -1137,6 +1139,8 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: else: cdef.line = n.lineno cdef.deco_line = n.decorator_list[0].lineno if n.decorator_list else None + if ast3.get_docstring(n): + cdef.docstring = ast3.get_docstring(n, clean=False) cdef.column = n.col_offset cdef.end_line = getattr(n, "end_lineno", None) cdef.end_column = getattr(n, "end_col_offset", None) diff --git a/mypy/nodes.py b/mypy/nodes.py index 3da5dc6ee1e9..325d2eaf97f6 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -774,6 +774,7 @@ class FuncDef(FuncItem, SymbolNode, Statement): "is_abstract", "original_def", "deco_line", + "docstring", ) # Note that all __init__ args must have default values @@ -794,6 +795,7 @@ def __init__( self.original_def: Union[None, FuncDef, Var, Decorator] = None # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: Optional[int] = None + self.docstring = None @property def name(self) -> str: @@ -1070,6 +1072,7 @@ class ClassDef(Statement): "analyzed", "has_incompatible_baseclass", "deco_line", + "docstring", ) name: str # Name of the class without module prefix @@ -1111,6 +1114,7 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: Optional[int] = None + self.docstring: str = None def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 08f86d96be11..bfc7684c76ee 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -206,6 +206,7 @@ def __init__( verbose: bool, quiet: bool, export_less: bool, + include_docstrings: bool, ) -> None: # See parse_options for descriptions of the flags. self.pyversion = pyversion @@ -224,6 +225,7 @@ def __init__( self.verbose = verbose self.quiet = quiet self.export_less = export_less + self.include_docstrings = include_docstrings class StubSource: @@ -572,6 +574,7 @@ def __init__( include_private: bool = False, analyzed: bool = False, export_less: bool = False, + include_docstrings: bool = False, ) -> None: # Best known value of __all__. self._all_ = _all_ @@ -587,6 +590,7 @@ def __init__( self._toplevel_names: List[str] = [] self._pyversion = pyversion self._include_private = include_private + self._include_docstrings = include_docstrings self.import_tracker = ImportTracker() # Was the tree semantically analysed before? self.analyzed = analyzed @@ -754,7 +758,11 @@ def visit_func_def( retfield = " -> " + retname self.add(", ".join(args)) - self.add(f"){retfield}: ...\n") + self.add(f"){retfield}:") + if self._include_docstrings and o.docstring: + self.add(f'\n{self._indent} """{o.docstring}"""\n{self._indent} ') + + self.add(" ...\n") self._state = FUNC def is_none_expr(self, expr: Expression) -> bool: @@ -926,8 +934,10 @@ def visit_class_def(self, o: ClassDef) -> None: if base_types: self.add(f"({', '.join(base_types)})") self.add(":\n") - n = len(self._output) self._indent += " " + if o.docstring: + self.add(f'{self._indent}"""{o.docstring}"""\n') + n = len(self._output) self._vars.append([]) super().visit_class_def(o) self._indent = self._indent[:-4] @@ -1605,6 +1615,7 @@ def generate_stub_from_ast( pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, include_private: bool = False, export_less: bool = False, + include_docstrings: bool = False, ) -> None: """Use analysed (or just parsed) AST to generate type stub for single file. @@ -1617,6 +1628,7 @@ def generate_stub_from_ast( include_private=include_private, analyzed=not parse_only, export_less=export_less, + include_docstrings=include_docstrings, ) assert mod.ast is not None, "This function must be used only with analyzed modules" mod.ast.accept(gen) @@ -1677,6 +1689,7 @@ def generate_stubs(options: Options) -> None: options.pyversion, options.include_private, options.export_less, + options.include_docstrings, ) # Separately analyse C modules using different logic. @@ -1688,7 +1701,7 @@ def generate_stubs(options: Options) -> None: target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): - generate_stub_for_c_module(mod.module, target, sigs=sigs, class_sigs=class_sigs) + generate_stub_for_c_module(mod.module, target, sigs=sigs, class_sigs=class_sigs, include_docstrings=options.include_docstrings) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: print("Processed %d modules" % num_modules) @@ -1743,6 +1756,13 @@ def parse_options(args: List[str]) -> Options: "don't implicitly export all names imported from other modules " "in the same package" ), ) + parser.add_argument( + "--include-docstrings", + action="store_true", + help=( + "include existing docstrings with the stubs" + ), + ) parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages") parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages") parser.add_argument( @@ -1823,6 +1843,7 @@ def parse_options(args: List[str]) -> Options: verbose=ns.verbose, quiet=ns.quiet, export_less=ns.export_less, + include_docstrings=ns.include_docstrings, ) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 66db4137fe50..649f7bbce511 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -44,6 +44,7 @@ def generate_stub_for_c_module( target: str, sigs: Optional[Dict[str, str]] = None, class_sigs: Optional[Dict[str, str]] = None, + include_docstrings: bool = False, ) -> None: """Generate stub for C module. @@ -64,7 +65,7 @@ def generate_stub_for_c_module( items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): - generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs) + generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs, include_docstrings=include_docstrings) done.add(name) types: List[str] = [] for name, obj in items: @@ -72,7 +73,7 @@ def generate_stub_for_c_module( continue if is_c_type(obj): generate_c_type_stub( - module, name, obj, types, imports=imports, sigs=sigs, class_sigs=class_sigs + module, name, obj, types, imports=imports, sigs=sigs, class_sigs=class_sigs, include_docstrings=include_docstrings ) done.add(name) variables = [] @@ -156,10 +157,11 @@ def generate_c_function_stub( sigs: Optional[Dict[str, str]] = None, class_name: Optional[str] = None, class_sigs: Optional[Dict[str, str]] = None, + include_docstrings: bool = False ) -> None: """Generate stub for a single function or method. - The result (always a single line) will be appended to 'output'. + The result will be appended to 'output'. If necessary, any required names will be added to 'imports'. The 'class_name' is used to find signature of __init__ or __new__ in 'class_sigs'. @@ -170,7 +172,7 @@ def generate_c_function_stub( class_sigs = {} ret_type = "None" if name == "__init__" and class_name else "Any" - + docstr = None if ( name in ("__new__", "__init__") and name not in sigs @@ -236,13 +238,23 @@ def generate_c_function_stub( if is_overloaded: output.append("@overload") - output.append( - "def {function}({args}) -> {ret}: ...".format( - function=name, - args=", ".join(sig), - ret=strip_or_import(signature.ret_type, module, imports), + if include_docstrings and docstr: + output.append( + "def {function}({args}) -> {ret}:\n\"\"\"{docstr}\"\"\"\n...".format( + function=name, + args=", ".join(sig), + ret=strip_or_import(signature.ret_type, module, imports), + docstr=docstr, + ) + ) + else: + output.append( + "def {function}({args}) -> {ret}: ...".format( + function=name, + args=", ".join(sig), + ret=strip_or_import(signature.ret_type, module, imports), + ) ) - ) def strip_or_import(typ: str, module: ModuleType, imports: List[str]) -> str: @@ -339,6 +351,7 @@ def generate_c_type_stub( imports: List[str], sigs: Optional[Dict[str, str]] = None, class_sigs: Optional[Dict[str, str]] = None, + include_docstrings: bool = False ) -> None: """Generate stub for a single class using runtime introspection. @@ -382,6 +395,7 @@ def generate_c_type_stub( sigs=sigs, class_name=class_name, class_sigs=class_sigs, + include_docstrings=include_docstrings, ) elif is_c_property(value): done.add(attr) @@ -397,7 +411,7 @@ def generate_c_type_stub( ) elif is_c_type(value): generate_c_type_stub( - module, attr, value, types, imports=imports, sigs=sigs, class_sigs=class_sigs + module, attr, value, types, imports=imports, sigs=sigs, class_sigs=class_sigs, include_docstrings=include_docstrings ) done.add(attr) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index a7c2ae6d21fd..82d5fb8797b8 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2688,3 +2688,20 @@ def f(): return 0 [out] def f(): ... + +[case testIncludeDocstrings] +# flags: --include-docstrings +class A: + """class docstring""" + def func(): + """func docstring""" + ... + def nodoc(): + ... +[out] +class A: + """class docstring""" + def func() -> None: + """func docstring""" + ... + def nodoc() -> None: ... \ No newline at end of file From 6b8558602613b11bd56784b357303bddf3d9bc0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 10:18:18 +0200 Subject: [PATCH 02/26] Add --include-docstings to stubgen docs. --- docs/source/stubgen.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/source/stubgen.rst b/docs/source/stubgen.rst index f06c9c066bb7..2de0743572e7 100644 --- a/docs/source/stubgen.rst +++ b/docs/source/stubgen.rst @@ -163,6 +163,11 @@ Additional flags Instead, only export imported names that are not referenced in the module that contains the import. +.. option:: --include-docstrings + + Include docstrings in stubs. This will add docstrings to Python function and + classes stubs and to C extension function stubs. + .. option:: --search-path PATH Specify module search directories, separated by colons (only used if From 74bbee869c9ea0d3d071623852f17ab644295081 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 10:31:23 +0200 Subject: [PATCH 03/26] Fix missing newline --- test-data/unit/stubgen.test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 82d5fb8797b8..9182cad08222 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2704,4 +2704,4 @@ class A: def func() -> None: """func docstring""" ... - def nodoc() -> None: ... \ No newline at end of file + def nodoc() -> None: ... From 29798375dbbdf655f4a4c6563f9eff9d88af0fbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 10:39:21 +0200 Subject: [PATCH 04/26] Fix code style --- mypy/stubgen.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index e53dc7491bbf..d8a3381428cd 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1690,7 +1690,13 @@ def generate_stubs(options: Options) -> None: target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): - generate_stub_for_c_module(mod.module, target, sigs=sigs, class_sigs=class_sigs, include_docstrings=options.include_docstrings) + generate_stub_for_c_module( + mod.module, + target, + sigs=sigs, + class_sigs=class_sigs, + include_docstrings=options.include_docstrings, + ) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: print("Processed %d modules" % num_modules) @@ -1748,9 +1754,7 @@ def parse_options(args: List[str]) -> Options: parser.add_argument( "--include-docstrings", action="store_true", - help=( - "include existing docstrings with the stubs" - ), + help=("include existing docstrings with the stubs"), ) parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages") parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages") From b272ddf61ec85a3f8e354184cd5005b71efcfa4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 12:25:48 +0200 Subject: [PATCH 05/26] Fix missing docstring argument --- mypy/stubgen.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index d8a3381428cd..ffab76f51ba5 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1678,7 +1678,12 @@ def generate_stubs(options: Options) -> None: files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): generate_stub_from_ast( - mod, target, options.parse_only, options.include_private, options.export_less + mod, + target, + options.parse_only, + options.include_private, + options.export_less, + include_docstrings=options.include_docstrings, ) # Separately analyse C modules using different logic. From eab95672afddfbe2684247c95e94ebc64062fc0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 12:26:16 +0200 Subject: [PATCH 06/26] Fix code style --- mypy/stubgenc.py | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 649f7bbce511..acc8811b11c1 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -65,7 +65,15 @@ def generate_stub_for_c_module( items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): - generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs, include_docstrings=include_docstrings) + generate_c_function_stub( + module, + name, + obj, + functions, + imports=imports, + sigs=sigs, + include_docstrings=include_docstrings, + ) done.add(name) types: List[str] = [] for name, obj in items: @@ -73,7 +81,14 @@ def generate_stub_for_c_module( continue if is_c_type(obj): generate_c_type_stub( - module, name, obj, types, imports=imports, sigs=sigs, class_sigs=class_sigs, include_docstrings=include_docstrings + module, + name, + obj, + types, + imports=imports, + sigs=sigs, + class_sigs=class_sigs, + include_docstrings=include_docstrings, ) done.add(name) variables = [] @@ -157,7 +172,7 @@ def generate_c_function_stub( sigs: Optional[Dict[str, str]] = None, class_name: Optional[str] = None, class_sigs: Optional[Dict[str, str]] = None, - include_docstrings: bool = False + include_docstrings: bool = False, ) -> None: """Generate stub for a single function or method. @@ -240,7 +255,7 @@ def generate_c_function_stub( output.append("@overload") if include_docstrings and docstr: output.append( - "def {function}({args}) -> {ret}:\n\"\"\"{docstr}\"\"\"\n...".format( + 'def {function}({args}) -> {ret}:\n"""{docstr}"""\n...'.format( function=name, args=", ".join(sig), ret=strip_or_import(signature.ret_type, module, imports), @@ -351,7 +366,7 @@ def generate_c_type_stub( imports: List[str], sigs: Optional[Dict[str, str]] = None, class_sigs: Optional[Dict[str, str]] = None, - include_docstrings: bool = False + include_docstrings: bool = False, ) -> None: """Generate stub for a single class using runtime introspection. @@ -411,7 +426,14 @@ def generate_c_type_stub( ) elif is_c_type(value): generate_c_type_stub( - module, attr, value, types, imports=imports, sigs=sigs, class_sigs=class_sigs, include_docstrings=include_docstrings + module, + attr, + value, + types, + imports=imports, + sigs=sigs, + class_sigs=class_sigs, + include_docstrings=include_docstrings, ) done.add(attr) From e1812efe309114993272d706f510fe899c2e57e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 12:26:31 +0200 Subject: [PATCH 07/26] Fix types --- mypy/nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index bd10ce6f0e5c..487cd32398ff 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -795,7 +795,7 @@ def __init__( self.original_def: Union[None, FuncDef, Var, Decorator] = None # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: Optional[int] = None - self.docstring = None + self.docstring: Optional[str] = None @property def name(self) -> str: @@ -1114,7 +1114,7 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: Optional[int] = None - self.docstring: str = None + self.docstring: Optional[str] = None def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) From eefc49ee1468991ee7305c872ae10efe417ffe23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 13:46:22 +0200 Subject: [PATCH 08/26] Remove useless check --- mypy/fastparse.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 96c5f7be2da6..11d6d897c485 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -994,8 +994,8 @@ def do_func_def( # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset, end_line, end_column) retval = func_def - if ast3.get_docstring(n): - func_def.docstring = ast3.get_docstring(n, clean=False) + + func_def.docstring = ast3.get_docstring(n, clean=False) self.class_and_function_stack.pop() return retval @@ -1105,8 +1105,7 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: else: cdef.line = n.lineno cdef.deco_line = n.decorator_list[0].lineno if n.decorator_list else None - if ast3.get_docstring(n): - cdef.docstring = ast3.get_docstring(n, clean=False) + cdef.docstring = ast3.get_docstring(n, clean=False) cdef.column = n.col_offset cdef.end_line = getattr(n, "end_lineno", None) cdef.end_column = getattr(n, "end_col_offset", None) From a0e8647a35e39cd61aa6ad5760199c45ad9b23d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 29 Jul 2022 18:41:38 +0200 Subject: [PATCH 09/26] Fix superfluous whitespace --- mypy/fastparse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 11d6d897c485..aea91fa964b8 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -994,7 +994,7 @@ def do_func_def( # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset, end_line, end_column) retval = func_def - + func_def.docstring = ast3.get_docstring(n, clean=False) self.class_and_function_stack.pop() return retval From acd3168b403513af72233d455477a36f2191ecb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 11:32:47 +0200 Subject: [PATCH 10/26] Add more stubgen docstrings tests --- test-data/unit/stubgen.test | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 9182cad08222..62e2a7089e1e 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2698,6 +2698,12 @@ class A: ... def nodoc(): ... +class B: + def func(): + """func docstring""" + ... + def nodoc(): + ... [out] class A: """class docstring""" @@ -2705,3 +2711,34 @@ class A: """func docstring""" ... def nodoc() -> None: ... + +class B: + def func() -> None: + """func docstring""" + ... + def nodoc() -> None: ... + +[case testIgnoreDocstrings] +class A: + """class docstring""" + def func(): + """func docstring""" + ... + def nodoc(): + ... + +class B: + def func(): + """func docstring""" + ... + def nodoc(): + ... + +[out] +class A: + def func() -> None: ... + def nodoc() -> None: ... + +class B: + def func() -> None: ... + def nodoc() -> None: ... From ab642845cf325ebb9c1cb1357ab6976d4716ff0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 11:33:29 +0200 Subject: [PATCH 11/26] Fix docstring option not checked --- mypy/stubgen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index ffab76f51ba5..f7f90bb75673 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -932,7 +932,7 @@ def visit_class_def(self, o: ClassDef) -> None: self.add(f"({', '.join(base_types)})") self.add(":\n") self._indent += " " - if o.docstring: + if self._include_docstrings and o.docstring: self.add(f'{self._indent}"""{o.docstring}"""\n') n = len(self._output) self._vars.append([]) From 7a36ff078fa35973e69ee203c022eb7973fc8cf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 11:33:44 +0200 Subject: [PATCH 12/26] Fix indentation for docstrings --- mypy/stubgenc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index acc8811b11c1..f2999918858b 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -255,13 +255,14 @@ def generate_c_function_stub( output.append("@overload") if include_docstrings and docstr: output.append( - 'def {function}({args}) -> {ret}:\n"""{docstr}"""\n...'.format( + "def {function}({args}) -> {ret}:".format( function=name, args=", ".join(sig), ret=strip_or_import(signature.ret_type, module, imports), - docstr=docstr, ) ) + output.append(' """{docstr}"""'.format(docstr=docstr.strip())) + output.append(' ...') else: output.append( "def {function}({args}) -> {ret}: ...".format( From 97700e7f3e87f9b81382b94ea69547fb8ab74373 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 11:34:46 +0200 Subject: [PATCH 13/26] Add docstrings inclusion flag to fastparse --- mypy/fastparse.py | 7 ++++--- mypy/options.py | 6 ++++++ mypy/stubgen.py | 1 + 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index aea91fa964b8..7db4c931fe2c 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -994,8 +994,8 @@ def do_func_def( # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset, end_line, end_column) retval = func_def - - func_def.docstring = ast3.get_docstring(n, clean=False) + if self.options.include_docstrings: + func_def.docstring = ast3.get_docstring(n, clean=False) self.class_and_function_stack.pop() return retval @@ -1105,7 +1105,8 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: else: cdef.line = n.lineno cdef.deco_line = n.decorator_list[0].lineno if n.decorator_list else None - cdef.docstring = ast3.get_docstring(n, clean=False) + if self.options.include_docstrings: + cdef.docstring = ast3.get_docstring(n, clean=False) cdef.column = n.col_offset cdef.end_line = getattr(n, "end_lineno", None) cdef.end_column = getattr(n, "end_col_offset", None) diff --git a/mypy/options.py b/mypy/options.py index 860c296cfbb0..388c4f56be4b 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -251,6 +251,12 @@ def __init__(self) -> None: # mypy. (Like mypyc.) self.preserve_asts = False + # If True, function and class docstrings will be extracted and retained. + # This isn't exposed as a command line option + # because it is intended for software integrating with + # mypy. (Like stubgen.) + self.include_docstrings = False + # Paths of user plugins self.plugins: List[str] = [] diff --git a/mypy/stubgen.py b/mypy/stubgen.py index f7f90bb75673..d8816d791a49 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1555,6 +1555,7 @@ def mypy_options(stubgen_options: Options) -> MypyOptions: options.python_version = stubgen_options.pyversion options.show_traceback = True options.transform_source = remove_misplaced_type_comments + options.include_docstrings = stubgen_options.include_docstrings return options From 03c285c1e2e041f3f784befe7f27ae8d9489b1e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 12:06:51 +0200 Subject: [PATCH 14/26] Add stubgenc doctring test --- misc/test-stubgenc.sh | 29 +++- test-data/pybind11_mypy_demo/src/main.cpp | 2 +- .../pybind11_mypy_demo/__init__.pyi | 0 .../pybind11_mypy_demo/basics.pyi | 134 ++++++++++++++++++ 4 files changed, 158 insertions(+), 7 deletions(-) create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi create mode 100644 test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index 7da135f0bf16..7298922d18a3 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -10,10 +10,27 @@ python -m pip install -r test-requirements.txt python -m pip install ./test-data/pybind11_mypy_demo python -m pip install . -# Remove expected stubs and generate new inplace -STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/stubgen -rm -rf $STUBGEN_OUTPUT_FOLDER/* -stubgen -p pybind11_mypy_demo -o $STUBGEN_OUTPUT_FOLDER +EXIT=0 -# Compare generated stubs to expected ones -git diff --exit-code $STUBGEN_OUTPUT_FOLDER +# performs the stubgenc test +# first argument is the test result folder +# everything else is passed to stubgen as its arguments +function stubgenc_test() { + # Remove expected stubs and generate new inplace + STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/$1 + rm -rf $STUBGEN_OUTPUT_FOLDER/* + stubgen -o $STUBGEN_OUTPUT_FOLDER "${@:2}" + + # Compare generated stubs to expected ones + git diff --exit-code $STUBGEN_OUTPUT_FOLDER + if [ $? -ne 0 ] + then + EXIT=$? + fi +} + +# create stubs without docstrings +stubgenc_test stubgen -p pybind11_mypy_demo +# create stubs with docstrings +stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings +exit $EXIT \ No newline at end of file diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp index 5cedef391b2d..ab31681a0b5a 100644 --- a/test-data/pybind11_mypy_demo/src/main.cpp +++ b/test-data/pybind11_mypy_demo/src/main.cpp @@ -119,7 +119,7 @@ void bind_basics(py::module& basics) { using namespace basics; // Functions - basics.def("answer", &answer); + basics.def("answer", &answer, "answer docstring"); // tests explicit docstrings basics.def("sum", &sum); basics.def("midpoint", &midpoint, py::arg("left"), py::arg("right")); basics.def("weighted_midpoint", weighted_midpoint, py::arg("left"), py::arg("right"), py::arg("alpha")=0.5); diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi new file mode 100644 index 000000000000..efb61f500c5c --- /dev/null +++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi @@ -0,0 +1,134 @@ +from typing import ClassVar + +from typing import overload +PI: float + +class Point: + class AngleUnit: + __members__: ClassVar[dict] = ... # read-only + __entries: ClassVar[dict] = ... + degree: ClassVar[Point.AngleUnit] = ... + radian: ClassVar[Point.AngleUnit] = ... + def __init__(self, value: int) -> None: + """__init__(self: pybind11_mypy_demo.basics.Point.AngleUnit, value: int) -> None""" + ... + def __eq__(self, other: object) -> bool: + """__eq__(self: object, other: object) -> bool""" + ... + def __getstate__(self) -> int: + """__getstate__(self: object) -> int""" + ... + def __hash__(self) -> int: + """__hash__(self: object) -> int""" + ... + def __index__(self) -> int: + """__index__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" + ... + def __int__(self) -> int: + """__int__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" + ... + def __ne__(self, other: object) -> bool: + """__ne__(self: object, other: object) -> bool""" + ... + def __setstate__(self, state: int) -> None: + """__setstate__(self: pybind11_mypy_demo.basics.Point.AngleUnit, state: int) -> None""" + ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + + class LengthUnit: + __members__: ClassVar[dict] = ... # read-only + __entries: ClassVar[dict] = ... + inch: ClassVar[Point.LengthUnit] = ... + mm: ClassVar[Point.LengthUnit] = ... + pixel: ClassVar[Point.LengthUnit] = ... + def __init__(self, value: int) -> None: + """__init__(self: pybind11_mypy_demo.basics.Point.LengthUnit, value: int) -> None""" + ... + def __eq__(self, other: object) -> bool: + """__eq__(self: object, other: object) -> bool""" + ... + def __getstate__(self) -> int: + """__getstate__(self: object) -> int""" + ... + def __hash__(self) -> int: + """__hash__(self: object) -> int""" + ... + def __index__(self) -> int: + """__index__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" + ... + def __int__(self) -> int: + """__int__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" + ... + def __ne__(self, other: object) -> bool: + """__ne__(self: object, other: object) -> bool""" + ... + def __setstate__(self, state: int) -> None: + """__setstate__(self: pybind11_mypy_demo.basics.Point.LengthUnit, state: int) -> None""" + ... + @property + def name(self) -> str: ... + @property + def value(self) -> int: ... + angle_unit: ClassVar[Point.AngleUnit] = ... + length_unit: ClassVar[Point.LengthUnit] = ... + x_axis: ClassVar[Point] = ... # read-only + y_axis: ClassVar[Point] = ... # read-only + origin: ClassVar[Point] = ... + x: float + y: float + @overload + def __init__(self) -> None: + """__init__(*args, **kwargs) +Overloaded function. + +1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + +2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" + ... + @overload + def __init__(self, x: float, y: float) -> None: + """__init__(*args, **kwargs) +Overloaded function. + +1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + +2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" + ... + @overload + def distance_to(self, x: float, y: float) -> float: + """distance_to(*args, **kwargs) +Overloaded function. + +1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + +2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" + ... + @overload + def distance_to(self, other: Point) -> float: + """distance_to(*args, **kwargs) +Overloaded function. + +1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + +2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" + ... + @property + def length(self) -> float: ... + +def answer() -> int: + """answer() -> int + +answer docstring""" + ... +def midpoint(left: float, right: float) -> float: + """midpoint(left: float, right: float) -> float""" + ... +def sum(arg0: int, arg1: int) -> int: + """sum(arg0: int, arg1: int) -> int""" + ... +def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float: + """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float""" + ... From 0a61327357200324c005e3978bd9a19056315188 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 1 Aug 2022 14:48:59 +0200 Subject: [PATCH 15/26] Fix coding style --- mypy/stubgenc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index f2999918858b..970332e7c0a5 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -262,7 +262,7 @@ def generate_c_function_stub( ) ) output.append(' """{docstr}"""'.format(docstr=docstr.strip())) - output.append(' ...') + output.append(" ...") else: output.append( "def {function}({args}) -> {ret}: ...".format( From bb5b0c88bc194c525e736e6df854765177622d53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 19 Aug 2022 11:06:45 +0200 Subject: [PATCH 16/26] Fix type annotation --- .env | 1 + mypy/nodes.py | 4 ++-- stubtest/__init__.py | 33 +++++++++++++++++++++++++++++++++ stubtest/__main__.py | 0 stubtest/__main__.pyi | 0 5 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 .env create mode 100644 stubtest/__init__.py create mode 100644 stubtest/__main__.py create mode 100644 stubtest/__main__.pyi diff --git a/.env b/.env new file mode 100644 index 000000000000..b7b7222b7d66 --- /dev/null +++ b/.env @@ -0,0 +1 @@ +PYTHONPATH=/home/chylek/data/mypy \ No newline at end of file diff --git a/mypy/nodes.py b/mypy/nodes.py index d8fa076d649d..f032043aafa3 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -803,7 +803,7 @@ def __init__( self.original_def: None | FuncDef | Var | Decorator = None # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None - self.docstring: int | None = None + self.docstring: str | None = None @property def name(self) -> str: @@ -1125,7 +1125,7 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None - self.docstring: int | None = None + self.docstring: str | None = None def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) diff --git a/stubtest/__init__.py b/stubtest/__init__.py new file mode 100644 index 000000000000..f670f2cdac08 --- /dev/null +++ b/stubtest/__init__.py @@ -0,0 +1,33 @@ +def hovno(): + """ + dělá hovno + """ + print("dělá hovno") + + +def something(a: int) -> str: + """taky něco dělá + + Args: + a (int): _description_ + + Returns: + str: _description_ + """ + return a * "something" + + +class Klass: + """dsfdsafkdjfsd + + dafdsfasdf + d + safsdfsad + """ + + def __init__(self) -> None: + """_summary_""" + pass + + def ret(self, nuthin: bool) -> None: + return diff --git a/stubtest/__main__.py b/stubtest/__main__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubtest/__main__.pyi b/stubtest/__main__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 From fa1f5290f3b28548935ca664213a05dbd9e2d45f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 19 Aug 2022 12:11:33 +0200 Subject: [PATCH 17/26] Remove files added by mistake --- .env | 1 - stubtest/__init__.py | 33 --------------------------------- stubtest/__main__.py | 0 stubtest/__main__.pyi | 0 4 files changed, 34 deletions(-) delete mode 100644 .env delete mode 100644 stubtest/__init__.py delete mode 100644 stubtest/__main__.py delete mode 100644 stubtest/__main__.pyi diff --git a/.env b/.env deleted file mode 100644 index b7b7222b7d66..000000000000 --- a/.env +++ /dev/null @@ -1 +0,0 @@ -PYTHONPATH=/home/chylek/data/mypy \ No newline at end of file diff --git a/stubtest/__init__.py b/stubtest/__init__.py deleted file mode 100644 index f670f2cdac08..000000000000 --- a/stubtest/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -def hovno(): - """ - dělá hovno - """ - print("dělá hovno") - - -def something(a: int) -> str: - """taky něco dělá - - Args: - a (int): _description_ - - Returns: - str: _description_ - """ - return a * "something" - - -class Klass: - """dsfdsafkdjfsd - - dafdsfasdf - d - safsdfsad - """ - - def __init__(self) -> None: - """_summary_""" - pass - - def ret(self, nuthin: bool) -> None: - return diff --git a/stubtest/__main__.py b/stubtest/__main__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/stubtest/__main__.pyi b/stubtest/__main__.pyi deleted file mode 100644 index e69de29bb2d1..000000000000 From 79d30fc1a1a6cf6f054cd9985c2142a327b91440 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 19 Aug 2022 12:12:08 +0200 Subject: [PATCH 18/26] Add newline --- misc/test-stubgenc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index 7298922d18a3..dfd2c8774d44 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -33,4 +33,4 @@ function stubgenc_test() { stubgenc_test stubgen -p pybind11_mypy_demo # create stubs with docstrings stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings -exit $EXIT \ No newline at end of file +exit $EXIT From 3fc3447767ec86af61dfd1845226e159b8f05d05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Fri, 19 Aug 2022 20:31:17 +0200 Subject: [PATCH 19/26] Fix issues found by shellcheck --- misc/test-stubgenc.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index dfd2c8774d44..5beb26340e7c 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -3,7 +3,7 @@ set -e set -x -cd "$(dirname $0)/.." +cd "$(dirname "$0")/.." # Install dependencies, demo project and mypy python -m pip install -r test-requirements.txt @@ -18,12 +18,11 @@ EXIT=0 function stubgenc_test() { # Remove expected stubs and generate new inplace STUBGEN_OUTPUT_FOLDER=./test-data/pybind11_mypy_demo/$1 - rm -rf $STUBGEN_OUTPUT_FOLDER/* - stubgen -o $STUBGEN_OUTPUT_FOLDER "${@:2}" + rm -rf "${STUBGEN_OUTPUT_FOLDER:?}/*" + stubgen -o "$STUBGEN_OUTPUT_FOLDER" "${@:2}" # Compare generated stubs to expected ones - git diff --exit-code $STUBGEN_OUTPUT_FOLDER - if [ $? -ne 0 ] + if ! git diff --exit-code "$STUBGEN_OUTPUT_FOLDER"; then EXIT=$? fi From d232bfe710caf84e2c993382bb20a09daff65c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Sat, 20 Aug 2022 15:13:07 +0200 Subject: [PATCH 20/26] Fix formatting in stubgenc's docstring output --- mypy/stubgenc.py | 4 +- .../pybind11_mypy_demo/basics.pyi | 68 ++++++------------- 2 files changed, 24 insertions(+), 48 deletions(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 6223bb4c54ba..8b9e677838c0 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -262,8 +262,8 @@ def generate_c_function_stub( ret=strip_or_import(signature.ret_type, module, imports), ) ) - output.append(' """{docstr}"""'.format(docstr=docstr.strip())) - output.append(" ...") + docstr_indented = "\n ".join(docstr.strip().split("\n")) + output.extend(f' """{docstr_indented}"""'.split("\n")) else: output.append( "def {function}({args}) -> {ret}: ...".format( diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi index efb61f500c5c..df1f156330f7 100644 --- a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi @@ -11,28 +11,20 @@ class Point: radian: ClassVar[Point.AngleUnit] = ... def __init__(self, value: int) -> None: """__init__(self: pybind11_mypy_demo.basics.Point.AngleUnit, value: int) -> None""" - ... def __eq__(self, other: object) -> bool: """__eq__(self: object, other: object) -> bool""" - ... def __getstate__(self) -> int: """__getstate__(self: object) -> int""" - ... def __hash__(self) -> int: """__hash__(self: object) -> int""" - ... def __index__(self) -> int: """__index__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" - ... def __int__(self) -> int: """__int__(self: pybind11_mypy_demo.basics.Point.AngleUnit) -> int""" - ... def __ne__(self, other: object) -> bool: """__ne__(self: object, other: object) -> bool""" - ... def __setstate__(self, state: int) -> None: """__setstate__(self: pybind11_mypy_demo.basics.Point.AngleUnit, state: int) -> None""" - ... @property def name(self) -> str: ... @property @@ -46,28 +38,20 @@ class Point: pixel: ClassVar[Point.LengthUnit] = ... def __init__(self, value: int) -> None: """__init__(self: pybind11_mypy_demo.basics.Point.LengthUnit, value: int) -> None""" - ... def __eq__(self, other: object) -> bool: """__eq__(self: object, other: object) -> bool""" - ... def __getstate__(self) -> int: """__getstate__(self: object) -> int""" - ... def __hash__(self) -> int: """__hash__(self: object) -> int""" - ... def __index__(self) -> int: """__index__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" - ... def __int__(self) -> int: """__int__(self: pybind11_mypy_demo.basics.Point.LengthUnit) -> int""" - ... def __ne__(self, other: object) -> bool: """__ne__(self: object, other: object) -> bool""" - ... def __setstate__(self, state: int) -> None: """__setstate__(self: pybind11_mypy_demo.basics.Point.LengthUnit, state: int) -> None""" - ... @property def name(self) -> str: ... @property @@ -82,53 +66,45 @@ class Point: @overload def __init__(self) -> None: """__init__(*args, **kwargs) -Overloaded function. - -1. __init__(self: pybind11_mypy_demo.basics.Point) -> None - -2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" - ... + Overloaded function. + + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" @overload def __init__(self, x: float, y: float) -> None: """__init__(*args, **kwargs) -Overloaded function. - -1. __init__(self: pybind11_mypy_demo.basics.Point) -> None - -2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" - ... + Overloaded function. + + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None + + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" @overload def distance_to(self, x: float, y: float) -> float: """distance_to(*args, **kwargs) -Overloaded function. - -1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float - -2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" - ... + Overloaded function. + + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" @overload def distance_to(self, other: Point) -> float: """distance_to(*args, **kwargs) -Overloaded function. - -1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float - -2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" - ... + Overloaded function. + + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float + + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" @property def length(self) -> float: ... def answer() -> int: """answer() -> int - -answer docstring""" - ... + + answer docstring""" def midpoint(left: float, right: float) -> float: """midpoint(left: float, right: float) -> float""" - ... def sum(arg0: int, arg1: int) -> int: """sum(arg0: int, arg1: int) -> int""" - ... def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float: """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float""" - ... From 3f849b9f84875635c8019f9f4ed9a58272aea48d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Sat, 20 Aug 2022 15:14:13 +0200 Subject: [PATCH 21/26] Remove ellipsis if a function body is a docstring --- mypy/stubgen.py | 8 +++++--- test-data/unit/stubgen.test | 27 ++++++++++++++++++--------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 08ca353392f3..71cf2f354fd1 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -761,9 +761,10 @@ def visit_func_def( self.add(", ".join(args)) self.add(f"){retfield}:") if self._include_docstrings and o.docstring: - self.add(f'\n{self._indent} """{o.docstring}"""\n{self._indent} ') + self.add(f'\n{self._indent} """{o.docstring}"""\n') + else: + self.add(" ...\n") - self.add(" ...\n") self._state = FUNC def is_none_expr(self, expr: Expression) -> bool: @@ -947,7 +948,8 @@ def visit_class_def(self, o: ClassDef) -> None: if len(self._output) == n: if self._state == EMPTY_CLASS and sep is not None: self._output[sep] = "" - self._output[-1] = self._output[-1][:-1] + " ...\n" + if not (self._include_docstrings and o.docstring): + self._output[-1] = self._output[-1][:-1] + " ...\n" self._state = EMPTY_CLASS else: self._state = CLASS diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 6a3b6df1d5e3..f39a790ffe6f 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2709,9 +2709,13 @@ def f(): ... [case testIncludeDocstrings] # flags: --include-docstrings class A: - """class docstring""" + """class docstring + + a multiline docstring""" def func(): - """func docstring""" + """func docstring + + don't forget to indent""" ... def nodoc(): ... @@ -2723,24 +2727,29 @@ class B: ... [out] class A: - """class docstring""" + """class docstring + + a multiline docstring""" def func() -> None: - """func docstring""" - ... + """func docstring + + don't forget to indent""" def nodoc() -> None: ... class B: def func() -> None: """func docstring""" - ... def nodoc() -> None: ... [case testIgnoreDocstrings] class A: - """class docstring""" + """class docstring + + a multiline docstring""" def func(): - """func docstring""" - ... + """func docstring + + don't forget to indent""" def nodoc(): ... From 5d0cd9db580b65a4a91bf7ce2d2b456888f5203f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 22 Aug 2022 14:21:52 +0200 Subject: [PATCH 22/26] Fix invalid strings due to quotes --- mypy/stubgen.py | 6 ++-- mypy/stubgenc.py | 6 ++-- mypy/util.py | 17 ++++++++++ test-data/pybind11_mypy_demo/src/main.cpp | 4 +-- .../pybind11_mypy_demo/basics.pyi | 8 +++-- test-data/unit/stubgen.test | 32 ++++++++++++++----- 6 files changed, 56 insertions(+), 17 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 71cf2f354fd1..18736cb69884 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -761,7 +761,8 @@ def visit_func_def( self.add(", ".join(args)) self.add(f"){retfield}:") if self._include_docstrings and o.docstring: - self.add(f'\n{self._indent} """{o.docstring}"""\n') + docstring = mypy.util.quote_docstring(o.docstring) + self.add(f"\n{self._indent} {docstring}\n") else: self.add(" ...\n") @@ -938,7 +939,8 @@ def visit_class_def(self, o: ClassDef) -> None: self.add(":\n") self._indent += " " if self._include_docstrings and o.docstring: - self.add(f'{self._indent}"""{o.docstring}"""\n') + docstring = mypy.util.quote_docstring(o.docstring) + self.add(f"{self._indent}{docstring}\n") n = len(self._output) self._vars.append([]) super().visit_class_def(o) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 8b9e677838c0..b06be54e227a 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -14,6 +14,7 @@ from typing import Any, Mapping from typing_extensions import Final +import mypy.util from mypy.moduleinspect import is_c_module from mypy.stubdoc import ( ArgSig, @@ -262,8 +263,9 @@ def generate_c_function_stub( ret=strip_or_import(signature.ret_type, module, imports), ) ) - docstr_indented = "\n ".join(docstr.strip().split("\n")) - output.extend(f' """{docstr_indented}"""'.split("\n")) + docstr_quoted = mypy.util.quote_docstring(docstr.strip()) + docstr_indented = "\n ".join(docstr_quoted.split("\n")) + output.extend(f" {docstr_indented}".split("\n")) else: output.append( "def {function}({args}) -> {ret}: ...".format( diff --git a/mypy/util.py b/mypy/util.py index 686a71c4331b..13da69080c63 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -802,3 +802,20 @@ def plural_s(s: int | Sized) -> str: return "s" else: return "" + + +def quote_docstring(docstr: str) -> str: + """Returns docstring correctly encapsulated in a single or double quoted form.""" + # Uses repr to get hint on the correct quotes and escape everything properly. + # Creating multiline string for prettier output. + docstr_repr = "\n".join(re.split(r"(?<=[^\\])\\n", repr(docstr))) + + if docstr_repr.startswith("'"): + # Enforce double quotes when it's safe to do so. + # That is when double quotes are not in the string + # or when it doesn't end with a single quote. + if '"' not in docstr_repr[1:-1] and docstr_repr[-2] != "'": + return f'"""{docstr_repr[1:-1]}"""' + return f"''{docstr_repr}''" + else: + return f'""{docstr_repr}""' diff --git a/test-data/pybind11_mypy_demo/src/main.cpp b/test-data/pybind11_mypy_demo/src/main.cpp index ab31681a0b5a..075168123ec8 100644 --- a/test-data/pybind11_mypy_demo/src/main.cpp +++ b/test-data/pybind11_mypy_demo/src/main.cpp @@ -119,8 +119,8 @@ void bind_basics(py::module& basics) { using namespace basics; // Functions - basics.def("answer", &answer, "answer docstring"); // tests explicit docstrings - basics.def("sum", &sum); + basics.def("answer", &answer, "answer docstring, with end quote\""); // tests explicit docstrings + basics.def("sum", &sum, "multiline docstring test, edge case quotes \"\"\"'''"); basics.def("midpoint", &midpoint, py::arg("left"), py::arg("right")); basics.def("weighted_midpoint", weighted_midpoint, py::arg("left"), py::arg("right"), py::arg("alpha")=0.5); diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi index df1f156330f7..eab1439cbee0 100644 --- a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi @@ -99,12 +99,14 @@ class Point: def length(self) -> float: ... def answer() -> int: - """answer() -> int + '''answer() -> int - answer docstring""" + answer docstring, with end quote"''' def midpoint(left: float, right: float) -> float: """midpoint(left: float, right: float) -> float""" def sum(arg0: int, arg1: int) -> int: - """sum(arg0: int, arg1: int) -> int""" + '''sum(arg0: int, arg1: int) -> int + + multiline docstring test, edge case quotes """\'\'\'''' def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float: """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float""" diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index f39a790ffe6f..1c0e2aa0eb6d 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2714,16 +2714,25 @@ class A: a multiline docstring""" def func(): """func docstring - don't forget to indent""" ... def nodoc(): ... class B: - def func(): - """func docstring""" + def quoteA(): + '''func docstring with quotes"""\\n + and an end quote\'''' ... - def nodoc(): + def quoteB(): + '''func docstring with quotes""" + \'\'\' + and an end quote\\"''' + ... + def quoteC(): + """func docstring with end quote\\\"""" + ... + def quoteD(): + r'''raw with quotes\"''' ... [out] class A: @@ -2732,14 +2741,21 @@ class A: a multiline docstring""" def func() -> None: """func docstring - don't forget to indent""" def nodoc() -> None: ... class B: - def func() -> None: - """func docstring""" - def nodoc() -> None: ... + def quoteA() -> None: + '''func docstring with quotes"""\\n + and an end quote\'''' + def quoteB() -> None: + '''func docstring with quotes""" + \'\'\' + and an end quote\\"''' + def quoteC() -> None: + '''func docstring with end quote\\"''' + def quoteD() -> None: + '''raw with quotes\\"''' [case testIgnoreDocstrings] class A: From 7ea672784cba85e6c99c5f81bf639e516fff51d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Wed, 5 Oct 2022 12:48:13 +0200 Subject: [PATCH 23/26] Fix incorrectly resolved merge --- mypy/stubgenc.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 81d9fc6fe6b6..6f844794f880 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -359,13 +359,6 @@ def generate_c_function_stub( ret=strip_or_import(signature.ret_type, module, imports), ) ) - output.append( - "def {function}({args}) -> {ret}: ...".format( - function=name, - args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, imports), - ) - ) def strip_or_import(typ: str, module: ModuleType, imports: list[str]) -> str: From 182c9e60032e6f1174c6eb11683d1f930f3a19d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Mon, 30 Jan 2023 16:14:14 +0100 Subject: [PATCH 24/26] Merge branch 'master' into issue-11965 --- .github/workflows/mypy_primer.yml | 1 + .github/workflows/mypy_primer_comment.yml | 28 +- .github/workflows/test.yml | 30 +- .pre-commit-config.yaml | 8 +- README.md | 20 +- docs/source/additional_features.rst | 2 +- docs/source/command_line.rst | 7 +- docs/source/common_issues.rst | 2 + docs/source/config_file.rst | 8 +- docs/source/error_code_list.rst | 63 + docs/source/error_code_list2.rst | 60 +- docs/source/generics.rst | 92 +- docs/source/index.rst | 6 +- docs/source/literal_types.rst | 4 +- docs/source/more_types.rst | 3 +- docs/source/running_mypy.rst | 48 +- docs/source/stubtest.rst | 4 + docs/source/type_narrowing.rst | 2 +- misc/docker/Dockerfile | 12 + misc/docker/README.md | 101 + misc/docker/build.py | 46 + misc/docker/run-wrapper.sh | 13 + misc/docker/run.sh | 15 + misc/fix_annotate.py | 9 +- misc/perf_compare.py | 146 ++ misc/sync-typeshed.py | 9 +- misc/upload-pypi.py | 19 +- mypy/applytype.py | 82 +- mypy/build.py | 98 +- mypy/checker.py | 454 +++-- mypy/checkexpr.py | 397 ++-- mypy/checkmember.py | 87 +- mypy/config_parser.py | 23 +- mypy/constant_fold.py | 116 ++ mypy/constraints.py | 382 ++-- mypy/copytype.py | 2 +- mypy/dmypy_server.py | 17 +- mypy/erasetype.py | 4 +- mypy/errorcodes.py | 22 +- mypy/errors.py | 18 + mypy/evalexpr.py | 204 ++ mypy/expandtype.py | 253 ++- mypy/fastparse.py | 22 + mypy/fixup.py | 2 + mypy/ipc.py | 5 - mypy/join.py | 19 +- mypy/main.py | 22 +- mypy/meet.py | 35 +- mypy/message_registry.py | 9 + mypy/messages.py | 265 ++- mypy/mixedtraverser.py | 5 + mypy/modulefinder.py | 37 +- mypy/moduleinspect.py | 2 +- mypy/mro.py | 4 +- mypy/nodes.py | 346 +++- mypy/options.py | 20 +- mypy/partially_defined.py | 482 ++++- mypy/plugins/attrs.py | 22 +- mypy/plugins/common.py | 2 +- mypy/plugins/dataclasses.py | 38 +- mypy/plugins/enums.py | 3 +- mypy/report.py | 87 +- mypy/semanal.py | 727 ++++--- mypy/semanal_classprop.py | 8 +- mypy/semanal_main.py | 23 +- mypy/semanal_namedtuple.py | 33 +- mypy/semanal_newtype.py | 10 +- mypy/semanal_shared.py | 23 +- mypy/semanal_typeargs.py | 92 +- mypy/semanal_typeddict.py | 44 +- mypy/server/astdiff.py | 33 +- mypy/server/astmerge.py | 8 +- mypy/server/aststrip.py | 13 +- mypy/server/deps.py | 20 +- mypy/server/update.py | 46 +- mypy/solve.py | 4 +- mypy/stats.py | 2 +- mypy/strconv.py | 8 +- mypy/stubgen.py | 9 +- mypy/stubinfo.py | 28 +- mypy/stubtest.py | 99 +- mypy/subtypes.py | 340 ++-- mypy/test/data.py | 5 +- mypy/test/helpers.py | 8 +- mypy/test/testcheck.py | 2 + mypy/test/testcmdline.py | 8 +- mypy/test/testconstraints.py | 24 +- mypy/test/testdeps.py | 4 +- mypy/test/testsemanal.py | 2 +- mypy/test/teststubtest.py | 62 +- mypy/test/testtypegen.py | 4 +- mypy/test/testtypes.py | 8 + mypy/test/typefixture.py | 38 +- mypy/traverser.py | 2 + mypy/treetransform.py | 19 +- mypy/tvar_scope.py | 3 +- mypy/type_visitor.py | 178 +- mypy/typeanal.py | 338 ++-- mypy/typeops.py | 70 +- mypy/types.py | 422 +++- mypy/typeshed/stdlib/_ast.pyi | 10 +- mypy/typeshed/stdlib/_codecs.pyi | 61 +- mypy/typeshed/stdlib/_curses.pyi | 7 +- mypy/typeshed/stdlib/_decimal.pyi | 4 +- mypy/typeshed/stdlib/_msi.pyi | 10 +- mypy/typeshed/stdlib/_operator.pyi | 32 +- mypy/typeshed/stdlib/_posixsubprocess.pyi | 42 +- mypy/typeshed/stdlib/_socket.pyi | 24 +- mypy/typeshed/stdlib/_tkinter.pyi | 6 +- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 33 +- mypy/typeshed/stdlib/_winapi.pyi | 8 +- mypy/typeshed/stdlib/abc.pyi | 9 +- mypy/typeshed/stdlib/antigravity.pyi | 4 +- mypy/typeshed/stdlib/array.pyi | 14 +- mypy/typeshed/stdlib/ast.pyi | 64 +- mypy/typeshed/stdlib/asyncio/base_events.pyi | 36 +- mypy/typeshed/stdlib/asyncio/events.pyi | 40 +- mypy/typeshed/stdlib/asyncio/runners.pyi | 9 +- mypy/typeshed/stdlib/asyncio/sslproto.pyi | 2 +- mypy/typeshed/stdlib/asyncio/streams.pyi | 11 +- mypy/typeshed/stdlib/asyncio/subprocess.pyi | 100 +- mypy/typeshed/stdlib/asyncio/tasks.pyi | 2 +- mypy/typeshed/stdlib/asyncio/transports.pyi | 6 +- mypy/typeshed/stdlib/asyncio/trsock.pyi | 29 +- .../stdlib/asyncio/windows_events.pyi | 10 +- mypy/typeshed/stdlib/asyncore.pyi | 4 +- mypy/typeshed/stdlib/base64.pyi | 4 +- mypy/typeshed/stdlib/binhex.pyi | 5 +- mypy/typeshed/stdlib/builtins.pyi | 132 +- mypy/typeshed/stdlib/bz2.pyi | 8 +- mypy/typeshed/stdlib/codecs.pyi | 11 +- mypy/typeshed/stdlib/collections/__init__.pyi | 7 +- mypy/typeshed/stdlib/compileall.pyi | 40 +- mypy/typeshed/stdlib/contextlib.pyi | 4 +- mypy/typeshed/stdlib/ctypes/__init__.pyi | 6 +- mypy/typeshed/stdlib/datetime.pyi | 44 +- mypy/typeshed/stdlib/dbm/__init__.pyi | 2 +- mypy/typeshed/stdlib/dbm/dumb.pyi | 3 + mypy/typeshed/stdlib/dbm/gnu.pyi | 8 +- mypy/typeshed/stdlib/dbm/ndbm.pyi | 8 +- mypy/typeshed/stdlib/difflib.pyi | 14 +- mypy/typeshed/stdlib/dis.pyi | 15 +- mypy/typeshed/stdlib/distutils/dist.pyi | 4 +- mypy/typeshed/stdlib/email/__init__.pyi | 2 +- mypy/typeshed/stdlib/email/base64mime.pyi | 12 +- mypy/typeshed/stdlib/email/feedparser.pyi | 2 +- mypy/typeshed/stdlib/email/header.pyi | 7 +- mypy/typeshed/stdlib/email/message.pyi | 13 +- .../stdlib/email/mime/application.pyi | 2 +- mypy/typeshed/stdlib/email/mime/audio.pyi | 2 +- mypy/typeshed/stdlib/email/mime/image.pyi | 2 +- mypy/typeshed/stdlib/email/parser.pyi | 2 +- mypy/typeshed/stdlib/email/quoprimime.pyi | 12 +- mypy/typeshed/stdlib/encodings/__init__.pyi | 4 +- mypy/typeshed/stdlib/encodings/utf_8.pyi | 7 +- mypy/typeshed/stdlib/encodings/utf_8_sig.pyi | 7 +- mypy/typeshed/stdlib/fcntl.pyi | 2 +- mypy/typeshed/stdlib/genericpath.pyi | 18 +- mypy/typeshed/stdlib/gzip.pyi | 16 +- mypy/typeshed/stdlib/hmac.pyi | 12 +- mypy/typeshed/stdlib/http/client.pyi | 16 +- mypy/typeshed/stdlib/http/server.pyi | 12 +- mypy/typeshed/stdlib/imaplib.pyi | 22 +- mypy/typeshed/stdlib/imp.pyi | 9 +- mypy/typeshed/stdlib/importlib/abc.pyi | 40 +- mypy/typeshed/stdlib/importlib/machinery.pyi | 23 +- mypy/typeshed/stdlib/importlib/util.pyi | 6 +- mypy/typeshed/stdlib/inspect.pyi | 2 +- mypy/typeshed/stdlib/io.pyi | 10 +- mypy/typeshed/stdlib/ipaddress.pyi | 4 +- mypy/typeshed/stdlib/itertools.pyi | 6 + mypy/typeshed/stdlib/json/__init__.pyi | 10 +- mypy/typeshed/stdlib/json/encoder.pyi | 4 +- mypy/typeshed/stdlib/logging/handlers.pyi | 4 +- mypy/typeshed/stdlib/lzma.pyi | 10 +- mypy/typeshed/stdlib/mailbox.pyi | 39 +- mypy/typeshed/stdlib/marshal.pyi | 35 +- mypy/typeshed/stdlib/math.pyi | 25 +- mypy/typeshed/stdlib/mmap.pyi | 7 +- mypy/typeshed/stdlib/msvcrt.pyi | 4 +- .../stdlib/multiprocessing/connection.pyi | 4 +- .../stdlib/multiprocessing/context.pyi | 4 +- .../stdlib/multiprocessing/reduction.pyi | 37 +- .../multiprocessing/resource_tracker.pyi | 4 +- mypy/typeshed/stdlib/multiprocessing/util.pyi | 4 +- mypy/typeshed/stdlib/netrc.pyi | 6 +- mypy/typeshed/stdlib/operator.pyi | 1 - mypy/typeshed/stdlib/os/__init__.pyi | 67 +- mypy/typeshed/stdlib/pathlib.pyi | 16 +- mypy/typeshed/stdlib/pickle.pyi | 13 +- mypy/typeshed/stdlib/pickletools.pyi | 6 +- mypy/typeshed/stdlib/plistlib.pyi | 14 +- mypy/typeshed/stdlib/posixpath.pyi | 8 +- mypy/typeshed/stdlib/pyexpat/__init__.pyi | 7 +- mypy/typeshed/stdlib/quopri.pyi | 13 +- mypy/typeshed/stdlib/shutil.pyi | 27 +- mypy/typeshed/stdlib/smtplib.pyi | 17 +- mypy/typeshed/stdlib/socket.pyi | 30 +- mypy/typeshed/stdlib/socketserver.pyi | 59 +- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 12 +- mypy/typeshed/stdlib/ssl.pyi | 88 +- mypy/typeshed/stdlib/string.pyi | 11 +- mypy/typeshed/stdlib/struct.pyi | 4 +- mypy/typeshed/stdlib/subprocess.pyi | 409 ++-- mypy/typeshed/stdlib/sysconfig.pyi | 8 +- mypy/typeshed/stdlib/tarfile.pyi | 2 +- mypy/typeshed/stdlib/termios.pyi | 42 +- mypy/typeshed/stdlib/tkinter/commondialog.pyi | 4 +- mypy/typeshed/stdlib/tkinter/dialog.pyi | 2 +- mypy/typeshed/stdlib/tkinter/dnd.pyi | 2 +- mypy/typeshed/stdlib/tkinter/scrolledtext.pyi | 3 +- mypy/typeshed/stdlib/tokenize.pyi | 8 +- mypy/typeshed/stdlib/types.pyi | 10 +- mypy/typeshed/stdlib/typing.pyi | 10 +- mypy/typeshed/stdlib/unicodedata.pyi | 14 +- mypy/typeshed/stdlib/unittest/case.pyi | 18 +- mypy/typeshed/stdlib/unittest/mock.pyi | 19 +- mypy/typeshed/stdlib/urllib/parse.pyi | 48 +- mypy/typeshed/stdlib/urllib/request.pyi | 48 +- mypy/typeshed/stdlib/urllib/response.pyi | 6 +- mypy/typeshed/stdlib/venv/__init__.pyi | 3 + mypy/typeshed/stdlib/weakref.pyi | 9 +- mypy/typeshed/stdlib/winsound.pyi | 5 +- mypy/typeshed/stdlib/xml/__init__.pyi | 2 +- mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi | 14 +- mypy/typeshed/stdlib/xml/dom/minidom.pyi | 8 +- .../stdlib/xml/etree/ElementInclude.pyi | 3 +- .../typeshed/stdlib/xml/etree/ElementTree.pyi | 28 +- mypy/typeshed/stdlib/xml/parsers/__init__.pyi | 2 +- mypy/typeshed/stdlib/xml/sax/__init__.pyi | 6 +- mypy/typeshed/stdlib/xml/sax/xmlreader.pyi | 2 +- mypy/typeshed/stdlib/xmlrpc/client.pyi | 45 +- mypy/typeshed/stdlib/xmlrpc/server.pyi | 6 +- mypy/typeshed/stdlib/zipfile.pyi | 10 +- mypy/typeshed/stdlib/zipimport.pyi | 11 +- mypy/typeshed/stdlib/zlib.pyi | 26 +- mypy/typeshed/stdlib/zoneinfo/__init__.pyi | 5 +- mypy/typestate.py | 144 +- mypy/typetraverser.py | 3 + mypy/typevars.py | 10 +- mypy/typevartuples.py | 58 +- mypy/util.py | 16 +- mypy/version.py | 8 +- mypy_self_check.ini | 2 - mypyc/analysis/attrdefined.py | 13 +- mypyc/analysis/ircheck.py | 22 +- mypyc/build.py | 55 +- mypyc/codegen/emit.py | 55 +- mypyc/codegen/emitclass.py | 22 +- mypyc/codegen/emitfunc.py | 6 +- mypyc/codegen/emitmodule.py | 5 +- mypyc/codegen/emitwrapper.py | 8 +- mypyc/codegen/literals.py | 46 +- mypyc/common.py | 12 +- mypyc/ir/class_ir.py | 33 +- mypyc/ir/func_ir.py | 21 +- mypyc/ir/ops.py | 20 +- mypyc/ir/pprint.py | 13 +- mypyc/ir/rtypes.py | 38 + mypyc/irbuild/builder.py | 29 +- mypyc/irbuild/classdef.py | 53 +- mypyc/irbuild/constant_fold.py | 59 +- mypyc/irbuild/env_class.py | 4 +- mypyc/irbuild/expression.py | 82 +- mypyc/irbuild/for_helpers.py | 15 +- mypyc/irbuild/function.py | 105 +- mypyc/irbuild/ll_builder.py | 115 +- mypyc/irbuild/mapper.py | 2 +- mypyc/irbuild/match.py | 355 ++++ mypyc/irbuild/prebuildvisitor.py | 10 +- mypyc/irbuild/prepare.py | 241 ++- mypyc/irbuild/specialize.py | 54 +- mypyc/irbuild/statement.py | 63 +- mypyc/irbuild/util.py | 10 + mypyc/irbuild/visitor.py | 3 +- mypyc/irbuild/vtable.py | 6 +- mypyc/lib-rt/CPy.h | 14 +- mypyc/lib-rt/dict_ops.c | 8 + mypyc/lib-rt/getargsfast.c | 5 - mypyc/lib-rt/list_ops.c | 8 + mypyc/lib-rt/misc_ops.c | 21 +- mypyc/lib-rt/pythonsupport.h | 29 +- mypyc/options.py | 2 + mypyc/primitives/dict_ops.py | 24 +- mypyc/primitives/int_ops.py | 80 +- mypyc/primitives/list_ops.py | 21 + mypyc/primitives/registry.py | 38 +- mypyc/primitives/set_ops.py | 2 +- mypyc/sametype.py | 4 +- mypyc/test-data/commandline.test | 8 +- mypyc/test-data/fixtures/ir.py | 2 + mypyc/test-data/irbuild-basic.test | 305 +-- mypyc/test-data/irbuild-bool.test | 144 ++ mypyc/test-data/irbuild-classes.test | 88 - mypyc/test-data/irbuild-constant-fold.test | 14 +- mypyc/test-data/irbuild-dunders.test | 38 +- mypyc/test-data/irbuild-glue-methods.test | 437 +++++ mypyc/test-data/irbuild-i32.test | 52 + mypyc/test-data/irbuild-i64.test | 270 +++ mypyc/test-data/irbuild-int.test | 67 + mypyc/test-data/irbuild-lists.test | 101 + mypyc/test-data/irbuild-match.test | 1708 +++++++++++++++++ mypyc/test-data/irbuild-optional.test | 14 +- mypyc/test-data/irbuild-set.test | 182 ++ mypyc/test-data/irbuild-statements.test | 4 +- mypyc/test-data/irbuild-try.test | 105 + mypyc/test-data/run-async.test | 3 + mypyc/test-data/run-bools.test | 57 +- mypyc/test-data/run-classes.test | 203 ++ mypyc/test-data/run-generators.test | 17 + mypyc/test-data/run-i32.test | 16 + mypyc/test-data/run-i64.test | 391 +++- mypyc/test-data/run-integers.test | 24 + mypyc/test-data/run-match.test | 283 +++ mypyc/test-data/run-misc.test | 30 + mypyc/test-data/run-sets.test | 33 + mypyc/test-data/run-strings.test | 5 + mypyc/test-data/run-tuples.test | 10 +- mypyc/test/test_commandline.py | 5 + mypyc/test/test_irbuild.py | 6 + mypyc/test/test_ircheck.py | 31 +- mypyc/test/test_run.py | 6 + .../test/{test_subtype.py => test_typeops.py} | 26 +- mypyc/test/testutil.py | 3 +- mypyc/transform/exceptions.py | 22 +- mypyc/transform/uninit.py | 4 +- pyproject.toml | 4 +- runtests.py | 2 +- setup.py | 2 +- .../packages/modulefinder/nsx-pkg3/nsx/c/c | 0 test-data/packages/modulefinder/pkg1/a | 0 test-data/unit/check-abstract.test | 8 +- test-data/unit/check-async-await.test | 30 + test-data/unit/check-attr.test | 79 + test-data/unit/check-basic.test | 79 +- test-data/unit/check-class-namedtuple.test | 2 - test-data/unit/check-classes.test | 282 +-- test-data/unit/check-columns.test | 2 +- test-data/unit/check-ctypes.test | 13 + test-data/unit/check-custom-plugin.test | 9 + test-data/unit/check-dataclass-transform.test | 46 + test-data/unit/check-dataclasses.test | 75 +- test-data/unit/check-dynamic-typing.test | 71 +- test-data/unit/check-enum.test | 29 +- test-data/unit/check-errorcodes.test | 50 +- test-data/unit/check-expressions.test | 120 +- test-data/unit/check-flags.test | 46 +- test-data/unit/check-functions.test | 42 +- test-data/unit/check-generics.test | 170 +- test-data/unit/check-incomplete-fixture.test | 8 - test-data/unit/check-incremental.test | 88 +- test-data/unit/check-inference-context.test | 227 ++- test-data/unit/check-inference.test | 167 +- test-data/unit/check-isinstance.test | 7 +- test-data/unit/check-kwargs.test | 96 +- test-data/unit/check-literal.test | 30 +- test-data/unit/check-modules.test | 57 +- test-data/unit/check-namedtuple.test | 33 +- test-data/unit/check-native-int.test | 44 + test-data/unit/check-newsemanal.test | 249 ++- test-data/unit/check-overloading.test | 37 +- .../unit/check-parameter-specification.test | 306 ++- test-data/unit/check-partially-defined.test | 362 ---- test-data/unit/check-possibly-undefined.test | 970 ++++++++++ test-data/unit/check-protocols.test | 213 +- test-data/unit/check-python310.test | 103 + test-data/unit/check-python311.test | 65 + test-data/unit/check-python38.test | 25 +- test-data/unit/check-python39.test | 2 +- test-data/unit/check-recursive-types.test | 89 + test-data/unit/check-redefine.test | 2 +- test-data/unit/check-selftype.test | 578 +++++- test-data/unit/check-slots.test | 10 + test-data/unit/check-statements.test | 74 +- test-data/unit/check-super.test | 9 +- test-data/unit/check-tuples.test | 44 +- test-data/unit/check-type-aliases.test | 111 +- test-data/unit/check-type-promotion.test | 133 ++ test-data/unit/check-typeddict.test | 314 ++- test-data/unit/check-typevar-tuple.test | 144 +- test-data/unit/check-unions.test | 25 +- test-data/unit/check-varargs.test | 223 +-- test-data/unit/cmdline.test | 93 + test-data/unit/daemon.test | 14 + test-data/unit/errorstream.test | 4 +- test-data/unit/fine-grained-attr.test | 34 + .../unit/fine-grained-follow-imports.test | 77 + test-data/unit/fine-grained-inspect.test | 4 +- test-data/unit/fine-grained.test | 279 ++- test-data/unit/fixtures/__init_subclass__.pyi | 1 + test-data/unit/fixtures/__new__.pyi | 1 + test-data/unit/fixtures/alias.pyi | 2 + test-data/unit/fixtures/any.pyi | 2 + test-data/unit/fixtures/args.pyi | 1 + test-data/unit/fixtures/attr.pyi | 3 +- test-data/unit/fixtures/bool.pyi | 4 +- test-data/unit/fixtures/bool_py2.pyi | 16 - test-data/unit/fixtures/callable.pyi | 1 + test-data/unit/fixtures/classmethod.pyi | 3 + test-data/unit/fixtures/complex.pyi | 1 + test-data/unit/fixtures/complex_tuple.pyi | 1 + test-data/unit/fixtures/dataclasses.pyi | 6 +- test-data/unit/fixtures/dict.pyi | 3 +- test-data/unit/fixtures/divmod.pyi | 2 + test-data/unit/fixtures/exception.pyi | 14 +- test-data/unit/fixtures/f_string.pyi | 2 + test-data/unit/fixtures/fine_grained.pyi | 1 + test-data/unit/fixtures/float.pyi | 2 + test-data/unit/fixtures/floatdict_python2.pyi | 68 - test-data/unit/fixtures/for.pyi | 1 + test-data/unit/fixtures/function.pyi | 1 + test-data/unit/fixtures/isinstance.pyi | 2 + .../unit/fixtures/isinstance_python3_10.pyi | 2 + test-data/unit/fixtures/list.pyi | 2 + test-data/unit/fixtures/module_all.pyi | 1 + .../unit/fixtures/module_all_python2.pyi | 15 - test-data/unit/fixtures/notimplemented.pyi | 1 + test-data/unit/fixtures/object_hashable.pyi | 1 + test-data/unit/fixtures/ops.pyi | 4 +- test-data/unit/fixtures/primitives.pyi | 8 +- test-data/unit/fixtures/property.pyi | 1 + test-data/unit/fixtures/property_py2.pyi | 21 - test-data/unit/fixtures/python2.pyi | 38 - test-data/unit/fixtures/set.pyi | 3 + test-data/unit/fixtures/slice.pyi | 1 + test-data/unit/fixtures/staticmethod.pyi | 2 +- test-data/unit/fixtures/transform.pyi | 2 + test-data/unit/fixtures/tuple-simple.pyi | 1 + test-data/unit/fixtures/tuple.pyi | 6 +- test-data/unit/fixtures/type.pyi | 10 +- test-data/unit/fixtures/typing-full.pyi | 2 + test-data/unit/fixtures/typing-medium.pyi | 4 + test-data/unit/fixtures/typing-namedtuple.pyi | 1 + test-data/unit/fixtures/typing-typeddict.pyi | 2 + test-data/unit/fixtures/union.pyi | 1 + test-data/unit/lib-stub/__builtin__.pyi | 30 - test-data/unit/lib-stub/_decimal.pyi | 4 + test-data/unit/lib-stub/builtins.pyi | 12 +- test-data/unit/lib-stub/datetime.pyi | 16 + test-data/unit/lib-stub/decimal.pyi | 3 + test-data/unit/lib-stub/functools.pyi | 35 + test-data/unit/lib-stub/mypy_extensions.pyi | 9 +- test-data/unit/lib-stub/traceback.pyi | 3 + test-data/unit/lib-stub/typing.pyi | 3 + test-data/unit/lib-stub/typing_extensions.pyi | 9 +- test-data/unit/lib-stub/unannotated_lib.pyi | 1 + test-data/unit/merge.test | 96 +- test-data/unit/plugins/customentry.py | 2 +- test-data/unit/pythoneval.test | 241 +++ test-data/unit/reports.test | 24 +- test-data/unit/semanal-basic.test | 65 +- test-data/unit/semanal-classes.test | 15 +- test-data/unit/semanal-errors.test | 15 +- test-data/unit/semanal-expressions.test | 55 +- test-data/unit/semanal-modules.test | 64 +- test-data/unit/semanal-python310.test | 50 +- test-data/unit/semanal-statements.test | 283 ++- test-data/unit/semanal-symtable.test | 12 +- test-data/unit/semanal-types.test | 28 + test-data/unit/stubgen.test | 9 + test-data/unit/typexport-basic.test | 172 +- test-requirements.txt | 13 +- tox.ini | 4 +- 463 files changed, 18859 insertions(+), 5681 deletions(-) create mode 100644 misc/docker/Dockerfile create mode 100644 misc/docker/README.md create mode 100644 misc/docker/build.py create mode 100755 misc/docker/run-wrapper.sh create mode 100755 misc/docker/run.sh create mode 100644 misc/perf_compare.py create mode 100644 mypy/constant_fold.py create mode 100644 mypy/evalexpr.py create mode 100644 mypyc/irbuild/match.py create mode 100644 mypyc/test-data/irbuild-bool.test create mode 100644 mypyc/test-data/irbuild-glue-methods.test create mode 100644 mypyc/test-data/irbuild-match.test create mode 100644 mypyc/test-data/run-match.test rename mypyc/test/{test_subtype.py => test_typeops.py} (64%) create mode 100644 test-data/packages/modulefinder/nsx-pkg3/nsx/c/c create mode 100644 test-data/packages/modulefinder/pkg1/a create mode 100644 test-data/unit/check-dataclass-transform.test delete mode 100644 test-data/unit/check-partially-defined.test create mode 100644 test-data/unit/check-possibly-undefined.test create mode 100644 test-data/unit/check-python311.test delete mode 100644 test-data/unit/fixtures/bool_py2.pyi delete mode 100644 test-data/unit/fixtures/floatdict_python2.pyi delete mode 100644 test-data/unit/fixtures/module_all_python2.pyi delete mode 100644 test-data/unit/fixtures/property_py2.pyi delete mode 100644 test-data/unit/fixtures/python2.pyi delete mode 100644 test-data/unit/lib-stub/__builtin__.pyi create mode 100644 test-data/unit/lib-stub/_decimal.pyi create mode 100644 test-data/unit/lib-stub/datetime.pyi create mode 100644 test-data/unit/lib-stub/decimal.pyi create mode 100644 test-data/unit/lib-stub/functools.pyi create mode 100644 test-data/unit/lib-stub/traceback.pyi create mode 100644 test-data/unit/lib-stub/unannotated_lib.pyi diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index d26372aa6635..9eef1c1c7466 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -62,6 +62,7 @@ jobs: --new $GITHUB_SHA --old base_commit \ --num-shards 5 --shard-index ${{ matrix.shard-index }} \ --debug \ + --additional-flags="--debug-serialize" \ --output concise \ | tee diff_${{ matrix.shard-index }}.txt ) || [ $? -eq 1 ] diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index 2056fc5a40c0..12ce91c12910 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -48,15 +48,29 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | + const MAX_CHARACTERS = 30000 + const MAX_CHARACTERS_PER_PROJECT = MAX_CHARACTERS / 3 + const fs = require('fs') let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) - // posting comment fails if too long, so truncate - if (data.length > 30000) { - let truncated_data = data.substring(0, 30000) - let lines_truncated = data.split('\n').length - truncated_data.split('\n').length - data = truncated_data + `\n\n... (truncated ${lines_truncated} lines) ...\n` + + function truncateIfNeeded(original, maxLength) { + if (original.length <= maxLength) { + return original + } + let truncated = original.substring(0, maxLength) + // further, remove last line that might be truncated + truncated = truncated.substring(0, truncated.lastIndexOf('\n')) + let lines_truncated = original.split('\n').length - truncated.split('\n').length + return `${truncated}\n\n... (truncated ${lines_truncated} lines) ...` } + const projects = data.split('\n\n') + // don't let one project dominate + data = projects.map(project => truncateIfNeeded(project, MAX_CHARACTERS_PER_PROJECT)).join('\n\n') + // posting comment fails if too long, so truncate + data = truncateIfNeeded(data, MAX_CHARACTERS) + console.log("Diff from mypy_primer:") console.log(data) @@ -76,8 +90,8 @@ jobs: return prNumber - name: Hide old comments - # v0.3.0 - uses: kanga333/comment-hider@bbdf5b562fbec24e6f60572d8f712017428b92e0 + # v0.4.0 + uses: kanga333/comment-hider@c12bb20b48aeb8fc098e35967de8d4f8018fffdf with: github_token: ${{ secrets.GITHUB_TOKEN }} leave_visible: 1 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8b82df7d99cd..a02378cc01ab 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -64,7 +64,7 @@ jobs: toxenv: py tox_extra_args: "-n 2" - name: Test suite with py311-ubuntu, mypyc-compiled - python: '3.11-dev' + python: '3.11' arch: x64 os: ubuntu-latest toxenv: py @@ -102,6 +102,16 @@ jobs: name: ${{ matrix.name }} env: TOX_SKIP_MISSING_INTERPRETERS: False + # Rich (pip) + FORCE_COLOR: 1 + # Tox + PY_COLORS: 1 + # Mypy (see https://github.com/python/mypy/issues/7771) + TERM: xterm-color + MYPY_FORCE_COLOR: 1 + MYPY_FORCE_TERMINAL_WIDTH: 200 + # Pytest + PYTEST_ADDOPTS: --color=yes steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 @@ -127,3 +137,21 @@ jobs: run: tox -e ${{ matrix.toxenv }} --notest - name: Test run: tox -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} + + python-nightly: + runs-on: ubuntu-latest + name: Test suite with Python nightly + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.12-dev' + - name: Install tox + run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + - name: Setup tox environment + run: tox -e py --notest + - name: Test + run: tox -e py --skip-pkg-install -- "-n 2" + continue-on-error: true + - name: Mark as a success + run: exit 0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c4acf4f87e1b..0de686b7eb01 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,10 @@ repos: - repo: https://github.com/psf/black - rev: 22.6.0 # must match test-requirements.txt + rev: 22.12.0 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/pycqa/isort - rev: 5.10.1 # must match test-requirements.txt + rev: 5.11.4 # must match test-requirements.txt hooks: - id: isort - repo: https://github.com/pycqa/flake8 @@ -12,5 +12,5 @@ repos: hooks: - id: flake8 additional_dependencies: - - flake8-bugbear==22.8.23 # must match test-requirements.txt - - flake8-noqa==1.2.9 # must match test-requirements.txt + - flake8-bugbear==22.12.6 # must match test-requirements.txt + - flake8-noqa==1.3.0 # must match test-requirements.txt diff --git a/README.md b/README.md index 95cacb05d682..9d9618e6bc12 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Mypy: Static Typing for Python [![Build Status](https://github.com/python/mypy/actions/workflows/test.yml/badge.svg)](https://github.com/python/mypy/actions) [![Documentation Status](https://readthedocs.org/projects/mypy/badge/?version=latest)](https://mypy.readthedocs.io/en/latest/?badge=latest) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) +[![Checked with mypy](https://www.mypy-lang.org/static/mypy_badge.svg)](https://mypy-lang.org/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) @@ -84,6 +84,7 @@ more examples and information. In particular, see: - [type hints cheat sheet](https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html) - [getting started](https://mypy.readthedocs.io/en/stable/getting_started.html) +- [list of error codes](https://mypy.readthedocs.io/en/stable/error_code_list.html) Quick start ----------- @@ -109,9 +110,13 @@ programs, even if mypy reports type errors: python3 PROGRAM You can also try mypy in an [online playground](https://mypy-play.net/) (developed by -Yusuke Miyazaki). +Yusuke Miyazaki). If you are working with large code bases, you can run mypy in +[daemon mode], that will give much faster (often sub-second) incremental updates: + + dmypy run -- PROGRAM [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing +[daemon-mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html Integrations @@ -124,7 +129,7 @@ Mypy can be integrated into popular IDEs: `let g:syntastic_python_checkers=['mypy']` * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` -* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) +* Emacs: using [Flycheck](https://github.com/flycheck/) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) * PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates @@ -160,14 +165,7 @@ To get started with developing mypy, see [CONTRIBUTING.md](CONTRIBUTING.md). If you need help getting started, don't hesitate to ask on [gitter](https://gitter.im/python/typing). -Development status ------------------- - -Mypy is beta software, but it has already been used in production -for several years at Dropbox and in many other organizations, and -it has an extensive test suite. - -mypyc and compiled version of mypy +Mypyc and compiled version of mypy ---------------------------------- [Mypyc](https://github.com/mypyc/mypyc) uses Python type hints to compile Python diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst index 19e0d4dcce01..ef5bf9e8936d 100644 --- a/docs/source/additional_features.rst +++ b/docs/source/additional_features.rst @@ -177,7 +177,7 @@ Caveats/Known Issues will complain about not understanding the argument and the type annotation in :py:meth:`__init__ ` will be replaced by ``Any``. -* :ref:`Validator decorators ` +* :ref:`Validator decorators ` and `default decorators `_ are not type-checked against the attribute they are setting/validating. diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 83d2983472be..31d23db204eb 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -448,9 +448,10 @@ potentially problematic or redundant in some way. are when: - The function has a ``None`` or ``Any`` return type - - The function has an empty body or a body that is just - ellipsis (``...``). Empty functions are often used for - abstract methods. + - The function has an empty body and is marked as an abstract method, + is in a protocol class, or is in a stub file + - The execution path can never return; for example, if an exception + is always raised Passing in :option:`--no-warn-no-return` will disable these error messages in all cases. diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 42962581702f..465035307d5d 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -188,6 +188,8 @@ Ignoring a whole file A ``# type: ignore`` comment at the top of a module (before any statements, including imports or docstrings) has the effect of ignoring the entire contents of the module. +This behaviour can be surprising and result in +"Module ... has no attribute ... [attr-defined]" errors. To only ignore errors, use a top-level ``# mypy: ignore-errors`` comment instead. To only ignore errors with a specific error code, use a top-level diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index abaec31c6888..3b96e6bd7a5a 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -210,7 +210,7 @@ section of the command line docs. line. Mypy *will* recursively type check any submodules of the provided package. This flag is identical to :confval:`modules` apart from this behavior. - + This option may only be set in the global section (``[mypy]``). .. confval:: exclude @@ -901,6 +901,12 @@ Report generation If these options are set, mypy will generate a report in the specified format into the specified directory. +.. warning:: + + Generating reports disables incremental mode and can significantly slow down + your workflow. It is recommended to enable reporting only for specific runs + (e.g. in CI). + .. confval:: any_exprs_report :type: string diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 264badc03107..674ad08c4d09 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -430,6 +430,56 @@ Example: # Error: Incompatible types (expression has type "float", # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} + +Check TypedDict Keys [typeddict-unknown-key] +-------------------------------------------- + +When constructing a ``TypedDict`` object, mypy checks whether the definition +contains unknown keys. For convenience's sake, mypy will not generate an error +when a ``TypedDict`` has extra keys if it's passed to a function as an argument. +However, it will generate an error when these are created. Example: + +.. code-block:: python + + from typing_extensions import TypedDict + + class Point(TypedDict): + x: int + y: int + + class Point3D(Point): + z: int + + def add_x_coordinates(a: Point, b: Point) -> int: + return a["x"] + b["x"] + + a: Point = {"x": 1, "y": 4} + b: Point3D = {"x": 2, "y": 5, "z": 6} + + # OK + add_x_coordinates(a, b) + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + add_x_coordinates(a, {"x": 1, "y": 4, "z": 5}) + + +Setting an unknown value on a ``TypedDict`` will also generate this error: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + a["z"] = 3 + + +Whereas reading an unknown value will generate the more generic/serious +``typeddict-item``: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: TypedDict "Point" has no key "z" [typeddict-item] + _ = a["z"] + Check that type of target is known [has-type] --------------------------------------------- @@ -764,6 +814,19 @@ the provided type. assert_type([1], list[str]) # Error +Check that function isn't used in boolean context [truthy-function] +------------------------------------------------------------------- + +Functions will always evaluate to true in boolean contexts. + +.. code-block:: python + + def f(): + ... + + if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + pass + Report syntax errors [syntax] ----------------------------- diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index cac19e705361..85ab76da5cee 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -82,6 +82,25 @@ Example: # Error: Redundant cast to "int" [redundant-cast] return cast(int, x) +Check that methods do not have redundant Self annotations [redundant-self] +-------------------------------------------------------------------------- + +Such annotations are allowed by :pep:`673` but are redundant, so if you want +warnings about them, enable this error code. + +Example: + +.. code-block:: python + + # mypy: enable-error-code="redundant-self" + + from typing import Self + + class C: + # Error: Redundant Self annotation on method first argument + def copy(self: Self) -> Self: + return type(self)() + Check that comparisons are overlapping [comparison-overlap] ----------------------------------------------------------- @@ -231,46 +250,31 @@ since unless implemented by a sub-type, the expression will always evaluate to t if foo: ... +The check is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), +except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, +while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. + -This check might falsely imply an error. For example, ``Iterable`` does not implement -``__len__`` and so this code will be flagged: +Check that iterable is not implicitly true in boolean context [truthy-iterable] +------------------------------------------------------------------------------- -.. code-block:: python +``Iterable`` does not implement ``__len__`` and so this code will be flagged: - # Use "mypy -enable-error-code truthy-bool ..." +.. code-block:: python from typing import Iterable - def transform(items: Iterable[int]) -> Iterable[int]: - # Error: "items" has type "Iterable[int]" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + def transform(items: Iterable[int]) -> list[int]: + # Error: "items" has type "Iterable[int]" which can always be true in boolean context. Consider using "Collection[int]" instead. [truthy-iterable] if not items: return [42] return [x + 1 for x in items] +If called with a ``Generator`` like ``int(x) for x in []``, this function would not return ``[42]`` unlike +what the author might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, +and so there is no error in practice. In such case, it is recommended to annotate ``items: Collection[int]``. -If called as ``transform((int(s) for s in []))``, this function would not return ``[42]`` unlike what the author -might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, and so there is -no error in practice. In such case, it might be prudent to annotate ``items: Sequence[int]``. - -This is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), -except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, -while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. - - -Check that function isn't used in boolean context [truthy-function] -------------------------------------------------------------------- - -Functions will always evaluate to true in boolean contexts. - -.. code-block:: python - - def f(): - ... - - if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] - pass - .. _ignore-without-code: Check that ``# type: ignore`` include an error code [ignore-without-code] diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 3ae616f78691..a867bc863c83 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -264,15 +264,8 @@ Generic methods and generic self You can also define generic methods — just use a type variable in the method signature that is different from class type variables. In particular, ``self`` may also be generic, allowing a method to return the most precise -type known at the point of access. - -.. note:: - - This feature is experimental. Checking code with type annotations for self - arguments is still not fully implemented. Mypy may disallow valid code or - allow unsafe code. - -In this way, for example, you can typecheck chaining of setter methods: +type known at the point of access. In this way, for example, you can typecheck +chaining of setter methods: .. code-block:: python @@ -333,8 +326,69 @@ or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), possibly by making use of the ``Any`` type. +Note that this feature may accept some unsafe code for the purpose of +*practicality*. For example: + +.. code-block:: python + + from typing import TypeVar + + T = TypeVar("T") + class Base: + def compare(self: T, other: T) -> bool: + return False + + class Sub(Base): + def __init__(self, x: int) -> None: + self.x = x + + # This is unsafe (see below), but allowed because it is + # a common pattern, and rarely causes issues in practice. + def compare(self, other: Sub) -> bool: + return self.x > other.x + + b: Base = Sub(42) + b.compare(Base()) # Runtime error here: 'Base' object has no attribute 'x' + For some advanced uses of self-types see :ref:`additional examples `. +Automatic self types using typing.Self +************************************** + +The patterns described above are quite common, so there is a syntactic sugar +for them introduced in :pep:`673`. Instead of defining a type variable and +using an explicit ``self`` annotation, you can import a magic type ``typing.Self`` +that is automatically transformed into a type variable with an upper bound of +current class, and you don't need an annotation for ``self`` (or ``cls`` for +class methods). The above example can thus be rewritten as: + +.. code-block:: python + + from typing import Self + + class Friend: + other: Self | None = None + + @classmethod + def make_pair(cls) -> tuple[Self, Self]: + a, b = cls(), cls() + a.other = b + b.other = a + return a, b + + class SuperFriend(Friend): + pass + + a, b = SuperFriend.make_pair() + +This is more compact than using explicit type variables, plus additionally +you can use ``Self`` in attribute annotations, not just in methods. + +.. note:: + + To use this feature on versions of Python before 3.11, you will need to + import ``Self`` from ``typing_extensions`` version 4.0 or newer. + .. _variance-of-generics: Variance of generic types @@ -548,7 +602,7 @@ Note that class decorators are handled differently than function decorators in mypy: decorating a class does not erase its type, even if the decorator has incomplete type annotations. -Suppose we have the following decorator, not type annotated yet, +Suppose we have the following decorator, not type annotated yet, that preserves the original function's signature and merely prints the decorated function's name: .. code-block:: python @@ -581,7 +635,7 @@ Before parameter specifications, here's how one might have annotated the decorat .. code-block:: python - from typing import Callable, TypeVar + from typing import Any, Callable, TypeVar, cast F = TypeVar('F', bound=Callable[..., Any]) @@ -596,8 +650,8 @@ and that would enable the following type checks: .. code-block:: python - reveal_type(a) # str - add_forty_two('x') # Type check error: incompatible type "str"; expected "int" + reveal_type(a) # Revealed type is "builtins.int" + add_forty_two('x') # Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" Note that the ``wrapper()`` function is not type-checked. Wrapper @@ -670,7 +724,7 @@ achieved by combining with :py:func:`@overload `: .. code-block:: python - from typing import Any, Callable, TypeVar, overload + from typing import Any, Callable, Optional, TypeVar, overload F = TypeVar('F', bound=Callable[..., Any]) @@ -682,7 +736,7 @@ achieved by combining with :py:func:`@overload `: def atomic(*, savepoint: bool = True) -> Callable[[F], F]: ... # Implementation - def atomic(__func: Callable[..., Any] = None, *, savepoint: bool = True): + def atomic(__func: Optional[Callable[..., Any]] = None, *, savepoint: bool = True): def decorator(func: Callable[..., Any]): ... # Code goes here if __func is not None: @@ -862,9 +916,5 @@ defeating the purpose of using aliases. Example: OIntVec = Optional[Vec[int]] -.. note:: - - A type alias does not define a new type. For generic type aliases - this means that variance of type variables used for alias definition does not - apply to aliases. A parameterized generic alias is treated simply as an original - type with the corresponding type variables substituted. +Using type variable bounds or values in generic aliases, has the same effect +as in generic classes/functions. diff --git a/docs/source/index.rst b/docs/source/index.rst index 1f77e951843d..27b3a078af6c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -39,10 +39,10 @@ understand, debug, and maintain. .. note:: - Mypy is used in production by many companies and projects, but mypy is - officially beta software. There will be occasional changes + Although mypy is production ready, there will be occasional changes that break backward compatibility. The mypy development team tries to - minimize the impact of changes to user code. + minimize the impact of changes to user code. In case of a major breaking + change, mypy's major version will be bumped. Contents -------- diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst index 7195ccc2b69b..a66d300bd0fd 100644 --- a/docs/source/literal_types.rst +++ b/docs/source/literal_types.rst @@ -495,13 +495,13 @@ the same way Python's runtime does: ... right = 'right' Traceback (most recent call last): ... - TypeError: Other: cannot extend enumeration 'Some' + TypeError: AllDirection: cannot extend enumeration 'Direction' Mypy also catches this error: .. code-block:: python - class AllDirection(Direction): # E: Cannot inherit from final class "Some" + class AllDirection(Direction): # E: Cannot inherit from final class "Direction" left = 'left' right = 'right' diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 707411e95fef..722909a038b5 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -804,9 +804,10 @@ classes are generic, self-type allows giving them precise signatures: .. code-block:: python T = TypeVar('T') - Q = TypeVar('Q', bound='Base[Any]') class Base(Generic[T]): + Q = TypeVar('Q', bound='Base[T]') + def __init__(self, item: T) -> None: self.item = item diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index a7eb3fc5e1e7..4a7b5dcf4093 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -250,7 +250,7 @@ If you are getting this error, try: 1. Upgrading the version of the library you're using, in case a newer version has started to include type hints. -2. Searching to see if there is a :ref:`PEP 561 compliant stub package `. +2. Searching to see if there is a :ref:`PEP 561 compliant stub package ` corresponding to your third party library. Stub packages let you install type hints independently from the library itself. @@ -322,34 +322,27 @@ the library, you will get a message like this: main.py:1: note: Hint: "python3 -m pip install types-PyYAML" main.py:1: note: (or run "mypy --install-types" to install all missing stub packages) -You can resolve the issue by running the suggested pip command or -commands. Alternatively, you can use :option:`--install-types ` to install all known missing stubs: +You can resolve the issue by running the suggested pip commands. +If you're running mypy in CI, you can ensure the presence of any stub packages +you need the same as you would any other test dependency, e.g. by adding them to +the appropriate ``requirements.txt`` file. -.. code-block:: text - - mypy --install-types - -This installs any stub packages that were suggested in the previous -mypy run. You can also use your normal mypy command line with the -extra :option:`--install-types ` option to -install missing stubs at the end of the run (if any were found). - -Use :option:`--install-types ` with -:option:`--non-interactive ` to install all suggested -stub packages without asking for confirmation, *and* type check your -code, in a single command: +Alternatively, add the :option:`--install-types ` +to your mypy command to install all known missing stubs: .. code-block:: text - mypy --install-types --non-interactive src/ + mypy --install-types -This can be useful in Continuous Integration jobs if you'd prefer not -to manage stub packages manually. This is somewhat slower than -explicitly installing stubs before running mypy, since it may type -check your code twice -- the first time to find the missing stubs, and +This is slower than explicitly installing stubs, since it effectively +runs mypy twice -- the first time to find the missing stubs, and the second time to type check your code properly after mypy has -installed the stubs. +installed the stubs. It also can make controlling stub versions harder, +resulting in less reproducible type checking. + +By default, :option:`--install-types ` shows a confirmation prompt. +Use :option:`--non-interactive ` to install all suggested +stub packages without asking for confirmation *and* type check your code: If you've already installed the relevant third-party libraries in an environment other than the one mypy is running in, you can use :option:`--python-executable @@ -394,15 +387,6 @@ this error, try: you must run ``mypy ~/foo-project/src`` (or set the ``MYPYPATH`` to ``~/foo-project/src``. -In some rare cases, you may get the "Cannot find implementation or library -stub for module" error even when the module is installed in your system. -This can happen when the module is both missing type hints and is installed -on your system in an unconventional way. - -In this case, follow the steps above on how to handle -:ref:`missing type hints in third party libraries `. - - .. _finding-imports: How imports are found diff --git a/docs/source/stubtest.rst b/docs/source/stubtest.rst index ca291f55947e..a8279eb6c239 100644 --- a/docs/source/stubtest.rst +++ b/docs/source/stubtest.rst @@ -41,6 +41,10 @@ stubs and implementation or to check for stub completeness. It's used to test Python's official collection of library stubs, `typeshed `_. +.. warning:: + + stubtest will import and execute Python code from the packages it checks. + Example ******* diff --git a/docs/source/type_narrowing.rst b/docs/source/type_narrowing.rst index 806835ed33a5..72a816679140 100644 --- a/docs/source/type_narrowing.rst +++ b/docs/source/type_narrowing.rst @@ -16,7 +16,7 @@ The simplest way to narrow a type is to use one of the supported expressions: - :py:func:`isinstance` like in ``isinstance(obj, float)`` will narrow ``obj`` to have ``float`` type - :py:func:`issubclass` like in ``issubclass(cls, MyClass)`` will narrow ``cls`` to be ``Type[MyClass]`` -- :py:func:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type +- :py:class:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type - :py:func:`callable` like in ``callable(obj)`` will narrow object to callable type Type narrowing is contextual. For example, based on the condition, mypy will narrow an expression only within an ``if`` branch: diff --git a/misc/docker/Dockerfile b/misc/docker/Dockerfile new file mode 100644 index 000000000000..3327f9e38815 --- /dev/null +++ b/misc/docker/Dockerfile @@ -0,0 +1,12 @@ +FROM ubuntu:latest + +WORKDIR /mypy + +RUN apt-get update +RUN apt-get install -y python3 python3-pip clang + +COPY mypy-requirements.txt . +COPY test-requirements.txt . +COPY build-requirements.txt . + +RUN pip3 install -r test-requirements.txt diff --git a/misc/docker/README.md b/misc/docker/README.md new file mode 100644 index 000000000000..839f9761cb03 --- /dev/null +++ b/misc/docker/README.md @@ -0,0 +1,101 @@ +Running mypy and mypyc tests in a Docker container +================================================== + +This directory contains scripts for running mypy and mypyc tests in a +Linux Docker container. This allows running Linux tests on a different +operating system that supports Docker, or running tests in an +isolated, predictable environment on a Linux host operating system. + +Why use Docker? +--------------- + +Mypyc tests can be significantly faster in a Docker container than +running natively on macOS. + +Also, if it's inconvient to install the necessary dependencies on the +host operating system, or there are issues getting some tests to pass +on the host operating system, using a container can be an easy +workaround. + +Prerequisites +------------- + +First install Docker. On macOS, both Docker Desktop (proprietary, but +with a free of charge subscription for some use cases) and Colima (MIT +license) should work as runtimes. + +You may have to explicitly start the runtime first. Colima example +(replace '8' with the number of CPU cores you have): + +``` +$ colima start -c 8 + +``` + +How to run tests +---------------- + +You need to build the container with all necessary dependencies before +you can run tests: + +``` +$ python3 misc/docker/build.py +``` + +This creates a `mypy-test` Docker container that you can use to run +tests. + +You may need to run the script as root: + +``` +$ sudo python3 misc/docker/build.py +``` + +If you have a stale container which isn't up-to-date, use `--no-cache` +`--pull` to force rebuilding everything: + +``` +$ python3 misc/docker/build.py --no-cache --pull +``` + +Now you can run tests by using the `misc/docker/run.sh` script. Give +it the pytest command line you want to run as arguments. For example, +you can run mypyc tests like this: + +``` +$ misc/docker/run.sh pytest mypyc +``` + +You can also use `-k `, `-n0`, `-q`, etc. + +Again, you may need to run `run.sh` as root: + +``` +$ sudo misc/docker/run.sh pytest mypyc +``` + +You can also use `runtests.py` in the container. Example: + +``` +$ misc/docker/run.sh ./runtests.py self lint +``` + +Notes +----- + +File system changes within the container are not visible to the host +system. You can't use the container to format code using Black, for +example. + +On a mac, you may want to give additional CPU to the VM used to run +the container. The default allocation may be way too low (e.g. 2 CPU +cores). For example, use the `-c` option when starting the VM if you +use Colima: + +``` +$ colima start -c 8 +``` + +Giving access to all available CPUs to the Linux VM tends to provide +the best performance. This is not needed on a Linux host, since the +container is not run in a VM. diff --git a/misc/docker/build.py b/misc/docker/build.py new file mode 100644 index 000000000000..2103be3f110f --- /dev/null +++ b/misc/docker/build.py @@ -0,0 +1,46 @@ +"""Build a "mypy-test" Linux Docker container for running mypy/mypyc tests. + +This allows running Linux tests under a non-Linux operating system. Mypyc +tests can also run much faster under Linux that the host OS. + +NOTE: You may need to run this as root (using sudo). + +Run with "--no-cache" to force reinstallation of mypy dependencies. +Run with "--pull" to force update of the Linux (Ubuntu) base image. + +After you've built the container, use "run.sh" to run tests. Example: + + misc/docker/run.sh pytest mypyc/ +""" + +import argparse +import os +import subprocess +import sys + + +def main() -> None: + parser = argparse.ArgumentParser( + description="""Build a 'mypy-test' Docker container for running mypy/mypyc tests. You may + need to run this as root (using sudo).""" + ) + parser.add_argument("--no-cache", action="store_true", help="Force rebuilding") + parser.add_argument("--pull", action="store_true", help="Force pulling fresh Linux base image") + args = parser.parse_args() + + dockerdir = os.path.dirname(os.path.abspath(__file__)) + dockerfile = os.path.join(dockerdir, "Dockerfile") + rootdir = os.path.join(dockerdir, "..", "..") + + cmdline = ["docker", "build", "-t", "mypy-test", "-f", dockerfile] + if args.no_cache: + cmdline.append("--no-cache") + if args.pull: + cmdline.append("--pull") + cmdline.append(rootdir) + result = subprocess.run(cmdline) + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/misc/docker/run-wrapper.sh b/misc/docker/run-wrapper.sh new file mode 100755 index 000000000000..77e77d99af34 --- /dev/null +++ b/misc/docker/run-wrapper.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Internal wrapper script used to run commands in a container + +# Copy all the files we need from the mypy repo directory shared with +# the host to a local directory. Accessing files using a shared +# directory on a mac can be *very* slow. +echo "copying files to the container..." +cp -R /repo/{mypy,mypyc,test-data,misc} . +cp /repo/{pytest.ini,conftest.py,runtests.py,pyproject.toml,setup.cfg} . +cp /repo/{mypy_self_check.ini,mypy_bootstrap.ini} . + +# Run the wrapped command +"$@" diff --git a/misc/docker/run.sh b/misc/docker/run.sh new file mode 100755 index 000000000000..c8fc0e510e8e --- /dev/null +++ b/misc/docker/run.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Run mypy or mypyc tests in a Docker container that was built using misc/docker/build.py. +# +# Usage: misc/docker/run.sh ... +# +# For example, run mypyc tests like this: +# +# misc/docker/run.sh pytest mypyc +# +# NOTE: You may need to run this as root (using sudo). + +SCRIPT_DIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) +MYPY_DIR="$SCRIPT_DIR/../.." + +docker run -ti --rm -v "$MYPY_DIR:/repo" mypy-test /repo/misc/docker/run-wrapper.sh "$@" diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index b661a899924c..7fffba8a8507 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -213,8 +213,7 @@ def has_return_exprs(self, node): results = {} if self.return_expr.match(node, results): return True - for child in node.children: - if child.type not in (syms.funcdef, syms.classdef): - if self.has_return_exprs(child): - return True - return False + return any( + child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child) + for child in node.children + ) diff --git a/misc/perf_compare.py b/misc/perf_compare.py new file mode 100644 index 000000000000..be05bb6ddc32 --- /dev/null +++ b/misc/perf_compare.py @@ -0,0 +1,146 @@ +"""Compare performance of mypyc-compiled mypy between one or more commits/branches. + +Simple usage: + + python misc/perf_compare.py my-branch master ... + +What this does: + + * Create a temp clone of the mypy repo for each target commit to measure + * Checkout a target commit in each of the clones + * Compile mypyc in each of the clones *in parallel* + * Create another temp clone of the mypy repo as the code to check + * Self check with each of the compiled mypys N times + * Report the average runtimes and relative performance + * Remove the temp clones +""" + +from __future__ import annotations + +import argparse +import glob +import os +import random +import shutil +import statistics +import subprocess +import sys +import threading +import time + + +def heading(s: str) -> None: + print() + print(f"=== {s} ===") + print() + + +def build_mypy(target_dir: str) -> None: + env = os.environ.copy() + env["CC"] = "clang" + env["MYPYC_OPT_LEVEL"] = "2" + cmd = [sys.executable, "setup.py", "--use-mypyc", "build_ext", "--inplace"] + subprocess.run(cmd, env=env, check=True, cwd=target_dir) + + +def clone(target_dir: str, commit: str | None) -> None: + heading(f"Cloning mypy to {target_dir}") + repo_dir = os.getcwd() + if os.path.isdir(target_dir): + print(f"{target_dir} exists: deleting") + shutil.rmtree(target_dir) + subprocess.run(["git", "clone", repo_dir, target_dir], check=True) + if commit: + subprocess.run(["git", "checkout", commit], check=True, cwd=target_dir) + + +def run_benchmark(compiled_dir: str, check_dir: str) -> float: + cache_dir = os.path.join(compiled_dir, ".mypy_cache") + if os.path.isdir(cache_dir): + shutil.rmtree(cache_dir) + env = os.environ.copy() + env["PYTHONPATH"] = os.path.abspath(compiled_dir) + abschk = os.path.abspath(check_dir) + cmd = [ + sys.executable, + "-m", + "mypy", + "--config-file", + os.path.join(abschk, "mypy_self_check.ini"), + ] + cmd += glob.glob(os.path.join(abschk, "mypy/*.py")) + cmd += glob.glob(os.path.join(abschk, "mypy/*/*.py")) + t0 = time.time() + # Ignore errors, since some commits being measured may generate additional errors. + subprocess.run(cmd, cwd=compiled_dir, env=env) + return time.time() - t0 + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument("commit", nargs="+") + args = parser.parse_args() + commits = args.commit + num_runs = 16 + + if not (os.path.isdir(".git") and os.path.isdir("mypyc")): + sys.exit("error: Run this the mypy repo root") + + build_threads = [] + target_dirs = [] + for i, commit in enumerate(commits): + target_dir = f"mypy.{i}.tmpdir" + target_dirs.append(target_dir) + clone(target_dir, commit) + t = threading.Thread(target=lambda: build_mypy(target_dir)) + t.start() + build_threads.append(t) + + self_check_dir = "mypy.self.tmpdir" + clone(self_check_dir, commits[0]) + + heading("Compiling mypy") + print("(This will take a while...)") + + for t in build_threads: + t.join() + + print(f"Finished compiling mypy ({len(commits)} builds)") + + heading("Performing measurements") + + results: dict[str, list[float]] = {} + for n in range(num_runs): + if n == 0: + print("Warmup...") + else: + print(f"Run {n}/{num_runs - 1}...") + items = list(enumerate(commits)) + random.shuffle(items) + for i, commit in items: + tt = run_benchmark(target_dirs[i], self_check_dir) + # Don't record the first warm-up run + if n > 0: + print(f"{commit}: t={tt:.3f}s") + results.setdefault(commit, []).append(tt) + + print() + heading("Results") + first = -1.0 + for commit in commits: + tt = statistics.mean(results[commit]) + if first < 0: + delta = "0.0%" + first = tt + else: + d = (tt / first) - 1 + delta = f"{d:+.1%}" + print(f"{commit:<25} {tt:.3f}s ({delta})") + + shutil.rmtree(self_check_dir) + for target_dir in target_dirs: + shutil.rmtree(target_dir) + + +if __name__ == "__main__": + main() diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index c6856f86744a..8eeb9be7f4f8 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -134,7 +134,7 @@ def main() -> None: parser.add_argument( "--commit", default=None, - help="Typeshed commit (default to latest master if using a repository clone)", + help="Typeshed commit (default to latest main if using a repository clone)", ) parser.add_argument( "--typeshed-dir", @@ -184,8 +184,11 @@ def main() -> None: subprocess.run(["git", "commit", "-m", message], check=True) print("Created typeshed sync commit.") - # Currently just LiteralString reverts - commits_to_cherry_pick = ["780534b13722b7b0422178c049a1cbbf4ea4255b"] + commits_to_cherry_pick = [ + "780534b13722b7b0422178c049a1cbbf4ea4255b", # LiteralString reverts + "5319fa34a8004c1568bb6f032a07b8b14cc95bed", # sum reverts + "0062994228fb62975c6cef4d2c80d00c7aa1c545", # ctypes reverts + ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) print(f"Cherry-picked {commit}.") diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index be8da9e44f86..e60ec3cca207 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -29,6 +29,21 @@ def is_whl_or_tar(name: str) -> bool: return name.endswith(".tar.gz") or name.endswith(".whl") +def item_ok_for_pypi(name: str) -> bool: + if not is_whl_or_tar(name): + return False + + if name.endswith(".tar.gz"): + name = name[:-7] + if name.endswith(".whl"): + name = name[:-4] + + if name.endswith("wasm32"): + return False + + return True + + def get_release_for_tag(tag: str) -> dict[str, Any]: with urlopen(f"{BASE}/{REPO}/releases/tags/{tag}") as f: data = json.load(f) @@ -75,7 +90,7 @@ def check_sdist(dist: Path, version: str) -> None: def spot_check_dist(dist: Path, version: str) -> None: - items = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + items = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] assert len(items) > 10 assert all(version in item.name for item in items) assert any(item.name.endswith("py3-none-any.whl") for item in items) @@ -93,7 +108,7 @@ def tmp_twine() -> Iterator[Path]: def upload_dist(dist: Path, dry_run: bool = True) -> None: with tmp_twine() as twine: - files = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + files = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] cmd: list[Any] = [twine, "upload"] cmd += files if dry_run: diff --git a/mypy/applytype.py b/mypy/applytype.py index 1c401664568d..a81ed3cd1f16 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -4,13 +4,14 @@ import mypy.subtypes from mypy.expandtype import expand_type, expand_unpack_with_variables -from mypy.nodes import ARG_POS, ARG_STAR, Context +from mypy.nodes import ARG_STAR, Context from mypy.types import ( AnyType, CallableType, Parameters, ParamSpecType, PartialType, + TupleType, Type, TypeVarId, TypeVarLikeType, @@ -19,6 +20,7 @@ UnpackType, get_proper_type, ) +from mypy.typevartuples import find_unpack_in_list, replace_starargs def get_target_type( @@ -73,6 +75,7 @@ def apply_generic_arguments( report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], context: Context, skip_unsatisfied: bool = False, + allow_erased_callables: bool = False, ) -> CallableType: """Apply generic type arguments to a callable type. @@ -113,35 +116,61 @@ def apply_generic_arguments( # Apply arguments to argument types. var_arg = callable.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - expanded = expand_unpack_with_variables(var_arg.typ, id_to_type) - assert isinstance(expanded, list) - # Handle other cases later. - for t in expanded: - assert not isinstance(t, UnpackType) star_index = callable.arg_kinds.index(ARG_STAR) - arg_kinds = ( - callable.arg_kinds[:star_index] - + [ARG_POS] * len(expanded) - + callable.arg_kinds[star_index + 1 :] - ) - arg_names = ( - callable.arg_names[:star_index] - + [None] * len(expanded) - + callable.arg_names[star_index + 1 :] - ) - arg_types = ( - [expand_type(at, id_to_type) for at in callable.arg_types[:star_index]] - + expanded - + [expand_type(at, id_to_type) for at in callable.arg_types[star_index + 1 :]] + callable = callable.copy_modified( + arg_types=( + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[:star_index] + ] + + [callable.arg_types[star_index]] + + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[star_index + 1 :] + ] + ) ) + + unpacked_type = get_proper_type(var_arg.typ.type) + if isinstance(unpacked_type, TupleType): + # Assuming for now that because we convert prefixes to positional arguments, + # the first argument is always an unpack. + expanded_tuple = expand_type(unpacked_type, id_to_type) + if isinstance(expanded_tuple, TupleType): + # TODO: handle the case where the tuple has an unpack. This will + # hit an assert below. + expanded_unpack = find_unpack_in_list(expanded_tuple.items) + if expanded_unpack is not None: + callable = callable.copy_modified( + arg_types=( + callable.arg_types[:star_index] + + [expanded_tuple] + + callable.arg_types[star_index + 1 :] + ) + ) + else: + callable = replace_starargs(callable, expanded_tuple.items) + else: + # TODO: handle the case for if we get a variable length tuple. + assert False, f"mypy bug: unimplemented case, {expanded_tuple}" + elif isinstance(unpacked_type, TypeVarTupleType): + expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type) + assert isinstance(expanded_tvt, list) + for t in expanded_tvt: + assert not isinstance(t, UnpackType) + callable = replace_starargs(callable, expanded_tvt) + else: + assert False, "mypy bug: unhandled case applying unpack" else: - arg_types = [expand_type(at, id_to_type) for at in callable.arg_types] - arg_kinds = callable.arg_kinds - arg_names = callable.arg_names + callable = callable.copy_modified( + arg_types=[ + expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types + ] + ) # Apply arguments to TypeGuard if any. if callable.type_guard is not None: - type_guard = expand_type(callable.type_guard, id_to_type) + type_guard = expand_type(callable.type_guard, id_to_type, allow_erased_callables) else: type_guard = None @@ -149,10 +178,7 @@ def apply_generic_arguments( remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( - arg_types=arg_types, - ret_type=expand_type(callable.ret_type, id_to_type), + ret_type=expand_type(callable.ret_type, id_to_type, allow_erased_callables), variables=remaining_tvars, type_guard=type_guard, - arg_kinds=arg_kinds, - arg_names=arg_names, ) diff --git a/mypy/build.py b/mypy/build.py index 31851680ea82..a4817d1866c7 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -12,6 +12,7 @@ from __future__ import annotations +import collections import contextlib import errno import gc @@ -49,7 +50,7 @@ from mypy.indirection import TypeIndirectionVisitor from mypy.messages import MessageBuilder from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable, TypeInfo -from mypy.partially_defined import PartiallyDefinedVariableVisitor +from mypy.partially_defined import PossiblyUndefinedVariableVisitor from mypy.semanal import SemanticAnalyzer from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis from mypy.util import ( @@ -99,7 +100,7 @@ stub_package_name, ) from mypy.types import Type -from mypy.typestate import TypeState, reset_global_state +from mypy.typestate import reset_global_state, type_state from mypy.version import __version__ # Switch to True to produce debug output related to fine-grained incremental @@ -275,9 +276,11 @@ def _build( try: graph = dispatch(sources, manager, stdout) if not options.fine_grained_incremental: - TypeState.reset_all_subtype_caches() + type_state.reset_all_subtype_caches() if options.timing_stats is not None: dump_timing_stats(options.timing_stats, graph) + if options.line_checking_stats is not None: + dump_line_checking_stats(options.line_checking_stats, graph) return BuildResult(manager, graph) finally: t0 = time.time() @@ -1889,6 +1892,10 @@ class State: # Cumulative time spent on this file, in microseconds (for profiling stats) time_spent_us: int = 0 + # Per-line type-checking time (cumulative time spent type-checking expressions + # on a given source code line). + per_line_checking_time_ns: dict[int, int] + def __init__( self, id: str | None, @@ -1940,7 +1947,7 @@ def __init__( raise if follow_imports == "silent": self.ignore_all = True - elif path and is_silent_import_module(manager, path): + elif path and is_silent_import_module(manager, path) and not root_source: self.ignore_all = True self.path = path if path: @@ -1956,6 +1963,7 @@ def __init__( source = "" self.source = source self.add_ancestors() + self.per_line_checking_time_ns = collections.defaultdict(int) t0 = time.time() self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) self.manager.add_stats(validate_meta_time=time.time() - t0) @@ -2320,6 +2328,7 @@ def type_checker(self) -> TypeChecker: self.tree, self.xpath, manager.plugin, + self.per_line_checking_time_ns, ) return self._type_checker @@ -2338,14 +2347,22 @@ def type_check_second_pass(self) -> bool: self.time_spent_us += time_spent_us(t0) return result - def detect_partially_defined_vars(self, type_map: dict[Expression, Type]) -> None: + def detect_possibly_undefined_vars(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" + if self.tree.is_stub: + # We skip stub files because they aren't actually executed. + return manager = self.manager - if manager.errors.is_error_code_enabled(codes.PARTIALLY_DEFINED): - manager.errors.set_file(self.xpath, self.tree.fullname, options=manager.options) + manager.errors.set_file(self.xpath, self.tree.fullname, options=self.options) + if manager.errors.is_error_code_enabled( + codes.POSSIBLY_UNDEFINED + ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): self.tree.accept( - PartiallyDefinedVariableVisitor( - MessageBuilder(manager.errors, manager.modules), type_map + PossiblyUndefinedVariableVisitor( + MessageBuilder(manager.errors, manager.modules), + self.type_map(), + self.options, + self.tree.names, ) ) @@ -2445,7 +2462,7 @@ def update_fine_grained_deps(self, deps: dict[str, set[str]]) -> None: from mypy.server.deps import merge_dependencies # Lazy import to speed up startup merge_dependencies(self.compute_fine_grained_deps(), deps) - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def valid_references(self) -> set[str]: assert self.ancestors is not None @@ -2465,6 +2482,12 @@ def write_cache(self) -> None: or self.options.cache_dir == os.devnull or self.options.fine_grained_incremental ): + if self.options.debug_serialize: + try: + self.tree.serialize() + except Exception: + print(f"Error serializing {self.id}", file=self.manager.stdout) + raise # Propagate to display traceback return is_errors = self.transitive_error if is_errors: @@ -2629,7 +2652,7 @@ def find_module_and_diagnose( else: skipping_module(manager, caller_line, caller_state, id, result) raise ModuleNotFound - if is_silent_import_module(manager, result): + if is_silent_import_module(manager, result) and not root_source: follow_imports = "silent" return (result, follow_imports) else: @@ -2728,11 +2751,8 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: else: parent_mod = parent_st.tree if parent_mod is not None: - if parent_mod.is_partial_stub_package: - return True - else: - # Bail out soon, complete subpackage found - return False + # Bail out soon, complete subpackage found + return parent_mod.is_partial_stub_package id = parent return False @@ -2909,7 +2929,7 @@ def dispatch(sources: list[BuildSource], manager: BuildManager, stdout: TextIO) # then we need to collect fine grained protocol dependencies. # Since these are a global property of the program, they are calculated after we # processed the whole graph. - TypeState.add_all_protocol_deps(manager.fg_deps) + type_state.add_all_protocol_deps(manager.fg_deps) if not manager.options.fine_grained_incremental: rdeps = generate_deps_for_cache(manager, graph) write_deps_cache(rdeps, manager, graph) @@ -2946,13 +2966,22 @@ def dumps(self) -> str: def dump_timing_stats(path: str, graph: Graph) -> None: - """ - Dump timing stats for each file in the given graph - """ + """Dump timing stats for each file in the given graph.""" with open(path, "w") as f: - for k in sorted(graph.keys()): - v = graph[k] - f.write(f"{v.id} {v.time_spent_us}\n") + for id in sorted(graph): + f.write(f"{id} {graph[id].time_spent_us}\n") + + +def dump_line_checking_stats(path: str, graph: Graph) -> None: + """Dump per-line expression type checking stats.""" + with open(path, "w") as f: + for id in sorted(graph): + if not graph[id].per_line_checking_time_ns: + continue + f.write(f"{id}:\n") + for line in sorted(graph[id].per_line_checking_time_ns): + line_time = graph[id].per_line_checking_time_ns[line] + f.write(f"{line:>5} {line_time/1000:8.1f}\n") def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None: @@ -3024,7 +3053,11 @@ def load_graph( for bs in sources: try: st = State( - id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True + id=bs.module, + path=bs.path, + source=bs.text, + manager=manager, + root_source=not bs.followed, ) except ModuleNotFound: continue @@ -3388,7 +3421,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No graph[id].type_check_first_pass() if not graph[id].type_checker().deferred_nodes: unfinished_modules.discard(id) - graph[id].detect_partially_defined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() while unfinished_modules: @@ -3397,7 +3430,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No continue if not graph[id].type_check_second_pass(): unfinished_modules.discard(id) - graph[id].detect_partially_defined_vars(graph[id].type_map()) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() for id in stale: graph[id].generate_unused_ignore_notes() @@ -3576,9 +3609,10 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: set[str] def is_silent_import_module(manager: BuildManager, path: str) -> bool: - if not manager.options.no_silence_site_packages: - for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: - if is_sub_path(path, dir): - # Silence errors in site-package dirs and typeshed - return True - return False + if manager.options.no_silence_site_packages: + return False + # Silence errors in site-package dirs and typeshed + return any( + is_sub_path(path, dir) + for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path + ) diff --git a/mypy/checker.py b/mypy/checker.py index 31177795e5e5..1f635c09bc0a 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7,7 +7,6 @@ from contextlib import contextmanager, nullcontext from typing import ( AbstractSet, - Any, Callable, Dict, Generic, @@ -40,7 +39,7 @@ from mypy.erasetype import erase_type, erase_typevars, remove_instance_last_known_values from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode from mypy.errors import Errors, ErrorWatcher, report_internal_error -from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance from mypy.join import join_types from mypy.literals import Key, literal, literal_hash from mypy.maptype import map_instance_to_supertype @@ -77,6 +76,7 @@ AssignmentStmt, Block, BreakStmt, + BytesExpr, CallExpr, ClassDef, ComparisonExpr, @@ -87,6 +87,7 @@ EllipsisExpr, Expression, ExpressionStmt, + FloatExpr, ForStmt, FuncBase, FuncDef, @@ -116,6 +117,7 @@ ReturnStmt, StarExpr, Statement, + StrExpr, SymbolNode, SymbolTable, SymbolTableNode, @@ -160,6 +162,7 @@ erase_to_bound, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, get_type_vars, is_literal_type_like, @@ -174,16 +177,19 @@ tuple_fallback, ) from mypy.types import ( + ANY_STRATEGY, + MYPYC_NATIVE_INT_NAMES, OVERLOAD_NAMES, AnyType, + BoolTypeQuery, CallableType, DeletedType, + ErasedType, FunctionLike, Instance, LiteralType, NoneType, Overloaded, - ParamSpecType, PartialType, ProperType, StarType, @@ -193,7 +199,6 @@ TypedDictType, TypeGuardedType, TypeOfAny, - TypeQuery, TypeTranslator, TypeType, TypeVarId, @@ -202,7 +207,6 @@ UnboundType, UninhabitedType, UnionType, - UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, @@ -210,6 +214,7 @@ is_named_instance, is_optional, remove_optional, + store_argument_type, strip_type, ) from mypy.typetraverser import TypeTraverserVisitor @@ -361,6 +366,7 @@ def __init__( tree: MypyFile, path: str, plugin: Plugin, + per_line_checking_time_ns: dict[int, int], ) -> None: """Construct a type checker. @@ -373,8 +379,6 @@ def __init__( self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin - self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg, self.plugin) - self.pattern_checker = PatternChecker(self, self.msg, self.plugin) self.tscope = Scope() self.scope = CheckerScope(tree) self.binder = ConditionalTypeBinder() @@ -412,6 +416,12 @@ def __init__( # example when type-checking class decorators. self.allow_abstract_call = False + # Child checker objects for specific AST node types + self.expr_checker = mypy.checkexpr.ExpressionChecker( + self, self.msg, self.plugin, per_line_checking_time_ns + ) + self.pattern_checker = PatternChecker(self, self.msg, self.plugin) + @property def type_context(self) -> list[Type | None]: return self.expr_checker.type_context @@ -842,6 +852,10 @@ def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Typ if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_yield_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -872,6 +886,10 @@ def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> T if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_receive_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -911,6 +929,10 @@ def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Ty if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_return_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type(return_type, is_coroutine): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. @@ -960,7 +982,10 @@ def _visit_func_def(self, defn: FuncDef) -> None: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type - assert orig_type is not None, f"Error checking function redefinition {defn}" + if orig_type is None: + # If other branch is unreachable, we don't type check it and so we might + # not have a type for the original definition + return if isinstance(orig_type, PartialType): if orig_type.type is None: # Ah this is a partial type. Give it the type of the function. @@ -1170,25 +1195,8 @@ def check_func_def( if ctx.line < 0: ctx = typ self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) - if typ.arg_kinds[i] == nodes.ARG_STAR: - if isinstance(arg_type, ParamSpecType): - pass - elif isinstance(arg_type, UnpackType): - arg_type = TupleType( - [arg_type], - fallback=self.named_generic_type( - "builtins.tuple", [self.named_type("builtins.object")] - ), - ) - else: - # builtins.tuple[T] is typing.Tuple[T, ...] - arg_type = self.named_generic_type("builtins.tuple", [arg_type]) - elif typ.arg_kinds[i] == nodes.ARG_STAR2: - if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: - arg_type = self.named_generic_type( - "builtins.dict", [self.str_type(), arg_type] - ) - item.arguments[i].variable.type = arg_type + # Need to store arguments again for the expanded item. + store_argument_type(item, i, typ, self.named_generic_type) # Type check initialization expressions. body_is_trivial = is_trivial_body(defn.body) @@ -1869,6 +1877,23 @@ def check_method_override_for_base_with_name( original_class_or_static = False # a variable can't be class or static if isinstance(original_type, FunctionLike): + active_self_type = self.scope.active_self_type() + if isinstance(original_type, Overloaded) and active_self_type: + # If we have an overload, filter to overloads that match the self type. + # This avoids false positives for concrete subclasses of generic classes, + # see testSelfTypeOverrideCompatibility for an example. + # It's possible we might want to do this as part of bind_and_map_method + filtered_items = [ + item + for item in original_type.items + if not item.arg_types or is_subtype(active_self_type, item.arg_types[0]) + ] + # If we don't have any filtered_items, maybe it's always a valid override + # of the superclass? However if you get to that point you're in murky type + # territory anyway, so we just preserve the type and have the behaviour match + # that of older versions of mypy. + if filtered_items: + original_type = Overloaded(filtered_items) original_type = self.bind_and_map_method(base_attr, original_type, defn.info, base) if original_node and is_property(original_node): original_type = get_property_type(original_type) @@ -2154,7 +2179,7 @@ def visit_class_def(self, defn: ClassDef) -> None: temp = self.temp_node(sig, context=decorator) fullname = None if isinstance(decorator, RefExpr): - fullname = decorator.fullname + fullname = decorator.fullname or None # TODO: Figure out how to have clearer error messages. # (e.g. "class decorator must be a function that accepts a type." @@ -2292,9 +2317,7 @@ def is_final_enum_value(self, sym: SymbolTableNode) -> bool: ): return False - if self.is_stub or sym.node.has_explicit_value: - return True - return False + return self.is_stub or sym.node.has_explicit_value def check_enum_bases(self, defn: ClassDef) -> None: """ @@ -2469,11 +2492,20 @@ class C(B, A[int]): ... # this is unsafe because... second_sig = self.bind_and_map_method(second, second_type, ctx, base2) ok = is_subtype(first_sig, second_sig, ignore_pos_arg_names=True) elif first_type and second_type: + if isinstance(first.node, Var): + first_type = expand_self_type(first.node, first_type, fill_typevars(ctx)) + if isinstance(second.node, Var): + second_type = expand_self_type(second.node, second_type, fill_typevars(ctx)) ok = is_equivalent(first_type, second_type) if not ok: second_node = base2[name].node - if isinstance(second_node, Decorator) and second_node.func.is_property: - ok = is_subtype(first_type, cast(CallableType, second_type).ret_type) + if ( + isinstance(second_type, FunctionLike) + and second_node is not None + and is_property(second_node) + ): + second_type = get_property_type(second_type) + ok = is_subtype(first_type, second_type) else: if first_type is None: self.msg.cannot_determine_type_in_base(name, base1.name, ctx) @@ -2528,8 +2560,8 @@ def visit_import_from(self, node: ImportFrom) -> None: def visit_import_all(self, node: ImportAll) -> None: self.check_import(node) - def visit_import(self, s: Import) -> None: - pass + def visit_import(self, node: Import) -> None: + self.check_import(node) def check_import(self, node: ImportBase) -> None: for assign in node.assignments: @@ -2657,26 +2689,7 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.msg.annotation_in_unchecked_function(context=s) def check_type_alias_rvalue(self, s: AssignmentStmt) -> None: - if not (self.is_stub and isinstance(s.rvalue, OpExpr) and s.rvalue.op == "|"): - # We do this mostly for compatibility with old semantic analyzer. - # TODO: should we get rid of this? - alias_type = self.expr_checker.accept(s.rvalue) - else: - # Avoid type checking 'X | Y' in stubs, since there can be errors - # on older Python targets. - alias_type = AnyType(TypeOfAny.special_form) - - def accept_items(e: Expression) -> None: - if isinstance(e, OpExpr) and e.op == "|": - accept_items(e.left) - accept_items(e.right) - else: - # Nested union types have been converted to type context - # in semantic analysis (such as in 'list[int | str]'), - # so we don't need to deal with them here. - self.expr_checker.accept(e) - - accept_items(s.rvalue) + alias_type = self.expr_checker.accept(s.rvalue) self.store_type(s.lvalues[-1], alias_type) def check_assignment( @@ -2739,8 +2752,8 @@ def check_assignment( # None initializers preserve the partial None type. return - if is_valid_inferred_type(rvalue_type): - var = lvalue_type.var + var = lvalue_type.var + if is_valid_inferred_type(rvalue_type, is_lvalue_final=var.is_final): partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: @@ -3049,6 +3062,8 @@ def lvalue_type_from_base( if base_var: base_node = base_var.node base_type = base_var.type + if isinstance(base_node, Var) and base_type is not None: + base_type = expand_self_type(base_node, base_type, fill_typevars(expr_node.info)) if isinstance(base_node, Decorator): base_node = base_node.func base_type = base_node.type @@ -3453,8 +3468,7 @@ def check_multi_assignment_from_union( assert declared_type is not None clean_items.append((type, declared_type)) - # TODO: fix signature of zip() in typeshed. - types, declared_types = cast(Any, zip)(*clean_items) + types, declared_types = zip(*clean_items) self.binder.assign_type( expr, make_simplified_union(list(types)), @@ -3689,7 +3703,10 @@ def infer_variable_type( """Infer the type of initialized variables from initializer type.""" if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) - elif not is_valid_inferred_type(init_type) and not self.no_partial_types: + elif ( + not is_valid_inferred_type(init_type, is_lvalue_final=name.is_final) + and not self.no_partial_types + ): # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type @@ -3817,6 +3834,23 @@ def inference_error_fallback_type(self, type: Type) -> Type: # we therefore need to erase them. return erase_typevars(fallback) + def simple_rvalue(self, rvalue: Expression) -> bool: + """Returns True for expressions for which inferred type should not depend on context. + + Note that this function can still return False for some expressions where inferred type + does not depend on context. It only exists for performance optimizations. + """ + if isinstance(rvalue, (IntExpr, StrExpr, BytesExpr, FloatExpr, RefExpr)): + return True + if isinstance(rvalue, CallExpr): + if isinstance(rvalue.callee, RefExpr) and isinstance(rvalue.callee.node, FuncBase): + typ = rvalue.callee.node.type + if isinstance(typ, CallableType): + return not typ.variables + elif isinstance(typ, Overloaded): + return not any(item.variables for item in typ.items) + return False + def check_simple_assignment( self, lvalue_type: Type | None, @@ -3838,6 +3872,30 @@ def check_simple_assignment( rvalue_type = self.expr_checker.accept( rvalue, lvalue_type, always_allow_any=always_allow_any ) + if ( + isinstance(get_proper_type(lvalue_type), UnionType) + # Skip literal types, as they have special logic (for better errors). + and not isinstance(get_proper_type(rvalue_type), LiteralType) + and not self.simple_rvalue(rvalue) + ): + # Try re-inferring r.h.s. in empty context, and use that if it + # results in a narrower type. We don't do this always because this + # may cause some perf impact, plus we want to partially preserve + # the old behavior. This helps with various practical examples, see + # e.g. testOptionalTypeNarrowedByGenericCall. + with self.msg.filter_errors() as local_errors, self.local_type_map() as type_map: + alt_rvalue_type = self.expr_checker.accept( + rvalue, None, always_allow_any=always_allow_any + ) + if ( + not local_errors.has_new_errors() + # Skip Any type, since it is special cased in binder. + and not isinstance(get_proper_type(alt_rvalue_type), AnyType) + and is_valid_inferred_type(alt_rvalue_type) + and is_proper_subtype(alt_rvalue_type, rvalue_type) + ): + rvalue_type = alt_rvalue_type + self.store_types(type_map) if isinstance(rvalue_type, DeletedType): self.msg.deleted_as_rvalue(rvalue_type, context) if isinstance(lvalue_type, DeletedType): @@ -4307,7 +4365,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: - t = self.check_except_handler_test(typ) + t = self.check_except_handler_test(typ, s.is_star) var = s.vars[i] if var: # To support local variables, we make this a definition line, @@ -4327,7 +4385,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: if s.else_body: self.accept(s.else_body) - def check_except_handler_test(self, n: Expression) -> Type: + def check_except_handler_test(self, n: Expression, is_star: bool) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) @@ -4343,22 +4401,47 @@ def check_except_handler_test(self, n: Expression) -> Type: item = ttype.items[0] if not item.is_type_obj(): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) - exc_type = item.ret_type + return self.default_exception_type(is_star) + exc_type = erase_typevars(item.ret_type) elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) if not is_subtype(exc_type, self.named_type("builtins.BaseException")): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) all_types.append(exc_type) + if is_star: + new_all_types: list[Type] = [] + for typ in all_types: + if is_proper_subtype(typ, self.named_type("builtins.BaseExceptionGroup")): + self.fail(message_registry.INVALID_EXCEPTION_GROUP, n) + new_all_types.append(AnyType(TypeOfAny.from_error)) + else: + new_all_types.append(typ) + return self.wrap_exception_group(new_all_types) return make_simplified_union(all_types) + def default_exception_type(self, is_star: bool) -> Type: + """Exception type to return in case of a previous type error.""" + any_type = AnyType(TypeOfAny.from_error) + if is_star: + return self.named_generic_type("builtins.ExceptionGroup", [any_type]) + return any_type + + def wrap_exception_group(self, types: Sequence[Type]) -> Type: + """Transform except* variable type into an appropriate exception group.""" + arg = make_simplified_union(types) + if is_subtype(arg, self.named_type("builtins.Exception")): + base = "builtins.ExceptionGroup" + else: + base = "builtins.BaseExceptionGroup" + return self.named_generic_type(base, [arg]) + def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: """Helper for check_except_handler_test to retrieve handler types.""" typ = get_proper_type(typ) @@ -4435,10 +4518,7 @@ def analyze_range_native_int_type(self, expr: Expression) -> Type | None: ok = True for arg in expr.args: argt = get_proper_type(self.lookup_type(arg)) - if isinstance(argt, Instance) and argt.type.fullname in ( - "mypy_extensions.i64", - "mypy_extensions.i32", - ): + if isinstance(argt, Instance) and argt.type.fullname in MYPYC_NATIVE_INT_NAMES: if native_int is None: native_int = argt elif argt != native_int: @@ -4516,7 +4596,7 @@ def visit_decorator(self, e: Decorator) -> None: temp = self.temp_node(sig, context=e) fullname = None if isinstance(d, RefExpr): - fullname = d.fullname + fullname = d.fullname or None # if this is a expression like @b.a where b is an object, get the type of b # so we can pass it the method hook in the plugins object_type: Type | None = None @@ -4580,7 +4660,7 @@ def visit_with_stmt(self, s: WithStmt) -> None: # exceptions or not. We determine this using a heuristic based on the # return type of the __exit__ method -- see the discussion in # https://github.com/python/mypy/issues/7214 and the section about context managers - # in https://github.com/python/typeshed/blob/master/CONTRIBUTING.md#conventions + # in https://github.com/python/typeshed/blob/main/CONTRIBUTING.md#conventions # for more details. exit_ret_type = get_proper_type(exit_ret_type) @@ -4778,7 +4858,7 @@ def make_fake_typeinfo( return cdef, info def intersect_instances( - self, instances: tuple[Instance, Instance], ctx: Context + self, instances: tuple[Instance, Instance], errors: list[tuple[str, str]] ) -> Instance | None: """Try creating an ad-hoc intersection of the given instances. @@ -4805,6 +4885,17 @@ def intersect_instances( curr_module = self.scope.stack[0] assert isinstance(curr_module, MypyFile) + # First, retry narrowing while allowing promotions (they are disabled by default + # for isinstance() checks, etc). This way we will still type-check branches like + # x: complex = 1 + # if isinstance(x, int): + # ... + left, right = instances + if is_proper_subtype(left, right, ignore_promotions=False): + return left + if is_proper_subtype(right, left, ignore_promotions=False): + return right + def _get_base_classes(instances_: tuple[Instance, Instance]) -> list[Instance]: base_classes_ = [] for inst in instances_: @@ -4845,17 +4936,10 @@ def _make_fake_typeinfo_and_full_name( self.check_multiple_inheritance(info) info.is_intersection = True except MroError: - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "inconsistent method resolution order", ctx - ) + errors.append((pretty_names_list, "inconsistent method resolution order")) return None - if local_errors.has_new_errors(): - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "incompatible method signatures", ctx - ) + errors.append((pretty_names_list, "incompatible method signatures")) return None curr_module.names[full_name] = SymbolTableNode(GDEF, info) @@ -5019,6 +5103,45 @@ def conditional_callable_type_map( return None, {} + def conditional_types_for_iterable( + self, item_type: Type, iterable_type: Type + ) -> tuple[Type | None, Type | None]: + """ + Narrows the type of `iterable_type` based on the type of `item_type`. + For now, we only support narrowing unions of TypedDicts based on left operand being literal string(s). + """ + if_types: list[Type] = [] + else_types: list[Type] = [] + + iterable_type = get_proper_type(iterable_type) + if isinstance(iterable_type, UnionType): + possible_iterable_types = get_proper_types(iterable_type.relevant_items()) + else: + possible_iterable_types = [iterable_type] + + item_str_literals = try_getting_str_literals_from_type(item_type) + + for possible_iterable_type in possible_iterable_types: + if item_str_literals and isinstance(possible_iterable_type, TypedDictType): + for key in item_str_literals: + if key in possible_iterable_type.required_keys: + if_types.append(possible_iterable_type) + elif ( + key in possible_iterable_type.items or not possible_iterable_type.is_final + ): + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + else: + else_types.append(possible_iterable_type) + else: + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + + return ( + UnionType.make_union(if_types) if if_types else None, + UnionType.make_union(else_types) if else_types else None, + ) + def _is_truthy_type(self, t: ProperType) -> bool: return ( ( @@ -5026,6 +5149,7 @@ def _is_truthy_type(self, t: ProperType) -> bool: and bool(t.type) and not t.type.has_readable_member("__bool__") and not t.type.has_readable_member("__len__") + and t.type.fullname != "builtins.object" ) or isinstance(t, FunctionLike) or ( @@ -5061,6 +5185,14 @@ def format_expr_type() -> str: self.fail(message_registry.FUNCTION_ALWAYS_TRUE.format(format_type(t)), expr) elif isinstance(t, UnionType): self.fail(message_registry.TYPE_ALWAYS_TRUE_UNIONTYPE.format(format_expr_type()), expr) + elif isinstance(t, Instance) and t.type.fullname == "typing.Iterable": + _, info = self.make_fake_typeinfo("typing", "Collection", "Collection", []) + self.fail( + message_registry.ITERABLE_ALWAYS_TRUE.format( + format_expr_type(), format_type(Instance(info, t.args)) + ), + expr, + ) else: self.fail(message_registry.TYPE_ALWAYS_TRUE.format(format_expr_type()), expr) @@ -5326,28 +5458,42 @@ def has_no_custom_eq_checks(t: Type) -> bool: elif operator in {"in", "not in"}: assert len(expr_indices) == 2 left_index, right_index = expr_indices - if left_index not in narrowable_operand_index_to_hash: - continue - item_type = operand_types[left_index] - collection_type = operand_types[right_index] + iterable_type = operand_types[right_index] - # We only try and narrow away 'None' for now - if not is_optional(item_type): - continue + if_map, else_map = {}, {} + + if left_index in narrowable_operand_index_to_hash: + # We only try and narrow away 'None' for now + if is_optional(item_type): + collection_item_type = get_proper_type( + builtin_item_type(iterable_type) + ) + if ( + collection_item_type is not None + and not is_optional(collection_item_type) + and not ( + isinstance(collection_item_type, Instance) + and collection_item_type.type.fullname == "builtins.object" + ) + and is_overlapping_erased_types(item_type, collection_item_type) + ): + if_map[operands[left_index]] = remove_optional(item_type) + + if right_index in narrowable_operand_index_to_hash: + if_type, else_type = self.conditional_types_for_iterable( + item_type, iterable_type + ) + expr = operands[right_index] + if if_type is None: + if_map = None + else: + if_map[expr] = if_type + if else_type is None: + else_map = None + else: + else_map[expr] = else_type - collection_item_type = get_proper_type(builtin_item_type(collection_type)) - if collection_item_type is None or is_optional(collection_item_type): - continue - if ( - isinstance(collection_item_type, Instance) - and collection_item_type.type.fullname == "builtins.object" - ): - continue - if is_overlapping_erased_types(item_type, collection_item_type): - if_map, else_map = {operands[left_index]: remove_optional(item_type)}, {} - else: - continue else: if_map = {} else_map = {} @@ -5796,6 +5942,10 @@ def check_subtype( if isinstance(msg, str): msg = ErrorMessage(msg, code=code) + if self.msg.prefer_simple_messages(): + self.fail(msg, context) # Fast path -- skip all fancy logic + return False + orig_subtype = subtype subtype = get_proper_type(subtype) orig_supertype = supertype @@ -5830,7 +5980,7 @@ def check_subtype( if ( isinstance(supertype, Instance) and supertype.type.is_protocol - and isinstance(subtype, (Instance, TupleType, TypedDictType)) + and isinstance(subtype, (CallableType, Instance, TupleType, TypedDictType)) ): self.msg.report_protocol_problems(subtype, supertype, context, code=msg.code) if isinstance(supertype, CallableType) and isinstance(subtype, Instance): @@ -5838,10 +5988,11 @@ def check_subtype( if call: self.msg.note_call(subtype, call, context, code=msg.code) if isinstance(subtype, (CallableType, Overloaded)) and isinstance(supertype, Instance): - if supertype.type.is_protocol and supertype.type.protocol_members == ["__call__"]: + if supertype.type.is_protocol and "__call__" in supertype.type.protocol_members: call = find_member("__call__", supertype, subtype, is_operator=True) assert call is not None - self.msg.note_call(supertype, call, context, code=msg.code) + if not is_subtype(subtype, call, options=self.options): + self.msg.note_call(supertype, call, context, code=msg.code) self.check_possible_missing_await(subtype, supertype, context) return False @@ -5951,10 +6102,7 @@ def store_type(self, node: Expression, typ: Type) -> None: self._type_maps[-1][node] = typ def has_type(self, node: Expression) -> bool: - for m in reversed(self._type_maps): - if node in m: - return True - return False + return any(node in m for m in reversed(self._type_maps)) def lookup_type_or_none(self, node: Expression) -> Type | None: for m in reversed(self._type_maps): @@ -6021,11 +6169,11 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: last = parts[-1] if last in n.names: return n.names[last] - elif len(parts) == 2 and parts[0] == "builtins": - fullname = "builtins." + last + elif len(parts) == 2 and parts[0] in ("builtins", "typing"): + fullname = ".".join(parts) if fullname in SUGGESTED_TEST_FIXTURES: - suggestion = ", e.g. add '[builtins fixtures/{}]' to your test".format( - SUGGESTED_TEST_FIXTURES[fullname] + suggestion = ", e.g. add '[{} fixtures/{}]' to your test".format( + parts[0], SUGGESTED_TEST_FIXTURES[fullname] ) else: suggestion = "" @@ -6091,7 +6239,7 @@ def enter_partial_types( self.msg.need_annotation_for_var(var, context, self.options.python_version) self.partial_reported.add(var) if var.type: - fixed = self.fixup_partial_type(var.type) + fixed = fixup_partial_type(var.type) var.invalid_partial_type = fixed != var.type var.type = fixed @@ -6122,29 +6270,14 @@ def handle_partial_var_type( else: # Defer the node -- we might get a better type in the outer scope self.handle_cannot_determine_type(node.name, context) - return self.fixup_partial_type(typ) - - def fixup_partial_type(self, typ: Type) -> Type: - """Convert a partial type that we couldn't resolve into something concrete. - - This means, for None we make it Optional[Any], and for anything else we - fill in all of the type arguments with Any. - """ - if not isinstance(typ, PartialType): - return typ - if typ.type is None: - return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) - else: - return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: - if var.info: - for base in var.info.mro[1:]: - if base.get(var.name) is not None: - return True - if var.info.fallback_to_any: - return True - return False + if not var.info: + return False + return var.info.fallback_to_any or any( + base.get(var.name) is not None for base in var.info.mro[1:] + ) def find_partial_types(self, var: Var) -> dict[Var, Context] | None: """Look for an active partial type scope containing variable. @@ -6318,15 +6451,20 @@ def conditional_types_with_intersection( possible_target_types.append(item) out = [] + errors: list[tuple[str, str]] = [] for v in possible_expr_types: if not isinstance(v, Instance): return yes_type, no_type for t in possible_target_types: - intersection = self.intersect_instances((v, t), ctx) + intersection = self.intersect_instances((v, t), errors) if intersection is None: continue out.append(intersection) if len(out) == 0: + # Only report errors if no element in the union worked. + if self.should_report_unreachable_issues(): + for types, reason in errors: + self.msg.impossible_intersection(types, reason, ctx) return UninhabitedType(), expr_type new_yes_type = make_simplified_union(out) return new_yes_type, expr_type @@ -6340,8 +6478,7 @@ def is_writable_attribute(self, node: Node) -> bool: elif isinstance(node, OverloadedFuncDef) and node.is_property: first_item = cast(Decorator, node.items[0]) return first_item.var.is_settable_property - else: - return False + return False def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: if isinstance(expr, OpExpr) and expr.op == "|": @@ -6656,6 +6793,8 @@ def builtin_item_type(tp: Type) -> Type | None: "builtins.dict", "builtins.set", "builtins.frozenset", + "_collections_abc.dict_keys", + "typing.KeysView", ]: if not tp.args: # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). @@ -6981,33 +7120,48 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st return False, method -def is_valid_inferred_type(typ: Type) -> bool: - """Is an inferred type valid? +def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: + """Is an inferred type valid and needs no further refinement? - Examples of invalid types include the None type or List[]. + Examples of invalid types include the None type (when we are not assigning + None to a final lvalue) or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ - if isinstance(get_proper_type(typ), (NoneType, UninhabitedType)): - # With strict Optional checking, we *may* eventually infer NoneType when - # the initializer is None, but we only do that if we can't infer a - # specific Optional type. This resolution happens in - # leave_partial_types when we pop a partial types scope. + proper_type = get_proper_type(typ) + if isinstance(proper_type, NoneType): + # If the lvalue is final, we may immediately infer NoneType when the + # initializer is None. + # + # If not, we want to defer making this decision. The final inferred + # type could either be NoneType or an Optional type, depending on + # the context. This resolution happens in leave_partial_types when + # we pop a partial types scope. + return is_lvalue_final + elif isinstance(proper_type, UninhabitedType): return False - return not typ.accept(NothingSeeker()) + return not typ.accept(InvalidInferredTypes()) + +class InvalidInferredTypes(BoolTypeQuery): + """Find type components that are not valid for an inferred type. -class NothingSeeker(TypeQuery[bool]): - """Find any types resulting from failed (ambiguous) type inference.""" + These include type, and any types resulting from failed + (ambiguous) type inference. + """ def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return t.ambiguous + def visit_erased_type(self, t: ErasedType) -> bool: + # This can happen inside a lambda. + return True + class SetNothingToAny(TypeTranslator): """Replace all ambiguous types with Any (to avoid spurious extra errors).""" @@ -7018,7 +7172,7 @@ def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the alias cannot by an ambiguous , so we just + # Target of the alias cannot be an ambiguous , so we just # replace the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index ac16f9c9c813..d918eb9b5467 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3,6 +3,7 @@ from __future__ import annotations import itertools +import time from contextlib import contextmanager from typing import Callable, ClassVar, Iterator, List, Optional, Sequence, cast from typing_extensions import Final, TypeAlias as _TypeAlias, overload @@ -111,6 +112,7 @@ custom_special_method, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, is_literal_type_like, make_simplified_union, @@ -149,6 +151,7 @@ TypeVarType, UninhabitedType, UnionType, + UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, @@ -159,8 +162,9 @@ is_self_type_like, remove_optional, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevars import fill_typevars +from mypy.typevartuples import find_unpack_in_list from mypy.util import split_module_names from mypy.visitor import ExpressionVisitor @@ -212,8 +216,8 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" output: set[str] = set() - while isinstance(expr.node, MypyFile) or expr.fullname is not None: - if isinstance(expr.node, MypyFile) and expr.fullname is not None: + while isinstance(expr.node, MypyFile) or expr.fullname: + if isinstance(expr.node, MypyFile) and expr.fullname: # If it's None, something's wrong (perhaps due to an # import cycle or a suppressed error). For now we just # skip it. @@ -224,7 +228,7 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: if isinstance(expr.node, TypeInfo): # Reference to a class or a nested class output.update(split_module_names(expr.node.module_name)) - elif expr.fullname is not None and "." in expr.fullname and not is_suppressed_import: + elif "." in expr.fullname and not is_suppressed_import: # Everything else (that is not a silenced import within a class) output.add(expr.fullname.rsplit(".", 1)[0]) break @@ -261,11 +265,22 @@ class ExpressionChecker(ExpressionVisitor[Type]): strfrm_checker: StringFormatterChecker plugin: Plugin - def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: + def __init__( + self, + chk: mypy.checker.TypeChecker, + msg: MessageBuilder, + plugin: Plugin, + per_line_checking_time_ns: dict[int, int], + ) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg self.plugin = plugin + self.per_line_checking_time_ns = per_line_checking_time_ns + self.collect_line_checking_stats = chk.options.line_checking_stats is not None + # Are we already visiting some expression? This is used to avoid double counting + # time for nested expressions. + self.in_expression = False self.type_context = [None] # Temporary overrides for expression types. This is currently @@ -511,7 +526,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # There are two special cases where plugins might act: # * A "static" reference/alias to a class or function; # get_function_hook() will be invoked for these. - fullname = e.callee.fullname + fullname = e.callee.fullname or None if isinstance(e.callee.node, TypeAlias): target = get_proper_type(e.callee.node.target) if isinstance(target, Instance): @@ -521,7 +536,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # get_method_hook() and get_method_signature_hook() will # be invoked for these. if ( - fullname is None + not fullname and isinstance(e.callee, MemberExpr) and self.chk.has_type(e.callee.expr) ): @@ -590,7 +605,7 @@ def method_fullname(self, object_type: Type, method_name: str) -> str | None: elif isinstance(object_type, TupleType): type_name = tuple_fallback(object_type).type.fullname - if type_name is not None: + if type_name: return f"{type_name}.{method_name}" else: return None @@ -775,17 +790,21 @@ def check_typeddict_call_with_kwargs( context: Context, orig_callee: Type | None, ) -> Type: - if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())): + actual_keys = kwargs.keys() + if not (callee.required_keys <= actual_keys <= callee.items.keys()): expected_keys = [ key for key in callee.items.keys() - if key in callee.required_keys or key in kwargs.keys() + if key in callee.required_keys or key in actual_keys ] - actual_keys = kwargs.keys() self.msg.unexpected_typeddict_keys( callee, expected_keys=expected_keys, actual_keys=list(actual_keys), context=context ) - return AnyType(TypeOfAny.from_error) + if callee.required_keys > actual_keys: + # found_set is a sub-set of the required_keys + # This means we're missing some keys and as such, we can't + # properly type the object + return AnyType(TypeOfAny.from_error) orig_callee = get_proper_type(orig_callee) if isinstance(orig_callee, CallableType): @@ -880,7 +899,8 @@ def try_infer_partial_type(self, e: CallExpr) -> None: return var, partial_types = ret typ = self.try_infer_partial_value_type_from_call(e, callee.name, var) - if typ is not None: + # Var may be deleted from partial_types in try_infer_partial_value_type_from_call + if typ is not None and var in partial_types: var.type = typ del partial_types[var] elif isinstance(callee.expr, IndexExpr) and isinstance(callee.expr.base, RefExpr): @@ -1403,13 +1423,21 @@ def check_callable_call( ) callee = freshen_function_type_vars(callee) callee = self.infer_function_type_arguments_using_context(callee, context) + if need_refresh: + # Argument kinds etc. may have changed due to + # ParamSpec or TypeVarTuple variables being replaced with an arbitrary + # number of arguments; recalculate actual-to-formal map + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) callee = self.infer_function_type_arguments( callee, args, arg_kinds, formal_to_actual, context ) if need_refresh: - # Argument kinds etc. may have changed due to - # ParamSpec variables being replaced with an arbitrary - # number of arguments; recalculate actual-to-formal map formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, @@ -1561,21 +1589,21 @@ def infer_arg_types_in_empty_context(self, args: list[Expression]) -> list[Type] res.append(arg_type) return res - @contextmanager - def allow_unions(self, type_context: Type) -> Iterator[None]: - # This is a hack to better support inference for recursive types. - # When the outer context for a function call is known to be recursive, - # we solve type constraints inferred from arguments using unions instead - # of joins. This is a bit arbitrary, but in practice it works for most - # cases. A cleaner alternative would be to switch to single bin type - # inference, but this is a lot of work. - old = TypeState.infer_unions + def infer_more_unions_for_recursive_type(self, type_context: Type) -> bool: + """Adjust type inference of unions if type context has a recursive type. + + Return the old state. The caller must assign it to type_state.infer_unions + afterwards. + + This is a hack to better support inference for recursive types. + + Note: This is performance-sensitive and must not be a context manager + until mypyc supports them better. + """ + old = type_state.infer_unions if has_recursive_types(type_context): - TypeState.infer_unions = True - try: - yield - finally: - TypeState.infer_unions = old + type_state.infer_unions = True + return old def infer_arg_types_in_context( self, @@ -1596,8 +1624,16 @@ def infer_arg_types_in_context( for i, actuals in enumerate(formal_to_actual): for ai in actuals: if not arg_kinds[ai].is_star(): - with self.allow_unions(callee.arg_types[i]): - res[ai] = self.accept(args[ai], callee.arg_types[i]) + arg_type = callee.arg_types[i] + # When the outer context for a function call is known to be recursive, + # we solve type constraints inferred from arguments using unions instead + # of joins. This is a bit arbitrary, but in practice it works for most + # cases. A cleaner alternative would be to switch to single bin type + # inference, but this is a lot of work. + old = self.infer_more_unions_for_recursive_type(arg_type) + res[ai] = self.accept(args[ai], arg_type) + # We need to manually restore union inference state, ugh. + type_state.infer_unions = old # Fill in the rest of the argument types. for i, t in enumerate(res): @@ -1998,11 +2034,84 @@ def check_argument_types( # Keep track of consumed tuple *arg items. mapper = ArgTypeExpander(self.argument_infer_context()) for i, actuals in enumerate(formal_to_actual): - for actual in actuals: - actual_type = arg_types[actual] + orig_callee_arg_type = get_proper_type(callee.arg_types[i]) + + # Checking the case that we have more than one item but the first argument + # is an unpack, so this would be something like: + # [Tuple[Unpack[Ts]], int] + # + # In this case we have to check everything together, we do this by re-unifying + # the suffices to the tuple, e.g. a single actual like + # Tuple[Unpack[Ts], int] + expanded_tuple = False + if len(actuals) > 1: + first_actual_arg_type = get_proper_type(arg_types[actuals[0]]) + if ( + isinstance(first_actual_arg_type, TupleType) + and len(first_actual_arg_type.items) == 1 + and isinstance(get_proper_type(first_actual_arg_type.items[0]), UnpackType) + ): + # TODO: use walrus operator + actual_types = [first_actual_arg_type.items[0]] + [ + arg_types[a] for a in actuals[1:] + ] + actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) + + assert isinstance(orig_callee_arg_type, TupleType) + assert orig_callee_arg_type.items + callee_arg_types = orig_callee_arg_type.items + callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( + len(orig_callee_arg_type.items) - 1 + ) + expanded_tuple = True + + if not expanded_tuple: + actual_types = [arg_types[a] for a in actuals] + actual_kinds = [arg_kinds[a] for a in actuals] + if isinstance(orig_callee_arg_type, UnpackType): + unpacked_type = get_proper_type(orig_callee_arg_type.type) + if isinstance(unpacked_type, TupleType): + inner_unpack_index = find_unpack_in_list(unpacked_type.items) + if inner_unpack_index is None: + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + inner_unpack = get_proper_type(unpacked_type.items[inner_unpack_index]) + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + # We assume heterogenous tuples are desugared earlier + assert isinstance(inner_unpacked_type, Instance) + assert inner_unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = ( + unpacked_type.items[:inner_unpack_index] + + [inner_unpacked_type.args[0]] + * (len(actuals) - len(unpacked_type.items) + 1) + + unpacked_type.items[inner_unpack_index + 1 :] + ) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + assert isinstance(unpacked_type, Instance) + assert unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = [unpacked_type.args[0]] * len(actuals) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + callee_arg_types = [orig_callee_arg_type] * len(actuals) + callee_arg_kinds = [callee.arg_kinds[i]] * len(actuals) + + assert len(actual_types) == len(actuals) == len(actual_kinds) + + if len(callee_arg_types) != len(actual_types): + # TODO: Improve error message + self.chk.fail("Invalid number of arguments", context) + continue + + assert len(callee_arg_types) == len(actual_types) + assert len(callee_arg_types) == len(callee_arg_kinds) + for actual, actual_type, actual_kind, callee_arg_type, callee_arg_kind in zip( + actuals, actual_types, actual_kinds, callee_arg_types, callee_arg_kinds + ): if actual_type is None: continue # Some kind of error was already reported. - actual_kind = arg_kinds[actual] # Check that a *arg is valid as varargs. if actual_kind == nodes.ARG_STAR and not self.is_valid_var_arg(actual_type): self.msg.invalid_var_arg(actual_type, context) @@ -2012,13 +2121,13 @@ def check_argument_types( is_mapping = is_subtype(actual_type, self.chk.named_type("typing.Mapping")) self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( - actual_type, actual_kind, callee.arg_names[i], callee.arg_kinds[i] + actual_type, actual_kind, callee.arg_names[i], callee_arg_kind ) check_arg( expanded_actual, actual_type, - arg_kinds[actual], - callee.arg_types[i], + actual_kind, + callee_arg_type, actual + 1, i + 1, callee, @@ -2072,7 +2181,8 @@ def check_arg( self.msg.incompatible_argument_note( original_caller_type, callee_type, context, code=code ) - self.chk.check_possible_missing_await(caller_type, callee_type, context) + if not self.msg.prefer_simple_messages(): + self.chk.check_possible_missing_await(caller_type, callee_type, context) def check_overload_call( self, @@ -2127,7 +2237,7 @@ def check_overload_call( # we don't want to introduce internal inconsistencies. unioned_result = ( make_simplified_union(list(returns), context.line, context.column), - self.combine_function_signatures(inferred_types), + self.combine_function_signatures(get_proper_types(inferred_types)), ) # Step 3: We try checking each branch one-by-one. @@ -2468,7 +2578,7 @@ def type_overrides_set( for expr in exprs: del self.type_overrides[expr] - def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | CallableType: + def combine_function_signatures(self, types: list[ProperType]) -> AnyType | CallableType: """Accepts a list of function signatures and attempts to combine them together into a new CallableType consisting of the union of all of the given arguments and return types. @@ -2476,10 +2586,9 @@ def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | Callab an ambiguity because of Any in arguments). """ assert types, "Trying to merge no callables" - types = get_proper_types(types) if not all(isinstance(c, CallableType) for c in types): return AnyType(TypeOfAny.special_form) - callables = cast(Sequence[CallableType], types) + callables = cast("list[CallableType]", types) if len(callables) == 1: return callables[0] @@ -2666,6 +2775,10 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): module_symbol_table = base.node.names + if isinstance(base, RefExpr) and isinstance(base.node, Var): + is_self = base.node.is_self + else: + is_self = False member_type = analyze_member_access( e.name, @@ -2679,6 +2792,7 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type chk=self.chk, in_literal_context=self.is_literal_context(), module_symbol_table=module_symbol_table, + is_self=is_self, ) return member_type @@ -2765,6 +2879,9 @@ def visit_ellipsis(self, e: EllipsisExpr) -> Type: def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" + if e.analyzed: + # It's actually a type expression X | Y. + return self.accept(e.analyzed) if e.op == "and" or e.op == "or": return self.check_boolean_op(e, e) if e.op == "*" and isinstance(e.left, ListExpr): @@ -2858,7 +2975,7 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: not local_errors.has_new_errors() and cont_type and self.dangerous_comparison( - left_type, cont_type, original_container=right_type + left_type, cont_type, original_container=right_type, prefer_literal=False ) ): self.msg.dangerous_comparison(left_type, cont_type, "container", e) @@ -2876,27 +2993,19 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: # testCustomEqCheckStrictEquality for an example. if not w.has_new_errors() and operator in ("==", "!="): right_type = self.accept(right) - # We suppress the error if there is a custom __eq__() method on either - # side. User defined (or even standard library) classes can define this - # to return True for comparisons between non-overlapping types. - if not custom_special_method( - left_type, "__eq__" - ) and not custom_special_method(right_type, "__eq__"): - # Also flag non-overlapping literals in situations like: - # x: Literal['a', 'b'] - # if x == 'c': - # ... + if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible left_type = try_getting_literal(left_type) right_type = try_getting_literal(right_type) - if self.dangerous_comparison(left_type, right_type): - self.msg.dangerous_comparison(left_type, right_type, "equality", e) + self.msg.dangerous_comparison(left_type, right_type, "equality", e) elif operator == "is" or operator == "is not": right_type = self.accept(right) # validate the right operand sub_result = self.bool_type() - left_type = try_getting_literal(left_type) - right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) self.msg.dangerous_comparison(left_type, right_type, "identity", e) method_type = None else: @@ -2925,12 +3034,17 @@ def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: if isinstance(expr.node, Var): result = self.analyze_var_ref(expr.node, expr) if isinstance(result, PartialType) and result.type is not None: - self.chk.store_type(expr, self.chk.fixup_partial_type(result)) + self.chk.store_type(expr, fixup_partial_type(result)) return result return None def dangerous_comparison( - self, left: Type, right: Type, original_container: Type | None = None + self, + left: Type, + right: Type, + original_container: Type | None = None, + *, + prefer_literal: bool = True, ) -> bool: """Check for dangerous non-overlapping comparisons like 42 == 'no'. @@ -2952,6 +3066,20 @@ def dangerous_comparison( left, right = get_proper_types((left, right)) + # We suppress the error if there is a custom __eq__() method on either + # side. User defined (or even standard library) classes can define this + # to return True for comparisons between non-overlapping types. + if custom_special_method(left, "__eq__") or custom_special_method(right, "__eq__"): + return False + + if prefer_literal: + # Also flag non-overlapping literals in situations like: + # x: Literal['a', 'b'] + # if x == 'c': + # ... + left = try_getting_literal(left) + right = try_getting_literal(right) + if self.chk.binder.is_unreachable_warning_suppressed(): # We are inside a function that contains type variables with value restrictions in # its signature. In this case we just suppress all strict-equality checks to avoid @@ -2982,14 +3110,18 @@ def dangerous_comparison( return False if isinstance(left, Instance) and isinstance(right, Instance): # Special case some builtin implementations of AbstractSet. + left_name = left.type.fullname + right_name = right.type.fullname if ( - left.type.fullname in OVERLAPPING_TYPES_ALLOWLIST - and right.type.fullname in OVERLAPPING_TYPES_ALLOWLIST + left_name in OVERLAPPING_TYPES_ALLOWLIST + and right_name in OVERLAPPING_TYPES_ALLOWLIST ): abstract_set = self.chk.lookup_typeinfo("typing.AbstractSet") left = map_instance_to_supertype(left, abstract_set) right = map_instance_to_supertype(right, abstract_set) - return not is_overlapping_types(left.args[0], right.args[0]) + return self.dangerous_comparison(left.args[0], right.args[0]) + elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: + return self.dangerous_comparison(left.args[0], right.args[0]) if isinstance(left, LiteralType) and isinstance(right, LiteralType): if isinstance(left.value, bool) and isinstance(right.value, bool): # Comparing different booleans is not dangerous. @@ -3369,7 +3501,7 @@ def check_op( # we call 'combine_function_signature' instead of just unioning the inferred # callable types. results_final = make_simplified_union(all_results) - inferred_final = self.combine_function_signatures(all_inferred) + inferred_final = self.combine_function_signatures(get_proper_types(all_inferred)) return results_final, inferred_final else: return self.check_method_call_by_name( @@ -3650,7 +3782,9 @@ def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) return self.chk.named_generic_type("builtins.tuple", [union]) return union - def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type: + def visit_typeddict_index_expr( + self, td_type: TypedDictType, index: Expression, setitem: bool = False + ) -> Type: if isinstance(index, StrExpr): key_names = [index.value] else: @@ -3679,7 +3813,7 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) for key_name in key_names: value_type = td_type.items.get(key_name) if value_type is None: - self.msg.typeddict_key_not_found(td_type, key_name, index) + self.msg.typeddict_key_not_found(td_type, key_name, index, setitem) return AnyType(TypeOfAny.from_error) else: value_types.append(value_type) @@ -3772,10 +3906,8 @@ def visit_type_application(self, tapp: TypeApplication) -> Type: There are two different options here, depending on whether expr refers to a type alias or directly to a generic class. In the first case we need - to use a dedicated function typeanal.expand_type_aliases. This - is due to the fact that currently type aliases machinery uses - unbound type variables, while normal generics use bound ones; - see TypeAlias docstring for more details. + to use a dedicated function typeanal.expand_type_alias(). This + is due to some differences in how type arguments are applied and checked. """ if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): # Subscription of a (generic) alias in runtime context, expand the alias. @@ -3869,9 +4001,8 @@ class LongName(Generic[T]): ... else: if alias_definition: return AnyType(TypeOfAny.special_form) - # This type is invalid in most runtime contexts, give it an 'object' type. - # TODO: Use typing._SpecialForm instead? - return self.named_type("builtins.object") + # The _SpecialForm type can be used in some runtime contexts (e.g. it may have __or__). + return self.named_type("typing._SpecialForm") def apply_type_arguments_to_callable( self, tp: Type, args: Sequence[Type], ctx: Context @@ -3887,6 +4018,9 @@ def apply_type_arguments_to_callable( if isinstance(tp, CallableType): if len(tp.variables) != len(args): + if tp.is_type_obj() and tp.type_object().fullname == "builtins.tuple": + # TODO: Specialize the callable for the type arguments + return tp self.msg.incompatible_type_application(len(tp.variables), len(args), ctx) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, args, ctx) @@ -4076,6 +4210,17 @@ def fast_dict_type(self, e: DictExpr) -> Type | None: self.resolved_type[e] = dt return dt + def check_typeddict_literal_in_context( + self, e: DictExpr, typeddict_context: TypedDictType + ) -> Type: + orig_ret_type = self.check_typeddict_call_with_dict( + callee=typeddict_context, kwargs=e, context=e, orig_callee=None + ) + ret_type = get_proper_type(orig_ret_type) + if isinstance(ret_type, TypedDictType): + return ret_type.copy_modified() + return typeddict_context.copy_modified() + def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. @@ -4085,15 +4230,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested - typeddict_context = self.find_typeddict_context(self.type_context[-1], e) - if typeddict_context: - orig_ret_type = self.check_typeddict_call_with_dict( - callee=typeddict_context, kwargs=e, context=e, orig_callee=None - ) - ret_type = get_proper_type(orig_ret_type) - if isinstance(ret_type, TypedDictType): - return ret_type.copy_modified() - return typeddict_context.copy_modified() + typeddict_contexts = self.find_typeddict_context(self.type_context[-1], e) + if typeddict_contexts: + if len(typeddict_contexts) == 1: + return self.check_typeddict_literal_in_context(e, typeddict_contexts[0]) + # Multiple items union, check if at least one of them matches cleanly. + for typeddict_context in typeddict_contexts: + with self.msg.filter_errors() as err, self.chk.local_type_map() as tmap: + ret_type = self.check_typeddict_literal_in_context(e, typeddict_context) + if err.has_new_errors(): + continue + self.chk.store_types(tmap) + return ret_type + # No item matched without an error, so we can't unambiguously choose the item. + self.msg.typeddict_context_ambiguous(typeddict_contexts, e) # fast path attempt dt = self.fast_dict_type(e) @@ -4159,26 +4309,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: def find_typeddict_context( self, context: Type | None, dict_expr: DictExpr - ) -> TypedDictType | None: + ) -> list[TypedDictType]: context = get_proper_type(context) if isinstance(context, TypedDictType): - return context + return [context] elif isinstance(context, UnionType): items = [] for item in context.items: - item_context = self.find_typeddict_context(item, dict_expr) - if item_context is not None and self.match_typeddict_call_with_dict( - item_context, dict_expr, dict_expr - ): - items.append(item_context) - if len(items) == 1: - # Only one union item is valid TypedDict for the given dict_expr, so use the - # context as it's unambiguous. - return items[0] - if len(items) > 1: - self.msg.typeddict_context_ambiguous(items, dict_expr) + item_contexts = self.find_typeddict_context(item, dict_expr) + for item_context in item_contexts: + if self.match_typeddict_call_with_dict(item_context, dict_expr, dict_expr): + items.append(item_context) + return items # No TypedDict type in context. - return None + return [] def visit_lambda_expr(self, e: LambdaExpr) -> Type: """Type check lambda expression.""" @@ -4305,19 +4449,30 @@ def visit_super_expr(self, e: SuperExpr) -> Type: index = mro.index(type_info) else: method = self.chk.scope.top_function() - assert method is not None # Mypy explicitly allows supertype upper bounds (and no upper bound at all) # for annotating self-types. However, if such an annotation is used for # checking super() we will still get an error. So to be consistent, we also # allow such imprecise annotations for use with super(), where we fall back - # to the current class MRO instead. - if is_self_type_like(instance_type, is_classmethod=method.is_class): + # to the current class MRO instead. This works only from inside a method. + if method is not None and is_self_type_like( + instance_type, is_classmethod=method.is_class + ): if e.info and type_info in e.info.mro: mro = e.info.mro index = mro.index(type_info) if index is None: - self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) - return AnyType(TypeOfAny.from_error) + if ( + instance_info.is_protocol + and instance_info != type_info + and not type_info.is_protocol + ): + # A special case for mixins, in this case super() should point + # directly to the host protocol, this is not safe, since the real MRO + # is not known yet for mixin, but this feature is more like an escape hatch. + index = -1 + else: + self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) + return AnyType(TypeOfAny.from_error) if len(mro) == index + 1: self.chk.fail(message_registry.TARGET_CLASS_HAS_NO_BASE_CLASS, e) @@ -4648,7 +4803,14 @@ def accept( applies only to this expression and not any subexpressions. """ if node in self.type_overrides: + # This branch is very fast, there is no point timing it. return self.type_overrides[node] + # We don't use context manager here to get most precise data (and avoid overhead). + record_time = False + if self.collect_line_checking_stats and not self.in_expression: + t0 = time.perf_counter_ns() + self.in_expression = True + record_time = True self.type_context.append(type_context) old_is_callee = self.is_callee self.is_callee = is_callee @@ -4683,9 +4845,13 @@ def accept( self.msg.disallowed_any_type(typ, node) if not self.chk.in_checked_function() or self.chk.current_node_deferred: - return AnyType(TypeOfAny.unannotated) + result: Type = AnyType(TypeOfAny.unannotated) else: - return typ + result = typ + if record_time: + self.per_line_checking_time_ns[node.line] += time.perf_counter_ns() - t0 + self.in_expression = False + return result def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type @@ -4704,6 +4870,7 @@ def is_valid_var_arg(self, typ: Type) -> bool: ) or isinstance(typ, AnyType) or isinstance(typ, ParamSpecType) + or isinstance(typ, UnpackType) ) def is_valid_keyword_var_arg(self, typ: Type) -> bool: @@ -4741,7 +4908,7 @@ def has_member(self, typ: Type, member: str) -> bool: typ = typ.fallback if isinstance(typ, Instance): return typ.type.has_readable_member(member) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True @@ -4998,9 +5165,9 @@ def has_any_type(t: Type, ignore_in_type_obj: bool = False) -> bool: return t.accept(HasAnyType(ignore_in_type_obj)) -class HasAnyType(types.TypeQuery[bool]): +class HasAnyType(types.BoolTypeQuery): def __init__(self, ignore_in_type_obj: bool) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) self.ignore_in_type_obj = ignore_in_type_obj def visit_any(self, t: AnyType) -> bool: @@ -5077,7 +5244,7 @@ def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> Callabl return c.copy_modified(ret_type=new_ret_type) -class ArgInferSecondPassQuery(types.TypeQuery[bool]): +class ArgInferSecondPassQuery(types.BoolTypeQuery): """Query whether an argument type should be inferred in the second pass. The result is True if the type has a type variable in a callable return @@ -5086,17 +5253,17 @@ class ArgInferSecondPassQuery(types.TypeQuery[bool]): """ def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_callable_type(self, t: CallableType) -> bool: return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) -class HasTypeVarQuery(types.TypeQuery[bool]): +class HasTypeVarQuery(types.BoolTypeQuery): """Visitor for querying whether a type has a type variable component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_type_var(self, t: TypeVarType) -> bool: return True @@ -5106,11 +5273,11 @@ def has_erased_component(t: Type | None) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) -class HasErasedComponentsQuery(types.TypeQuery[bool]): +class HasErasedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an erased component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_erased_type(self, t: ErasedType) -> bool: return True @@ -5120,11 +5287,11 @@ def has_uninhabited_component(t: Type | None) -> bool: return t is not None and t.accept(HasUninhabitedComponentsQuery()) -class HasUninhabitedComponentsQuery(types.TypeQuery[bool]): +class HasUninhabitedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an UninhabitedType component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return True @@ -5354,7 +5521,7 @@ def type_info_from_type(typ: Type) -> TypeInfo | None: def is_operator_method(fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False short_name = fullname.split(".")[-1] return ( diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 6c9da4a6ce7c..a2c580e13446 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -6,7 +6,11 @@ from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars -from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars +from mypy.expandtype import ( + expand_self_type, + expand_type_by_instance, + freshen_all_functions_type_vars, +) from mypy.maptype import map_instance_to_supertype from mypy.messages import MessageBuilder from mypy.nodes import ( @@ -36,7 +40,9 @@ class_callable, erase_to_bound, function_type, + get_type_vars, make_simplified_union, + supported_self_type, tuple_fallback, type_object_type_from_function, ) @@ -63,8 +69,8 @@ TypeVarType, UnionType, get_proper_type, - has_type_vars, ) +from mypy.typetraverser import TypeTraverserVisitor if TYPE_CHECKING: # import for forward declaration only import mypy.checker @@ -90,6 +96,7 @@ def __init__( self_type: Type | None, module_symbol_table: SymbolTable | None = None, no_deferral: bool = False, + is_self: bool = False, ) -> None: self.is_lvalue = is_lvalue self.is_super = is_super @@ -101,6 +108,7 @@ def __init__( self.chk = chk self.module_symbol_table = module_symbol_table self.no_deferral = no_deferral + self.is_self = is_self def named_type(self, name: str) -> Instance: return self.chk.named_type(name) @@ -152,6 +160,7 @@ def analyze_member_access( self_type: Type | None = None, module_symbol_table: SymbolTable | None = None, no_deferral: bool = False, + is_self: bool = False, ) -> Type: """Return the type of attribute 'name' of 'typ'. @@ -187,6 +196,7 @@ def analyze_member_access( self_type=self_type, module_symbol_table=module_symbol_table, no_deferral=no_deferral, + is_self=is_self, ) result = _analyze_member_access(name, typ, mx, override_info) possible_literal = get_proper_type(result) @@ -262,8 +272,9 @@ def report_missing_attribute( override_info: TypeInfo | None = None, ) -> Type: res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) - if may_be_awaitable_attribute(name, typ, mx, override_info): - mx.msg.possible_missing_await(mx.context) + if not mx.msg.prefer_simple_messages(): + if may_be_awaitable_attribute(name, typ, mx, override_info): + mx.msg.possible_missing_await(mx.context) return res_type @@ -306,7 +317,7 @@ def analyze_instance_member_access( if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) signature = function_type(method, mx.named_type("builtins.function")) - signature = freshen_function_type_vars(signature) + signature = freshen_all_functions_type_vars(signature) if name == "__new__" or method.is_static: # __new__ is special and behaves like a static method -- don't strip # the first argument. @@ -324,7 +335,7 @@ def analyze_instance_member_access( # Since generic static methods should not be allowed. typ = map_instance_to_supertype(typ, method.info) member_type = expand_type_by_instance(signature, typ) - freeze_type_vars(member_type) + freeze_all_type_vars(member_type) return member_type else: # Not a method. @@ -682,12 +693,12 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: return inferred_dunder_get_type.ret_type -def is_instance_var(var: Var, info: TypeInfo) -> bool: +def is_instance_var(var: Var) -> bool: """Return if var is an instance variable according to PEP 526.""" return ( # check the type_info node is the var (not a decorated function, etc.) - var.name in info.names - and info.names[var.name].node is var + var.name in var.info.names + and var.info.names[var.name].node is var and not var.is_classvar # variables without annotations are treated as classvar and not var.is_inferred @@ -722,12 +733,18 @@ def analyze_var( mx.msg.read_only_property(name, itype.type, mx.context) if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) - t = get_proper_type(expand_type_by_instance(typ, itype)) + t = freshen_all_functions_type_vars(typ) + if not (mx.is_self or mx.is_super) or supported_self_type( + get_proper_type(mx.original_type) + ): + t = expand_self_type(var, t, mx.original_type) + t = get_proper_type(expand_type_by_instance(t, itype)) + freeze_all_type_vars(t) result: Type = t typ = get_proper_type(typ) if ( var.is_initialized_in_class - and (not is_instance_var(var, info) or mx.is_operator) + and (not is_instance_var(var) or mx.is_operator) and isinstance(typ, FunctionLike) and not typ.is_type_obj() ): @@ -750,13 +767,16 @@ def analyze_var( # In `x.f`, when checking `x` against A1 we assume x is compatible with A # and similarly for B1 when checking against B dispatched_type = meet.meet_types(mx.original_type, itype) - signature = freshen_function_type_vars(functype) + signature = freshen_all_functions_type_vars(functype) + bound = get_proper_type(expand_self_type(var, signature, mx.original_type)) + assert isinstance(bound, FunctionLike) + signature = bound signature = check_self_arg( signature, dispatched_type, var.is_classmethod, mx.context, name, mx.msg ) signature = bind_self(signature, mx.self_type, var.is_classmethod) expanded_signature = expand_type_by_instance(signature, itype) - freeze_type_vars(expanded_signature) + freeze_all_type_vars(expanded_signature) if var.is_property: # A property cannot have an overloaded type => the cast is fine. assert isinstance(expanded_signature, CallableType) @@ -779,16 +799,15 @@ def analyze_var( return result -def freeze_type_vars(member_type: Type) -> None: - if not isinstance(member_type, ProperType): - return - if isinstance(member_type, CallableType): - for v in member_type.variables: +def freeze_all_type_vars(member_type: Type) -> None: + member_type.accept(FreezeTypeVarsVisitor()) + + +class FreezeTypeVarsVisitor(TypeTraverserVisitor): + def visit_callable_type(self, t: CallableType) -> None: + for v in t.variables: v.id.meta_level = 0 - if isinstance(member_type, Overloaded): - for it in member_type.items: - for v in it.variables: - v.id.meta_level = 0 + super().visit_callable_type(t) def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> SymbolNode | None: @@ -882,7 +901,7 @@ def analyze_class_attribute_access( # For modules use direct symbol table lookup. if not itype.extra_attrs.mod_name: return itype.extra_attrs.attrs[name] - if info.fallback_to_any: + if info.fallback_to_any or info.meta_fallback_to_any: return apply_class_attr_hook(mx, hook, AnyType(TypeOfAny.special_form)) return None @@ -894,6 +913,10 @@ def analyze_class_attribute_access( if isinstance(node.node, TypeInfo): mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context) + # Refuse class attribute access if slot defined + if info.slots and name in info.slots: + mx.msg.fail(message_registry.CLASS_VAR_CONFLICTS_SLOTS.format(name), mx.context) + # If a final attribute was declared on `self` in `__init__`, then it # can't be accessed on the class object. if node.implicit and isinstance(node.node, Var) and node.node.is_final: @@ -945,7 +968,12 @@ def analyze_class_attribute_access( # x: T # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime - if isinstance(t, TypeVarType) or has_type_vars(t): + # Exception is Self type wrapped in ClassVar, that is safe. + def_vars = set(node.node.info.defn.type_vars) + if not node.node.is_classvar and node.node.info.self_type: + def_vars.add(node.node.info.self_type) + typ_vars = set(get_type_vars(t)) + if def_vars & typ_vars: # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType) or node.implicit: if node.node.is_classvar: @@ -958,7 +986,8 @@ def analyze_class_attribute_access( # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int - t = erase_typevars(expand_type_by_instance(t, isuper)) + t = get_proper_type(expand_self_type(node.node, t, itype)) + t = erase_typevars(expand_type_by_instance(t, isuper), {tv.id for tv in def_vars}) is_classmethod = (is_decorated and cast(Decorator, node.node).func.is_class) or ( isinstance(node.node, FuncBase) and node.node.is_class @@ -1044,7 +1073,9 @@ def analyze_typeddict_access( if isinstance(mx.context, IndexExpr): # Since we can get this during `a['key'] = ...` # it is safe to assume that the context is `IndexExpr`. - item_type = mx.chk.expr_checker.visit_typeddict_index_expr(typ, mx.context.index) + item_type = mx.chk.expr_checker.visit_typeddict_index_expr( + typ, mx.context.index, setitem=True + ) else: # It can also be `a.__setitem__(...)` direct call. # In this case `item_type` can be `Any`, @@ -1116,11 +1147,11 @@ class B(A[str]): pass if isinstance(t, CallableType): tvars = original_vars if original_vars is not None else [] if is_classmethod: - t = freshen_function_type_vars(t) + t = freshen_all_functions_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None t = cast(CallableType, expand_type_by_instance(t, isuper)) - freeze_type_vars(t) + freeze_all_type_vars(t) return t.copy_modified(variables=list(tvars) + list(t.variables)) elif isinstance(t, Overloaded): return Overloaded( diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 485d2f67f5de..190782a3bded 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -137,6 +137,15 @@ def check_follow_imports(choice: str) -> str: return choice +def split_commas(value: str) -> list[str]: + # Uses a bit smarter technique to allow last trailing comma + # and to remove last `""` item from the split. + items = value.split(",") + if items and items[-1] == "": + items.pop(-1) + return items + + # For most options, the type of the default value set in options.py is # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container @@ -151,13 +160,13 @@ def check_follow_imports(choice: str) -> str: "junit_xml": expand_path, "follow_imports": check_follow_imports, "no_site_packages": bool, - "plugins": lambda s: [p.strip() for p in s.split(",")], - "always_true": lambda s: [p.strip() for p in s.split(",")], - "always_false": lambda s: [p.strip() for p in s.split(",")], - "enable_incomplete_feature": lambda s: [p.strip() for p in s.split(",")], - "disable_error_code": lambda s: validate_codes([p.strip() for p in s.split(",")]), - "enable_error_code": lambda s: validate_codes([p.strip() for p in s.split(",")]), - "package_root": lambda s: [p.strip() for p in s.split(",")], + "plugins": lambda s: [p.strip() for p in split_commas(s)], + "always_true": lambda s: [p.strip() for p in split_commas(s)], + "always_false": lambda s: [p.strip() for p in split_commas(s)], + "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)], + "disable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "enable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "package_root": lambda s: [p.strip() for p in split_commas(s)], "cache_dir": expand_path, "python_executable": expand_path, "strict": bool, diff --git a/mypy/constant_fold.py b/mypy/constant_fold.py new file mode 100644 index 000000000000..a22c1b9ba9e5 --- /dev/null +++ b/mypy/constant_fold.py @@ -0,0 +1,116 @@ +"""Constant folding of expressions. + +For example, 3 + 5 can be constant folded into 8. +""" + +from __future__ import annotations + +from typing import Union +from typing_extensions import Final + +from mypy.nodes import Expression, FloatExpr, IntExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var + +# All possible result types of constant folding +ConstantValue = Union[int, bool, float, str] +CONST_TYPES: Final = (int, bool, float, str) + + +def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | None: + """Return the constant value of an expression for supported operations. + + Among other things, support int arithmetic and string + concatenation. For example, the expression 3 + 5 has the constant + value 8. + + Also bind simple references to final constants defined in the + current module (cur_mod_id). Binding to references is best effort + -- we don't bind references to other modules. Mypyc trusts these + to be correct in compiled modules, so that it can replace a + constant expression (or a reference to one) with the statically + computed value. We don't want to infer constant values based on + stubs, in particular, as these might not match the implementation + (due to version skew, for example). + + Return None if unsuccessful. + """ + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, FloatExpr): + return expr.value + elif isinstance(expr, NameExpr): + if expr.name == "True": + return True + elif expr.name == "False": + return False + node = expr.node + if ( + isinstance(node, Var) + and node.is_final + and node.fullname.rsplit(".", 1)[0] == cur_mod_id + ): + value = node.final_value + if isinstance(value, (CONST_TYPES)): + return value + elif isinstance(expr, OpExpr): + left = constant_fold_expr(expr.left, cur_mod_id) + right = constant_fold_expr(expr.right, cur_mod_id) + if isinstance(left, int) and isinstance(right, int): + return constant_fold_binary_int_op(expr.op, left, right) + elif isinstance(left, str) and isinstance(right, str): + return constant_fold_binary_str_op(expr.op, left, right) + elif isinstance(expr, UnaryExpr): + value = constant_fold_expr(expr.expr, cur_mod_id) + if isinstance(value, int): + return constant_fold_unary_int_op(expr.op, value) + return None + + +def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: + if op == "+": + return left + right + if op == "-": + return left - right + elif op == "*": + return left * right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "&": + return left & right + elif op == "|": + return left | right + elif op == "^": + return left ^ right + elif op == "<<": + if right >= 0: + return left << right + elif op == ">>": + if right >= 0: + return left >> right + elif op == "**": + if right >= 0: + ret = left**right + assert isinstance(ret, int) + return ret + return None + + +def constant_fold_unary_int_op(op: str, value: int) -> int | None: + if op == "-": + return -value + elif op == "~": + return ~value + elif op == "+": + return value + return None + + +def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: + if op == "+": + return left + right + return None diff --git a/mypy/constraints.py b/mypy/constraints.py index 49b042d5baf0..a8f04094ca63 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -29,7 +29,6 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeOfAny, TypeQuery, TypeType, @@ -49,11 +48,10 @@ is_named_instance, is_union_with_any, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevartuples import ( extract_unpack, find_unpack_in_list, - split_with_instance, split_with_mapped_and_template, split_with_prefix_and_suffix, ) @@ -133,8 +131,33 @@ def infer_constraints_for_callable( ) ) - assert isinstance(unpack_type.type, TypeVarTupleType) - constraints.append(Constraint(unpack_type.type, SUPERTYPE_OF, TypeList(actual_types))) + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + constraints.append( + Constraint( + unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types, unpacked_type.tuple_fallback), + ) + ) + elif isinstance(unpacked_type, TupleType): + # Prefixes get converted to positional args, so technically the only case we + # should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out + # not to hold we can always handle the prefixes too. + inner_unpack = unpacked_type.items[0] + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + assert isinstance(inner_unpacked_type, TypeVarTupleType) + suffix_len = len(unpacked_type.items) - 1 + constraints.append( + Constraint( + inner_unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback), + ) + ) + else: + assert False, "mypy bug: unhandled constraint inference case" else: for actual in actuals: actual_arg_type = arg_types[actual] @@ -174,17 +197,18 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons if any( get_proper_type(template) == get_proper_type(t) and get_proper_type(actual) == get_proper_type(a) - for (t, a) in reversed(TypeState.inferring) + for (t, a) in reversed(type_state.inferring) ): return [] - if has_recursive_types(template): + if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): # This case requires special care because it may cause infinite recursion. + # Note that we include Instances because the may be recursive as str(Sequence[str]). if not has_type_vars(template): # Return early on an empty branch. return [] - TypeState.inferring.append((template, actual)) + type_state.inferring.append((template, actual)) res = _infer_constraints(template, actual, direction) - TypeState.inferring.pop() + type_state.inferring.pop() return res return _infer_constraints(template, actual, direction) @@ -395,8 +419,12 @@ def filter_satisfiable(option: list[Constraint] | None) -> list[Constraint] | No return option satisfiable = [] for c in option: - # TODO: add similar logic for TypeVar values (also in various other places)? - if mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): + if isinstance(c.origin_type_var, TypeVarType) and c.origin_type_var.values: + if any( + mypy.subtypes.is_subtype(c.target, value) for value in c.origin_type_var.values + ): + satisfiable.append(c) + elif mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): satisfiable.append(c) if not satisfiable: return None @@ -537,7 +565,7 @@ def visit_type_var_tuple(self, template: TypeVarTupleType) -> list[Constraint]: raise NotImplementedError def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: - raise NotImplementedError + raise RuntimeError("Mypy bug: unpack should be handled at a higher level.") def visit_parameters(self, template: Parameters) -> list[Constraint]: # constraining Any against C[P] turns into infer_against_any([P], Any) @@ -552,7 +580,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: original_actual = actual = self.actual res: list[Constraint] = [] if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: - if template.type.protocol_members == ["__call__"]: + if "__call__" in template.type.protocol_members: # Special case: a generic callback protocol if not any(template == t for t in template.type.inferring): template.type.inferring.append(template) @@ -564,7 +592,6 @@ def visit_instance(self, template: Instance) -> list[Constraint]: subres = infer_constraints(call, actual, self.direction) res.extend(subres) template.type.inferring.pop() - return res if isinstance(actual, CallableType) and actual.fallback is not None: if actual.is_type_obj() and template.type.is_protocol: ret_type = get_proper_type(actual.ret_type) @@ -592,6 +619,17 @@ def visit_instance(self, template: Instance) -> list[Constraint]: actual.item, template, subtype, template, class_obj=True ) ) + if self.direction == SUPERTYPE_OF: + # Infer constraints for Type[T] via metaclass of T when it makes sense. + a_item = actual.item + if isinstance(a_item, TypeVarType): + a_item = get_proper_type(a_item.upper_bound) + if isinstance(a_item, Instance) and a_item.type.metaclass_type: + res.extend( + self.infer_constraints_from_protocol_members( + a_item.type.metaclass_type, template, actual, template + ) + ) if isinstance(actual, Overloaded) and actual.fallback is not None: actual = actual.fallback @@ -610,45 +648,22 @@ def visit_instance(self, template: Instance) -> list[Constraint]: tvars = mapped.type.defn.type_vars if instance.type.has_type_var_tuple_type: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - instance_prefix, instance_middle, instance_suffix = split_with_instance( - instance - ) - - # Add a constraint for the type var tuple, and then - # remove it for the case below. - instance_unpack = extract_unpack(instance_middle) - if instance_unpack is not None: - if isinstance(instance_unpack, TypeVarTupleType): - res.append( - Constraint( - instance_unpack, SUBTYPE_OF, TypeList(list(mapped_middle)) - ) - ) - elif ( - isinstance(instance_unpack, Instance) - and instance_unpack.type.fullname == "builtins.tuple" - ): - for item in mapped_middle: - res.extend( - infer_constraints( - instance_unpack.args[0], item, self.direction - ) - ) - elif isinstance(instance_unpack, TupleType): - if len(instance_unpack.items) == len(mapped_middle): - for instance_arg, item in zip( - instance_unpack.items, mapped_middle - ): - res.extend( - infer_constraints(instance_arg, item, self.direction) - ) - - mapped_args = mapped_prefix + mapped_suffix - instance_args = instance_prefix + instance_suffix - assert instance.type.type_var_tuple_prefix is not None assert instance.type.type_var_tuple_suffix is not None + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, instance_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + instance.args, + instance.type.type_var_tuple_prefix, + instance.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( tuple(tvars), instance.type.type_var_tuple_prefix, @@ -702,55 +717,22 @@ def visit_instance(self, template: Instance) -> list[Constraint]: mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars if template.type.has_type_var_tuple_type: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - template_prefix, template_middle, template_suffix = split_with_instance( - template - ) - split_result = split_with_mapped_and_template(mapped, template) - assert split_result is not None - ( - mapped_prefix, - mapped_middle, - mapped_suffix, - template_prefix, - template_middle, - template_suffix, - ) = split_result - - # Add a constraint for the type var tuple, and then - # remove it for the case below. - template_unpack = extract_unpack(template_middle) - if template_unpack is not None: - if isinstance(template_unpack, TypeVarTupleType): - res.append( - Constraint( - template_unpack, SUPERTYPE_OF, TypeList(list(mapped_middle)) - ) - ) - elif ( - isinstance(template_unpack, Instance) - and template_unpack.type.fullname == "builtins.tuple" - ): - for item in mapped_middle: - res.extend( - infer_constraints( - template_unpack.args[0], item, self.direction - ) - ) - elif isinstance(template_unpack, TupleType): - if len(template_unpack.items) == len(mapped_middle): - for template_arg, item in zip( - template_unpack.items, mapped_middle - ): - res.extend( - infer_constraints(template_arg, item, self.direction) - ) - - mapped_args = mapped_prefix + mapped_suffix - template_args = template_prefix + template_suffix - + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None assert template.type.type_var_tuple_prefix is not None assert template.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, template_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + template.args, + template.type.type_var_tuple_prefix, + template.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( tuple(tvars), template.type.type_var_tuple_prefix, @@ -814,7 +796,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: # because some type may be considered a subtype of a protocol # due to _promote, but still not implement the protocol. not any(template == t for t in reversed(template.type.inferring)) - and mypy.subtypes.is_protocol_implementation(instance, erased) + and mypy.subtypes.is_protocol_implementation(instance, erased, skip=["__call__"]) ): template.type.inferring.append(template) res.extend( @@ -830,7 +812,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: and # We avoid infinite recursion for structural subtypes also here. not any(instance == i for i in reversed(instance.type.inferring)) - and mypy.subtypes.is_protocol_implementation(erased, instance) + and mypy.subtypes.is_protocol_implementation(erased, instance, skip=["__call__"]) ): instance.type.inferring.append(instance) res.extend( @@ -886,6 +868,8 @@ def infer_constraints_from_protocol_members( inst = mypy.subtypes.find_member(member, instance, subtype, class_obj=class_obj) temp = mypy.subtypes.find_member(member, template, subtype) if inst is None or temp is None: + if member == "__call__": + continue return [] # See #11020 # The above is safe since at this point we know that 'instance' is a subtype # of (erased) 'template', therefore it defines all protocol members @@ -911,12 +895,28 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # We can't infer constraints from arguments if the template is Callable[..., T] # (with literal '...'). if not template.is_ellipsis_args: + if find_unpack_in_list(template.arg_types) is not None: + ( + unpack_constraints, + cactual_args_t, + template_args_t, + ) = find_and_build_constraints_for_unpack( + tuple(cactual.arg_types), tuple(template.arg_types), self.direction + ) + template_args = list(template_args_t) + cactual_args = list(cactual_args_t) + res.extend(unpack_constraints) + assert len(template_args) == len(cactual_args) + else: + template_args = template.arg_types + cactual_args = cactual.arg_types # The lengths should match, but don't crash (it will error elsewhere). - for t, a in zip(template.arg_types, cactual.arg_types): + for t, a in zip(template_args, cactual_args): # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) else: # sometimes, it appears we try to get constraints between two paramspec callables? + # TODO: Direction # TODO: check the prefixes match prefix = param_spec.prefix @@ -934,7 +934,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: arg_types=cactual.arg_types[prefix_len:], arg_kinds=cactual.arg_kinds[prefix_len:], arg_names=cactual.arg_names[prefix_len:], - ret_type=NoneType(), + ret_type=UninhabitedType(), ), ) ) @@ -1006,58 +1006,53 @@ def infer_against_overloaded( return infer_constraints(template, item, self.direction) def visit_tuple_type(self, template: TupleType) -> list[Constraint]: + actual = self.actual - # TODO: Support subclasses of Tuple + unpack_index = find_unpack_in_list(template.items) is_varlength_tuple = ( isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple" ) - unpack_index = find_unpack_in_list(template.items) - if unpack_index is not None: - unpack_item = get_proper_type(template.items[unpack_index]) - assert isinstance(unpack_item, UnpackType) - - unpacked_type = get_proper_type(unpack_item.type) - if isinstance(unpacked_type, TypeVarTupleType): + if isinstance(actual, TupleType) or is_varlength_tuple: + res: list[Constraint] = [] + if unpack_index is not None: if is_varlength_tuple: - # This case is only valid when the unpack is the only - # item in the tuple. - # - # TODO: We should support this in the case that all the items - # in the tuple besides the unpack have the same type as the - # varlength tuple's type. E.g. Tuple[int, ...] should be valid - # where we expect Tuple[int, Unpack[Ts]], but not for Tuple[str, Unpack[Ts]]. - assert len(template.items) == 1 - - if isinstance(actual, (TupleType, AnyType)) or is_varlength_tuple: - modified_actual = actual - if isinstance(actual, TupleType): - # Exclude the items from before and after the unpack index. - # TODO: Support including constraints from the prefix/suffix. - _, actual_items, _ = split_with_prefix_and_suffix( - tuple(actual.items), - unpack_index, - len(template.items) - unpack_index - 1, - ) - modified_actual = actual.copy_modified(items=list(actual_items)) - return [ - Constraint( - type_var=unpacked_type, op=self.direction, target=modified_actual - ) - ] + unpack_type = template.items[unpack_index] + assert isinstance(unpack_type, UnpackType) + unpacked_type = unpack_type.type + assert isinstance(unpacked_type, TypeVarTupleType) + return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)] + else: + assert isinstance(actual, TupleType) + ( + unpack_constraints, + actual_items, + template_items, + ) = find_and_build_constraints_for_unpack( + tuple(actual.items), tuple(template.items), self.direction + ) + res.extend(unpack_constraints) + elif isinstance(actual, TupleType): + actual_items = tuple(actual.items) + template_items = tuple(template.items) + else: + return res - if isinstance(actual, TupleType) and len(actual.items) == len(template.items): - if ( - actual.partial_fallback.type.is_named_tuple - and template.partial_fallback.type.is_named_tuple - ): - # For named tuples using just the fallbacks usually gives better results. - return infer_constraints( - template.partial_fallback, actual.partial_fallback, self.direction - ) - res: list[Constraint] = [] - for i in range(len(template.items)): - res.extend(infer_constraints(template.items[i], actual.items[i], self.direction)) + # Cases above will return if actual wasn't a TupleType. + assert isinstance(actual, TupleType) + if len(actual_items) == len(template_items): + if ( + actual.partial_fallback.type.is_named_tuple + and template.partial_fallback.type.is_named_tuple + ): + # For named tuples using just the fallbacks usually gives better results. + return res + infer_constraints( + template.partial_fallback, actual.partial_fallback, self.direction + ) + for i in range(len(template_items)): + res.extend( + infer_constraints(template_items[i], actual_items[i], self.direction) + ) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items, actual) @@ -1090,10 +1085,13 @@ def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]: def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]: res: list[Constraint] = [] for t in types: - # Note that we ignore variance and simply always use the - # original direction. This is because for Any targets direction is - # irrelevant in most cases, see e.g. is_same_constraint(). - res.extend(infer_constraints(t, any_type, self.direction)) + if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType): + res.append(Constraint(t.type, self.direction, any_type)) + else: + # Note that we ignore variance and simply always use the + # original direction. This is because for Any targets direction is + # irrelevant in most cases, see e.g. is_same_constraint(). + res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> list[Constraint]: @@ -1163,3 +1161,85 @@ def find_matching_overload_items( # it maintains backward compatibility. res = items[:] return res + + +def find_and_build_constraints_for_unpack( + mapped: tuple[Type, ...], template: tuple[Type, ...], direction: int +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + mapped_prefix_len = find_unpack_in_list(mapped) + if mapped_prefix_len is not None: + mapped_suffix_len: int | None = len(mapped) - mapped_prefix_len - 1 + else: + mapped_suffix_len = None + + template_prefix_len = find_unpack_in_list(template) + assert template_prefix_len is not None + template_suffix_len = len(template) - template_prefix_len - 1 + + return build_constraints_for_unpack( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + direction, + ) + + +def build_constraints_for_unpack( + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, + direction: int, +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + if mapped_prefix_len is None: + mapped_prefix_len = template_prefix_len + if mapped_suffix_len is None: + mapped_suffix_len = template_suffix_len + + split_result = split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) + assert split_result is not None + ( + mapped_prefix, + mapped_middle, + mapped_suffix, + template_prefix, + template_middle, + template_suffix, + ) = split_result + + template_unpack = extract_unpack(template_middle) + res = [] + + if template_unpack is not None: + if isinstance(template_unpack, TypeVarTupleType): + res.append( + Constraint( + template_unpack, + direction, + TupleType(list(mapped_middle), template_unpack.tuple_fallback), + ) + ) + elif ( + isinstance(template_unpack, Instance) + and template_unpack.type.fullname == "builtins.tuple" + ): + for item in mapped_middle: + res.extend(infer_constraints(template_unpack.args[0], item, direction)) + + elif isinstance(template_unpack, TupleType): + if len(template_unpack.items) == len(mapped_middle): + for template_arg, item in zip(template_unpack.items, mapped_middle): + res.extend(infer_constraints(template_arg, item, direction)) + return (res, mapped_prefix + mapped_suffix, template_prefix + template_suffix) diff --git a/mypy/copytype.py b/mypy/copytype.py index baa1ba34cbac..6024e527705b 100644 --- a/mypy/copytype.py +++ b/mypy/copytype.py @@ -94,7 +94,7 @@ def visit_parameters(self, t: Parameters) -> ProperType: return self.copy_common(t, dup) def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: - dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound) + dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound, t.tuple_fallback) return self.copy_common(t, dup) def visit_unpack_type(self, t: UnpackType) -> ProperType: diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 671999065e7d..7227cd559946 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -512,7 +512,8 @@ def initialize_fine_grained( print_memory_profile(run_gc=False) - status = 1 if messages else 0 + __, n_notes, __ = count_stats(messages) + status = 1 if messages and n_notes < len(messages) else 0 messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} @@ -592,7 +593,7 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources.extend(new_files) # Process changes directly reachable from roots. - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) # Follow deps from changed modules (still within graph). worklist = changed[:] @@ -609,13 +610,13 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources2, graph, seen, changed_paths ) self.update_sources(new_files) - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) worklist.extend(changed) t2 = time.time() def refresh_file(module: str, path: str) -> list[str]: - return fine_grained_manager.update([(module, path)], []) + return fine_grained_manager.update([(module, path)], [], followed=True) for module_id, state in list(graph.items()): new_messages = refresh_suppressed_submodules( @@ -632,10 +633,10 @@ def refresh_file(module: str, path: str) -> list[str]: new_unsuppressed = self.find_added_suppressed(graph, seen, manager.search_paths) if not new_unsuppressed: break - new_files = [BuildSource(mod[1], mod[0]) for mod in new_unsuppressed] + new_files = [BuildSource(mod[1], mod[0], followed=True) for mod in new_unsuppressed] sources.extend(new_files) self.update_sources(new_files) - messages = fine_grained_manager.update(new_unsuppressed, []) + messages = fine_grained_manager.update(new_unsuppressed, [], followed=True) for module_id, path in new_unsuppressed: new_messages = refresh_suppressed_submodules( @@ -717,7 +718,7 @@ def find_reachable_changed_modules( for dep in state.dependencies: if dep not in seen: seen.add(dep) - worklist.append(BuildSource(graph[dep].path, graph[dep].id)) + worklist.append(BuildSource(graph[dep].path, graph[dep].id, followed=True)) return changed, new_files def direct_imports( @@ -725,7 +726,7 @@ def direct_imports( ) -> list[BuildSource]: """Return the direct imports of module not included in seen.""" state = graph[module[0]] - return [BuildSource(graph[dep].path, dep) for dep in state.dependencies] + return [BuildSource(graph[dep].path, dep, followed=True) for dep in state.dependencies] def find_added_suppressed( self, graph: mypy.build.Graph, seen: set[str], search_paths: SearchPaths diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 89c07186f44a..6533d0c4e0f9 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -176,8 +176,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Type alias target can't contain bound type variables, so - # it is safe to just erase the arguments. + # Type alias target can't contain bound type variables (not bound by the type + # alias itself), so it is safe to just erase the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index f2a74c332b2e..ab49e70eaf20 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -67,6 +67,9 @@ def __str__(self) -> str: TYPEDDICT_ITEM: Final = ErrorCode( "typeddict-item", "Check items when constructing TypedDict", "General" ) +TYPPEDICT_UNKNOWN_KEY: Final = ErrorCode( + "typeddict-unknown-key", "Check unknown keys when constructing TypedDict", "General" +) HAS_TYPE: Final = ErrorCode( "has-type", "Check that type of reference can be determined", "General" ) @@ -140,8 +143,8 @@ def __str__(self) -> str: ANNOTATION_UNCHECKED = ErrorCode( "annotation-unchecked", "Notify about type annotations in unchecked functions", "General" ) -PARTIALLY_DEFINED: Final[ErrorCode] = ErrorCode( - "partially-defined", +POSSIBLY_UNDEFINED: Final[ErrorCode] = ErrorCode( + "possibly-undefined", "Warn about variables that are defined only in some execution paths", "General", default_enabled=False, @@ -160,6 +163,12 @@ def __str__(self) -> str: "Warn about function that always evaluate to true in boolean contexts", "General", ) +TRUTHY_ITERABLE: Final[ErrorCode] = ErrorCode( + "truthy-iterable", + "Warn about Iterable expressions that could always evaluate to true in boolean contexts", + "General", + default_enabled=False, +) NAME_MATCH: Final = ErrorCode( "name-match", "Check that type definition has consistent naming", "General" ) @@ -180,6 +189,15 @@ def __str__(self) -> str: "General", default_enabled=False, ) +REDUNDANT_SELF_TYPE = ErrorCode( + "redundant-self", + "Warn about redundant Self type annotations on method first argument", + "General", + default_enabled=False, +) +USED_BEFORE_DEF: Final[ErrorCode] = ErrorCode( + "used-before-def", "Warn about variables that are used before they are defined", "General" +) # Syntax errors are often blocking. diff --git a/mypy/errors.py b/mypy/errors.py index bfc44a858010..d1e13ad701fc 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -737,6 +737,24 @@ def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" return file in self.error_info_map + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + Return True if errors are not shown to user, i.e. errors are ignored + or they are collected for internal use only. + + If True, we should prefer to generate a simple message quickly. + All normal errors should still be reported. + """ + if self.file in self.ignored_files: + # Errors ignored, so no point generating fancy messages + return True + for _watcher in self._watchers: + if _watcher._filter is True and _watcher._filtered is None: + # Errors are filtered + return True + return False + def raise_error(self, use_stdout: bool = True) -> NoReturn: """Raise a CompileError with the generated messages. diff --git a/mypy/evalexpr.py b/mypy/evalexpr.py new file mode 100644 index 000000000000..2bc6966fa2fa --- /dev/null +++ b/mypy/evalexpr.py @@ -0,0 +1,204 @@ +""" + +Evaluate an expression. + +Used by stubtest; in a separate file because things break if we don't +put it in a mypyc-compiled file. + +""" +import ast +from typing_extensions import Final + +import mypy.nodes +from mypy.visitor import ExpressionVisitor + +UNKNOWN = object() + + +class _NodeEvaluator(ExpressionVisitor[object]): + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> int: + return o.value + + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: + return o.value + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> object: + # The value of a BytesExpr is a string created from the repr() + # of the bytes object. Get the original bytes back. + try: + return ast.literal_eval(f"b'{o.value}'") + except SyntaxError: + return ast.literal_eval(f'b"{o.value}"') + + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> float: + return o.value + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> object: + return o.value + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> object: + return Ellipsis + + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> object: + return UNKNOWN + + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> object: + if o.name == "True": + return True + elif o.name == "False": + return False + elif o.name == "None": + return None + # TODO: Handle more names by figuring out a way to hook into the + # symbol table. + return UNKNOWN + + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> object: + return UNKNOWN + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> object: + return UNKNOWN + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> object: + return UNKNOWN + + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> object: + return UNKNOWN + + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> object: + return UNKNOWN + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> object: + return UNKNOWN + + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> object: + return o.expr.accept(self) + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> object: + return o.expr.accept(self) + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> object: + return UNKNOWN + + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> object: + return UNKNOWN + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> object: + operand = o.expr.accept(self) + if operand is UNKNOWN: + return UNKNOWN + if o.op == "-": + if isinstance(operand, (int, float, complex)): + return -operand + elif o.op == "+": + if isinstance(operand, (int, float, complex)): + return +operand + elif o.op == "~": + if isinstance(operand, int): + return ~operand + elif o.op == "not": + if isinstance(operand, (bool, int, float, str, bytes)): + return not operand + return UNKNOWN + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> object: + return o.value.accept(self) + + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return items + return UNKNOWN + + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> object: + items = [ + (UNKNOWN if key is None else key.accept(self), value.accept(self)) + for key, value in o.items + ] + if all(key is not UNKNOWN and value is not None for key, value in items): + return dict(items) + return UNKNOWN + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return tuple(items) + return UNKNOWN + + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return set(items) + return UNKNOWN + + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> object: + return UNKNOWN + + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> object: + return UNKNOWN + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> object: + return UNKNOWN + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> object: + return UNKNOWN + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> object: + return UNKNOWN + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> object: + return UNKNOWN + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> object: + return UNKNOWN + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> object: + return UNKNOWN + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> object: + return UNKNOWN + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> object: + return UNKNOWN + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> object: + return UNKNOWN + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> object: + return UNKNOWN + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> object: + return UNKNOWN + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> object: + return UNKNOWN + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> object: + return UNKNOWN + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> object: + return UNKNOWN + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> object: + return UNKNOWN + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> object: + return UNKNOWN + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> object: + return UNKNOWN + + def visit_temp_node(self, o: mypy.nodes.TempNode) -> object: + return UNKNOWN + + +_evaluator: Final = _NodeEvaluator() + + +def evaluate_expression(expr: mypy.nodes.Expression) -> object: + """Evaluate an expression at runtime. + + Return the result of the expression, or UNKNOWN if the expression cannot be + evaluated. + """ + return expr.accept(_evaluator) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 08bc216689fb..7933283b24d6 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -1,10 +1,14 @@ from __future__ import annotations from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload +from typing_extensions import Final -from mypy.nodes import ARG_STAR +from mypy.nodes import ARG_POS, ARG_STAR, ArgKind, Var +from mypy.type_visitor import TypeTranslator from mypy.types import ( + ANY_STRATEGY, AnyType, + BoolTypeQuery, CallableType, DeletedType, ErasedType, @@ -14,7 +18,6 @@ NoneType, Overloaded, Parameters, - ParamSpecFlavor, ParamSpecType, PartialType, ProperType, @@ -22,7 +25,6 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeType, TypeVarId, TypeVarLikeType, @@ -33,26 +35,39 @@ UninhabitedType, UnionType, UnpackType, + expand_param_spec, + flatten_nested_unions, get_proper_type, + remove_trivial, +) +from mypy.typevartuples import ( + find_unpack_in_list, + split_with_instance, + split_with_prefix_and_suffix, ) -from mypy.typevartuples import split_with_instance, split_with_prefix_and_suffix @overload -def expand_type(typ: ProperType, env: Mapping[TypeVarId, Type]) -> ProperType: +def expand_type( + typ: ProperType, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> ProperType: ... @overload -def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> Type: ... -def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = False +) -> Type: """Substitute any type variable references in a type given by a type environment. """ - return typ.accept(ExpandTypeVisitor(env)) + return typ.accept(ExpandTypeVisitor(env, allow_erased_callables)) @overload @@ -82,7 +97,9 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type: instance.type.type_var_tuple_prefix, instance.type.type_var_tuple_suffix, ) - variables = {tvars_middle[0].id: TypeList(list(args_middle))} + tvar = tvars_middle[0] + assert isinstance(tvar, TypeVarTupleType) + variables = {tvar.id: TupleType(list(args_middle), tvar.tuple_fallback)} instance_args = args_prefix + args_suffix tvars = tvars_prefix + tvars_suffix else: @@ -124,13 +141,53 @@ def freshen_function_type_vars(callee: F) -> F: return cast(F, fresh_overload) +class HasGenericCallable(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_callable_type(self, t: CallableType) -> bool: + return t.is_generic() or super().visit_callable_type(t) + + +# Share a singleton since this is performance sensitive +has_generic_callable: Final = HasGenericCallable() + + +T = TypeVar("T", bound=Type) + + +def freshen_all_functions_type_vars(t: T) -> T: + result: Type + has_generic_callable.reset() + if not t.accept(has_generic_callable): + return t # Fast path to avoid expensive freshening + else: + result = t.accept(FreshenCallableVisitor()) + assert isinstance(result, type(t)) + return result + + +class FreshenCallableVisitor(TypeTranslator): + def visit_callable_type(self, t: CallableType) -> Type: + result = super().visit_callable_type(t) + assert isinstance(result, ProperType) and isinstance(result, CallableType) + return freshen_function_type_vars(result) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Same as for ExpandTypeVisitor + return t.copy_modified(args=[arg.accept(self) for arg in t.args]) + + class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value - def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: + def __init__( + self, variables: Mapping[TypeVarId, Type], allow_erased_callables: bool = False + ) -> None: self.variables = variables + self.allow_erased_callables = allow_erased_callables def visit_unbound_type(self, t: UnboundType) -> Type: return t @@ -148,8 +205,14 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: - # Should not get here. - raise RuntimeError() + if not self.allow_erased_callables: + raise RuntimeError() + # This may happen during type inference if some function argument + # type is a generic callable, and its erased form will appear in inferred + # constraints, then solver may check subtyping between them, which will trigger + # unify_generic_callables(), this is why we can get here. In all other cases it + # is a sign of a bug, since should never appear in any stored types. + return t def visit_instance(self, t: Instance) -> Type: args = self.expand_types_with_unpack(list(t.args)) @@ -159,6 +222,10 @@ def visit_instance(self, t: Instance) -> Type: return args def visit_type_var(self, t: TypeVarType) -> Type: + # Normally upper bounds can't contain other type variables, the only exception is + # special type variable Self`0 <: C[T, S], where C is the class where Self is used. + if t.id.raw_id == 0: + t = t.copy_modified(upper_bound=t.upper_bound.accept(self)) repl = self.variables.get(t.id, t) if isinstance(repl, ProperType) and isinstance(repl, Instance): # TODO: do we really need to do this? @@ -172,32 +239,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: # TODO: what does prefix mean in this case? # TODO: why does this case even happen? Instances aren't plural. return repl - elif isinstance(repl, ParamSpecType): - return repl.copy_modified( - flavor=t.flavor, - prefix=t.prefix.copy_modified( - arg_types=t.prefix.arg_types + repl.prefix.arg_types, - arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, - arg_names=t.prefix.arg_names + repl.prefix.arg_names, - ), - ) - elif isinstance(repl, Parameters) or isinstance(repl, CallableType): - # if the paramspec is *P.args or **P.kwargs: - if t.flavor != ParamSpecFlavor.BARE: - assert isinstance(repl, CallableType), "Should not be able to get here." - # Is this always the right thing to do? - param_spec = repl.param_spec() - if param_spec: - return param_spec.with_flavor(t.flavor) - else: - return repl - else: - return Parameters( - t.prefix.arg_types + repl.arg_types, - t.prefix.arg_kinds + repl.arg_kinds, - t.prefix.arg_names + repl.arg_names, - variables=[*t.prefix.variables, *repl.variables], - ) + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(t, repl) else: # TODO: should this branch be removed? better not to fail silently return repl @@ -219,6 +262,90 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | Instance | AnyType | None def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types)) + def interpolate_args_for_unpack( + self, t: CallableType, var_arg: UnpackType + ) -> tuple[list[str | None], list[ArgKind], list[Type]]: + star_index = t.arg_kinds.index(ARG_STAR) + + # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] + if isinstance(get_proper_type(var_arg.type), TupleType): + expanded_tuple = get_proper_type(var_arg.type.accept(self)) + # TODO: handle the case that expanded_tuple is a variable length tuple. + assert isinstance(expanded_tuple, TupleType) + expanded_items = expanded_tuple.items + else: + expanded_items_res = self.expand_unpack(var_arg) + if isinstance(expanded_items_res, list): + expanded_items = expanded_items_res + elif ( + isinstance(expanded_items_res, Instance) + and expanded_items_res.type.fullname == "builtins.tuple" + ): + # TODO: We shouldnt't simply treat this as a *arg because of suffix handling + # (there cannot be positional args after a *arg) + arg_types = ( + t.arg_types[:star_index] + + [expanded_items_res.args[0]] + + t.arg_types[star_index + 1 :] + ) + return (t.arg_names, t.arg_kinds, arg_types) + else: + return (t.arg_names, t.arg_kinds, t.arg_types) + + expanded_unpack_index = find_unpack_in_list(expanded_items) + # This is the case where we just have Unpack[Tuple[X1, X2, X3]] + # (for example if either the tuple had no unpacks, or the unpack in the + # tuple got fully expanded to something with fixed length) + if expanded_unpack_index is None: + arg_names = ( + t.arg_names[:star_index] + + [None] * len(expanded_items) + + t.arg_names[star_index + 1 :] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * len(expanded_items) + + t.arg_kinds[star_index + 1 :] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + # If Unpack[Ts] simplest form still has an unpack or is a + # homogenous tuple, then only the prefix can be represented as + # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] + # as the star arg, for example. + expanded_unpack = get_proper_type(expanded_items[expanded_unpack_index]) + assert isinstance(expanded_unpack, UnpackType) + + # Extract the typevartuple so we can get a tuple fallback from it. + expanded_unpacked_tvt = get_proper_type(expanded_unpack.type) + assert isinstance(expanded_unpacked_tvt, TypeVarTupleType) + + prefix_len = expanded_unpack_index + arg_names = t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] + arg_kinds = ( + t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType( + TupleType( + expanded_items[prefix_len:], expanded_unpacked_tvt.tuple_fallback + ) + ) + if len(expanded_items) - prefix_len > 1 + else expanded_items[0] + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + return (arg_names, arg_kinds, arg_types) + def visit_callable_type(self, t: CallableType) -> Type: param_spec = t.param_spec() if param_spec is not None: @@ -246,21 +373,16 @@ def visit_callable_type(self, t: CallableType) -> Type: var_arg = t.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): - expanded = self.expand_unpack(var_arg.typ) - # Handle other cases later. - assert isinstance(expanded, list) - assert len(expanded) == 1 and isinstance(expanded[0], UnpackType) - star_index = t.arg_kinds.index(ARG_STAR) - arg_types = ( - self.expand_types(t.arg_types[:star_index]) - + expanded - + self.expand_types(t.arg_types[star_index + 1 :]) - ) + arg_names, arg_kinds, arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) else: + arg_names = t.arg_names + arg_kinds = t.arg_kinds arg_types = self.expand_types(t.arg_types) return t.copy_modified( arg_types=arg_types, + arg_names=arg_names, + arg_kinds=arg_kinds, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) @@ -327,11 +449,13 @@ def visit_literal_type(self, t: LiteralType) -> Type: return t def visit_union_type(self, t: UnionType) -> Type: - # After substituting for type variables in t.items, - # some of the resulting types might be subtypes of others. - from mypy.typeops import make_simplified_union # asdf - - return make_simplified_union(self.expand_types(t.items), t.line, t.column) + expanded = self.expand_types(t.items) + # After substituting for type variables in t.items, some resulting types + # might be subtypes of others, however calling make_simplified_union() + # can cause recursion, so we just remove strict duplicates. + return UnionType.make_union( + remove_trivial(flatten_nested_unions(expanded)), t.line, t.column + ) def visit_partial_type(self, t: PartialType) -> Type: return t @@ -344,8 +468,8 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(item) def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the type alias cannot contain type variables, - # so we just expand the arguments. + # Target of the type alias cannot contain type variables (not bound by the type + # alias itself), so we just expand the arguments. return t.copy_modified(args=self.expand_types(t.args)) def expand_types(self, types: Iterable[Type]) -> list[Type]: @@ -365,8 +489,6 @@ def expand_unpack_with_variables( repl = get_proper_type(variables.get(t.type.id, t)) if isinstance(repl, TupleType): return repl.items - if isinstance(repl, TypeList): - return repl.items elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple": return repl elif isinstance(repl, AnyType): @@ -383,3 +505,20 @@ def expand_unpack_with_variables( raise NotImplementedError(f"Invalid type replacement to expand: {repl}") else: raise NotImplementedError(f"Invalid type to expand: {t.type}") + + +@overload +def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType: + ... + + +@overload +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + ... + + +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + """Expand appearances of Self type in a variable type.""" + if var.info.self_type is not None and not var.is_property: + return expand_type(typ, {var.info.self_type.id: replacement}) + return typ diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 38ad44a623b9..6d955525293d 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -212,6 +212,10 @@ def ast3_parse( MatchAs = Any MatchOr = Any AstNode = Union[ast3.expr, ast3.stmt, ast3.ExceptHandler] + if sys.version_info >= (3, 11): + TryStar = ast3.TryStar + else: + TryStar = Any except ImportError: try: from typed_ast import ast35 # type: ignore[attr-defined] # noqa: F401 @@ -1253,6 +1257,24 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: ) return self.set_line(node, n) + def visit_TryStar(self, n: TryStar) -> TryStmt: + vs = [ + self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers + ] + types = [self.visit(h.type) for h in n.handlers] + handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] + + node = TryStmt( + self.as_required_block(n.body, n.lineno), + vs, + types, + handlers, + self.as_block(n.orelse, n.lineno), + self.as_block(n.finalbody, n.lineno), + ) + node.is_star = True + return self.set_line(node, n) + # Assert(expr test, expr? msg) def visit_Assert(self, n: ast3.Assert) -> AssertStmt: node = AssertStmt(self.visit(n.test), self.visit(n.msg)) diff --git a/mypy/fixup.py b/mypy/fixup.py index b3a2d43d6b4d..3593e4faa184 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -180,6 +180,8 @@ def visit_var(self, v: Var) -> None: def visit_type_alias(self, a: TypeAlias) -> None: a.target.accept(self.type_fixer) + for v in a.alias_tvars: + v.accept(self.type_fixer) class TypeFixer(TypeVisitor[None]): diff --git a/mypy/ipc.py b/mypy/ipc.py index d52769bdb2b1..f07616df0fd0 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -89,9 +89,6 @@ def write(self, data: bytes) -> None: if sys.platform == "win32": try: ov, err = _winapi.WriteFile(self.connection, data, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) - assert isinstance(err, int) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE @@ -217,8 +214,6 @@ def __enter__(self) -> IPCServer: # client never connects, though this can be "solved" by killing the server try: ov = _winapi.ConnectNamedPipe(self.connection, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) except OSError as e: # Don't raise if the client already exists, or the client already connected if e.winerror not in (_winapi.ERROR_PIPE_CONNECTED, _winapi.ERROR_NO_DATA): diff --git a/mypy/join.py b/mypy/join.py index d54febd7462a..62d256f4440f 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -9,6 +9,7 @@ from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT from mypy.state import state from mypy.subtypes import ( + SubtypeContext, find_member, is_equivalent, is_proper_subtype, @@ -101,7 +102,9 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: assert new_type is not None args.append(new_type) result: ProperType = Instance(t.type, args) - elif t.type.bases and is_subtype(t, s, ignore_type_params=True): + elif t.type.bases and is_proper_subtype( + t, s, subtype_context=SubtypeContext(ignore_type_params=True) + ): result = self.join_instances_via_supertype(t, s) else: # Now t is not a subtype of s, and t != s. Now s could be a subtype @@ -141,8 +144,11 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: - """Return a simple least upper bound given the declared type.""" - # TODO: check infinite recursion for aliases here? + """Return a simple least upper bound given the declared type. + + This function should be only used by binder, and should not recurse. + For all other uses, use `join_types()`. + """ declaration = get_proper_type(declaration) s = get_proper_type(s) t = get_proper_type(t) @@ -158,10 +164,10 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: if isinstance(s, ErasedType): return t - if is_proper_subtype(s, t): + if is_proper_subtype(s, t, ignore_promotions=True): return t - if is_proper_subtype(t, s): + if is_proper_subtype(t, s, ignore_promotions=True): return s if isinstance(declaration, UnionType): @@ -176,6 +182,9 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: # Meets/joins require callable type normalization. s, t = normalize_callables(s, t) + if isinstance(s, UnionType) and not isinstance(t, UnionType): + s, t = t, s + value = t.accept(TypeJoinVisitor(s)) if declaration is None or is_subtype(value, declaration): return value diff --git a/mypy/main.py b/mypy/main.py index 360a8ed1df17..47dea2ae9797 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1008,7 +1008,10 @@ def add_invertible_flag( help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.", ) - add_invertible_flag("--fast-exit", default=True, help=argparse.SUPPRESS, group=internals_group) + internals_group.add_argument("--fast-exit", action="store_true", help=argparse.SUPPRESS) + internals_group.add_argument( + "--no-fast-exit", action="store_false", dest="fast_exit", help=argparse.SUPPRESS + ) # This flag is useful for mypy tests, where function bodies may be omitted. Plugin developers # may want to use this as well in their tests. add_invertible_flag( @@ -1082,8 +1085,14 @@ def add_invertible_flag( "--inferstats", action="store_true", dest="dump_inference_stats", help=argparse.SUPPRESS ) parser.add_argument("--dump-build-stats", action="store_true", help=argparse.SUPPRESS) - # dump timing stats for each processed file into the given output file + # Dump timing stats for each processed file into the given output file parser.add_argument("--timing-stats", dest="timing_stats", help=argparse.SUPPRESS) + # Dump per line type checking timing stats for each processed file into the given + # output file. Only total time spent in each top level expression will be shown. + # Times are show in microseconds. + parser.add_argument( + "--line-checking-stats", dest="line_checking_stats", help=argparse.SUPPRESS + ) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). @@ -1117,10 +1126,19 @@ def add_invertible_flag( parser.add_argument( "--cache-map", nargs="+", dest="special-opts:cache_map", help=argparse.SUPPRESS ) + # --debug-serialize will run tree.serialize() even if cache generation is disabled. + # Useful for mypy_primer to detect serialize errors earlier. + parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS) # This one is deprecated, but we will keep it for few releases. parser.add_argument( "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS ) + parser.add_argument( + "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS + ) # options specifying code to check code_group = parser.add_argument_group( diff --git a/mypy/meet.py b/mypy/meet.py index 3e772419ef3e..1cc125f3bfd6 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -15,6 +15,7 @@ ) from mypy.typeops import is_recursive_pair, make_simplified_union, tuple_fallback from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, AnyType, CallableType, DeletedType, @@ -76,7 +77,7 @@ def meet_types(s: Type, t: Type) -> ProperType: # Code in checker.py should merge any extra_items where possible, so we # should have only compatible extra_items here. We check this before # the below subtype check, so that extra_attrs will not get erased. - if is_same_type(s, t) and (s.extra_attrs or t.extra_attrs): + if (s.extra_attrs or t.extra_attrs) and is_same_type(s, t): if s.extra_attrs and t.extra_attrs: if len(s.extra_attrs.attrs) > len(t.extra_attrs.attrs): # Return the one that has more precise information. @@ -124,7 +125,15 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: [ narrow_declared_type(x, narrowed) for x in declared.relevant_items() - if is_overlapping_types(x, narrowed, ignore_promotions=True) + # This (ugly) special-casing is needed to support checking + # branches like this: + # x: Union[float, complex] + # if isinstance(x, int): + # ... + if ( + is_overlapping_types(x, narrowed, ignore_promotions=True) + or is_subtype(narrowed, x, ignore_promotions=False) + ) ] ) if is_enum_overlapping_union(declared, narrowed): @@ -430,18 +439,13 @@ def _type_object_overlap(left: Type, right: Type) -> bool: return _type_object_overlap(left, right) or _type_object_overlap(right, left) if isinstance(left, CallableType) and isinstance(right, CallableType): - - def _callable_overlap(left: CallableType, right: CallableType) -> bool: - return is_callable_compatible( - left, - right, - is_compat=_is_overlapping_types, - ignore_pos_arg_names=True, - allow_partial_overlap=True, - ) - - # Compare both directions to handle type objects. - return _callable_overlap(left, right) or _callable_overlap(right, left) + return is_callable_compatible( + left, + right, + is_compat=_is_overlapping_types, + ignore_pos_arg_names=True, + allow_partial_overlap=True, + ) elif isinstance(left, CallableType): left = left.fallback elif isinstance(right, CallableType): @@ -472,6 +476,9 @@ def _callable_overlap(left: CallableType, right: CallableType) -> bool: ): return True + if right.type.fullname == "builtins.int" and left.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True + # Two unrelated types cannot be partially overlapping: they're disjoint. if left.type.has_base(right.type.fullname): left = map_instance_to_supertype(left, right.type) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index c84ce120dbda..7827a2818be9 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -44,6 +44,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage: NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") INVALID_EXCEPTION_TYPE: Final = ErrorMessage("Exception type must be derived from BaseException") +INVALID_EXCEPTION_GROUP: Final = ErrorMessage( + "Exception type in except* cannot derive from BaseExceptionGroup" +) RETURN_IN_ASYNC_GENERATOR: Final = ErrorMessage( '"return" with value in async generator is not allowed' ) @@ -134,6 +137,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: MODULE_LEVEL_GETATTRIBUTE: Final = ErrorMessage( "__getattribute__ is not valid at the module level" ) +CLASS_VAR_CONFLICTS_SLOTS: Final = '"{}" in __slots__ conflicts with class variable access' NAME_NOT_IN_SLOTS: Final = ErrorMessage( 'Trying to assign name "{}" that is not in "__slots__" of type "{}"' ) @@ -150,6 +154,10 @@ def with_additional_msg(self, info: str) -> ErrorMessage: FUNCTION_ALWAYS_TRUE: Final = ErrorMessage( "Function {} could always be true in boolean context", code=codes.TRUTHY_FUNCTION ) +ITERABLE_ALWAYS_TRUE: Final = ErrorMessage( + "{} which can always be true in boolean context. Consider using {} instead.", + code=codes.TRUTHY_ITERABLE, +) NOT_CALLABLE: Final = "{} not callable" TYPE_MUST_BE_USED: Final = "Value of type {} must be used" @@ -231,6 +239,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: "variable" ) CLASS_VAR_WITH_TYPEVARS: Final = "ClassVar cannot contain type variables" +CLASS_VAR_WITH_GENERIC_SELF: Final = "ClassVar cannot contain Self type in generic classes" CLASS_VAR_OUTSIDE_OF_CLASS: Final = "ClassVar can only be used for assignments in class body" # Protocol diff --git a/mypy/messages.py b/mypy/messages.py index 4e762faa0b32..b529615e564e 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -15,7 +15,7 @@ import re from contextlib import contextmanager from textwrap import dedent -from typing import Any, Callable, Iterable, Iterator, List, Sequence, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, Sequence, cast from typing_extensions import Final from mypy import errorcodes as codes, message_registry @@ -122,8 +122,6 @@ # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. SUGGESTED_TEST_FIXTURES: Final = { - "builtins.list": "list.pyi", - "builtins.dict": "dict.pyi", "builtins.set": "set.pyi", "builtins.tuple": "tuple.pyi", "builtins.bool": "bool.pyi", @@ -132,6 +130,7 @@ "builtins.isinstance": "isinstancelist.pyi", "builtins.property": "property.pyi", "builtins.classmethod": "classmethod.pyi", + "typing._SpecialForm": "typing-medium.pyi", } @@ -190,6 +189,14 @@ def disable_type_names(self) -> Iterator[None]: def are_type_names_disabled(self) -> bool: return len(self._disable_type_names) > 0 and self._disable_type_names[-1] + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + If errors aren't shown to the user, we don't want to waste cyles producing + complex error messages. + """ + return self.errors.prefer_simple_messages() + def report( self, msg: str, @@ -230,8 +237,8 @@ def span_from_context(ctx: Context) -> tuple[int, int]: else: origin_span = None self.errors.report( - context.get_line() if context else -1, - context.get_column() if context else -1, + context.line if context else -1, + context.column if context else -1, msg, severity=severity, file=file, @@ -431,7 +438,7 @@ def has_no_attr( alternatives.discard(member) matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] - matches.extend(best_matches(member, alternatives)[:3]) + matches.extend(best_matches(member, alternatives, n=3)) if member == "__aiter__" and matches == ["__iter__"]: matches = [] # Avoid misleading suggestion if matches: @@ -684,64 +691,69 @@ def incompatible_argument( actual_type_str, expected_type_str ) else: - try: - expected_type = callee.arg_types[m - 1] - except IndexError: # Varargs callees - expected_type = callee.arg_types[-1] - arg_type_str, expected_type_str = format_type_distinctly( - arg_type, expected_type, bare=True - ) - if arg_kind == ARG_STAR: - arg_type_str = "*" + arg_type_str - elif arg_kind == ARG_STAR2: - arg_type_str = "**" + arg_type_str - - # For function calls with keyword arguments, display the argument name rather than the - # number. - arg_label = str(n) - if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: - arg_name = outer_context.arg_names[n - 1] - if arg_name is not None: - arg_label = f'"{arg_name}"' - if ( - arg_kind == ARG_STAR2 - and isinstance(arg_type, TypedDictType) - and m <= len(callee.arg_names) - and callee.arg_names[m - 1] is not None - and callee.arg_kinds[m - 1] != ARG_STAR2 - ): - arg_name = callee.arg_names[m - 1] - assert arg_name is not None - arg_type_str, expected_type_str = format_type_distinctly( - arg_type.items[arg_name], expected_type, bare=True - ) - arg_label = f'"{arg_name}"' - if isinstance(outer_context, IndexExpr) and isinstance(outer_context.index, StrExpr): - msg = 'Value of "{}" has incompatible type {}; expected {}'.format( - outer_context.index.value, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), - ) + if self.prefer_simple_messages(): + msg = "Argument has incompatible type" else: - msg = "Argument {} {}has incompatible type {}; expected {}".format( - arg_label, - target, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), + try: + expected_type = callee.arg_types[m - 1] + except IndexError: # Varargs callees + expected_type = callee.arg_types[-1] + arg_type_str, expected_type_str = format_type_distinctly( + arg_type, expected_type, bare=True ) + if arg_kind == ARG_STAR: + arg_type_str = "*" + arg_type_str + elif arg_kind == ARG_STAR2: + arg_type_str = "**" + arg_type_str + + # For function calls with keyword arguments, display the argument name rather + # than the number. + arg_label = str(n) + if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: + arg_name = outer_context.arg_names[n - 1] + if arg_name is not None: + arg_label = f'"{arg_name}"' + if ( + arg_kind == ARG_STAR2 + and isinstance(arg_type, TypedDictType) + and m <= len(callee.arg_names) + and callee.arg_names[m - 1] is not None + and callee.arg_kinds[m - 1] != ARG_STAR2 + ): + arg_name = callee.arg_names[m - 1] + assert arg_name is not None + arg_type_str, expected_type_str = format_type_distinctly( + arg_type.items[arg_name], expected_type, bare=True + ) + arg_label = f'"{arg_name}"' + if isinstance(outer_context, IndexExpr) and isinstance( + outer_context.index, StrExpr + ): + msg = 'Value of "{}" has incompatible type {}; expected {}'.format( + outer_context.index.value, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + else: + msg = "Argument {} {}has incompatible type {}; expected {}".format( + arg_label, + target, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + expected_type = get_proper_type(expected_type) + if isinstance(expected_type, UnionType): + expected_types = list(expected_type.items) + else: + expected_types = [expected_type] + for type in get_proper_types(expected_types): + if isinstance(arg_type, Instance) and isinstance(type, Instance): + notes = append_invariance_notes(notes, arg_type, type) object_type = get_proper_type(object_type) if isinstance(object_type, TypedDictType): code = codes.TYPEDDICT_ITEM else: code = codes.ARG_TYPE - expected_type = get_proper_type(expected_type) - if isinstance(expected_type, UnionType): - expected_types = list(expected_type.items) - else: - expected_types = [expected_type] - for type in get_proper_types(expected_types): - if isinstance(arg_type, Instance) and isinstance(type, Instance): - notes = append_invariance_notes(notes, arg_type, type) self.fail(msg, context, code=code) if notes: for note_msg in notes: @@ -755,6 +767,8 @@ def incompatible_argument_note( context: Context, code: ErrorCode | None, ) -> None: + if self.prefer_simple_messages(): + return if isinstance( original_caller_type, (Instance, TupleType, TypedDictType, TypeType, CallableType) ): @@ -831,7 +845,9 @@ def invalid_index_type( def too_few_arguments( self, callee: CallableType, context: Context, argument_names: Sequence[str | None] | None ) -> None: - if argument_names is not None: + if self.prefer_simple_messages(): + msg = "Too few arguments" + elif argument_names is not None: num_positional_args = sum(k is None for k in argument_names) arguments_left = callee.arg_names[num_positional_args : callee.min_args] diff = [k for k in arguments_left if k not in argument_names] @@ -855,7 +871,10 @@ def missing_named_argument(self, callee: CallableType, context: Context, name: s self.fail(msg, context, code=codes.CALL_ARG) def too_many_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many arguments" + else: + msg = "Too many arguments" + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) self.maybe_note_about_special_args(callee, context) @@ -873,11 +892,16 @@ def too_many_arguments_from_typed_dict( self.fail(msg, context) def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many positional arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many positional arguments" + else: + msg = "Too many positional arguments" + for_function(callee) self.fail(msg, context) self.maybe_note_about_special_args(callee, context) def maybe_note_about_special_args(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + return # https://github.com/python/mypy/issues/11309 first_arg = callee.def_extras.get("first_arg") if first_arg and first_arg not in {"self", "cls", "mcs"}: @@ -902,11 +926,11 @@ def unexpected_keyword_argument( matching_type_args.append(callee_arg_name) else: not_matching_type_args.append(callee_arg_name) - matches = best_matches(name, matching_type_args) + matches = best_matches(name, matching_type_args, n=3) if not matches: - matches = best_matches(name, not_matching_type_args) + matches = best_matches(name, not_matching_type_args, n=3) if matches: - msg += f"; did you mean {pretty_seq(matches[:3], 'or')}?" + msg += f"; did you mean {pretty_seq(matches, 'or')}?" self.fail(msg, context, code=codes.CALL_ARG) module = find_defining_module(self.modules, callee) if module: @@ -1228,7 +1252,10 @@ def undefined_in_superclass(self, member: str, context: Context) -> None: self.fail(f'"{member}" undefined in superclass', context) def variable_may_be_undefined(self, name: str, context: Context) -> None: - self.fail(f'Name "{name}" may be undefined', context, code=codes.PARTIALLY_DEFINED) + self.fail(f'Name "{name}" may be undefined', context, code=codes.POSSIBLY_UNDEFINED) + + def var_used_before_def(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" is used before definition', context, code=codes.USED_BEFORE_DEF) def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: actual = get_proper_type(actual) @@ -1608,9 +1635,9 @@ def unexpected_typeddict_keys( expected_set = set(expected_keys) if not typ.is_anonymous(): # Generate simpler messages for some common special cases. - if actual_set < expected_set: - # Use list comprehension instead of set operations to preserve order. - missing = [key for key in expected_keys if key not in actual_set] + # Use list comprehension instead of set operations to preserve order. + missing = [key for key in expected_keys if key not in actual_set] + if missing: self.fail( "Missing {} for TypedDict {}".format( format_key_list(missing, short=True), format_type(typ) @@ -1618,20 +1645,18 @@ def unexpected_typeddict_keys( context, code=codes.TYPEDDICT_ITEM, ) + extra = [key for key in actual_keys if key not in expected_set] + if extra: + self.fail( + "Extra {} for TypedDict {}".format( + format_key_list(extra, short=True), format_type(typ) + ), + context, + code=codes.TYPPEDICT_UNKNOWN_KEY, + ) + if missing or extra: + # No need to check for further errors return - else: - extra = [key for key in actual_keys if key not in expected_set] - if extra: - # If there are both extra and missing keys, only report extra ones for - # simplicity. - self.fail( - "Extra {} for TypedDict {}".format( - format_key_list(extra, short=True), format_type(typ) - ), - context, - code=codes.TYPEDDICT_ITEM, - ) - return found = format_key_list(actual_keys, short=True) if not expected_keys: self.fail(f"Unexpected TypedDict {found}", context) @@ -1651,8 +1676,15 @@ def typeddict_key_must_be_string_literal(self, typ: TypedDictType, context: Cont ) def typeddict_key_not_found( - self, typ: TypedDictType, item_name: str, context: Context + self, typ: TypedDictType, item_name: str, context: Context, setitem: bool = False ) -> None: + """Handle error messages for TypedDicts that have unknown keys. + + Note, that we differentiate in between reading a value and setting a + value. + Setting a value on a TypedDict is an 'unknown-key' error, whereas + reading it is the more serious/general 'item' error. + """ if typ.is_anonymous(): self.fail( '"{}" is not a valid TypedDict key; expected one of {}'.format( @@ -1661,22 +1693,21 @@ def typeddict_key_not_found( context, ) else: + err_code = codes.TYPPEDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM self.fail( - f'TypedDict {format_type(typ)} has no key "{item_name}"', - context, - code=codes.TYPEDDICT_ITEM, + f'TypedDict {format_type(typ)} has no key "{item_name}"', context, code=err_code ) - matches = best_matches(item_name, typ.items.keys()) + matches = best_matches(item_name, typ.items.keys(), n=3) if matches: self.note( - "Did you mean {}?".format(pretty_seq(matches[:3], "or")), - context, - code=codes.TYPEDDICT_ITEM, + "Did you mean {}?".format(pretty_seq(matches, "or")), context, code=err_code ) def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: formatted_types = ", ".join(list(format_type_distinctly(*types))) - self.fail(f"Type of TypedDict is ambiguous, could be any of ({formatted_types})", context) + self.fail( + f"Type of TypedDict is ambiguous, none of ({formatted_types}) matches cleanly", context + ) def typeddict_key_cannot_be_deleted( self, typ: TypedDictType, item_name: str, context: Context @@ -1865,6 +1896,7 @@ def report_protocol_problems( class_obj = False is_module = False + skip = [] if isinstance(subtype, TupleType): if not isinstance(subtype.partial_fallback, Instance): return @@ -1879,23 +1911,25 @@ def report_protocol_problems( class_obj = True subtype = subtype.item elif isinstance(subtype, CallableType): - if not subtype.is_type_obj(): - return - ret_type = get_proper_type(subtype.ret_type) - if isinstance(ret_type, TupleType): - ret_type = ret_type.partial_fallback - if not isinstance(ret_type, Instance): - return - class_obj = True - subtype = ret_type + if subtype.is_type_obj(): + ret_type = get_proper_type(subtype.ret_type) + if isinstance(ret_type, TupleType): + ret_type = ret_type.partial_fallback + if not isinstance(ret_type, Instance): + return + class_obj = True + subtype = ret_type + else: + subtype = subtype.fallback + skip = ["__call__"] if subtype.extra_attrs and subtype.extra_attrs.mod_name: is_module = True # Report missing members - missing = get_missing_protocol_members(subtype, supertype) + missing = get_missing_protocol_members(subtype, supertype, skip=skip) if ( missing - and len(missing) < len(supertype.type.protocol_members) + and (len(missing) < len(supertype.type.protocol_members) or missing == ["__call__"]) and len(missing) <= MAX_ITEMS ): if missing == ["__call__"] and class_obj: @@ -2253,6 +2287,9 @@ def format_literal_value(typ: LiteralType) -> str: if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: return f"{base_str} {itype.extra_attrs.mod_name}" return base_str + if itype.type.fullname == "typing._SpecialForm": + # This is not a real type but used for some typing-related constructs. + return "" if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: @@ -2601,13 +2638,15 @@ def variance_string(variance: int) -> str: return "invariant" -def get_missing_protocol_members(left: Instance, right: Instance) -> list[str]: +def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str]) -> list[str]: """Find all protocol members of 'right' that are not implemented (i.e. completely missing) in 'left'. """ assert right.type.is_protocol missing: list[str] = [] for member in right.type.protocol_members: + if member in skip: + continue if not find_member(member, left, left): missing.append(member) return missing @@ -2654,6 +2693,7 @@ def get_bad_protocol_flags( if ( IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags + and IS_SETTABLE in superflags or IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or IS_SETTABLE in superflags @@ -2760,11 +2800,24 @@ def find_defining_module(modules: dict[str, MypyFile], typ: CallableType) -> Myp COMMON_MISTAKES: Final[dict[str, Sequence[str]]] = {"add": ("append", "extend")} -def best_matches(current: str, options: Iterable[str]) -> list[str]: - ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options} - return sorted( - (o for o in options if ratios[o] > 0.75), reverse=True, key=lambda v: (ratios[v], v) - ) +def _real_quick_ratio(a: str, b: str) -> float: + # this is an upper bound on difflib.SequenceMatcher.ratio + # similar to difflib.SequenceMatcher.real_quick_ratio, but faster since we don't instantiate + al = len(a) + bl = len(b) + return 2.0 * min(al, bl) / (al + bl) + + +def best_matches(current: str, options: Collection[str], n: int) -> list[str]: + # narrow down options cheaply + assert current + options = [o for o in options if _real_quick_ratio(current, o) > 0.75] + if len(options) >= 50: + options = [o for o in options if abs(len(o) - len(current)) <= 1] + + ratios = {option: difflib.SequenceMatcher(a=current, b=option).ratio() for option in options} + options = [option for option, ratio in ratios.items() if ratio > 0.75] + return sorted(options, key=lambda v: (-ratios[v], v))[:n] def pretty_seq(args: Sequence[str], conjunction: str) -> str: diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py index d25e9b9b0137..771f87fc6bd6 100644 --- a/mypy/mixedtraverser.py +++ b/mypy/mixedtraverser.py @@ -25,6 +25,9 @@ class MixedTraverserVisitor(TraverserVisitor, TypeTraverserVisitor): """Recursive traversal of both Node and Type objects.""" + def __init__(self) -> None: + self.in_type_alias_expr = False + # Symbol nodes def visit_var(self, var: Var) -> None: @@ -45,7 +48,9 @@ def visit_class_def(self, o: ClassDef) -> None: def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: super().visit_type_alias_expr(o) + self.in_type_alias_expr = True o.type.accept(self) + self.in_type_alias_expr = False def visit_type_var_expr(self, o: TypeVarExpr) -> None: super().visit_type_var_expr(o) diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 5d542b154906..265d76ed5bb6 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -115,15 +115,19 @@ def __init__( module: str | None, text: str | None = None, base_dir: str | None = None, + followed: bool = False, ) -> None: self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') self.module = module or "__main__" # Module name (e.g. 'foo.bar') self.text = text # Source code, if initially supplied, else None self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') + self.followed = followed # Was this found by following imports? def __repr__(self) -> str: - return "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r})".format( - self.path, self.module, self.text is not None, self.base_dir + return ( + "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r}, followed={})".format( + self.path, self.module, self.text is not None, self.base_dir, self.followed + ) ) @@ -144,14 +148,11 @@ def __init__(self, sources: list[BuildSource]) -> None: self.source_modules[source.module] = source.path or "" def is_source(self, file: MypyFile) -> bool: - if file.path and file.path in self.source_paths: - return True - elif file._fullname in self.source_modules: - return True - elif self.source_text_present: - return True - else: - return False + return ( + (file.path and file.path in self.source_paths) + or file._fullname in self.source_modules + or self.source_text_present + ) class FindModuleCache: @@ -506,7 +507,11 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # In namespace mode, register a potential namespace package if self.options and self.options.namespace_packages: - if fscache.exists_case(base_path, dir_prefix) and not has_init: + if ( + not has_init + and fscache.exists_case(base_path, dir_prefix) + and not fscache.isfile_case(base_path, dir_prefix) + ): near_misses.append((base_path, dir_prefix)) # No package, look for module. @@ -569,11 +574,11 @@ def _is_compatible_stub_package(self, stub_dir: str) -> bool: whether the stubs are compatible with Python 2 and 3. """ metadata_fnam = os.path.join(stub_dir, "METADATA.toml") - if os.path.isfile(metadata_fnam): - with open(metadata_fnam, "rb") as f: - metadata = tomllib.load(f) - return bool(metadata.get("python3", True)) - return True + if not os.path.isfile(metadata_fnam): + return True + with open(metadata_fnam, "rb") as f: + metadata = tomllib.load(f) + return bool(metadata.get("python3", True)) def find_modules_recursive(self, module: str) -> list[BuildSource]: module_path = self.find_module(module) diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index feca1f43abf2..b383fc9dc145 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -36,7 +36,7 @@ def is_c_module(module: ModuleType) -> bool: # Could be a namespace package. These must be handled through # introspection, since there is no source file. return True - return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd"] + return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"] class InspectError(Exception): diff --git a/mypy/mro.py b/mypy/mro.py index 912cf3e2e341..cc9f88a9d045 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -4,7 +4,7 @@ from mypy.nodes import TypeInfo from mypy.types import Instance -from mypy.typestate import TypeState +from mypy.typestate import type_state def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None) -> None: @@ -17,7 +17,7 @@ def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None info.mro = mro # The property of falling back to Any is inherited. info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) - TypeState.reset_all_subtype_caches_for(info) + type_state.reset_all_subtype_caches_for(info) class MroError(Exception): diff --git a/mypy/nodes.py b/mypy/nodes.py index a40e94c42a00..75b8ee7660e9 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -12,6 +12,7 @@ Callable, Dict, Iterator, + List, Optional, Sequence, Tuple, @@ -24,7 +25,6 @@ from mypy_extensions import trait import mypy.strconv -from mypy.bogus_type import Bogus from mypy.util import short_type from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor @@ -71,14 +71,6 @@ def set_line( if end_column is not None: self.end_column = end_column - def get_line(self) -> int: - """Don't use. Use x.line.""" - return self.line - - def get_column(self) -> int: - """Don't use. Use x.column.""" - return self.column - if TYPE_CHECKING: # break import cycle only needed for mypy @@ -254,12 +246,10 @@ class SymbolNode(Node): def name(self) -> str: pass - # fullname can often be None even though the type system - # disagrees. We mark this with Bogus to let mypyc know not to - # worry about it. + # Fully qualified name @property @abstractmethod - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: pass @abstractmethod @@ -298,8 +288,10 @@ class MypyFile(SymbolNode): "future_import_flags", ) + __match_args__ = ("name", "path", "defs") + # Fully qualified module name - _fullname: Bogus[str] + _fullname: str # Path to the file (empty string if not known) path: str # Top-level definitions and statements @@ -366,7 +358,7 @@ def name(self) -> str: return "" if not self._fullname else self._fullname.split(".")[-1] @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -433,6 +425,8 @@ class Import(ImportBase): __slots__ = ("ids",) + __match_args__ = ("ids",) + ids: list[tuple[str, str | None]] # (module id, as id) def __init__(self, ids: list[tuple[str, str | None]]) -> None: @@ -448,6 +442,8 @@ class ImportFrom(ImportBase): __slots__ = ("id", "names", "relative") + __match_args__ = ("id", "names", "relative") + id: str relative: int names: list[tuple[str, str | None]] # Tuples (name, as name) @@ -467,6 +463,8 @@ class ImportAll(ImportBase): __slots__ = ("id", "relative", "imported_names") + __match_args__ = ("id", "relative") + id: str relative: int # NOTE: Only filled and used by old semantic analyzer. @@ -482,44 +480,13 @@ def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) -class ImportedName(SymbolNode): - """Indirect reference to a fullname stored in symbol table. - - This node is not present in the original program as such. This is - just a temporary artifact in binding imported names. After semantic - analysis pass 2, these references should be replaced with direct - reference to a real AST node. - - Note that this is neither a Statement nor an Expression so this - can't be visited. - """ - - __slots__ = ("target_fullname",) - - def __init__(self, target_fullname: str) -> None: - super().__init__() - self.target_fullname = target_fullname - - @property - def name(self) -> str: - return self.target_fullname.split(".")[-1] - - @property - def fullname(self) -> str: - return self.target_fullname - - def serialize(self) -> JsonDict: - assert False, "ImportedName leaked from semantic analysis" - - @classmethod - def deserialize(cls, data: JsonDict) -> ImportedName: - assert False, "ImportedName should never be serialized" - - def __str__(self) -> str: - return f"ImportedName({self.target_fullname})" - - -FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] +FUNCBASE_FLAGS: Final = [ + "is_property", + "is_class", + "is_static", + "is_final", + "is_dataclass_transform", +] class FuncBase(Node): @@ -545,6 +512,7 @@ class FuncBase(Node): "is_static", # Uses "@staticmethod" "is_final", # Uses "@final" "_fullname", + "is_dataclass_transform", # Is decorated with "@typing.dataclass_transform" or similar ) def __init__(self) -> None: @@ -562,8 +530,8 @@ def __init__(self) -> None: self.is_static = False self.is_final = False # Name with module prefix - # TODO: Type should be Optional[str] - self._fullname = cast(Bogus[str], None) + self._fullname = "" + self.is_dataclass_transform = False @property @abstractmethod @@ -571,7 +539,7 @@ def name(self) -> str: pass @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname @@ -652,6 +620,8 @@ class Argument(Node): __slots__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + __match_args__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + def __init__( self, variable: Var, @@ -791,6 +761,8 @@ class FuncDef(FuncItem, SymbolNode, Statement): "docstring", ) + __match_args__ = ("name", "arguments", "type", "body") + # Note that all __init__ args must have default values def __init__( self, @@ -882,6 +854,8 @@ class Decorator(SymbolNode, Statement): __slots__ = ("func", "decorators", "original_decorators", "var", "is_overload") + __match_args__ = ("decorators", "var", "func") + func: FuncDef # Decorated function decorators: list[Expression] # Decorators (may be empty) # Some decorators are removed by semanal, keep the original here. @@ -903,7 +877,7 @@ def name(self) -> str: return self.func.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self.func.fullname @property @@ -993,11 +967,13 @@ class Var(SymbolNode): "invalid_partial_type", ) + __match_args__ = ("name", "type", "final_value") + def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: super().__init__() self._name = name # Name without module prefix # TODO: Should be Optional[str] - self._fullname = cast("Bogus[str]", None) # Name with module prefix + self._fullname = "" # Name with module prefix # TODO: Should be Optional[TypeInfo] self.info = VAR_NO_INFO self.type: mypy.types.Type | None = type # Declared or inferred type, or None @@ -1049,7 +1025,7 @@ def name(self) -> str: return self._name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -1087,7 +1063,7 @@ class ClassDef(Statement): __slots__ = ( "name", - "fullname", + "_fullname", "defs", "type_vars", "base_type_exprs", @@ -1100,10 +1076,13 @@ class ClassDef(Statement): "has_incompatible_baseclass", "deco_line", "docstring", + "removed_statements", ) + __match_args__ = ("name", "defs") + name: str # Name of the class without module prefix - fullname: Bogus[str] # Fully qualified name of the class + _fullname: str # Fully qualified name of the class defs: Block type_vars: list[mypy.types.TypeVarLikeType] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) @@ -1116,6 +1095,8 @@ class ClassDef(Statement): keywords: dict[str, Expression] analyzed: Expression | None has_incompatible_baseclass: bool + # Used by special forms like NamedTuple and TypedDict to store invalid statements + removed_statements: list[Statement] def __init__( self, @@ -1128,7 +1109,7 @@ def __init__( ) -> None: super().__init__() self.name = name - self.fullname = None # type: ignore[assignment] + self._fullname = "" self.defs = defs self.type_vars = type_vars or [] self.base_type_exprs = base_type_exprs or [] @@ -1142,6 +1123,15 @@ def __init__( # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None self.docstring: str | None = None + self.removed_statements = [] + + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) @@ -1180,6 +1170,8 @@ class GlobalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1195,6 +1187,8 @@ class NonlocalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1208,6 +1202,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class Block(Statement): __slots__ = ("body", "is_unreachable") + __match_args__ = ("body", "is_unreachable") + def __init__(self, body: list[Statement]) -> None: super().__init__() self.body = body @@ -1230,6 +1226,8 @@ class ExpressionStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1262,6 +1260,8 @@ class AssignmentStmt(Statement): "invalid_recursive_alias", ) + __match_args__ = ("lvalues", "rvalues", "type") + lvalues: list[Lvalue] # This is a TempNode if and only if no rvalue (x: t). rvalue: Expression @@ -1310,6 +1310,8 @@ class OperatorAssignmentStmt(Statement): __slots__ = ("op", "lvalue", "rvalue") + __match_args__ = ("lvalue", "op", "rvalue") + op: str # TODO: Enum? lvalue: Lvalue rvalue: Expression @@ -1327,6 +1329,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WhileStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: Expression body: Block else_body: Block | None @@ -1354,6 +1358,8 @@ class ForStmt(Statement): "is_async", ) + __match_args__ = ("index", "index_type", "expr", "body", "else_body") + # Index variables index: Lvalue # Type given by type comments for index, can be None @@ -1396,6 +1402,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ReturnStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1409,6 +1417,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class AssertStmt(Statement): __slots__ = ("expr", "msg") + __match_args__ = ("expr", "msg") + expr: Expression msg: Expression | None @@ -1424,6 +1434,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class DelStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Lvalue def __init__(self, expr: Lvalue) -> None: @@ -1458,6 +1470,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class IfStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: list[Expression] body: list[Block] else_body: Block | None @@ -1475,6 +1489,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class RaiseStmt(Statement): __slots__ = ("expr", "from_expr") + __match_args__ = ("expr", "from_expr") + # Plain 'raise' is a valid statement. expr: Expression | None from_expr: Expression | None @@ -1489,7 +1505,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): - __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") + + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") body: Block # Try body # Plain 'except:' also possible @@ -1498,6 +1516,8 @@ class TryStmt(Statement): handlers: list[Block] # Except bodies else_body: Block | None finally_body: Block | None + # Whether this is try ... except* (added in Python 3.11) + is_star: bool def __init__( self, @@ -1515,6 +1535,7 @@ def __init__( self.handlers = handlers self.else_body = else_body self.finally_body = finally_body + self.is_star = False def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) @@ -1523,6 +1544,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WithStmt(Statement): __slots__ = ("expr", "target", "unanalyzed_type", "analyzed_types", "body", "is_async") + __match_args__ = ("expr", "target", "body") + expr: list[Expression] target: list[Lvalue | None] # Type given by type comments for target, can be None @@ -1552,6 +1575,10 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class MatchStmt(Statement): + __slots__ = ("subject", "patterns", "guards", "bodies") + + __match_args__ = ("subject", "patterns", "guards", "bodies") + subject: Expression patterns: list[Pattern] guards: list[Expression | None] @@ -1583,6 +1610,8 @@ class IntExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: int # 0 by default def __init__(self, value: int) -> None: @@ -1604,6 +1633,8 @@ class StrExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: str # '' by default def __init__(self, value: str) -> None: @@ -1619,6 +1650,8 @@ class BytesExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + # Note: we deliberately do NOT use bytes here because it ends up # unnecessarily complicating a lot of the result logic. For example, # we'd have to worry about converting the bytes into a format we can @@ -1643,6 +1676,8 @@ class FloatExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: float # 0.0 by default def __init__(self, value: float) -> None: @@ -1658,6 +1693,8 @@ class ComplexExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: complex def __init__(self, value: complex) -> None: @@ -1682,6 +1719,8 @@ class StarExpr(Expression): __slots__ = ("expr", "valid") + __match_args__ = ("expr", "valid") + expr: Expression valid: bool @@ -1702,7 +1741,7 @@ class RefExpr(Expression): __slots__ = ( "kind", "node", - "fullname", + "_fullname", "is_new_def", "is_inferred_def", "is_alias_rvalue", @@ -1716,7 +1755,7 @@ def __init__(self) -> None: # Var, FuncDef or TypeInfo that describes this self.node: SymbolNode | None = None # Fully qualified name (or name if not global) - self.fullname: str | None = None + self._fullname = "" # Does this define a new name? self.is_new_def = False # Does this define a new name with inferred type? @@ -1729,6 +1768,14 @@ def __init__(self) -> None: # Cache type guard from callable_type.type_guard self.type_guard: mypy.types.Type | None = None + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + class NameExpr(RefExpr): """Name expression @@ -1738,9 +1785,11 @@ class NameExpr(RefExpr): __slots__ = ("name", "is_special_form") + __match_args__ = ("name", "node") + def __init__(self, name: str) -> None: super().__init__() - self.name = name # Name referred to (may be qualified) + self.name = name # Name referred to # Is this a l.h.s. of a special form assignment like typed dict or type variable? self.is_special_form = False @@ -1756,6 +1805,8 @@ class MemberExpr(RefExpr): __slots__ = ("expr", "name", "def_var") + __match_args__ = ("expr", "name", "node") + def __init__(self, expr: Expression, name: str) -> None: super().__init__() self.expr = expr @@ -1817,6 +1868,8 @@ class CallExpr(Expression): __slots__ = ("callee", "args", "arg_kinds", "arg_names", "analyzed") + __match_args__ = ("callee", "args", "arg_kinds", "arg_names") + def __init__( self, callee: Expression, @@ -1845,6 +1898,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldFromExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1858,6 +1913,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1876,6 +1933,8 @@ class IndexExpr(Expression): __slots__ = ("base", "index", "method_type", "analyzed") + __match_args__ = ("base", "index") + base: Expression index: Expression # Inferred __getitem__ method type @@ -1900,6 +1959,8 @@ class UnaryExpr(Expression): __slots__ = ("op", "expr", "method_type") + __match_args__ = ("op", "expr") + op: str # TODO: Enum? expr: Expression # Inferred operator method type @@ -1920,6 +1981,8 @@ class AssignmentExpr(Expression): __slots__ = ("target", "value") + __match_args__ = ("target", "value") + def __init__(self, target: Expression, value: Expression) -> None: super().__init__() self.target = target @@ -1930,10 +1993,22 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class OpExpr(Expression): - """Binary operation (other than . or [] or comparison operators, - which have specific nodes).""" + """Binary operation. + + The dot (.), [] and comparison operators have more specific nodes. + """ - __slots__ = ("op", "left", "right", "method_type", "right_always", "right_unreachable") + __slots__ = ( + "op", + "left", + "right", + "method_type", + "right_always", + "right_unreachable", + "analyzed", + ) + + __match_args__ = ("left", "op", "right") op: str # TODO: Enum? left: Expression @@ -1944,8 +2019,12 @@ class OpExpr(Expression): right_always: bool # Per static analysis only: Is the right side unreachable? right_unreachable: bool + # Used for expressions that represent a type "X | Y" in some contexts + analyzed: TypeAliasExpr | None - def __init__(self, op: str, left: Expression, right: Expression) -> None: + def __init__( + self, op: str, left: Expression, right: Expression, analyzed: TypeAliasExpr | None = None + ) -> None: super().__init__() self.op = op self.left = left @@ -1953,6 +2032,7 @@ def __init__(self, op: str, left: Expression, right: Expression) -> None: self.method_type = None self.right_always = False self.right_unreachable = False + self.analyzed = analyzed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_op_expr(self) @@ -1963,6 +2043,8 @@ class ComparisonExpr(Expression): __slots__ = ("operators", "operands", "method_types") + __match_args__ = ("operands", "operators") + operators: list[str] operands: list[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). @@ -1993,6 +2075,8 @@ class SliceExpr(Expression): __slots__ = ("begin_index", "end_index", "stride") + __match_args__ = ("begin_index", "end_index", "stride") + begin_index: Expression | None end_index: Expression | None stride: Expression | None @@ -2017,6 +2101,8 @@ class CastExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2034,6 +2120,8 @@ class AssertTypeExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2051,6 +2139,8 @@ class RevealExpr(Expression): __slots__ = ("expr", "kind", "local_nodes") + __match_args__ = ("expr", "kind", "local_nodes") + expr: Expression | None kind: int local_nodes: list[Var] | None @@ -2072,6 +2162,8 @@ class SuperExpr(Expression): __slots__ = ("name", "info", "call") + __match_args__ = ("name", "call", "info") + name: str info: TypeInfo | None # Type that contains this super expression call: CallExpr # The expression super(...) @@ -2089,6 +2181,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class LambdaExpr(FuncItem, Expression): """Lambda expression""" + __match_args__ = ("arguments", "arg_names", "arg_kinds", "body") + @property def name(self) -> str: return "" @@ -2112,6 +2206,8 @@ class ListExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2127,6 +2223,8 @@ class DictExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[tuple[Expression | None, Expression]] def __init__(self, items: list[tuple[Expression | None, Expression]]) -> None: @@ -2144,6 +2242,8 @@ class TupleExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2159,6 +2259,8 @@ class SetExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2174,6 +2276,8 @@ class GeneratorExpr(Expression): __slots__ = ("left_expr", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("left_expr", "indices", "sequences", "condlists") + left_expr: Expression sequences: list[Expression] condlists: list[list[Expression]] @@ -2204,6 +2308,8 @@ class ListComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2219,6 +2325,8 @@ class SetComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2234,6 +2342,8 @@ class DictionaryComprehension(Expression): __slots__ = ("key", "value", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("key", "value", "indices", "sequences", "condlists") + key: Expression value: Expression sequences: list[Expression] @@ -2267,6 +2377,8 @@ class ConditionalExpr(Expression): __slots__ = ("cond", "if_expr", "else_expr") + __match_args__ = ("if_expr", "cond", "else_expr") + cond: Expression if_expr: Expression else_expr: Expression @@ -2286,6 +2398,8 @@ class TypeApplication(Expression): __slots__ = ("expr", "types") + __match_args__ = ("expr", "types") + expr: Expression types: list[mypy.types.Type] @@ -2363,6 +2477,8 @@ class TypeVarExpr(TypeVarLikeExpr): __slots__ = ("values",) + __match_args__ = ("name", "values", "upper_bound") + # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. values: list[mypy.types.Type] @@ -2406,6 +2522,8 @@ def deserialize(cls, data: JsonDict) -> TypeVarExpr: class ParamSpecExpr(TypeVarLikeExpr): __slots__ = () + __match_args__ = ("name", "upper_bound") + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_paramspec_expr(self) @@ -2432,7 +2550,22 @@ def deserialize(cls, data: JsonDict) -> ParamSpecExpr: class TypeVarTupleExpr(TypeVarLikeExpr): """Type variable tuple expression TypeVarTuple(...).""" - __slots__ = () + __slots__ = "tuple_fallback" + + tuple_fallback: mypy.types.Instance + + __match_args__ = ("name", "upper_bound") + + def __init__( + self, + name: str, + fullname: str, + upper_bound: mypy.types.Type, + tuple_fallback: mypy.types.Instance, + variance: int = INVARIANT, + ) -> None: + super().__init__(name, fullname, upper_bound, variance) + self.tuple_fallback = tuple_fallback def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_tuple_expr(self) @@ -2443,6 +2576,7 @@ def serialize(self) -> JsonDict: "name": self._name, "fullname": self._fullname, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), "variance": self.variance, } @@ -2453,6 +2587,7 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: data["name"], data["fullname"], mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.Instance.deserialize(data["tuple_fallback"]), data["variance"], ) @@ -2462,9 +2597,11 @@ class TypeAliasExpr(Expression): __slots__ = ("type", "tvars", "no_args", "node") + __match_args__ = ("type", "tvars", "no_args", "node") + # The target type. type: mypy.types.Type - # Names of unbound type variables used to define the alias + # Names of type variables used to define the alias tvars: list[str] # Whether this alias was defined in bare form. Used to distinguish # between @@ -2477,7 +2614,7 @@ class TypeAliasExpr(Expression): def __init__(self, node: TypeAlias) -> None: super().__init__() self.type = node.target - self.tvars = node.alias_tvars + self.tvars = [v.name for v in node.alias_tvars] self.no_args = node.no_args self.node = node @@ -2490,6 +2627,8 @@ class NamedTupleExpr(Expression): __slots__ = ("info", "is_typed") + __match_args__ = ("info",) + # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) info: TypeInfo @@ -2509,6 +2648,8 @@ class TypedDictExpr(Expression): __slots__ = ("info",) + __match_args__ = ("info",) + # The class representation of this typed dict info: TypeInfo @@ -2525,6 +2666,8 @@ class EnumCallExpr(Expression): __slots__ = ("info", "items", "values") + __match_args__ = ("info", "items", "values") + # The class representation of this enumerated type info: TypeInfo # The item names (for debugging) @@ -2561,6 +2704,8 @@ class NewTypeExpr(Expression): __slots__ = ("name", "old_type", "info") + __match_args__ = ("name", "old_type", "info") + name: str # The base type (the second argument to NewType) old_type: mypy.types.Type | None @@ -2584,6 +2729,8 @@ class AwaitExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -2665,6 +2812,7 @@ class is generic then it will be a type constructor of higher kind. "inferring", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "type_vars", "has_param_spec_type", "bases", @@ -2680,9 +2828,10 @@ class is generic then it will be a type constructor of higher kind. "has_type_var_tuple_type", "type_var_tuple_prefix", "type_var_tuple_suffix", + "self_type", ) - _fullname: Bogus[str] # Fully qualified name + _fullname: str # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. @@ -2758,6 +2907,10 @@ class is generic then it will be a type constructor of higher kind. # (and __setattr__), but without the __getattr__ method. fallback_to_any: bool + # Same as above but for cases where metaclass has type Any. This will suppress + # all attribute errors only for *class object* access. + meta_fallback_to_any: bool + # Information related to type annotations. # Generic type variable names (full names) @@ -2820,10 +2973,14 @@ class is generic then it will be a type constructor of higher kind. # in case we are doing multiple semantic analysis passes. special_alias: TypeAlias | None + # Shared type variable for typing.Self in this class (if used, otherwise None). + self_type: mypy.types.TypeVarType | None + FLAGS: Final = [ "is_abstract", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "is_named_tuple", "is_newtype", "is_protocol", @@ -2863,6 +3020,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_final = False self.is_enum = False self.fallback_to_any = False + self.meta_fallback_to_any = False self._promote = [] self.alt_promote = None self.tuple_type = None @@ -2872,6 +3030,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_newtype = False self.is_intersection = False self.metadata = {} + self.self_type = None def add_type_vars(self) -> None: self.has_type_var_tuple_type = False @@ -2895,7 +3054,7 @@ def name(self) -> str: return self.defn.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def is_generic(self) -> bool: @@ -3089,6 +3248,7 @@ def serialize(self) -> JsonDict: "metadata": self.metadata, "slots": list(sorted(self.slots)) if self.slots is not None else None, "deletable_attributes": self.deletable_attributes, + "self_type": self.self_type.serialize() if self.self_type is not None else None, } return data @@ -3145,6 +3305,8 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: ti.slots = set(data["slots"]) if data["slots"] is not None else None ti.deletable_attributes = data["deletable_attributes"] set_flags(ti, data["flags"]) + st = data["self_type"] + ti.self_type = mypy.types.TypeVarType.deserialize(st) if st is not None else None return ti @@ -3209,10 +3371,9 @@ class TypeAlias(SymbolNode): class-valued attributes. See SemanticAnalyzerPass2.check_and_set_up_type_alias for details. - Aliases can be generic. Currently, mypy uses unbound type variables for - generic aliases and identifies them by name. Essentially, type aliases - work as macros that expand textually. The definition and expansion rules are - following: + Aliases can be generic. We use bound type variables for generic aliases, similar + to classes. Essentially, type aliases work as macros that expand textually. + The definition and expansion rules are following: 1. An alias targeting a generic class without explicit variables act as the given class (this doesn't apply to TypedDict, Tuple and Callable, which @@ -3263,11 +3424,11 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here Meaning of other fields: - target: The target type. For generic aliases contains unbound type variables - as nested types. + target: The target type. For generic aliases contains bound type variables + as nested types (currently TypeVar and ParamSpec are supported). _fullname: Qualified name of this type alias. This is used in particular to track fine grained dependencies from aliases. - alias_tvars: Names of unbound type variables used to define this alias. + alias_tvars: Type variables used to define this alias. normalized: Used to distinguish between `A = List`, and `A = list`. Both are internally stored using `builtins.list` (because `typing.List` is itself an alias), while the second cannot be subscripted because of @@ -3287,6 +3448,8 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here "eager", ) + __match_args__ = ("name", "target", "alias_tvars", "no_args") + def __init__( self, target: mypy.types.Type, @@ -3294,7 +3457,7 @@ def __init__( line: int, column: int, *, - alias_tvars: list[str] | None = None, + alias_tvars: list[mypy.types.TypeVarLikeType] | None = None, no_args: bool = False, normalized: bool = False, eager: bool = False, @@ -3344,12 +3507,16 @@ def name(self) -> str: def fullname(self) -> str: return self._fullname + @property + def has_param_spec_type(self) -> bool: + return any(isinstance(v, mypy.types.ParamSpecType) for v in self.alias_tvars) + def serialize(self) -> JsonDict: data: JsonDict = { ".class": "TypeAlias", "fullname": self._fullname, "target": self.target.serialize(), - "alias_tvars": self.alias_tvars, + "alias_tvars": [v.serialize() for v in self.alias_tvars], "no_args": self.no_args, "normalized": self.normalized, "line": self.line, @@ -3364,7 +3531,8 @@ def accept(self, visitor: NodeVisitor[T]) -> T: def deserialize(cls, data: JsonDict) -> TypeAlias: assert data[".class"] == "TypeAlias" fullname = data["fullname"] - alias_tvars = data["alias_tvars"] + alias_tvars = [mypy.types.deserialize_type(v) for v in data["alias_tvars"]] + assert all(isinstance(t, mypy.types.TypeVarLikeType) for t in alias_tvars) target = mypy.types.deserialize_type(data["target"]) no_args = data["no_args"] normalized = data["normalized"] @@ -3375,7 +3543,7 @@ def deserialize(cls, data: JsonDict) -> TypeAlias: fullname, line, column, - alias_tvars=alias_tvars, + alias_tvars=cast(List[mypy.types.TypeVarLikeType], alias_tvars), no_args=no_args, normalized=normalized, ) @@ -3602,11 +3770,7 @@ def serialize(self, prefix: str, name: str) -> JsonDict: if prefix is not None: fullname = self.node.fullname if ( - # See the comment above SymbolNode.fullname -- fullname can often be None, - # but for complex reasons it's annotated as being `Bogus[str]` instead of `str | None`, - # meaning mypy erroneously thinks the `fullname is not None` check here is redundant - fullname is not None # type: ignore[redundant-expr] - and "." in fullname + "." in fullname and fullname != prefix + "." + name and not (isinstance(self.node, Var) and self.node.from_module_getattr) ): diff --git a/mypy/options.py b/mypy/options.py index 8ae88c4a0d8f..f9e53535e88d 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -56,9 +56,16 @@ class BuildType: "warn_unused_ignores", } -OPTIONS_AFFECTING_CACHE: Final = (PER_MODULE_OPTIONS | {"platform", "bazel", "plugins"}) - { - "debug_cache" -} +OPTIONS_AFFECTING_CACHE: Final = ( + PER_MODULE_OPTIONS + | { + "platform", + "bazel", + "plugins", + "disable_bytearray_promotion", + "disable_memoryview_promotion", + } +) - {"debug_cache"} # Features that are currently incomplete/experimental TYPE_VAR_TUPLE: Final = "TypeVarTuple" @@ -242,6 +249,9 @@ def __init__(self) -> None: # Read cache files in fine-grained incremental mode (cache must include dependencies) self.use_fine_grained_cache = False + # Run tree.serialize() even if cache generation is disabled + self.debug_serialize = False + # Tune certain behaviors when being used as a front-end to mypyc. Set per-module # in modules being compiled. Not in the config file or command line. self.mypyc = False @@ -282,6 +292,7 @@ def __init__(self) -> None: self.enable_incomplete_features = False # deprecated self.enable_incomplete_feature: list[str] = [] self.timing_stats: str | None = None + self.line_checking_stats: str | None = None # -- test options -- # Stop after the semantic analysis phase @@ -335,6 +346,9 @@ def __init__(self) -> None: # Deprecated reverse version of the above, do not use. self.enable_recursive_aliases = False + self.disable_bytearray_promotion = False + self.disable_memoryview_promotion = False + # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 4300626ecd9f..af09493c9cae 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -1,12 +1,16 @@ from __future__ import annotations -from mypy import checker +from enum import Enum + +from mypy import checker, errorcodes from mypy.messages import MessageBuilder from mypy.nodes import ( AssertStmt, AssignmentExpr, AssignmentStmt, BreakStmt, + ClassDef, + Context, ContinueStmt, DictionaryComprehension, Expression, @@ -15,16 +19,30 @@ FuncDef, FuncItem, GeneratorExpr, + GlobalDecl, IfStmt, + Import, + ImportFrom, + LambdaExpr, ListExpr, Lvalue, + MatchStmt, + MypyFile, NameExpr, + NonlocalDecl, RaiseStmt, ReturnStmt, + StarExpr, + SymbolTable, + TryStmt, TupleExpr, WhileStmt, WithStmt, + implicit_module_attrs, ) +from mypy.options import Options +from mypy.patterns import AsPattern, StarredPattern +from mypy.reachability import ALWAYS_TRUE, infer_pattern_value from mypy.traverser import ExtendedTraverserVisitor from mypy.types import Type, UninhabitedType @@ -52,22 +70,47 @@ def __init__( self.must_be_defined = set(must_be_defined) self.skipped = skipped + def copy(self) -> BranchState: + return BranchState( + must_be_defined=set(self.must_be_defined), + may_be_defined=set(self.may_be_defined), + skipped=self.skipped, + ) + class BranchStatement: def __init__(self, initial_state: BranchState) -> None: self.initial_state = initial_state self.branches: list[BranchState] = [ - BranchState(must_be_defined=self.initial_state.must_be_defined) + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) ] + def copy(self) -> BranchStatement: + result = BranchStatement(self.initial_state) + result.branches = [b.copy() for b in self.branches] + return result + def next_branch(self) -> None: - self.branches.append(BranchState(must_be_defined=self.initial_state.must_be_defined)) + self.branches.append( + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) + ) def record_definition(self, name: str) -> None: assert len(self.branches) > 0 self.branches[-1].must_be_defined.add(name) self.branches[-1].may_be_defined.discard(name) + def delete_var(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.discard(name) + self.branches[-1].may_be_defined.discard(name) + def record_nested_branch(self, state: BranchState) -> None: assert len(self.branches) > 0 current_branch = self.branches[-1] @@ -86,24 +129,69 @@ def is_possibly_undefined(self, name: str) -> bool: assert len(self.branches) > 0 return name in self.branches[-1].may_be_defined + def is_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + branch = self.branches[-1] + return name not in branch.may_be_defined and name not in branch.must_be_defined + + def is_defined_in_a_branch(self, name: str) -> bool: + assert len(self.branches) > 0 + for b in self.branches: + if name in b.must_be_defined or name in b.may_be_defined: + return True + return False + def done(self) -> BranchState: - branches = [b for b in self.branches if not b.skipped] - if len(branches) == 0: - return BranchState(skipped=True) - if len(branches) == 1: - return branches[0] - - # must_be_defined is a union of must_be_defined of all branches. - must_be_defined = set(branches[0].must_be_defined) - for b in branches[1:]: - must_be_defined.intersection_update(b.must_be_defined) - # may_be_defined are all variables that are not must be defined. + # First, compute all vars, including skipped branches. We include skipped branches + # because our goal is to capture all variables that semantic analyzer would + # consider defined. all_vars = set() - for b in branches: + for b in self.branches: all_vars.update(b.may_be_defined) all_vars.update(b.must_be_defined) + # For the rest of the things, we only care about branches that weren't skipped. + non_skipped_branches = [b for b in self.branches if not b.skipped] + if len(non_skipped_branches) > 0: + must_be_defined = non_skipped_branches[0].must_be_defined + for b in non_skipped_branches[1:]: + must_be_defined.intersection_update(b.must_be_defined) + else: + must_be_defined = set() + # Everything that wasn't defined in all branches but was defined + # in at least one branch should be in `may_be_defined`! may_be_defined = all_vars.difference(must_be_defined) - return BranchState(may_be_defined=may_be_defined, must_be_defined=must_be_defined) + return BranchState( + must_be_defined=must_be_defined, + may_be_defined=may_be_defined, + skipped=len(non_skipped_branches) == 0, + ) + + +class ScopeType(Enum): + Global = 1 + Class = 2 + Func = 3 + Generator = 3 + + +class Scope: + def __init__(self, stmts: list[BranchStatement], scope_type: ScopeType) -> None: + self.branch_stmts: list[BranchStatement] = stmts + self.scope_type = scope_type + self.undefined_refs: dict[str, set[NameExpr]] = {} + + def copy(self) -> Scope: + result = Scope([s.copy() for s in self.branch_stmts], self.scope_type) + result.undefined_refs = self.undefined_refs.copy() + return result + + def record_undefined_ref(self, o: NameExpr) -> None: + if o.name not in self.undefined_refs: + self.undefined_refs[o.name] = set() + self.undefined_refs[o.name].add(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + return self.undefined_refs.pop(name, set()) class DefinedVariableTracker: @@ -111,69 +199,173 @@ class DefinedVariableTracker: def __init__(self) -> None: # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. - self.scopes: list[list[BranchStatement]] = [[BranchStatement(BranchState())]] - - def _scope(self) -> list[BranchStatement]: + self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())], ScopeType.Global)] + # disable_branch_skip is used to disable skipping a branch due to a return/raise/etc. This is useful + # in things like try/except/finally statements. + self.disable_branch_skip = False + + def copy(self) -> DefinedVariableTracker: + result = DefinedVariableTracker() + result.scopes = [s.copy() for s in self.scopes] + result.disable_branch_skip = self.disable_branch_skip + return result + + def _scope(self) -> Scope: assert len(self.scopes) > 0 return self.scopes[-1] - def enter_scope(self) -> None: - assert len(self._scope()) > 0 - self.scopes.append([BranchStatement(self._scope()[-1].branches[-1])]) + def enter_scope(self, scope_type: ScopeType) -> None: + assert len(self._scope().branch_stmts) > 0 + self.scopes.append( + Scope([BranchStatement(self._scope().branch_stmts[-1].branches[-1])], scope_type) + ) def exit_scope(self) -> None: self.scopes.pop() + def in_scope(self, scope_type: ScopeType) -> bool: + return self._scope().scope_type == scope_type + def start_branch_statement(self) -> None: - assert len(self._scope()) > 0 - self._scope().append(BranchStatement(self._scope()[-1].branches[-1])) + assert len(self._scope().branch_stmts) > 0 + self._scope().branch_stmts.append( + BranchStatement(self._scope().branch_stmts[-1].branches[-1]) + ) def next_branch(self) -> None: - assert len(self._scope()) > 1 - self._scope()[-1].next_branch() + assert len(self._scope().branch_stmts) > 1 + self._scope().branch_stmts[-1].next_branch() def end_branch_statement(self) -> None: - assert len(self._scope()) > 1 - result = self._scope().pop().done() - self._scope()[-1].record_nested_branch(result) + assert len(self._scope().branch_stmts) > 1 + result = self._scope().branch_stmts.pop().done() + self._scope().branch_stmts[-1].record_nested_branch(result) def skip_branch(self) -> None: # Only skip branch if we're outside of "root" branch statement. - if len(self._scope()) > 1: - self._scope()[-1].skip_branch() + if len(self._scope().branch_stmts) > 1 and not self.disable_branch_skip: + self._scope().branch_stmts[-1].skip_branch() - def record_declaration(self, name: str) -> None: + def record_definition(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].record_definition(name) + + def delete_var(self, name: str) -> None: assert len(self.scopes) > 0 - assert len(self.scopes[-1]) > 0 - self._scope()[-1].record_definition(name) + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].delete_var(name) + + def record_undefined_ref(self, o: NameExpr) -> None: + """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" + assert len(self.scopes) > 0 + self._scope().record_undefined_ref(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + """If name has previously been reported as undefined, the NameExpr that was called will be returned.""" + assert len(self.scopes) > 0 + return self._scope().pop_undefined_ref(name) def is_possibly_undefined(self, name: str) -> bool: - assert len(self._scope()) > 0 + assert len(self._scope().branch_stmts) > 0 # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`. - # Cases where a variable is not defined altogether are handled by semantic analyzer. - return self._scope()[-1].is_possibly_undefined(name) + return self._scope().branch_stmts[-1].is_possibly_undefined(name) + + def is_defined_in_different_branch(self, name: str) -> bool: + """This will return true if a variable is defined in a branch that's not the current branch.""" + assert len(self._scope().branch_stmts) > 0 + stmt = self._scope().branch_stmts[-1] + if not stmt.is_undefined(name): + return False + for stmt in self._scope().branch_stmts: + if stmt.is_defined_in_a_branch(name): + return True + return False + def is_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + return self._scope().branch_stmts[-1].is_undefined(name) + + +class Loop: + def __init__(self) -> None: + self.has_break = False -class PartiallyDefinedVariableVisitor(ExtendedTraverserVisitor): - """Detect variables that are defined only part of the time. - This visitor detects the following case: +class PossiblyUndefinedVariableVisitor(ExtendedTraverserVisitor): + """Detects the following cases: + - A variable that's defined only part of the time. + - If a variable is used before definition + + An example of a partial definition: if foo(): x = 1 print(x) # Error: "x" may be undefined. + Example of a used before definition: + x = y + y: int = 2 + Note that this code does not detect variables not defined in any of the branches -- that is handled by the semantic analyzer. """ - def __init__(self, msg: MessageBuilder, type_map: dict[Expression, Type]) -> None: + def __init__( + self, + msg: MessageBuilder, + type_map: dict[Expression, Type], + options: Options, + names: SymbolTable, + ) -> None: self.msg = msg self.type_map = type_map + self.options = options + self.builtins = SymbolTable() + builtins_mod = names.get("__builtins__", None) + if builtins_mod: + assert isinstance(builtins_mod.node, MypyFile) + self.builtins = builtins_mod.node.names + self.loops: list[Loop] = [] + self.try_depth = 0 self.tracker = DefinedVariableTracker() + for name in implicit_module_attrs: + self.tracker.record_definition(name) + + def var_used_before_def(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.USED_BEFORE_DEF): + self.msg.var_used_before_def(name, context) + + def variable_may_be_undefined(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.POSSIBLY_UNDEFINED): + self.msg.variable_may_be_undefined(name, context) + + def process_definition(self, name: str) -> None: + # Was this name previously used? If yes, it's a used-before-definition error. + if not self.tracker.in_scope(ScopeType.Class): + # Errors in class scopes are caught by the semantic analyzer. + refs = self.tracker.pop_undefined_ref(name) + for ref in refs: + if self.loops: + self.variable_may_be_undefined(name, ref) + else: + self.var_used_before_def(name, ref) + self.tracker.record_definition(name) + + def visit_global_decl(self, o: GlobalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_global_decl(o) + + def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_nonlocal_decl(o) def process_lvalue(self, lvalue: Lvalue | None) -> None: if isinstance(lvalue, NameExpr): - self.tracker.record_declaration(lvalue.name) + self.process_definition(lvalue.name) + elif isinstance(lvalue, StarExpr): + self.process_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): for item in lvalue.items: self.process_lvalue(item) @@ -192,32 +384,58 @@ def visit_if_stmt(self, o: IfStmt) -> None: e.accept(self) self.tracker.start_branch_statement() for b in o.body: + if b.is_unreachable: + continue b.accept(self) self.tracker.next_branch() if o.else_body: - o.else_body.accept(self) + if not o.else_body.is_unreachable: + o.else_body.accept(self) + else: + self.tracker.skip_branch() + self.tracker.end_branch_statement() + + def visit_match_stmt(self, o: MatchStmt) -> None: + self.tracker.start_branch_statement() + o.subject.accept(self) + for i in range(len(o.patterns)): + pattern = o.patterns[i] + pattern.accept(self) + guard = o.guards[i] + if guard is not None: + guard.accept(self) + if not o.bodies[i].is_unreachable: + o.bodies[i].accept(self) + else: + self.tracker.skip_branch() + is_catchall = infer_pattern_value(pattern) == ALWAYS_TRUE + if not is_catchall: + self.tracker.next_branch() self.tracker.end_branch_statement() def visit_func_def(self, o: FuncDef) -> None: - self.tracker.enter_scope() + self.process_definition(o.name) + self.tracker.enter_scope(ScopeType.Func) super().visit_func_def(o) self.tracker.exit_scope() def visit_func(self, o: FuncItem) -> None: + if o.is_dynamic() and not self.options.check_untyped_defs: + return if o.arguments is not None: for arg in o.arguments: - self.tracker.record_declaration(arg.variable.name) + self.tracker.record_definition(arg.variable.name) super().visit_func(o) def visit_generator_expr(self, o: GeneratorExpr) -> None: - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Generator) for idx in o.indices: self.process_lvalue(idx) super().visit_generator_expr(o) self.tracker.exit_scope() def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: - self.tracker.enter_scope() + self.tracker.enter_scope(ScopeType.Generator) for idx in o.indices: self.process_lvalue(idx) super().visit_dictionary_comprehension(o) @@ -228,16 +446,33 @@ def visit_for_stmt(self, o: ForStmt) -> None: self.process_lvalue(o.index) o.index.accept(self) self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) o.body.accept(self) self.tracker.next_branch() - if o.else_body: - o.else_body.accept(self) self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + has_break = loop.has_break + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() def visit_return_stmt(self, o: ReturnStmt) -> None: super().visit_return_stmt(o) self.tracker.skip_branch() + def visit_lambda_expr(self, o: LambdaExpr) -> None: + self.tracker.enter_scope(ScopeType.Func) + super().visit_lambda_expr(o) + self.tracker.exit_scope() + def visit_assert_stmt(self, o: AssertStmt) -> None: super().visit_assert_stmt(o) if checker.is_false_literal(o.expr): @@ -253,6 +488,8 @@ def visit_continue_stmt(self, o: ContinueStmt) -> None: def visit_break_stmt(self, o: BreakStmt) -> None: super().visit_break_stmt(o) + if self.loops: + self.loops[-1].has_break = True self.tracker.skip_branch() def visit_expression_stmt(self, o: ExpressionStmt) -> None: @@ -260,21 +497,135 @@ def visit_expression_stmt(self, o: ExpressionStmt) -> None: self.tracker.skip_branch() super().visit_expression_stmt(o) + def visit_try_stmt(self, o: TryStmt) -> None: + """ + Note that finding undefined vars in `finally` requires different handling from + the rest of the code. In particular, we want to disallow skipping branches due to jump + statements in except/else clauses for finally but not for other cases. Imagine a case like: + def f() -> int: + try: + x = 1 + except: + # This jump statement needs to be handled differently depending on whether or + # not we're trying to process `finally` or not. + return 0 + finally: + # `x` may be undefined here. + pass + # `x` is always defined here. + return x + """ + self.try_depth += 1 + if o.finally_body is not None: + # In order to find undefined vars in `finally`, we need to + # process try/except with branch skipping disabled. However, for the rest of the code + # after finally, we need to process try/except with branch skipping enabled. + # Therefore, we need to process try/finally twice. + # Because processing is not idempotent, we should make a copy of the tracker. + old_tracker = self.tracker.copy() + self.tracker.disable_branch_skip = True + self.process_try_stmt(o) + self.tracker = old_tracker + self.process_try_stmt(o) + self.try_depth -= 1 + + def process_try_stmt(self, o: TryStmt) -> None: + """ + Processes try statement decomposing it into the following: + if ...: + body + else_body + elif ...: + except 1 + elif ...: + except 2 + else: + except n + finally + """ + self.tracker.start_branch_statement() + o.body.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if len(o.handlers) > 0: + assert len(o.handlers) == len(o.vars) == len(o.types) + for i in range(len(o.handlers)): + self.tracker.next_branch() + exc_type = o.types[i] + if exc_type is not None: + exc_type.accept(self) + var = o.vars[i] + if var is not None: + self.process_definition(var.name) + var.accept(self) + o.handlers[i].accept(self) + if var is not None: + self.tracker.delete_var(var.name) + self.tracker.end_branch_statement() + + if o.finally_body is not None: + o.finally_body.accept(self) + def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) o.body.accept(self) + has_break = loop.has_break if not checker.is_true_literal(o.expr): + # If this is a loop like `while True`, we can consider the body to be + # a single branch statement (we're guaranteed that the body is executed at least once). + # If not, call next_branch() to make all variables defined there conditional. self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() if o.else_body: o.else_body.accept(self) - self.tracker.end_branch_statement() + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() + + def visit_as_pattern(self, o: AsPattern) -> None: + if o.name is not None: + self.process_lvalue(o.name) + super().visit_as_pattern(o) + + def visit_starred_pattern(self, o: StarredPattern) -> None: + if o.capture is not None: + self.process_lvalue(o.capture) + super().visit_starred_pattern(o) def visit_name_expr(self, o: NameExpr) -> None: + if o.name in self.builtins: + return if self.tracker.is_possibly_undefined(o.name): - self.msg.variable_may_be_undefined(o.name, o) + # A variable is only defined in some branches. + self.variable_may_be_undefined(o.name, o) # We don't want to report the error on the same variable multiple times. - self.tracker.record_declaration(o.name) + self.tracker.record_definition(o.name) + elif self.tracker.is_defined_in_different_branch(o.name): + # A variable is defined in one branch but used in a different branch. + if self.loops or self.try_depth > 0: + # If we're in a loop or in a try, we can't be sure that this variable + # is undefined. Report it as "may be undefined". + self.variable_may_be_undefined(o.name, o) + else: + self.var_used_before_def(o.name, o) + elif self.tracker.is_undefined(o.name): + # A variable is undefined. It could be due to two things: + # 1. A variable is just totally undefined + # 2. The variable is defined later in the code. + # Case (1) will be caught by semantic analyzer. Case (2) is a forward ref that should + # be caught by this visitor. Save the ref for later, so that if we see a definition, + # we know it's a used-before-definition scenario. + self.tracker.record_undefined_ref(o) super().visit_name_expr(o) def visit_with_stmt(self, o: WithStmt) -> None: @@ -282,3 +633,30 @@ def visit_with_stmt(self, o: WithStmt) -> None: expr.accept(self) self.process_lvalue(idx) o.body.accept(self) + + def visit_class_def(self, o: ClassDef) -> None: + self.process_definition(o.name) + self.tracker.enter_scope(ScopeType.Class) + super().visit_class_def(o) + self.tracker.exit_scope() + + def visit_import(self, o: Import) -> None: + for mod, alias in o.ids: + if alias is not None: + self.tracker.record_definition(alias) + else: + # When you do `import x.y`, only `x` becomes defined. + names = mod.split(".") + if len(names) > 0: + # `names` should always be nonempty, but we don't want mypy + # to crash on invalid code. + self.tracker.record_definition(names[0]) + super().visit_import(o) + + def visit_import_from(self, o: ImportFrom) -> None: + for mod, alias in o.names: + name = alias + if name is None: + name = mod + self.tracker.record_definition(name) + super().visit_import_from(o) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 17f1794d8c75..50d2955d2584 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -3,7 +3,7 @@ from __future__ import annotations from typing import Iterable, List, cast -from typing_extensions import Final +from typing_extensions import Final, Literal import mypy.plugin # To avoid circular imports. from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -736,7 +736,11 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node - assert isinstance(node, Var) + if not isinstance(node, Var): + # The superclass attribute was overridden with a non-variable. + # No need to do anything here, override will be verified during + # type checking. + continue node.is_property = True else: # This variable belongs to a super class so create new Var so we @@ -752,13 +756,14 @@ def _add_init( ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute], adder: MethodAdder, - method_name: str, + method_name: Literal["__init__", "__attrs_init__"], ) -> None: """Generate an __init__ method for the attributes and add it to the class.""" - # Convert attributes to arguments with kw_only arguments at the end of + # Convert attributes to arguments with kw_only arguments at the end of # the argument list pos_args = [] kw_only_args = [] + sym_table = ctx.cls.info.names for attribute in attributes: if not attribute.init: continue @@ -766,6 +771,13 @@ def _add_init( kw_only_args.append(attribute.argument(ctx)) else: pos_args.append(attribute.argument(ctx)) + + # If the attribute is Final, present in `__init__` and has + # no default, make sure it doesn't error later. + if not attribute.has_default and attribute.name in sym_table: + sym_node = sym_table[attribute.name].node + if isinstance(sym_node, Var) and sym_node.is_final: + sym_node.final_set_in_init = True args = pos_args + kw_only_args if all( # We use getattr rather than instance checks because the variable.type @@ -816,7 +828,7 @@ def _add_attrs_magic_attribute( ctx.cls, MAGIC_ATTR_NAME, TupleType(attributes_types, fallback=attributes_type), - fullname=f"{ctx.cls.fullname}.{attr_name}", + fullname=f"{ctx.cls.fullname}.{MAGIC_ATTR_NAME}", override_allow_incompatible=True, is_classvar=True, ) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 07cd5dc7de7f..a2a38f256da3 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -19,7 +19,7 @@ Var, ) from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface -from mypy.semanal import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name +from mypy.semanal_shared import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name from mypy.typeops import ( # noqa: F401 # Part of public API try_getting_str_literals as try_getting_str_literals, ) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 26bc8ae80fdb..75496d5e56f9 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -2,8 +2,10 @@ from __future__ import annotations +from typing import Optional from typing_extensions import Final +from mypy.expandtype import expand_type from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -50,6 +52,7 @@ TypeVarType, get_proper_type, ) +from mypy.typevars import fill_typevars # The set of decorators that generate dataclasses. dataclass_makers: Final = {"dataclass", "dataclasses.dataclass"} @@ -83,7 +86,7 @@ def __init__( self.info = info self.kw_only = kw_only - def to_argument(self) -> Argument: + def to_argument(self, current_info: TypeInfo) -> Argument: arg_kind = ARG_POS if self.kw_only and self.has_default: arg_kind = ARG_NAMED_OPT @@ -92,11 +95,23 @@ def to_argument(self) -> Argument: elif not self.kw_only and self.has_default: arg_kind = ARG_OPT return Argument( - variable=self.to_var(), type_annotation=self.type, initializer=None, kind=arg_kind + variable=self.to_var(current_info), + type_annotation=self.expand_type(current_info), + initializer=None, + kind=arg_kind, ) - def to_var(self) -> Var: - return Var(self.name, self.type) + def expand_type(self, current_info: TypeInfo) -> Optional[Type]: + if self.type is not None and self.info.self_type is not None: + # In general, it is not safe to call `expand_type()` during semantic analyzis, + # however this plugin is called very late, so all types should be fully ready. + # Also, it is tricky to avoid eager expansion of Self types here (e.g. because + # we serialize attributes). + return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)}) + return self.type + + def to_var(self, current_info: TypeInfo) -> Var: + return Var(self.name, self.expand_type(current_info)) def serialize(self) -> JsonDict: assert self.type @@ -175,11 +190,12 @@ def transform(self) -> bool: and attributes ): - args = [ - attr.to_argument() - for attr in attributes - if attr.is_in_init and not self._is_kw_only_type(attr.type) - ] + with state.strict_optional_set(ctx.api.options.strict_optional): + args = [ + attr.to_argument(info) + for attr in attributes + if attr.is_in_init and not self._is_kw_only_type(attr.type) + ] if info.fallback_to_any: # Make positional args optional since we don't know their order. @@ -548,7 +564,7 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None: if isinstance(var, Var): var.is_property = True else: - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var._fullname = info.fullname + "." + var.name @@ -567,7 +583,7 @@ def _propertize_callables( info = self._ctx.cls.info for attr in attributes: if isinstance(get_proper_type(attr.type), CallableType): - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var.is_settable_property = settable diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 75b301252f06..1acf42d11ee6 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -19,7 +19,7 @@ from mypy.nodes import TypeInfo from mypy.semanal_enum import ENUM_BASES from mypy.subtypes import is_equivalent -from mypy.typeops import make_simplified_union +from mypy.typeops import fixup_partial_type, make_simplified_union from mypy.types import CallableType, Instance, LiteralType, ProperType, Type, get_proper_type ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { @@ -77,6 +77,7 @@ def _infer_value_type_with_auto_fallback( """ if proper_type is None: return None + proper_type = get_proper_type(fixup_partial_type(proper_type)) if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): return proper_type assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." diff --git a/mypy/report.py b/mypy/report.py index 37b7497f1371..75c372200ca3 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -353,7 +353,7 @@ def indentation_level(self, line_number: int) -> int | None: return None def visit_func_def(self, defn: FuncDef) -> None: - start_line = defn.get_line() - 1 + start_line = defn.line - 1 start_indent = None # When a function is decorated, sometimes the start line will point to # whitespace or comments between the decorator and the function, so @@ -637,51 +637,48 @@ def on_file( etree.SubElement(class_element, "methods") lines_element = etree.SubElement(class_element, "lines") - with tokenize.open(path) as input_file: - class_lines_covered = 0 - class_total_lines = 0 - for lineno, _ in enumerate(input_file, 1): - status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) - hits = 0 - branch = False - if status == stats.TYPE_EMPTY: - continue - class_total_lines += 1 - if status != stats.TYPE_ANY: - class_lines_covered += 1 - hits = 1 - if status == stats.TYPE_IMPRECISE: - branch = True - file_info.counts[status] += 1 - line_element = etree.SubElement( - lines_element, - "line", - branch=str(branch).lower(), - hits=str(hits), - number=str(lineno), - precision=stats.precision_names[status], - ) - if branch: - line_element.attrib["condition-coverage"] = "50% (1/2)" - class_element.attrib["branch-rate"] = "0" - class_element.attrib["line-rate"] = get_line_rate( - class_lines_covered, class_total_lines + class_lines_covered = 0 + class_total_lines = 0 + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + hits = 0 + branch = False + if status == stats.TYPE_EMPTY: + continue + class_total_lines += 1 + if status != stats.TYPE_ANY: + class_lines_covered += 1 + hits = 1 + if status == stats.TYPE_IMPRECISE: + branch = True + file_info.counts[status] += 1 + line_element = etree.SubElement( + lines_element, + "line", + branch=str(branch).lower(), + hits=str(hits), + number=str(lineno), + precision=stats.precision_names[status], ) - # parent_module is set to whichever module contains this file. For most files, we want - # to simply strip the last element off of the module. But for __init__.py files, - # the module == the parent module. - parent_module = file_info.module.rsplit(".", 1)[0] - if file_info.name.endswith("__init__.py"): - parent_module = file_info.module - - if parent_module not in self.root_package.packages: - self.root_package.packages[parent_module] = CoberturaPackage(parent_module) - current_package = self.root_package.packages[parent_module] - packages_to_update = [self.root_package, current_package] - for package in packages_to_update: - package.total_lines += class_total_lines - package.covered_lines += class_lines_covered - current_package.classes[class_name] = class_element + if branch: + line_element.attrib["condition-coverage"] = "50% (1/2)" + class_element.attrib["branch-rate"] = "0" + class_element.attrib["line-rate"] = get_line_rate(class_lines_covered, class_total_lines) + # parent_module is set to whichever module contains this file. For most files, we want + # to simply strip the last element off of the module. But for __init__.py files, + # the module == the parent module. + parent_module = file_info.module.rsplit(".", 1)[0] + if file_info.name.endswith("__init__.py"): + parent_module = file_info.module + + if parent_module not in self.root_package.packages: + self.root_package.packages[parent_module] = CoberturaPackage(parent_module) + current_package = self.root_package.packages[parent_module] + packages_to_update = [self.root_package, current_package] + for package in packages_to_update: + package.total_lines += class_total_lines + package.covered_lines += class_lines_covered + current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib["line-rate"] = get_line_rate( diff --git a/mypy/semanal.py b/mypy/semanal.py index 9fca74b71872..79302b4d08e1 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -51,10 +51,11 @@ from __future__ import annotations from contextlib import contextmanager -from typing import Any, Callable, Iterable, Iterator, List, TypeVar, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, TypeVar, cast from typing_extensions import Final, TypeAlias as _TypeAlias from mypy import errorcodes as codes, message_registry +from mypy.constant_fold import constant_fold_expr from mypy.errorcodes import ErrorCode from mypy.errors import Errors, report_internal_error from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -69,6 +70,8 @@ from mypy.nodes import ( ARG_NAMED, ARG_POS, + ARG_STAR, + ARG_STAR2, CONTRAVARIANT, COVARIANT, GDEF, @@ -89,7 +92,6 @@ AwaitExpr, Block, BreakStmt, - BytesExpr, CallExpr, CastExpr, ClassDef, @@ -106,7 +108,6 @@ Expression, ExpressionStmt, FakeExpression, - FloatExpr, ForStmt, FuncBase, FuncDef, @@ -119,7 +120,6 @@ ImportBase, ImportFrom, IndexExpr, - IntExpr, LambdaExpr, ListComprehension, ListExpr, @@ -194,6 +194,7 @@ Plugin, SemanticAnalyzerPluginInterface, ) +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.reachability import ( ALWAYS_FALSE, ALWAYS_TRUE, @@ -208,6 +209,7 @@ from mypy.semanal_namedtuple import NamedTupleAnalyzer from mypy.semanal_newtype import NewTypeAnalyzer from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, PRIORITY_FALLBACKS, SemanticAnalyzerInterface, calculate_tuple_fallback, @@ -217,12 +219,14 @@ from mypy.semanal_typeddict import TypedDictAnalyzer from mypy.tvar_scope import TypeVarLikeScope from mypy.typeanal import ( + SELF_TYPE_NAMES, TypeAnalyser, TypeVarLikeList, TypeVarLikeQuery, analyze_type_alias, check_for_explicit_any, detect_diverging_alias, + find_self_type, fix_instance_types, has_any_from_unimported_type, no_subscript_builtin_alias, @@ -232,6 +236,7 @@ from mypy.typeops import function_type, get_type_vars from mypy.types import ( ASSERT_TYPE_NAMES, + DATACLASS_TRANSFORM_NAMES, FINAL_DECORATOR_NAMES, FINAL_TYPE_NAMES, NEVER_NAMES, @@ -246,7 +251,6 @@ FunctionLike, Instance, LiteralType, - LiteralValue, NoneType, Overloaded, Parameters, @@ -269,6 +273,7 @@ get_proper_types, invalid_recursive_alias, is_named_instance, + store_argument_type, ) from mypy.typevars import fill_typevars from mypy.util import ( @@ -302,10 +307,6 @@ # available very early on. CORE_BUILTIN_CLASSES: Final = ["object", "bool", "function"] -# Subclasses can override these Var attributes with incompatible types. This can also be -# set for individual attributes using 'allow_incompatible_override' of Var. -ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") - # Used for tracking incomplete references Tag: _TypeAlias = int @@ -337,7 +338,7 @@ class SemanticAnalyzer( # Nested block depths of scopes block_depth: list[int] # TypeInfo of directly enclosing class (or None) - type: TypeInfo | None = None + _type: TypeInfo | None = None # Stack of outer classes (the second tuple item contains tvars). type_stack: list[TypeInfo | None] # Type variables bound by the current scope, be it class or function @@ -416,7 +417,7 @@ def __init__( FuncItem | GeneratorExpr | DictionaryComprehension, SymbolTable ] = {} self.imports = set() - self.type = None + self._type = None self.type_stack = [] # Are the namespaces of classes being processed complete? self.incomplete_type_stack: list[bool] = [] @@ -454,8 +455,17 @@ def __init__( # rvalues while temporarily setting this to True. self.basic_type_applications = False + # Used to temporarily enable unbound type variables in some contexts. Namely, + # in base class expressions, and in right hand sides of type aliases. Do not add + # new uses of this, as this may cause leaking `UnboundType`s to type checking. + self.allow_unbound_tvars = False + # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties + @property + def type(self) -> TypeInfo | None: + return self._type + @property def is_stub_file(self) -> bool: return self._is_stub_file @@ -468,6 +478,15 @@ def is_typeshed_stub_file(self) -> bool: def final_iteration(self) -> bool: return self._final_iteration + @contextmanager + def allow_unbound_tvars_set(self) -> Iterator[None]: + old = self.allow_unbound_tvars + self.allow_unbound_tvars = True + try: + yield + finally: + self.allow_unbound_tvars = old + # # Preparing module (performed before semantic analysis) # @@ -595,31 +614,34 @@ def refresh_top_level(self, file_node: MypyFile) -> None: def add_implicit_module_attrs(self, file_node: MypyFile) -> None: """Manually add implicit definitions of module '__name__' etc.""" + str_type: Type | None = self.named_type_or_none("builtins.str") + if str_type is None: + str_type = UnboundType("builtins.str") for name, t in implicit_module_attrs.items(): if name == "__doc__": - typ: Type = UnboundType("__builtins__.str") + typ: Type = str_type elif name == "__path__": if not file_node.is_package_init_file(): continue # Need to construct the type ourselves, to avoid issues with __builtins__.list # not being subscriptable or typing.List not getting bound - sym = self.lookup_qualified("__builtins__.list", Context()) - if not sym: - continue - node = sym.node - if not isinstance(node, TypeInfo): - self.defer(node) + inst = self.named_type_or_none("builtins.list", [str_type]) + if inst is None: + assert not self.final_iteration, "Cannot find builtins.list to add __path__" + self.defer() return - typ = Instance(node, [self.str_type()]) + typ = inst elif name == "__annotations__": - sym = self.lookup_qualified("__builtins__.dict", Context(), suppress_errors=True) - if not sym: - continue - node = sym.node - if not isinstance(node, TypeInfo): - self.defer(node) + inst = self.named_type_or_none( + "builtins.dict", [str_type, AnyType(TypeOfAny.special_form)] + ) + if inst is None: + assert ( + not self.final_iteration + ), "Cannot find builtins.dict to add __annotations__" + self.defer() return - typ = Instance(node, [self.str_type(), AnyType(TypeOfAny.special_form)]) + typ = inst else: assert t is not None, f"type should be specified for {name}" typ = UnboundType(t) @@ -769,7 +791,7 @@ def file_context( if active_type: scope.leave_class() self.leave_class() - self.type = None + self._type = None self.incomplete_type_stack.pop() del self.options @@ -810,7 +832,10 @@ def analyze_func_def(self, defn: FuncDef) -> None: if defn.type: assert isinstance(defn.type, CallableType) - self.update_function_type_variables(defn.type, defn) + has_self_type = self.update_function_type_variables(defn.type, defn) + else: + has_self_type = False + self.function_stack.pop() if self.is_class_scope(): @@ -821,7 +846,7 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(defn.type, CallableType) if isinstance(get_proper_type(defn.type.ret_type), AnyType): defn.type = defn.type.copy_modified(ret_type=NoneType()) - self.prepare_method_signature(defn, self.type) + self.prepare_method_signature(defn, self.type, has_self_type) # Analyze function signature with self.tvar_scope_frame(self.tvar_scope.method_frame()): @@ -840,9 +865,14 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(result, ProperType) if isinstance(result, CallableType): result = self.remove_unpack_kwargs(defn, result) + if has_self_type and self.type is not None: + info = self.type + if info.self_type is not None: + result.variables = [info.self_type] + list(result.variables) defn.type = result self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) + self.check_paramspec_definition(defn) if isinstance(defn, FuncDef): assert isinstance(defn.type, CallableType) defn.type = set_callable_name(defn.type, defn) @@ -911,7 +941,7 @@ def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType new_arg_types = typ.arg_types[:-1] + [last_type] return typ.copy_modified(arg_types=new_arg_types, unpack_kwargs=True) - def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: + def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: bool) -> None: """Check basic signature validity and tweak annotation of self/cls argument.""" # Only non-static methods are special. functype = func.type @@ -919,14 +949,58 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: if func.name in ["__init_subclass__", "__class_getitem__"]: func.is_class = True if not func.arguments: - self.fail("Method must have at least one argument", func) + self.fail( + 'Method must have at least one argument. Did you forget the "self" argument?', + func, + ) elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): - leading_type: Type = fill_typevars(info) + if has_self_type: + assert self.type is not None and self.type.self_type is not None + leading_type: Type = self.type.self_type + else: + leading_type = fill_typevars(info) if func.is_class or func.name == "__new__": leading_type = self.class_type(leading_type) func.type = replace_implicit_first_type(functype, leading_type) + elif has_self_type and isinstance(func.unanalyzed_type, CallableType): + if not isinstance(get_proper_type(func.unanalyzed_type.arg_types[0]), AnyType): + if self.is_expected_self_type( + self_type, func.is_class or func.name == "__new__" + ): + # This error is off by default, since it is explicitly allowed + # by the PEP 673. + self.fail( + "Redundant Self annotation on method first argument", + func, + code=codes.REDUNDANT_SELF_TYPE, + ) + else: + self.fail( + "Method cannot have explicit self annotation and Self type", func + ) + elif has_self_type: + self.fail("Static methods cannot use Self type", func) + + def is_expected_self_type(self, typ: Type, is_classmethod: bool) -> bool: + """Does this (analyzed or not) type represent the expected Self type for a method?""" + assert self.type is not None + typ = get_proper_type(typ) + if is_classmethod: + if isinstance(typ, TypeType): + return self.is_expected_self_type(typ.item, is_classmethod=False) + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + if sym is not None and sym.fullname == "typing.Type" and typ.args: + return self.is_expected_self_type(typ.args[0], is_classmethod=False) + return False + if isinstance(typ, TypeVarType): + return typ == self.type.self_type + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + return sym is not None and sym.fullname in SELF_TYPE_NAMES + return False def set_original_def(self, previous: Node | None, new: FuncDef | Decorator) -> bool: """If 'new' conditionally redefine 'previous', set 'previous' as original @@ -951,15 +1025,41 @@ def f(): ... # Error: 'f' redefined else: return False - def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None: + def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> bool: """Make any type variables in the signature of defn explicit. Update the signature of defn to contain type variable definitions - if defn is generic. + if defn is generic. Return True, if the signature contains typing.Self + type, or False otherwise. """ with self.tvar_scope_frame(self.tvar_scope.method_frame()): a = self.type_analyzer() - fun_type.variables = a.bind_function_type_variables(fun_type, defn) + fun_type.variables, has_self_type = a.bind_function_type_variables(fun_type, defn) + if has_self_type and self.type is not None: + self.setup_self_type() + return has_self_type + + def setup_self_type(self) -> None: + """Setup a (shared) Self type variable for current class. + + We intentionally don't add it to the class symbol table, + so it can be accessed only by mypy and will not cause + clashes with user defined names. + """ + assert self.type is not None + info = self.type + if info.self_type is not None: + if has_placeholder(info.self_type.upper_bound): + # Similar to regular (user defined) type variables. + self.process_placeholder( + None, + "Self upper bound", + info, + force_progress=info.self_type.upper_bound != fill_typevars(info), + ) + else: + return + info.self_type = TypeVarType("Self", f"{info.fullname}.Self", 0, [], fill_typevars(info)) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.statement = defn @@ -1241,7 +1341,10 @@ def analyze_function_body(self, defn: FuncItem) -> None: # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() - a.bind_function_type_variables(cast(CallableType, defn.type), defn) + typ = cast(CallableType, defn.type) + a.bind_function_type_variables(typ, defn) + for i in range(len(typ.arg_types)): + store_argument_type(defn, i, typ, self.named_type) self.function_stack.append(defn) with self.enter(defn): for arg in defn.arguments: @@ -1282,6 +1385,64 @@ def check_function_signature(self, fdef: FuncItem) -> None: elif len(sig.arg_types) > len(fdef.arguments): self.fail("Type signature has too many arguments", fdef, blocker=True) + def check_paramspec_definition(self, defn: FuncDef) -> None: + func = defn.type + assert isinstance(func, CallableType) + + if not any(isinstance(var, ParamSpecType) for var in func.variables): + return # Function does not have param spec variables + + args = func.var_arg() + kwargs = func.kw_arg() + if args is None and kwargs is None: + return # Looks like this function does not have starred args + + args_defn_type = None + kwargs_defn_type = None + for arg_def, arg_kind in zip(defn.arguments, defn.arg_kinds): + if arg_kind == ARG_STAR: + args_defn_type = arg_def.type_annotation + elif arg_kind == ARG_STAR2: + kwargs_defn_type = arg_def.type_annotation + + # This may happen on invalid `ParamSpec` args / kwargs definition, + # type analyzer sets types of arguments to `Any`, but keeps + # definition types as `UnboundType` for now. + if not ( + (isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args")) + or ( + isinstance(kwargs_defn_type, UnboundType) + and kwargs_defn_type.name.endswith(".kwargs") + ) + ): + # Looks like both `*args` and `**kwargs` are not `ParamSpec` + # It might be something else, skipping. + return + + args_type = args.typ if args is not None else None + kwargs_type = kwargs.typ if kwargs is not None else None + + if ( + not isinstance(args_type, ParamSpecType) + or not isinstance(kwargs_type, ParamSpecType) + or args_type.name != kwargs_type.name + ): + if isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args"): + param_name = args_defn_type.name.split(".")[0] + elif isinstance(kwargs_defn_type, UnboundType) and kwargs_defn_type.name.endswith( + ".kwargs" + ): + param_name = kwargs_defn_type.name.split(".")[0] + else: + # Fallback for cases that probably should not ever happen: + param_name = "P" + + self.fail( + f'ParamSpec must have "*args" typed as "{param_name}.args" and "**kwargs" typed as "{param_name}.kwargs"', + func, + code=codes.VALID_TYPE, + ) + def visit_decorator(self, dec: Decorator) -> None: self.statement = dec # TODO: better don't modify them at all. @@ -1316,7 +1477,13 @@ def visit_decorator(self, dec: Decorator) -> None: dec.var.is_classmethod = True self.check_decorated_function_is_method("classmethod", dec) elif refers_to_fullname( - d, ("builtins.property", "abc.abstractproperty", "functools.cached_property") + d, + ( + "builtins.property", + "abc.abstractproperty", + "functools.cached_property", + "enum.property", + ), ): removed.append(i) dec.func.is_property = True @@ -1340,6 +1507,10 @@ def visit_decorator(self, dec: Decorator) -> None: removed.append(i) else: self.fail("@final cannot be used with non-method functions", d) + elif isinstance(d, CallExpr) and refers_to_fullname( + d.callee, DATACLASS_TRANSFORM_NAMES + ): + dec.func.is_dataclass_transform = True elif not dec.var.is_property: # We have seen a "non-trivial" decorator before seeing @property, if # we will see a @property later, give an error, as we don't support this. @@ -1419,7 +1590,9 @@ def analyze_class(self, defn: ClassDef) -> None: self.mark_incomplete(defn.name, defn) return - declared_metaclass, should_defer = self.get_declared_metaclass(defn.name, defn.metaclass) + declared_metaclass, should_defer, any_meta = self.get_declared_metaclass( + defn.name, defn.metaclass + ) if should_defer or self.found_incomplete_ref(tag): # Metaclass was not ready. Defer current target. self.mark_incomplete(defn.name, defn) @@ -1439,6 +1612,8 @@ def analyze_class(self, defn: ClassDef) -> None: self.setup_type_vars(defn, tvar_defs) if base_error: defn.info.fallback_to_any = True + if any_meta: + defn.info.meta_fallback_to_any = True with self.scope.class_scope(defn.info): self.configure_base_classes(defn, base_types) @@ -1457,7 +1632,7 @@ def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> N def setup_alias_type_vars(self, defn: ClassDef) -> None: assert defn.info.special_alias is not None - defn.info.special_alias.alias_tvars = list(defn.info.type_vars) + defn.info.special_alias.alias_tvars = list(defn.type_vars) target = defn.info.special_alias.target assert isinstance(target, ProperType) if isinstance(target, TypedDictType): @@ -1537,6 +1712,11 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook(decorator_name) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and is_dataclass_transform_decorator(decorator): + hook = dataclasses_plugin.dataclass_tag_callback if hook: hook(ClassDefContext(defn, decorator, self)) @@ -1580,7 +1760,7 @@ def enter_class(self, info: TypeInfo) -> None: self.locals.append(None) # Add class scope self.is_comprehension_stack.append(False) self.block_depth.append(-1) # The class body increments this to 0 - self.type = info + self._type = info self.missing_names.append(set()) def leave_class(self) -> None: @@ -1588,7 +1768,7 @@ def leave_class(self) -> None: self.block_depth.pop() self.locals.pop() self.is_comprehension_stack.pop() - self.type = self.type_stack.pop() + self._type = self.type_stack.pop() self.missing_names.pop() def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: @@ -1622,6 +1802,8 @@ class Foo(Bar, Generic[T]): ... declared_tvars: TypeVarLikeList = [] is_protocol = False for i, base_expr in enumerate(base_type_exprs): + if isinstance(base_expr, StarExpr): + base_expr.valid = True self.analyze_type_expr(base_expr) try: @@ -1722,7 +1904,7 @@ def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None: # It's bound by our type variable scope return None return unbound.name, sym.node - if sym and sym.fullname == "typing_extensions.Unpack": + if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"): inner_t = unbound.args[0] if not isinstance(inner_t, UnboundType): return None @@ -1756,7 +1938,7 @@ def get_all_bases_tvars( except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) return remove_dups(tvars) @@ -1774,7 +1956,7 @@ def get_and_bind_all_tvars(self, type_exprs: list[Expression]) -> list[TypeVarLi except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) tvars = remove_dups(tvars) # Variables are defined in order of textual appearance. tvar_defs = [] @@ -1878,7 +2060,9 @@ def analyze_base_classes( continue try: - base = self.expr_to_analyzed_type(base_expr, allow_placeholder=True) + base = self.expr_to_analyzed_type( + base_expr, allow_placeholder=True, allow_type_any=True + ) except TypeTranslationError: name = self.get_name_repr_of_expr(base_expr) if isinstance(base_expr, CallExpr): @@ -1964,7 +2148,9 @@ def configure_tuple_base_class(self, defn: ClassDef, base: TupleType) -> Instanc self.fail("Class has two incompatible bases derived from tuple", defn) defn.has_incompatible_baseclass = True if info.special_alias and has_placeholder(info.special_alias.target): - self.defer(force_progress=True) + self.process_placeholder( + None, "tuple base", defn, force_progress=base != info.tuple_type + ) info.update_tuple_type(base) self.setup_alias_type_vars(defn) @@ -2017,7 +2203,7 @@ def infer_metaclass_and_bases_from_compat_helpers(self, defn: ClassDef) -> None: if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): - base_expr.accept(self) + self.analyze_type_expr(base_expr) if ( base_expr.callee.fullname in { @@ -2083,8 +2269,17 @@ def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: def get_declared_metaclass( self, name: str, metaclass_expr: Expression | None - ) -> tuple[Instance | None, bool]: - """Returns either metaclass instance or boolean whether we should defer.""" + ) -> tuple[Instance | None, bool, bool]: + """Get declared metaclass from metaclass expression. + + Returns a tuple of three values: + * A metaclass instance or None + * A boolean indicating whether we should defer + * A boolean indicating whether we should set metaclass Any fallback + (either for Any metaclass or invalid/dynamic metaclass). + + The two boolean flags can only be True if instance is None. + """ declared_metaclass = None if metaclass_expr: metaclass_name = None @@ -2094,25 +2289,20 @@ def get_declared_metaclass( metaclass_name = get_member_expr_fullname(metaclass_expr) if metaclass_name is None: self.fail(f'Dynamic metaclass not supported for "{name}"', metaclass_expr) - return None, False + return None, False, True sym = self.lookup_qualified(metaclass_name, metaclass_expr) if sym is None: # Probably a name error - it is already handled elsewhere - return None, False + return None, False, True if isinstance(sym.node, Var) and isinstance(get_proper_type(sym.node.type), AnyType): - # Create a fake TypeInfo that fallbacks to `Any`, basically allowing - # all the attributes. Same thing as we do for `Any` base class. - any_info = self.make_empty_type_info(ClassDef(sym.node.name, Block([]))) - any_info.fallback_to_any = True - any_info._fullname = sym.node.fullname if self.options.disallow_subclassing_any: self.fail( - f'Class cannot use "{any_info.fullname}" as a metaclass (has type "Any")', + f'Class cannot use "{sym.node.name}" as a metaclass (has type "Any")', metaclass_expr, ) - return Instance(any_info, []), False + return None, False, True if isinstance(sym.node, PlaceholderNode): - return None, True # defer later in the caller + return None, True, False # defer later in the caller # Support type aliases, like `_Meta: TypeAlias = type` if ( @@ -2127,16 +2317,16 @@ def get_declared_metaclass( if not isinstance(metaclass_info, TypeInfo) or metaclass_info.tuple_type is not None: self.fail(f'Invalid metaclass "{metaclass_name}"', metaclass_expr) - return None, False + return None, False, False if not metaclass_info.is_metaclass(): self.fail( 'Metaclasses not inheriting from "type" are not supported', metaclass_expr ) - return None, False + return None, False, False inst = fill_typevars(metaclass_info) assert isinstance(inst, Instance) declared_metaclass = inst - return declared_metaclass, False + return declared_metaclass, False, False def recalculate_metaclass(self, defn: ClassDef, declared_metaclass: Instance | None) -> None: defn.info.declared_metaclass = declared_metaclass @@ -2174,13 +2364,33 @@ def visit_import(self, i: Import) -> None: base_id = id.split(".")[0] imported_id = base_id module_public = use_implicit_reexport - self.add_module_symbol( - base_id, - imported_id, - context=i, - module_public=module_public, - module_hidden=not module_public, - ) + + if base_id in self.modules: + node = self.modules[base_id] + if self.is_func_scope(): + kind = LDEF + elif self.type is not None: + kind = MDEF + else: + kind = GDEF + symbol = SymbolTableNode( + kind, node, module_public=module_public, module_hidden=not module_public + ) + self.add_imported_symbol( + imported_id, + symbol, + context=i, + module_public=module_public, + module_hidden=not module_public, + ) + else: + self.add_unknown_imported_symbol( + imported_id, + context=i, + target_name=base_id, + module_public=module_public, + module_hidden=not module_public, + ) def visit_import_from(self, imp: ImportFrom) -> None: self.statement = imp @@ -2242,10 +2452,20 @@ def visit_import_from(self, imp: ImportFrom) -> None: ) continue - if node and not node.module_hidden: + if node: self.process_imported_symbol( node, module_id, id, imported_id, fullname, module_public, context=imp ) + if node.module_hidden: + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=not module_public, + context=imp, + add_unknown_imported_symbol=False, + ) elif module and not missing_submodule: # Target module exists but the imported name is missing or hidden. self.report_missing_module_attribute( @@ -2306,19 +2526,6 @@ def process_imported_symbol( module_hidden=module_hidden, becomes_typeinfo=True, ) - existing_symbol = self.globals.get(imported_id) - if ( - existing_symbol - and not isinstance(existing_symbol.node, PlaceholderNode) - and not isinstance(node.node, PlaceholderNode) - ): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(imported_id, existing_symbol, node, context): - return - if existing_symbol and isinstance(node.node, PlaceholderNode): - # Imports are special, some redefinitions are allowed, so wait until - # we know what is the new symbol node. - return # NOTE: we take the original node even for final `Var`s. This is to support # a common pattern when constants are re-exported (same applies to import *). self.add_imported_symbol( @@ -2333,6 +2540,7 @@ def report_missing_module_attribute( module_public: bool, module_hidden: bool, context: Node, + add_unknown_imported_symbol: bool = True, ) -> None: # Missing attribute. if self.is_incomplete_namespace(import_id): @@ -2346,25 +2554,25 @@ def report_missing_module_attribute( # Suggest alternatives, if any match is found. module = self.modules.get(import_id) if module: - if not self.options.implicit_reexport and source_id in module.names.keys(): + if source_id in module.names.keys() and not module.names[source_id].module_public: message = ( - 'Module "{}" does not explicitly export attribute "{}"' - "; implicit reexport disabled".format(import_id, source_id) + f'Module "{import_id}" does not explicitly export attribute "{source_id}"' ) else: alternatives = set(module.names.keys()).difference({source_id}) - matches = best_matches(source_id, alternatives)[:3] + matches = best_matches(source_id, alternatives, n=3) if matches: suggestion = f"; maybe {pretty_seq(matches, 'or')}?" message += f"{suggestion}" self.fail(message, context, code=codes.ATTR_DEFINED) - self.add_unknown_imported_symbol( - imported_id, - context, - target_name=None, - module_public=module_public, - module_hidden=not module_public, - ) + if add_unknown_imported_symbol: + self.add_unknown_imported_symbol( + imported_id, + context, + target_name=None, + module_public=module_public, + module_hidden=not module_public, + ) if import_id == "typing": # The user probably has a missing definition in a test fixture. Let's verify. @@ -2435,14 +2643,9 @@ def visit_import_all(self, i: ImportAll) -> None: if isinstance(node.node, MypyFile): # Star import of submodule from a package, add it as a dependency. self.imports.add(node.node.fullname) - existing_symbol = self.lookup_current_scope(name) - if existing_symbol and not isinstance(node.node, PlaceholderNode): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(name, existing_symbol, node, i): - continue # `from x import *` always reexports symbols self.add_imported_symbol( - name, node, i, module_public=True, module_hidden=False + name, node, context=i, module_public=True, module_hidden=False ) else: @@ -2455,8 +2658,33 @@ def visit_import_all(self, i: ImportAll) -> None: def visit_assignment_expr(self, s: AssignmentExpr) -> None: s.value.accept(self) + if self.is_func_scope(): + if not self.check_valid_comprehension(s): + return self.analyze_lvalue(s.target, escape_comprehensions=True, has_explicit_value=True) + def check_valid_comprehension(self, s: AssignmentExpr) -> bool: + """Check that assignment expression is not nested within comprehension at class scope. + + class C: + [(j := i) for i in [1, 2, 3]] + is a syntax error that is not enforced by Python parser, but at later steps. + """ + for i, is_comprehension in enumerate(reversed(self.is_comprehension_stack)): + if not is_comprehension and i < len(self.locals) - 1: + if self.locals[-1 - i] is None: + self.fail( + "Assignment expression within a comprehension" + " cannot be used in a class body", + s, + code=codes.SYNTAX, + serious=True, + blocker=True, + ) + return False + break + return True + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.statement = s @@ -2471,8 +2699,15 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: # But we can't use a full visit because it may emit extra incomplete refs (namely # when analysing any type applications there) thus preventing the further analysis. # To break the tie, we first analyse rvalue partially, if it can be a type alias. - with self.basic_type_applications_set(s): + if self.can_possibly_be_type_form(s): + old_basic_type_applications = self.basic_type_applications + self.basic_type_applications = True + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) + self.basic_type_applications = old_basic_type_applications + else: s.rvalue.accept(self) + if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): # Initializer couldn't be fully analyzed. Defer the current node and give up. # Make sure that if we skip the definition of some local names, they can't be @@ -2480,10 +2715,11 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: for expr in names_modified_by_assignment(s): self.mark_incomplete(expr.name, expr) return - if self.can_possibly_be_index_alias(s): + if self.can_possibly_be_type_form(s): # Now re-visit those rvalues that were we skipped type applications above. # This should be safe as generally semantic analyzer is idempotent. - s.rvalue.accept(self) + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) # The r.h.s. is now ready to be classified, first check if it is a special form: special_form = False @@ -2622,16 +2858,19 @@ def can_be_type_alias(self, rv: Expression, allow_none: bool = False) -> bool: return True return False - def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: - """Like can_be_type_alias(), but simpler and doesn't require analyzed rvalue. + def can_possibly_be_type_form(self, s: AssignmentStmt) -> bool: + """Like can_be_type_alias(), but simpler and doesn't require fully analyzed rvalue. - Instead, use lvalues/annotations structure to figure out whether this can - potentially be a type alias definition. Another difference from above function - is that we are only interested IndexExpr and OpExpr rvalues, since only those + Instead, use lvalues/annotations structure to figure out whether this can potentially be + a type alias definition, NamedTuple, or TypedDict. Another difference from above function + is that we are only interested IndexExpr, CallExpr and OpExpr rvalues, since only those can be potentially recursive (things like `A = A` are never valid). """ if len(s.lvalues) > 1: return False + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + ref = s.rvalue.callee.fullname + return ref in TPDICT_NAMES or ref in TYPED_NAMEDTUPLE_NAMES if not isinstance(s.lvalues[0], NameExpr): return False if s.unanalyzed_type is not None and not self.is_pep_613(s): @@ -2641,17 +2880,6 @@ def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: # Something that looks like Foo = Bar[Baz, ...] return True - @contextmanager - def basic_type_applications_set(self, s: AssignmentStmt) -> Iterator[None]: - old = self.basic_type_applications - # As an optimization, only use the double visit logic if this - # can possibly be a recursive type alias. - self.basic_type_applications = self.can_possibly_be_index_alias(s) - try: - yield - finally: - self.basic_type_applications = old - def is_type_ref(self, rv: Expression, bare: bool = False) -> bool: """Does this expression refer to a type? @@ -2848,13 +3076,13 @@ def analyze_lvalues(self, s: AssignmentStmt) -> None: def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: if not isinstance(s.rvalue, CallExpr): return - fname = None + fname = "" call = s.rvalue while True: if isinstance(call.callee, RefExpr): fname = call.callee.fullname # check if method call - if fname is None and isinstance(call.callee, MemberExpr): + if not fname and isinstance(call.callee, MemberExpr): callee_expr = call.callee.expr if isinstance(callee_expr, RefExpr) and callee_expr.fullname: method_name = call.callee.name @@ -2957,7 +3185,8 @@ def store_final_status(self, s: AssignmentStmt) -> None: node = s.lvalues[0].node if isinstance(node, Var): node.is_final = True - node.final_value = self.unbox_literal(s.rvalue) + if s.type: + node.final_value = constant_fold_expr(s.rvalue, self.cur_mod_id) if self.is_class_scope() and ( isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs ): @@ -3017,13 +3246,6 @@ def flatten_lvalues(self, lvalues: list[Expression]) -> list[Expression]: res.append(lv) return res - def unbox_literal(self, e: Expression) -> int | float | bool | str | None: - if isinstance(e, (IntExpr, FloatExpr, StrExpr)): - return e.value - elif isinstance(e, NameExpr) and e.name in ("True", "False"): - return True if e.name == "True" else False - return None - def process_type_annotation(self, s: AssignmentStmt) -> None: """Analyze type annotation or infer simple literal type.""" if s.type: @@ -3078,77 +3300,85 @@ def is_annotated_protocol_member(self, s: AssignmentStmt) -> bool: def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: """Return builtins.int if rvalue is an int literal, etc. - If this is a 'Final' context, we return "Literal[...]" instead.""" - if self.options.semantic_analysis_only or self.function_stack: - # Skip this if we're only doing the semantic analysis pass. - # This is mostly to avoid breaking unit tests. - # Also skip inside a function; this is to avoid confusing + + If this is a 'Final' context, we return "Literal[...]" instead. + """ + if self.function_stack: + # Skip inside a function; this is to avoid confusing # the code that handles dead code due to isinstance() # inside type variables with value restrictions (like # AnyStr). return None - if isinstance(rvalue, FloatExpr): - return self.named_type_or_none("builtins.float") - - value: LiteralValue | None = None - type_name: str | None = None - if isinstance(rvalue, IntExpr): - value, type_name = rvalue.value, "builtins.int" - if isinstance(rvalue, StrExpr): - value, type_name = rvalue.value, "builtins.str" - if isinstance(rvalue, BytesExpr): - value, type_name = rvalue.value, "builtins.bytes" - - if type_name is not None: - assert value is not None - typ = self.named_type_or_none(type_name) - if typ and is_final: - return typ.copy_modified( - last_known_value=LiteralType( - value=value, fallback=typ, line=typ.line, column=typ.column - ) - ) - return typ - return None + value = constant_fold_expr(rvalue, self.cur_mod_id) + if value is None: + return None + + if isinstance(value, bool): + type_name = "builtins.bool" + elif isinstance(value, int): + type_name = "builtins.int" + elif isinstance(value, str): + type_name = "builtins.str" + elif isinstance(value, float): + type_name = "builtins.float" + + typ = self.named_type_or_none(type_name) + if typ and is_final: + return typ.copy_modified(last_known_value=LiteralType(value=value, fallback=typ)) + return typ def analyze_alias( - self, rvalue: Expression, allow_placeholder: bool = False - ) -> tuple[Type | None, list[str], set[str], list[str]]: + self, name: str, rvalue: Expression, allow_placeholder: bool = False + ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str]]: """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type, a list of qualified type variable names for generic aliases, a set of names the alias depends on, and a list of type variables if the alias is generic. - An schematic example for the dependencies: + A schematic example for the dependencies: A = int B = str analyze_alias(Dict[A, B])[2] == {'__main__.A', '__main__.B'} """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack - res = analyze_type_alias( - rvalue, - self, - self.tvar_scope, - self.plugin, - self.options, - self.is_typeshed_stub_file, - allow_placeholder=allow_placeholder, - in_dynamic_func=dynamic, - global_scope=global_scope, - ) - typ: Type | None = None + try: + typ = expr_to_unanalyzed_type(rvalue, self.options, self.is_stub_file) + except TypeTranslationError: + self.fail( + "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE + ) + return None, [], set(), [] + + found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope)) + tvar_defs: list[TypeVarLikeType] = [] + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + for name, tvar_expr in found_type_vars: + tvar_def = self.tvar_scope.bind_new(name, tvar_expr) + tvar_defs.append(tvar_def) + + res = analyze_type_alias( + typ, + self, + self.tvar_scope, + self.plugin, + self.options, + self.is_typeshed_stub_file, + allow_placeholder=allow_placeholder, + in_dynamic_func=dynamic, + global_scope=global_scope, + allowed_alias_tvars=tvar_defs, + ) + analyzed: Type | None = None if res: - typ, depends_on = res - found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) - alias_tvars = [name for (name, node) in found_type_vars] + analyzed, depends_on = res qualified_tvars = [node.fullname for (name, node) in found_type_vars] else: - alias_tvars = [] depends_on = set() qualified_tvars = [] - return typ, alias_tvars, depends_on, qualified_tvars + return analyzed, tvar_defs, depends_on, qualified_tvars def is_pep_613(self, s: AssignmentStmt) -> bool: if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType): @@ -3228,13 +3458,13 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: res: Type | None = None if self.is_none_alias(rvalue): res = NoneType() - alias_tvars: list[str] = [] + alias_tvars: list[TypeVarLikeType] = [] depends_on: set[str] = set() qualified_tvars: list[str] = [] else: tag = self.track_incomplete_refs() res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias( - rvalue, allow_placeholder=True + lvalue.name, rvalue, allow_placeholder=True ) if not res: return False @@ -3281,7 +3511,11 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: no_args=no_args, eager=eager, ) - if isinstance(s.rvalue, (IndexExpr, CallExpr)): # CallExpr is for `void = type(None)` + if isinstance(s.rvalue, (IndexExpr, CallExpr, OpExpr)) and ( + not isinstance(rvalue, OpExpr) + or (self.options.python_version >= (3, 10) or self.is_stub_file) + ): + # Note: CallExpr is for "void = type(None)" and OpExpr is for "X | Y" union syntax. s.rvalue.analyzed = TypeAliasExpr(alias_node) s.rvalue.analyzed.line = s.line # we use the column from resulting target, to get better location for errors @@ -3628,6 +3862,14 @@ def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: var = lvalue.node var.type = typ var.is_ready = True + typ = get_proper_type(typ) + if ( + var.is_final + and isinstance(typ, Instance) + and typ.last_known_value + and (not self.type or not self.type.is_enum) + ): + var.final_value = typ.last_known_value.value # If node is not a variable, we'll catch it elsewhere. elif isinstance(lvalue, TupleExpr): typ = get_proper_type(typ) @@ -3655,7 +3897,7 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: Return True if this looks like a type variable declaration (but maybe with errors), otherwise return False. """ - call = self.get_typevarlike_declaration(s, ("typing.TypeVar",)) + call = self.get_typevarlike_declaration(s, ("typing.TypeVar", "typing_extensions.TypeVar")) if not call: return False @@ -3714,12 +3956,16 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: type_var = TypeVarExpr(name, self.qualified_name(name), values, upper_bound, variance) type_var.line = call.line call.analyzed = type_var + updated = True else: assert isinstance(call.analyzed, TypeVarExpr) + updated = values != call.analyzed.values or upper_bound != call.analyzed.upper_bound call.analyzed.upper_bound = upper_bound call.analyzed.values = values if any(has_placeholder(v) for v in values) or has_placeholder(upper_bound): - self.defer(force_progress=True) + self.process_placeholder( + None, f"TypeVar {'values' if values else 'upper bound'}", s, force_progress=updated + ) self.add_symbol(name, call.analyzed, s) return True @@ -3920,8 +4166,9 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: # PEP 646 does not specify the behavior of variance, constraints, or bounds. if not call.analyzed: + tuple_fallback = self.named_type("builtins.tuple", [self.object_type()]) typevartuple_var = TypeVarTupleExpr( - name, self.qualified_name(name), self.object_type(), INVARIANT + name, self.qualified_name(name), self.object_type(), tuple_fallback, INVARIANT ) typevartuple_var.line = call.line call.analyzed = typevartuple_var @@ -3993,6 +4240,12 @@ def check_classvar(self, s: AssignmentStmt) -> None: # See https://github.com/python/mypy/issues/11538 self.fail(message_registry.CLASS_VAR_WITH_TYPEVARS, s) + if ( + analyzed is not None + and self.type.self_type in get_type_vars(analyzed) + and self.type.defn.type_vars + ): + self.fail(message_registry.CLASS_VAR_WITH_GENERIC_SELF, s) elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): # In case of member access, report error only when assigning to self # Other kinds of member assignments should be already reported @@ -4429,7 +4682,7 @@ def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: else: expr.kind = sym.kind expr.node = sym.node - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" def visit_super_expr(self, expr: SuperExpr) -> None: if not self.type and not expr.call.args: @@ -4465,8 +4718,7 @@ def visit_dict_expr(self, expr: DictExpr) -> None: def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: - # XXX TODO Change this error message - self.fail("Can use starred expression only as assignment target", expr) + self.fail("Can use starred expression only as assignment target", expr, blocker=True) else: expr.expr.accept(self) @@ -4655,7 +4907,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: self.process_placeholder(expr.name, "attribute", expr) return expr.kind = sym.kind - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" expr.node = sym.node elif isinstance(base, RefExpr): # This branch handles the case C.bar (or cls.bar or self.bar inside @@ -4687,7 +4939,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: if not n: return expr.kind = n.kind - expr.fullname = n.fullname + expr.fullname = n.fullname or "" expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: @@ -4814,12 +5066,12 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: except TypeTranslationError: self.fail("Type expected within [...]", expr) return None - # We always allow unbound type variables in IndexExpr, since we - # may be analysing a type alias definition rvalue. The error will be - # reported elsewhere if it is not the case. analyzed = self.anal_type( typearg, - allow_unbound_tvars=True, + # The type application may appear in base class expression, + # where type variables are not bound yet. Or when accepting + # r.h.s. of type alias before we figured out it is a type alias. + allow_unbound_tvars=self.allow_unbound_tvars, allow_placeholder=True, allow_param_spec_literals=has_param_spec, ) @@ -4969,10 +5221,11 @@ def visit_yield_expr(self, e: YieldExpr) -> None: e.expr.accept(self) def visit_await_expr(self, expr: AwaitExpr) -> None: - if not self.is_func_scope(): - self.fail('"await" outside function', expr) + if not self.is_func_scope() or not self.function_stack: + # We check both because is_function_scope() returns True inside comprehensions. + self.fail('"await" outside function', expr, serious=True, blocker=True) elif not self.function_stack[-1].is_coroutine: - self.fail('"await" outside coroutine ("async def")', expr) + self.fail('"await" outside coroutine ("async def")', expr, serious=True, blocker=True) expr.expr.accept(self) # @@ -5147,7 +5400,7 @@ def is_overloaded_item(self, node: SymbolNode, statement: Statement) -> bool: return False def is_defined_in_current_module(self, fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False return module_prefix(self.modules, fullname) == self.cur_mod_id @@ -5517,24 +5770,6 @@ def add_local(self, node: Var | FuncDef | OverloadedFuncDef, context: Context) - node._fullname = name self.add_symbol(name, node, context) - def add_module_symbol( - self, id: str, as_id: str, context: Context, module_public: bool, module_hidden: bool - ) -> None: - """Add symbol that is a reference to a module object.""" - if id in self.modules: - node = self.modules[id] - self.add_symbol( - as_id, node, context, module_public=module_public, module_hidden=module_hidden - ) - else: - self.add_unknown_imported_symbol( - as_id, - context, - target_name=id, - module_public=module_public, - module_hidden=module_hidden, - ) - def _get_node_for_class_scoped_import( self, name: str, symbol_node: SymbolNode | None, context: Context ) -> SymbolNode | None: @@ -5581,13 +5816,23 @@ def add_imported_symbol( self, name: str, node: SymbolTableNode, - context: Context, + context: ImportBase, module_public: bool, module_hidden: bool, ) -> None: """Add an alias to an existing symbol through import.""" assert not module_hidden or not module_public + existing_symbol = self.lookup_current_scope(name) + if ( + existing_symbol + and not isinstance(existing_symbol.node, PlaceholderNode) + and not isinstance(node.node, PlaceholderNode) + ): + # Import can redefine a variable. They get special treatment. + if self.process_import_over_existing_name(name, existing_symbol, node, context): + return + symbol_node: SymbolNode | None = node.node if self.is_class_scope(): @@ -5733,7 +5978,9 @@ def is_incomplete_namespace(self, fullname: str) -> bool: """ return fullname in self.incomplete_namespaces - def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: """Process a reference targeting placeholder node. If this is not a final iteration, defer current node, @@ -5745,10 +5992,11 @@ def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: if self.final_iteration: self.cannot_resolve_name(name, kind, ctx) else: - self.defer(ctx) + self.defer(ctx, force_progress=force_progress) - def cannot_resolve_name(self, name: str, kind: str, ctx: Context) -> None: - self.fail(f'Cannot resolve {kind} "{name}" (possible cyclic definition)', ctx) + def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None: + name_format = f' "{name}"' if name else "" + self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx) if not self.options.disable_recursive_aliases and self.is_func_scope(): self.note("Recursive types are not allowed at function scope", ctx) @@ -5965,12 +6213,12 @@ def fail( return # In case it's a bug and we don't really have context assert ctx is not None, msg - self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker, code=code) + self.errors.report(ctx.line, ctx.column, msg, blocker=blocker, code=code) def note(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: if not self.in_checked_function(): return - self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity="note", code=code) + self.errors.report(ctx.line, ctx.column, msg, severity="note", code=code) def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: if feature not in self.options.enable_incomplete_feature: @@ -5989,7 +6237,11 @@ def accept(self, node: Node) -> None: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def expr_to_analyzed_type( - self, expr: Expression, report_invalid_types: bool = True, allow_placeholder: bool = False + self, + expr: Expression, + report_invalid_types: bool = True, + allow_placeholder: bool = False, + allow_type_any: bool = False, ) -> Type | None: if isinstance(expr, CallExpr): # This is a legacy syntax intended mostly for Python 2, we keep it for @@ -6014,7 +6266,10 @@ def expr_to_analyzed_type( return TupleType(info.tuple_type.items, fallback=fallback) typ = self.expr_to_unanalyzed_type(expr) return self.anal_type( - typ, report_invalid_types=report_invalid_types, allow_placeholder=allow_placeholder + typ, + report_invalid_types=report_invalid_types, + allow_placeholder=allow_placeholder, + allow_type_any=allow_type_any, ) def analyze_type_expr(self, expr: Expression) -> None: @@ -6024,7 +6279,7 @@ def analyze_type_expr(self, expr: Expression) -> None: # them semantically analyzed, however, if they need to treat it as an expression # and not a type. (Which is to say, mypyc needs to do this.) Do the analysis # in a fresh tvar scope in order to suppress any errors about using type variables. - with self.tvar_scope_frame(TypeVarLikeScope()): + with self.tvar_scope_frame(TypeVarLikeScope()), self.allow_unbound_tvars_set(): expr.accept(self) def type_analyzer( @@ -6037,6 +6292,8 @@ def type_analyzer( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allow_type_any: bool = False, ) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope @@ -6052,6 +6309,8 @@ def type_analyzer( allow_placeholder=allow_placeholder, allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, + prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack @@ -6071,6 +6330,8 @@ def anal_type( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allow_type_any: bool = False, third_pass: bool = False, ) -> Type | None: """Semantically analyze a type. @@ -6093,6 +6354,11 @@ def anal_type( NOTE: The caller shouldn't defer even if this returns None or a placeholder type. """ + has_self_type = find_self_type( + typ, lambda name: self.lookup_qualified(name, typ, suppress_errors=True) + ) + if has_self_type and self.type and prohibit_self_type is None: + self.setup_self_type() a = self.type_analyzer( tvar_scope=tvar_scope, allow_unbound_tvars=allow_unbound_tvars, @@ -6101,6 +6367,8 @@ def anal_type( allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, report_invalid_types=report_invalid_types, + prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tag = self.track_incomplete_refs() typ = typ.accept(a) @@ -6136,7 +6404,9 @@ def add_plugin_dependency(self, trigger: str, target: str | None = None) -> None target = self.scope.current_target() self.cur_mod_node.plugin_deps.setdefault(trigger, set()).add(target) - def add_type_alias_deps(self, aliases_used: Iterable[str], target: str | None = None) -> None: + def add_type_alias_deps( + self, aliases_used: Collection[str], target: str | None = None + ) -> None: """Add full names of type aliases on which the current node depends. This is used by fine-grained incremental mode to re-check the corresponding nodes. @@ -6362,3 +6632,10 @@ def halt(self, reason: str = ...) -> NoReturn: return isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) ) + + +def is_dataclass_transform_decorator(node: Node | None) -> bool: + if isinstance(node, RefExpr): + return is_dataclass_transform_decorator(node.node) + + return isinstance(node, Decorator) and node.func.is_dataclass_transform diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index b5a702592144..ead80aed67b6 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -22,7 +22,7 @@ Var, ) from mypy.options import Options -from mypy.types import Instance, ProperType +from mypy.types import MYPYC_NATIVE_INT_NAMES, Instance, ProperType # Hard coded type promotions (shared between all Python versions). # These add extra ad-hoc edges to the subtyping relation. For example, @@ -165,6 +165,10 @@ def add_type_promotion( if not promote_targets: if defn.fullname in TYPE_PROMOTIONS: target_sym = module_names.get(TYPE_PROMOTIONS[defn.fullname]) + if defn.fullname == "builtins.bytearray" and options.disable_bytearray_promotion: + target_sym = None + elif defn.fullname == "builtins.memoryview" and options.disable_memoryview_promotion: + target_sym = None # With test stubs, the target may not exist. if target_sym: target_info = target_sym.node @@ -173,7 +177,7 @@ def add_type_promotion( # Special case the promotions between 'int' and native integer types. # These have promotions going both ways, such as from 'int' to 'i64' # and 'i64' to 'int', for convenience. - if defn.fullname == "mypy_extensions.i64" or defn.fullname == "mypy_extensions.i32": + if defn.fullname in MYPYC_NATIVE_INT_NAMES: int_sym = builtin_names["int"] assert isinstance(int_sym.node, TypeInfo) int_sym.node._promote.append(Instance(defn.info, [])) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 9e3aeaa7fa4b..d2dd0e32398d 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -37,9 +37,11 @@ from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo, Var from mypy.options import Options from mypy.plugin import ClassDefContext +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.semanal import ( SemanticAnalyzer, apply_semantic_analyzer_patches, + is_dataclass_transform_decorator, remove_imported_names_from_symtable, ) from mypy.semanal_classprop import ( @@ -66,7 +68,14 @@ # Number of passes over core modules before going on to the rest of the builtin SCC. CORE_WARMUP: Final = 2 -core_modules: Final = ["typing", "builtins", "abc", "collections"] +core_modules: Final = [ + "typing", + "_collections_abc", + "builtins", + "abc", + "collections", + "collections.abc", +] def semantic_analysis_for_scc(graph: Graph, scc: list[str], errors: Errors) -> None: @@ -450,11 +459,19 @@ def apply_hooks_to_class( ok = True for decorator in defn.decorators: with self.file_context(file_node, options, info): + hook = None + decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook_2(decorator_name) - if hook: - ok = ok and hook(ClassDefContext(defn, decorator, self)) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and is_dataclass_transform_decorator(decorator): + hook = dataclasses_plugin.dataclass_class_maker_callback + + if hook: + ok = ok and hook(ClassDefContext(defn, decorator, self)) return ok diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 1727c18b6fd9..1194557836b1 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -32,6 +32,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, SymbolTable, SymbolTableNode, @@ -111,7 +112,7 @@ def analyze_namedtuple_classdef( if result is None: # This is a valid named tuple, but some types are incomplete. return True, None - items, types, default_items = result + items, types, default_items, statements = result if is_func_scope and "@" not in defn.name: defn.name += "@" + str(defn.line) existing_info = None @@ -123,6 +124,7 @@ def analyze_namedtuple_classdef( defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. @@ -130,24 +132,27 @@ def analyze_namedtuple_classdef( def check_namedtuple_classdef( self, defn: ClassDef, is_stub_file: bool - ) -> tuple[list[str], list[Type], dict[str, Expression]] | None: + ) -> tuple[list[str], list[Type], dict[str, Expression], list[Statement]] | None: """Parse and validate fields in named tuple class definition. - Return a three tuple: + Return a four tuple: * field names * field types * field default values + * valid statements or None, if any of the types are not ready. """ if self.options.python_version < (3, 6) and not is_stub_file: self.fail("NamedTuple class syntax is only supported in Python 3.6", defn) - return [], [], {} + return [], [], {}, [] if len(defn.base_type_exprs) > 1: self.fail("NamedTuple should be a single base", defn) items: list[str] = [] types: list[Type] = [] default_items: dict[str, Expression] = {} + statements: list[Statement] = [] for stmt in defn.defs.body: + statements.append(stmt) if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). if isinstance(stmt, PassStmt) or ( @@ -160,9 +165,13 @@ def check_namedtuple_classdef( # And docstrings. if isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): continue + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) else: # Append name and type in this case... @@ -178,6 +187,7 @@ def check_namedtuple_classdef( stmt.type, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) if analyzed is None: # Something is incomplete. We need to defer this named tuple. @@ -198,7 +208,7 @@ def check_namedtuple_classdef( ) else: default_items[name] = stmt.rvalue - return items, types, default_items + return items, types, default_items, statements def check_namedtuple( self, node: Expression, var_name: str | None, is_func_scope: bool @@ -445,6 +455,7 @@ def parse_namedtuple_fields_with_types( type, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) # Workaround #4987 and avoid introducing a bogus UnboundType if isinstance(analyzed, UnboundType): @@ -470,13 +481,9 @@ def build_namedtuple_typeinfo( strtype = self.api.named_type("builtins.str") implicit_any = AnyType(TypeOfAny.special_form) basetuple_type = self.api.named_type("builtins.tuple", [implicit_any]) - dictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + dictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) # Actual signature should return OrderedDict[str, Union[types]] - ordereddictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + ordereddictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) fallback = self.api.named_type("builtins.tuple", [implicit_any]) # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. # but it can't be expressed. 'new' and 'len' should be callable types. @@ -490,7 +497,9 @@ def build_namedtuple_typeinfo( info.is_named_tuple = True tuple_base = TupleType(types, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "NamedTuple item", info, force_progress=tuple_base != info.tuple_type + ) info.update_tuple_type(tuple_base) info.line = line # For use by mypyc. diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index b6fb64532e6e..cb1055a62186 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -249,10 +249,16 @@ def build_newtype_typeinfo( init_func = FuncDef("__init__", args, Block([]), typ=signature) init_func.info = info init_func._fullname = info.fullname + ".__init__" + if not existing_info: + updated = True + else: + previous_sym = info.names["__init__"].node + assert isinstance(previous_sym, FuncDef) + updated = old_type != previous_sym.arguments[1].variable.type info.names["__init__"] = SymbolTableNode(MDEF, init_func) - if has_placeholder(old_type) or info.tuple_type and has_placeholder(info.tuple_type): - self.api.defer(force_progress=True) + if has_placeholder(old_type): + self.api.process_placeholder(None, "NewType base", info, force_progress=updated) return info # Helpers diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 63f4f5516f79..11c4af314a3b 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -21,7 +21,7 @@ TypeInfo, ) from mypy.tvar_scope import TypeVarLikeScope -from mypy.type_visitor import TypeQuery +from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery from mypy.types import ( TPDICT_FB_NAMES, FunctionLike, @@ -38,6 +38,11 @@ get_proper_type, ) +# Subclasses can override these Var attributes with incompatible types. This can also be +# set for individual attributes using 'allow_incompatible_override' of Var. +ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") + + # Priorities for ordering of patches within the "patch" phase of semantic analysis # (after the main pass): @@ -119,6 +124,11 @@ def is_stub_file(self) -> bool: def is_func_scope(self) -> bool: raise NotImplementedError + @property + @abstractmethod + def type(self) -> TypeInfo | None: + raise NotImplementedError + @trait class SemanticAnalyzerInterface(SemanticAnalyzerCoreInterface): @@ -162,6 +172,7 @@ def anal_type( allow_required: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, ) -> Type | None: raise NotImplementedError @@ -226,6 +237,12 @@ def qualified_name(self, n: str) -> str: def is_typeshed_stub_file(self) -> bool: raise NotImplementedError + @abstractmethod + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: + raise NotImplementedError + def set_callable_name(sig: Type, fdef: FuncDef) -> ProperType: sig = get_proper_type(sig) @@ -313,9 +330,9 @@ def paramspec_kwargs( ) -class HasPlaceholders(TypeQuery[bool]): +class HasPlaceholders(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_placeholder_type(self, t: PlaceholderType) -> bool: return True diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 161775ce8fd9..b9965236c379 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,23 +7,27 @@ from __future__ import annotations +from typing import Sequence + from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode from mypy.errors import Errors from mypy.messages import format_type from mypy.mixedtraverser import MixedTraverserVisitor -from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile, TypeInfo +from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype from mypy.types import ( AnyType, Instance, + Parameters, ParamSpecType, TupleType, Type, TypeAliasType, TypeOfAny, + TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, @@ -35,6 +39,7 @@ class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: + super().__init__() self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file @@ -77,7 +82,12 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # correct aliases. if t.alias and len(t.args) != len(t.alias.alias_tvars): t.args = [AnyType(TypeOfAny.from_error) for _ in t.alias.alias_tvars] - get_proper_type(t).accept(self) + assert t.alias is not None, f"Unfixed type alias {t.type_ref}" + is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) + if not is_error: + # If there was already an error for the alias itself, there is no point in checking + # the expansion, most likely it will result in the same kind of error. + get_proper_type(t).accept(self) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume @@ -85,36 +95,67 @@ def visit_instance(self, t: Instance) -> None: info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 - for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): + self.validate_args(info.name, t.args, info.defn.type_vars, t) + super().visit_instance(t) + + def validate_args( + self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context + ) -> bool: + is_error = False + for (i, arg), tvar in zip(enumerate(args), type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): # TODO: Better message - self.fail(f'Invalid location for ParamSpec "{arg.name}"', t) + is_error = True + self.fail(f'Invalid location for ParamSpec "{arg.name}"', ctx) + self.note( + "You can use ParamSpec as the first argument to Callable, e.g., " + "'Callable[{}, int]'".format(arg.name), + ctx, + ) continue if tvar.values: if isinstance(arg, TypeVarType): + if self.in_type_alias_expr: + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue arg_values = arg.values if not arg_values: + is_error = True self.fail( - message_registry.INVALID_TYPEVAR_AS_TYPEARG.format( - arg.name, info.name - ), - t, + message_registry.INVALID_TYPEVAR_AS_TYPEARG.format(arg.name, name), + ctx, code=codes.TYPE_VAR, ) continue else: arg_values = [arg] - self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) + if self.check_type_var_values(name, arg_values, tvar.name, tvar.values, ctx): + is_error = True if not is_subtype(arg, tvar.upper_bound): + if self.in_type_alias_expr and isinstance(arg, TypeVarType): + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue + is_error = True self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( - format_type(arg), info.name, format_type(tvar.upper_bound) + format_type(arg), name, format_type(tvar.upper_bound) ), - t, + ctx, code=codes.TYPE_VAR, ) - super().visit_instance(t) + elif isinstance(tvar, ParamSpecType): + if not isinstance( + get_proper_type(arg), (ParamSpecType, Parameters, AnyType, UnboundType) + ): + self.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg)}", + ctx, + ) + return is_error def visit_unpack_type(self, typ: UnpackType) -> None: proper_type = get_proper_type(typ.type) @@ -132,28 +173,25 @@ def visit_unpack_type(self, typ: UnpackType) -> None: self.fail(message_registry.INVALID_UNPACK.format(proper_type), typ) def check_type_var_values( - self, - type: TypeInfo, - actuals: list[Type], - arg_name: str, - valids: list[Type], - arg_number: int, - context: Context, - ) -> None: + self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context + ) -> bool: + is_error = False for actual in get_proper_types(actuals): - # TODO: bind type variables in class bases/alias targets - # so we can safely check this, currently we miss some errors. + # We skip UnboundType here, since they may appear in defn.bases, + # the error will be caught when visiting info.bases, that have bound type + # variables. if not isinstance(actual, (AnyType, UnboundType)) and not any( is_same_type(actual, value) for value in valids ): + is_error = True if len(actuals) > 1 or not isinstance(actual, Instance): self.fail( - message_registry.INVALID_TYPEVAR_ARG_VALUE.format(type.name), + message_registry.INVALID_TYPEVAR_ARG_VALUE.format(name), context, code=codes.TYPE_VAR, ) else: - class_name = f'"{type.name}"' + class_name = f'"{name}"' actual_type_name = f'"{actual.type.name}"' self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( @@ -162,6 +200,10 @@ def check_type_var_values( context, code=codes.TYPE_VAR, ) + return is_error def fail(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: - self.errors.report(context.get_line(), context.get_column(), msg, code=code) + self.errors.report(context.line, context.column, msg, code=code) + + def note(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: + self.errors.report(context.line, context.column, msg, severity="note", code=code) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index fd6b1bbd2bbf..55618318c1e8 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -23,6 +23,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, TempNode, TupleExpr, @@ -93,7 +94,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N and defn.base_type_exprs[0].fullname in TPDICT_NAMES ): # Building a new TypedDict - fields, types, required_keys = self.analyze_typeddict_classdef_fields(defn) + fields, types, statements, required_keys = self.analyze_typeddict_classdef_fields(defn) if fields is None: return True, None # Defer info = self.build_typeddict_typeinfo( @@ -102,6 +103,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements return True, info # Extending/merging existing TypedDicts @@ -139,7 +141,12 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N # Iterate over bases in reverse order so that leftmost base class' keys take precedence for base in reversed(typeddict_bases): self.add_keys_and_types_from_base(base, keys, types, required_keys, defn) - new_keys, new_types, new_required_keys = self.analyze_typeddict_classdef_fields(defn, keys) + ( + new_keys, + new_types, + new_statements, + new_required_keys, + ) = self.analyze_typeddict_classdef_fields(defn, keys) if new_keys is None: return True, None # Defer keys.extend(new_keys) @@ -151,6 +158,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = new_statements return True, info def add_keys_and_types_from_base( @@ -181,7 +189,7 @@ def add_keys_and_types_from_base( valid_items = base_items.copy() # Always fix invalid bases to avoid crashes. - tvars = info.type_vars + tvars = info.defn.type_vars if len(base_args) != len(tvars): any_kind = TypeOfAny.from_omitted_generics if base_args: @@ -227,7 +235,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: return base_args def map_items_to_base( - self, valid_items: dict[str, Type], tvars: list[str], base_args: list[Type] + self, valid_items: dict[str, Type], tvars: list[TypeVarLikeType], base_args: list[Type] ) -> dict[str, Type]: """Map item types to how they would look in their base with type arguments applied. @@ -250,7 +258,7 @@ def map_items_to_base( def analyze_typeddict_classdef_fields( self, defn: ClassDef, oldfields: list[str] | None = None - ) -> tuple[list[str] | None, list[Type], set[str]]: + ) -> tuple[list[str] | None, list[Type], list[Statement], set[str]]: """Analyze fields defined in a TypedDict class definition. This doesn't consider inherited fields (if any). Also consider totality, @@ -259,20 +267,27 @@ def analyze_typeddict_classdef_fields( Return tuple with these items: * List of keys (or None if found an incomplete reference --> deferral) * List of types for each key + * List of statements from defn.defs.body that are legally allowed to be a + part of a TypedDict definition * Set of required keys """ fields: list[str] = [] types: list[Type] = [] + statements: list[Statement] = [] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): - # Still allow pass or ... (for empty TypedDict's). - if not isinstance(stmt, PassStmt) and not ( + # Still allow pass or ... (for empty TypedDict's) and docstrings + if isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, (EllipsisExpr, StrExpr)) ): + statements.append(stmt) + else: + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) else: name = stmt.lvalues[0].name @@ -281,8 +296,9 @@ def analyze_typeddict_classdef_fields( if name in fields: self.fail(f'Duplicate TypedDict key "{name}"', stmt) continue - # Append name and type in this case... + # Append stmt, name, and type in this case... fields.append(name) + statements.append(stmt) if stmt.type is None: types.append(AnyType(TypeOfAny.unannotated)) else: @@ -291,11 +307,12 @@ def analyze_typeddict_classdef_fields( allow_required=True, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: - return None, [], set() # Need to defer + return None, [], [], set() # Need to defer types.append(analyzed) - # ...despite possible minor failures that allow further analyzis. + # ...despite possible minor failures that allow further analysis. if stmt.type is None or hasattr(stmt, "new_syntax") and not stmt.new_syntax: self.fail(TPDICT_CLASS_ERROR, stmt) elif not isinstance(stmt.rvalue, TempNode): @@ -317,7 +334,7 @@ def analyze_typeddict_classdef_fields( t.item if isinstance(t, RequiredType) else t for t in types ] - return fields, types, required_keys + return fields, types, statements, required_keys def check_typeddict( self, node: Expression, var_name: str | None, is_func_scope: bool @@ -486,6 +503,7 @@ def parse_typeddict_fields_with_types( allow_required=True, allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: return None @@ -517,7 +535,9 @@ def build_typeddict_typeinfo( info = existing_info or self.api.basic_new_typeinfo(name, fallback, line) typeddict_type = TypedDictType(dict(zip(items, types)), required_keys, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "TypedDict item", info, force_progress=typeddict_type != info.typeddict_type + ) info.update_typeddict_type(typeddict_type) return info diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 41a79db480c9..40b60f1a69d8 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -52,7 +52,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' from __future__ import annotations -from typing import Sequence, Tuple, cast +from typing import Sequence, Tuple, Union, cast from typing_extensions import TypeAlias as _TypeAlias from mypy.expandtype import expand_type @@ -109,11 +109,17 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' # snapshots are immutable). # # For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). -SnapshotItem: _TypeAlias = Tuple[object, ...] + +# Type snapshots are strict, they must be hashable and ordered (e.g. for Unions). +Primitive: _TypeAlias = Union[str, float, int, bool] # float is for Literal[3.14] support. +SnapshotItem: _TypeAlias = Tuple[Union[Primitive, "SnapshotItem"], ...] + +# Symbol snapshots can be more lenient. +SymbolSnapshot: _TypeAlias = Tuple[object, ...] def compare_symbol_table_snapshots( - name_prefix: str, snapshot1: dict[str, SnapshotItem], snapshot2: dict[str, SnapshotItem] + name_prefix: str, snapshot1: dict[str, SymbolSnapshot], snapshot2: dict[str, SymbolSnapshot] ) -> set[str]: """Return names that are different in two snapshots of a symbol table. @@ -155,7 +161,7 @@ def compare_symbol_table_snapshots( return triggers -def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SnapshotItem]: +def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SymbolSnapshot]: """Create a snapshot description that represents the state of a symbol table. The snapshot has a representation based on nested tuples and dicts @@ -165,7 +171,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna things defined in other modules are represented just by the names of the targets. """ - result: dict[str, SnapshotItem] = {} + result: dict[str, SymbolSnapshot] = {} for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? @@ -187,7 +193,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna elif isinstance(node, TypeAlias): result[name] = ( "TypeAlias", - node.alias_tvars, + snapshot_types(node.alias_tvars), node.normalized, node.no_args, snapshot_optional_type(node.target), @@ -206,7 +212,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna return result -def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> tuple[object, ...]: +def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> SymbolSnapshot: """Create a snapshot description of a symbol table node. The representation is nested tuples and dicts. Only externally @@ -255,6 +261,7 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> node.is_enum, node.is_protocol, node.fallback_to_any, + node.meta_fallback_to_any, node.is_named_tuple, node.is_newtype, # We need this to e.g. trigger metaclass calculation in subclasses. @@ -289,11 +296,11 @@ def snapshot_type(typ: Type) -> SnapshotItem: return typ.accept(SnapshotTypeVisitor()) -def snapshot_optional_type(typ: Type | None) -> SnapshotItem | None: +def snapshot_optional_type(typ: Type | None) -> SnapshotItem: if typ: return snapshot_type(typ) else: - return None + return ("",) def snapshot_types(types: Sequence[Type]) -> SnapshotItem: @@ -395,7 +402,7 @@ def visit_parameters(self, typ: Parameters) -> SnapshotItem: "Parameters", snapshot_types(typ.arg_types), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), ) def visit_callable_type(self, typ: CallableType) -> SnapshotItem: @@ -406,7 +413,7 @@ def visit_callable_type(self, typ: CallableType) -> SnapshotItem: snapshot_types(typ.arg_types), snapshot_type(typ.ret_type), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), typ.is_type_obj(), typ.is_ellipsis_args, snapshot_types(typ.variables), @@ -463,7 +470,7 @@ def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem: return ("TypeAliasType", typ.alias.fullname, snapshot_types(typ.args)) -def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[object, ...]: +def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> SymbolSnapshot: """Create a snapshot of the signature of a function that has no explicit signature. If the arguments to a function without signature change, it must be @@ -475,7 +482,7 @@ def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[obje if isinstance(func, FuncItem): return (tuple(func.arg_names), tuple(func.arg_kinds)) else: - result = [] + result: list[SymbolSnapshot] = [] for item in func.items: if isinstance(item, Decorator): if item.var.type: diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 7a6b247c84f8..6ce737c42520 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -110,7 +110,7 @@ UnionType, UnpackType, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import get_prefix, replace_object_state @@ -213,8 +213,8 @@ def visit_mypy_file(self, node: MypyFile) -> None: super().visit_mypy_file(node) def visit_block(self, node: Block) -> None: - super().visit_block(node) node.body = self.replace_statements(node.body) + super().visit_block(node) def visit_func_def(self, node: FuncDef) -> None: node = self.fixup(node) @@ -331,6 +331,8 @@ def visit_var(self, node: Var) -> None: def visit_type_alias(self, node: TypeAlias) -> None: self.fixup_type(node.target) + for v in node.alias_tvars: + self.fixup_type(v) super().visit_type_alias(node) # Helpers @@ -358,7 +360,7 @@ def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: # The subclass relationships may change, so reset all caches relevant to the # old MRO. new = cast(TypeInfo, self.replacements[node]) - TypeState.reset_all_subtype_caches_for(new) + type_state.reset_all_subtype_caches_for(new) return self.fixup(node) def fixup_type(self, typ: Type | None) -> None: diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 1bfd820efb21..05af6a3d53a1 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -54,6 +54,7 @@ MypyFile, NameExpr, Node, + OpExpr, OverloadedFuncDef, RefExpr, StarExpr, @@ -65,7 +66,7 @@ ) from mypy.traverser import TraverserVisitor from mypy.types import CallableType -from mypy.typestate import TypeState +from mypy.typestate import type_state SavedAttributes: _TypeAlias = Dict[Tuple[ClassDef, str], SymbolTableNode] @@ -140,7 +141,9 @@ def visit_class_def(self, node: ClassDef) -> None: ] with self.enter_class(node.info): super().visit_class_def(node) - TypeState.reset_subtype_caches_for(node.info) + node.defs.body.extend(node.removed_statements) + node.removed_statements = [] + type_state.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO node.analyzed = None @@ -220,10 +223,14 @@ def visit_index_expr(self, node: IndexExpr) -> None: node.analyzed = None # May have been an alias or type application. super().visit_index_expr(node) + def visit_op_expr(self, node: OpExpr) -> None: + node.analyzed = None # May have been an alias + super().visit_op_expr(node) + def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None - node.fullname = None + node.fullname = "" node.is_new_def = False node.is_inferred_def = False diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 45d7947641da..50b66b70b8aa 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -172,7 +172,7 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a UnpackType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import correct_relative_import @@ -289,13 +289,9 @@ def visit_decorator(self, o: Decorator) -> None: # all call sites, making them all `Any`. for d in o.decorators: tname: str | None = None - if isinstance(d, RefExpr) and d.fullname is not None: + if isinstance(d, RefExpr) and d.fullname: tname = d.fullname - if ( - isinstance(d, CallExpr) - and isinstance(d.callee, RefExpr) - and d.callee.fullname is not None - ): + if isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname: tname = d.callee.fullname if tname is not None: self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname)) @@ -344,7 +340,7 @@ def process_type_info(self, info: TypeInfo) -> None: self.add_dependency( make_wildcard_trigger(base_info.fullname), target=make_trigger(target) ) - # More protocol dependencies are collected in TypeState._snapshot_protocol_deps + # More protocol dependencies are collected in type_state._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) @@ -500,7 +496,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if ( isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) - and rvalue.callee.fullname is not None + and rvalue.callee.fullname ): fname: str | None = None if isinstance(rvalue.callee.node, TypeInfo): @@ -510,7 +506,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: fname = init.node.fullname else: fname = rvalue.callee.fullname - if fname is None: + if not fname: return for lv in o.lvalues: if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: @@ -638,7 +634,7 @@ def visit_del_stmt(self, o: DelStmt) -> None: # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: - if o.fullname is not None: + if o.fullname: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its @@ -1123,7 +1119,7 @@ def dump_all_dependencies( deps = get_dependencies(node, type_map, python_version, options) for trigger, targets in deps.items(): all_deps.setdefault(trigger, set()).update(targets) - TypeState.add_all_protocol_deps(all_deps) + type_state.add_all_protocol_deps(all_deps) for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): print(trigger) diff --git a/mypy/server/update.py b/mypy/server/update.py index 686068a4aad0..00b823c99dfd 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -151,13 +151,17 @@ semantic_analysis_for_scc, semantic_analysis_for_targets, ) -from mypy.server.astdiff import SnapshotItem, compare_symbol_table_snapshots, snapshot_symbol_table +from mypy.server.astdiff import ( + SymbolSnapshot, + compare_symbol_table_snapshots, + snapshot_symbol_table, +) from mypy.server.astmerge import merge_asts from mypy.server.aststrip import SavedAttributes, strip_target from mypy.server.deps import get_dependencies_of_target, merge_dependencies from mypy.server.target import trigger_to_target from mypy.server.trigger import WILDCARD_TAG, make_trigger -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import module_prefix, split_target MAX_ITER: Final = 1000 @@ -203,7 +207,10 @@ def __init__(self, result: BuildResult) -> None: self.processed_targets: list[str] = [] def update( - self, changed_modules: list[tuple[str, str]], removed_modules: list[tuple[str, str]] + self, + changed_modules: list[tuple[str, str]], + removed_modules: list[tuple[str, str]], + followed: bool = False, ) -> list[str]: """Update previous build result by processing changed modules. @@ -219,6 +226,7 @@ def update( Assume this is correct; it's not validated here. removed_modules: Modules that have been deleted since the previous update or removed from the build. + followed: If True, the modules were found through following imports Returns: A list of errors. @@ -256,7 +264,9 @@ def update( self.blocking_error = None while True: - result = self.update_one(changed_modules, initial_set, removed_set, blocking_error) + result = self.update_one( + changed_modules, initial_set, removed_set, blocking_error, followed + ) changed_modules, (next_id, next_path), blocker_messages = result if blocker_messages is not None: @@ -329,6 +339,7 @@ def update_one( initial_set: set[str], removed_set: set[str], blocking_error: str | None, + followed: bool, ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Process a module from the list of changed modules. @@ -355,7 +366,7 @@ def update_one( ) return changed_modules, (next_id, next_path), None - result = self.update_module(next_id, next_path, next_id in removed_set) + result = self.update_module(next_id, next_path, next_id in removed_set, followed) remaining, (next_id, next_path), blocker_messages = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(remaining + changed_modules) @@ -368,7 +379,7 @@ def update_one( return changed_modules, (next_id, next_path), blocker_messages def update_module( - self, module: str, path: str, force_removed: bool + self, module: str, path: str, force_removed: bool, followed: bool ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Update a single modified module. @@ -380,6 +391,7 @@ def update_module( path: File system path of the module force_removed: If True, consider module removed from the build even if path exists (used for removing an existing file from the build) + followed: Was this found via import following? Returns: Tuple with these items: @@ -409,7 +421,7 @@ def update_module( t0 = time.time() # Record symbol table snapshot of old version the changed module. - old_snapshots: dict[str, dict[str, SnapshotItem]] = {} + old_snapshots: dict[str, dict[str, SymbolSnapshot]] = {} if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot @@ -417,7 +429,7 @@ def update_module( manager.errors.reset() self.processed_targets.append(module) result = update_module_isolated( - module, path, manager, previous_modules, graph, force_removed + module, path, manager, previous_modules, graph, force_removed, followed ) if isinstance(result, BlockedUpdate): # Blocking error -- just give up @@ -552,6 +564,7 @@ def update_module_isolated( previous_modules: dict[str, str], graph: Graph, force_removed: bool, + followed: bool, ) -> UpdateResult: """Build a new version of one changed module only. @@ -575,7 +588,7 @@ def update_module_isolated( delete_module(module, path, graph, manager) return NormalUpdate(module, path, [], None) - sources = get_sources(manager.fscache, previous_modules, [(module, path)]) + sources = get_sources(manager.fscache, previous_modules, [(module, path)], followed) if module in manager.missing_modules: manager.missing_modules.remove(module) @@ -653,7 +666,7 @@ def restore(ids: list[str]) -> None: state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() - state.detect_partially_defined_vars(state.type_map()) + state.detect_possibly_undefined_vars() t2 = time.time() state.finish_passes() t3 = time.time() @@ -728,18 +741,21 @@ def get_module_to_path_map(graph: Graph) -> dict[str, str]: def get_sources( - fscache: FileSystemCache, modules: dict[str, str], changed_modules: list[tuple[str, str]] + fscache: FileSystemCache, + modules: dict[str, str], + changed_modules: list[tuple[str, str]], + followed: bool, ) -> list[BuildSource]: sources = [] for id, path in changed_modules: if fscache.isfile(path): - sources.append(BuildSource(path, id, None)) + sources.append(BuildSource(path, id, None, followed=followed)) return sources def calculate_active_triggers( manager: BuildManager, - old_snapshots: dict[str, dict[str, SnapshotItem]], + old_snapshots: dict[str, dict[str, SymbolSnapshot]], new_modules: dict[str, MypyFile | None], ) -> set[str]: """Determine activated triggers by comparing old and new symbol tables. @@ -857,7 +873,7 @@ def propagate_changes_using_dependencies( # We need to do this to avoid false negatives if the protocol itself is # unchanged, but was marked stale because its sub- (or super-) type changed. for info in stale_protos: - TypeState.reset_subtype_caches_for(info) + type_state.reset_subtype_caches_for(info) # Then fully reprocess all targets. # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): @@ -1069,7 +1085,7 @@ def update_deps( for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) # Merge also the newly added protocol deps (if any). - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def lookup_target( diff --git a/mypy/solve.py b/mypy/solve.py index c9c7db1ae26c..b8304d29c1ce 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -18,7 +18,7 @@ UnionType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state def solve_constraints( @@ -54,7 +54,7 @@ def solve_constraints( if bottom is None: bottom = c.target else: - if TypeState.infer_unions: + if type_state.infer_unions: # This deviates from the general mypy semantics because # recursive types are union-heavy in 95% of cases. bottom = UnionType.make_union([bottom, c.target]) diff --git a/mypy/stats.py b/mypy/stats.py index af6c5fc14a50..b3a32c1ce72c 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -149,7 +149,7 @@ def visit_func_def(self, o: FuncDef) -> None: if o in o.expanded: print( "{}:{}: ERROR: cycle in function expansion; skipping".format( - self.filename, o.get_line() + self.filename, o.line ) ) return diff --git a/mypy/strconv.py b/mypy/strconv.py index 1acf7699316c..b2e9da5dbf6a 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -50,7 +50,7 @@ def dump(self, nodes: Sequence[object], obj: mypy.nodes.Context) -> str: number. See mypy.util.dump_tagged for a description of the nodes argument. """ - tag = short_type(obj) + ":" + str(obj.get_line()) + tag = short_type(obj) + ":" + str(obj.line) if self.show_ids: assert self.id_mapper is not None tag += f"<{self.get_id(obj)}>" @@ -276,6 +276,8 @@ def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: a: list[Any] = [o.body] + if o.is_star: + a.append("*") for i in range(len(o.vars)): a.append(o.types[i]) @@ -365,7 +367,7 @@ def pretty_name( id = "" if isinstance(target_node, mypy.nodes.MypyFile) and name == fullname: n += id - elif kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): + elif kind == mypy.nodes.GDEF or (fullname != name and fullname): # Append fully qualified name for global references. n += f" [{fullname}{id}]" elif kind == mypy.nodes.LDEF: @@ -411,6 +413,8 @@ def visit_call_expr(self, o: mypy.nodes.CallExpr) -> str: return self.dump(a + extra, o) def visit_op_expr(self, o: mypy.nodes.OpExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) return self.dump([o.op, o.left, o.right], o) def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> str: diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 28d9e69194a2..04865f816460 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -95,6 +95,7 @@ MemberExpr, MypyFile, NameExpr, + OpExpr, OverloadedFuncDef, Statement, StrExpr, @@ -404,6 +405,9 @@ def visit_list_expr(self, node: ListExpr) -> str: def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." + def visit_op_expr(self, o: OpExpr) -> str: + return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}" + class ImportTracker: """Record necessary imports during stub generation.""" @@ -1323,10 +1327,7 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool: def is_private_member(self, fullname: str) -> bool: parts = fullname.split(".") - for part in parts: - if self.is_private_name(part): - return True - return False + return any(self.is_private_name(part) for part in parts) def get_str_type_of_node( self, rvalue: Expression, can_infer_optional: bool = False, can_be_any: bool = True diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index b8dea5d0046b..15bd96d9f4b4 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -20,14 +20,11 @@ def stub_package_name(prefix: str) -> str: # Package name can have one or two components ('a' or 'a.b'). legacy_bundled_packages = { "aiofiles": "types-aiofiles", - "atomicwrites": "types-atomicwrites", - "attr": "types-attrs", "backports": "types-backports", "backports_abc": "types-backports_abc", "bleach": "types-bleach", "boto": "types-boto", "cachetools": "types-cachetools", - "chardet": "types-chardet", "click_spinner": "types-click-spinner", "contextvars": "types-contextvars", "croniter": "types-croniter", @@ -38,7 +35,6 @@ def stub_package_name(prefix: str) -> str: "decorator": "types-decorator", "deprecated": "types-Deprecated", "docutils": "types-docutils", - "emoji": "types-emoji", "first": "types-first", "geoip2": "types-geoip2", "gflags": "types-python-gflags", @@ -64,7 +60,6 @@ def stub_package_name(prefix: str) -> str: "six": "types-six", "slugify": "types-python-slugify", "tabulate": "types-tabulate", - "termcolor": "types-termcolor", "toml": "types-toml", "typed_ast": "types-typed-ast", "tzlocal": "types-tzlocal", @@ -83,10 +78,14 @@ def stub_package_name(prefix: str) -> str: # Note that these packages are omitted for now: # sqlalchemy: It's unclear which stub package to suggest. There's also # a mypy plugin available. +# pika: typeshed's stubs are on PyPI as types-pika-ts. +# types-pika already exists on PyPI, and is more complete in many ways, +# but is a non-typeshed stubs package. non_bundled_packages = { "MySQLdb": "types-mysqlclient", "PIL": "types-Pillow", "PyInstaller": "types-pyinstaller", + "Xlib": "types-python-xlib", "annoy": "types-annoy", "appdirs": "types-appdirs", "aws_xray_sdk": "types-aws-xray-sdk", @@ -100,9 +99,11 @@ def stub_package_name(prefix: str) -> str: "chevron": "types-chevron", "colorama": "types-colorama", "commonmark": "types-commonmark", - "cryptography": "types-cryptography", + "consolemenu": "types-console-menu", + "crontab": "types-python-crontab", "d3dshot": "types-D3DShot", "dj_database_url": "types-dj-database-url", + "dockerfile_parse": "types-dockerfile-parse", "docopt": "types-docopt", "editdistance": "types-editdistance", "entrypoints": "types-entrypoints", @@ -115,6 +116,7 @@ def stub_package_name(prefix: str) -> str: "flake8_simplify": "types-flake8-simplify", "flake8_typing_imports": "types-flake8-typing-imports", "flask_cors": "types-Flask-Cors", + "flask_migrate": "types-Flask-Migrate", "flask_sqlalchemy": "types-Flask-SQLAlchemy", "fpdf": "types-fpdf2", "gdb": "types-gdb", @@ -134,22 +136,28 @@ def stub_package_name(prefix: str) -> str: "oauthlib": "types-oauthlib", "openpyxl": "types-openpyxl", "opentracing": "types-opentracing", + "paho.mqtt": "types-paho-mqtt", "parsimonious": "types-parsimonious", "passlib": "types-passlib", "passpy": "types-passpy", + "peewee": "types-peewee", "pep8ext_naming": "types-pep8-naming", "playsound": "types-playsound", - "prettytable": "types-prettytable", "psutil": "types-psutil", "psycopg2": "types-psycopg2", "pyaudio": "types-pyaudio", "pyautogui": "types-PyAutoGUI", + "pycocotools": "types-pycocotools", "pyflakes": "types-pyflakes", "pygments": "types-Pygments", "pyi_splash": "types-pyinstaller", "pynput": "types-pynput", + "pythoncom": "types-pywin32", + "pythonwin": "types-pywin32", + "pyscreeze": "types-PyScreeze", "pysftp": "types-pysftp", "pytest_lazyfixture": "types-pytest-lazy-fixture", + "pywintypes": "types-pywin32", "regex": "types-regex", "send2trash": "types-Send2Trash", "slumber": "types-slumber", @@ -163,6 +171,12 @@ def stub_package_name(prefix: str) -> str: "urllib3": "types-urllib3", "vobject": "types-vobject", "whatthepatch": "types-whatthepatch", + "win32": "types-pywin32", + "win32api": "types-pywin32", + "win32con": "types-pywin32", + "win32com": "types-pywin32", + "win32comext": "types-pywin32", + "win32gui": "types-pywin32", "xmltodict": "types-xmltodict", "xxhash": "types-xxhash", "zxcvbn": "types-zxcvbn", diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 87ccbd3176df..774f03cbbdd0 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -15,6 +15,7 @@ import os import pkgutil import re +import symtable import sys import traceback import types @@ -29,11 +30,13 @@ import mypy.build import mypy.modulefinder +import mypy.nodes import mypy.state import mypy.types import mypy.version from mypy import nodes from mypy.config_parser import parse_config_file +from mypy.evalexpr import UNKNOWN, evaluate_expression from mypy.options import Options from mypy.util import FancyFormatter, bytes_to_human_readable_repr, is_dunder, plural_s @@ -203,7 +206,9 @@ def test_module(module_name: str) -> Iterator[Error]: try: runtime = silent_import_module(module_name) - except Exception as e: + except KeyboardInterrupt: + raise + except BaseException as e: yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) return @@ -259,10 +264,10 @@ def _verify_exported_names( if not (names_in_runtime_not_stub or names_in_stub_not_runtime): return yield Error( - object_path, + object_path + ["__all__"], ( "names exported from the stub do not correspond to the names exported at runtime. " - "This is probably due to an inaccurate `__all__` in the stub or things being missing from the stub." + "This is probably due to things being missing from the stub or an inaccurate `__all__` in the stub" ), # Pass in MISSING instead of the stub and runtime objects, as the line numbers aren't very # relevant here, and it makes for a prettier error message @@ -279,6 +284,36 @@ def _verify_exported_names( ) +def _get_imported_symbol_names(runtime: types.ModuleType) -> frozenset[str] | None: + """Retrieve the names in the global namespace which are known to be imported. + + 1). Use inspect to retrieve the source code of the module + 2). Use symtable to parse the source and retrieve names that are known to be imported + from other modules. + + If either of the above steps fails, return `None`. + + Note that if a set of names is returned, + it won't include names imported via `from foo import *` imports. + """ + try: + source = inspect.getsource(runtime) + except (OSError, TypeError, SyntaxError): + return None + + if not source.strip(): + # The source code for the module was an empty file, + # no point in parsing it with symtable + return frozenset() + + try: + module_symtable = symtable.symtable(source, runtime.__name__, "exec") + except SyntaxError: + return None + + return frozenset(sym.get_name() for sym in module_symtable.get_symbols() if sym.is_imported()) + + @verify.register(nodes.MypyFile) def verify_mypyfile( stub: nodes.MypyFile, runtime: MaybeMissing[types.ModuleType], object_path: list[str] @@ -308,15 +343,26 @@ def verify_mypyfile( if not o.module_hidden and (not is_probably_private(m) or hasattr(runtime, m)) } + imported_symbols = _get_imported_symbol_names(runtime) + def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: + """Heuristics to determine whether a name originates from another module.""" obj = getattr(r, attr) - try: - obj_mod = getattr(obj, "__module__", None) - except Exception: + if isinstance(obj, types.ModuleType): return False - if obj_mod is not None: - return bool(obj_mod == r.__name__) - return not isinstance(obj, types.ModuleType) + if callable(obj): + # It's highly likely to be a class or a function if it's callable, + # so the __module__ attribute will give a good indication of which module it comes from + try: + obj_mod = obj.__module__ + except Exception: + pass + else: + if isinstance(obj_mod, str): + return bool(obj_mod == r.__name__) + if imported_symbols is not None: + return attr not in imported_symbols + return True runtime_public_contents = ( runtime_all_as_set @@ -325,8 +371,9 @@ def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: m for m in dir(runtime) if not is_probably_private(m) - # Ensure that the object's module is `runtime`, since in the absence of __all__ we - # don't have a good way to detect re-exports at runtime. + # Filter out objects that originate from other modules (best effort). Note that in the + # absence of __all__, we don't have a way to detect explicit / intentional re-exports + # at runtime and _belongs_to_runtime(runtime, m) } ) @@ -354,7 +401,7 @@ def _verify_final( ) -> Iterator[Error]: try: - class SubClass(runtime): # type: ignore[misc,valid-type] + class SubClass(runtime): # type: ignore[misc] pass except TypeError: @@ -573,6 +620,23 @@ def _verify_arg_default_value( f"has a default value of type {runtime_type}, " f"which is incompatible with stub argument type {stub_type}" ) + if stub_arg.initializer is not None: + stub_default = evaluate_expression(stub_arg.initializer) + if ( + stub_default is not UNKNOWN + and stub_default is not ... + and ( + stub_default != runtime_arg.default + # We want the types to match exactly, e.g. in case the stub has + # True and the runtime has 1 (or vice versa). + or type(stub_default) is not type(runtime_arg.default) # noqa: E721 + ) + ): + yield ( + f'runtime argument "{runtime_arg.name}" ' + f"has a default value of {runtime_arg.default!r}, " + f"which is different from stub argument default {stub_default!r}" + ) else: if stub_arg.kind.is_optional(): yield ( @@ -1083,7 +1147,7 @@ def apply_decorator_to_funcitem( ) -> nodes.FuncItem | None: if not isinstance(decorator, nodes.RefExpr): return None - if decorator.fullname is None: + if not decorator.fullname: # Happens with namedtuple return None if ( @@ -1224,6 +1288,8 @@ def verify_typealias( "__annotations__", "__path__", # mypy adds __path__ to packages, but C packages don't have it "__getattr__", # resulting behaviour might be typed explicitly + # Created by `warnings.warn`, does not make much sense to have in stubs: + "__warningregistry__", # TODO: remove the following from this list "__author__", "__version__", @@ -1479,7 +1545,9 @@ def build_stubs(modules: list[str], options: Options, find_submodules: bool = Fa for m in pkgutil.walk_packages(runtime.__path__, runtime.__name__ + ".") if m.name not in all_modules ) - except Exception: + except KeyboardInterrupt: + raise + except BaseException: pass if sources: @@ -1508,9 +1576,6 @@ def get_typeshed_stdlib_modules( stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir) if version_info is None: version_info = sys.version_info[0:2] - # Typeshed's minimum supported Python 3 is Python 3.7 - if sys.version_info < (3, 7): - version_info = (3, 7) def exists_in_version(module: str) -> bool: assert version_info is not None diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 38fae16e7011..9b555480e59b 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -8,7 +8,7 @@ import mypy.constraints import mypy.typeops from mypy.erasetype import erase_type -from mypy.expandtype import expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type_by_instance from mypy.maptype import map_instance_to_supertype # Circular import; done in the function instead. @@ -27,6 +27,7 @@ from mypy.options import Options from mypy.state import state from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, TUPLE_LIKE_INSTANCE_NAMES, TYPED_NAMEDTUPLE_NAMES, AnyType, @@ -57,10 +58,11 @@ UninhabitedType, UnionType, UnpackType, + _flattened, get_proper_type, is_named_instance, ) -from mypy.typestate import SubtypeKind, TypeState +from mypy.typestate import SubtypeKind, type_state from mypy.typevars import fill_typevars_with_any from mypy.typevartuples import extract_unpack, fully_split_with_mapped_and_template @@ -102,11 +104,7 @@ def check_context(self, proper_subtype: bool) -> None: # Historically proper and non-proper subtypes were defined using different helpers # and different visitors. Check if flag values are such that we definitely support. if proper_subtype: - assert ( - not self.ignore_type_params - and not self.ignore_pos_arg_names - and not self.ignore_declared_variance - ) + assert not self.ignore_pos_arg_names and not self.ignore_declared_variance else: assert not self.erase_instances and not self.keep_erased_types @@ -154,7 +152,7 @@ def is_subtype( options, } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_subtype(left, right): + if type_state.is_assumed_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # This case requires special care because it may cause infinite recursion. @@ -174,7 +172,7 @@ def is_subtype( # B = Union[int, Tuple[B, ...]] # When checking if A <: B we push pair (A, B) onto 'assuming' stack, then when after few # steps we come back to initial call is_subtype(A, B) and immediately return True. - with pop_on_exit(TypeState.get_assumptions(is_proper=False), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=False), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=False) return _is_subtype(left, right, subtype_context, proper_subtype=False) @@ -215,11 +213,11 @@ def is_proper_subtype( ignore_uninhabited, } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_proper_subtype(left, right): + if type_state.is_assumed_proper_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # Same as for non-proper subtype, see detailed comment there for explanation. - with pop_on_exit(TypeState.get_assumptions(is_proper=True), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=True), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=True) return _is_subtype(left, right, subtype_context, proper_subtype=True) @@ -289,18 +287,20 @@ def _is_subtype( # ErasedType as we do for non-proper subtyping. return True - def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool: - if proper_subtype: - return is_proper_subtype(left, right, subtype_context=subtype_context) - return is_subtype(left, right, subtype_context=subtype_context) - if isinstance(right, UnionType) and not isinstance(left, UnionType): # Normally, when 'left' is not itself a union, the only way # 'left' can be a subtype of the union 'right' is if it is a # subtype of one of the items making up the union. - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # Recombine rhs literal types, to make an enum type a subtype # of a union of all enum items as literal types. Only do it if # the previous check didn't succeed, since recombining can be @@ -312,9 +312,16 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool and (left.type.is_enum or left.type.fullname == "builtins.bool") ): right = UnionType(mypy.typeops.try_contracting_literals_in_union(right.items)) - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # However, if 'left' is a type variable T, T might also have # an upper bound which is itself a union. This case will be # handled below by the SubtypeVisitor. We have to check both @@ -330,34 +337,28 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool def check_type_parameter( - lefta: Type, righta: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext + left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext ) -> bool: - def check(left: Type, right: Type) -> bool: - return ( - is_proper_subtype(left, right, subtype_context=subtype_context) - if proper_subtype - else is_subtype(left, right, subtype_context=subtype_context) - ) - if variance == COVARIANT: - return check(lefta, righta) + if proper_subtype: + return is_proper_subtype(left, right, subtype_context=subtype_context) + else: + return is_subtype(left, right, subtype_context=subtype_context) elif variance == CONTRAVARIANT: - return check(righta, lefta) + if proper_subtype: + return is_proper_subtype(right, left, subtype_context=subtype_context) + else: + return is_subtype(right, left, subtype_context=subtype_context) else: if proper_subtype: # We pass ignore_promotions=False because it is a default for subtype checks. # The actual value will be taken from the subtype_context, and it is whatever # the original caller passed. return is_same_type( - lefta, righta, ignore_promotions=False, subtype_context=subtype_context + left, right, ignore_promotions=False, subtype_context=subtype_context ) - return is_equivalent(lefta, righta, subtype_context=subtype_context) - - -def ignore_type_parameter( - lefta: Type, righta: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext -) -> bool: - return True + else: + return is_equivalent(left, right, subtype_context=subtype_context) class SubtypeVisitor(TypeVisitor[bool]): @@ -366,9 +367,6 @@ def __init__(self, right: Type, subtype_context: SubtypeContext, proper_subtype: self.orig_right = right self.proper_subtype = proper_subtype self.subtype_context = subtype_context - self.check_type_parameter = ( - ignore_type_parameter if subtype_context.ignore_type_params else check_type_parameter - ) self.options = subtype_context.options self._subtype_kind = SubtypeVisitor.build_subtype_kind(subtype_context, proper_subtype) @@ -429,34 +427,30 @@ def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. - if self.subtype_context.keep_erased_types: - return False - return True + return not self.subtype_context.keep_erased_types def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any and not self.proper_subtype: - if isinstance(self.right, NoneType): - # NOTE: `None` is a *non-subclassable* singleton, therefore no class - # can by a subtype of it, even with an `Any` fallback. - # This special case is needed to treat descriptors in classes with - # dynamic base classes correctly, see #5456. - return False - return True + # NOTE: `None` is a *non-subclassable* singleton, therefore no class + # can by a subtype of it, even with an `Any` fallback. + # This special case is needed to treat descriptors in classes with + # dynamic base classes correctly, see #5456. + return not isinstance(self.right, NoneType) right = self.right if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) if isinstance(right, Instance): - if TypeState.is_cached_subtype_check(self._subtype_kind, left, right): + if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True if not self.subtype_context.ignore_promotions: for base in left.type.mro: if base._promote and any( self._is_subtype(p, self.right) for p in base._promote ): - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return True # Special case: Low-level integer types are compatible with 'int'. We can't # use promotions, since 'int' is already promoted to low-level integer types, @@ -485,7 +479,18 @@ def visit_instance(self, left: Instance) -> bool: t = erased nominal = True if right.type.has_type_var_tuple_type: - split_result = fully_split_with_mapped_and_template(left, right) + assert left.type.type_var_tuple_prefix is not None + assert left.type.type_var_tuple_suffix is not None + assert right.type.type_var_tuple_prefix is not None + assert right.type.type_var_tuple_suffix is not None + split_result = fully_split_with_mapped_and_template( + left.args, + left.type.type_var_tuple_prefix, + left.type.type_var_tuple_suffix, + right.args, + right.type.type_var_tuple_prefix, + right.type.type_var_tuple_suffix, + ) if split_result is None: return False @@ -513,11 +518,7 @@ def check_mixed( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): - if not all( - is_equivalent(l, unpacked_type.args[0]) for l in compare_to - ): - return False - return True + return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to) if isinstance(unpacked_type, TypeVarTupleType): return False if isinstance(unpacked_type, AnyType): @@ -580,19 +581,24 @@ def check_mixed( ) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) - for lefta, righta, tvar in type_params: - if isinstance(tvar, TypeVarType): - if not self.check_type_parameter( - lefta, righta, tvar.variance, self.proper_subtype, self.subtype_context - ): - nominal = False - else: - if not self.check_type_parameter( - lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context - ): - nominal = False + if not self.subtype_context.ignore_type_params: + for lefta, righta, tvar in type_params: + if isinstance(tvar, TypeVarType): + if not check_type_parameter( + lefta, + righta, + tvar.variance, + self.proper_subtype, + self.subtype_context, + ): + nominal = False + else: + if not check_type_parameter( + lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context + ): + nominal = False if nominal: - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( left, right, proper_subtype=self.proper_subtype @@ -639,6 +645,8 @@ def visit_param_spec(self, left: ParamSpecType) -> bool: and right.flavor == left.flavor ): return True + if isinstance(right, Parameters) and are_trivial_parameters(right): + return True return self._is_subtype(left.upper_bound, self.right) def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: @@ -686,13 +694,16 @@ def visit_callable_type(self, left: CallableType) -> bool: elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items) elif isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: - # OK, a callable can implement a protocol with a single `__call__` member. + if right.type.is_protocol and "__call__" in right.type.protocol_members: + # OK, a callable can implement a protocol with a `__call__` member. # TODO: we should probably explicitly exclude self-types in this case. call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): - return True + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True if right.type.is_protocol and left.is_type_obj(): ret_type = get_proper_type(left.ret_type) if isinstance(ret_type, TupleType): @@ -741,9 +752,8 @@ def visit_tuple_type(self, left: TupleType) -> bool: elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False - for l, r in zip(left.items, right.items): - if not self._is_subtype(l, r): - return False + if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): + return False rfallback = mypy.typeops.tuple_fallback(right) if is_named_instance(rfallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback @@ -752,9 +762,7 @@ def visit_tuple_type(self, left: TupleType) -> bool: # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True lfallback = mypy.typeops.tuple_fallback(left) - if not self._is_subtype(lfallback, rfallback): - return False - return True + return self._is_subtype(lfallback, rfallback) else: return False @@ -803,12 +811,15 @@ def visit_literal_type(self, left: LiteralType) -> bool: def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: + if right.type.is_protocol and "__call__" in right.type.protocol_members: # same as for CallableType call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): - return True + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True return self._is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items: @@ -823,9 +834,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Ensure each overload in the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() - possible_invalid_overloads = set() - for right_index, right_item in enumerate(right.items): + for right_item in right.items: found_match = False for left_index, left_item in enumerate(left.items): @@ -834,43 +844,36 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: - if not found_match: - # Update the index of the previous match. - previous_match_left_index = left_index - found_match = True - matched_overloads.add(left_item) - possible_invalid_overloads.discard(left_item) + previous_match_left_index = left_index + found_match = True + matched_overloads.add(left_index) + break else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. strict_concat = self.options.strict_concatenate if self.options else True - if is_callable_compatible( - left_item, - right_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, - ) or is_callable_compatible( - right_item, - left_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, + if left_index not in matched_overloads and ( + is_callable_compatible( + left_item, + right_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + or is_callable_compatible( + right_item, + left_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) ): - # If this is an overload that's already been matched, there's no - # problem. - if left_item not in matched_overloads: - possible_invalid_overloads.add(left_item) + return False if not found_match: return False - - if possible_invalid_overloads: - # There were potentially invalid overloads that were never matched to the - # supertype. - return False return True elif isinstance(right, UnboundType): return True @@ -897,6 +900,35 @@ def visit_union_type(self, left: UnionType) -> bool: if not self._is_subtype(item, self.orig_right): return False return True + + elif isinstance(self.right, UnionType): + # prune literals early to avoid nasty quadratic behavior which would otherwise arise when checking + # subtype relationships between slightly different narrowings of an Enum + # we achieve O(N+M) instead of O(N*M) + + fast_check: set[ProperType] = set() + + for item in _flattened(self.right.relevant_items()): + p_item = get_proper_type(item) + if isinstance(p_item, LiteralType): + fast_check.add(p_item) + elif isinstance(p_item, Instance): + if p_item.last_known_value is None: + fast_check.add(p_item) + else: + fast_check.add(p_item.last_known_value) + + for item in left.relevant_items(): + p_item = get_proper_type(item) + if p_item in fast_check: + continue + lit_type = mypy.typeops.simple_literal_type(p_item) + if lit_type in fast_check: + continue + if not self._is_subtype(item, self.orig_right): + return False + return True + return all(self._is_subtype(item, self.orig_right) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: @@ -957,7 +989,11 @@ def pop_on_exit(stack: list[tuple[T, T]], left: T, right: T) -> Iterator[None]: def is_protocol_implementation( - left: Instance, right: Instance, proper_subtype: bool = False, class_obj: bool = False + left: Instance, + right: Instance, + proper_subtype: bool = False, + class_obj: bool = False, + skip: list[str] | None = None, ) -> bool: """Check whether 'left' implements the protocol 'right'. @@ -977,10 +1013,13 @@ def f(self) -> A: ... as well. """ assert right.type.is_protocol + if skip is None: + skip = [] # We need to record this check to generate protocol fine-grained dependencies. - TypeState.record_protocol_subtype_check(left.type, right.type) + type_state.record_protocol_subtype_check(left.type, right.type) # nominal subtyping currently ignores '__init__' and '__new__' signatures members_not_to_check = {"__init__", "__new__"} + members_not_to_check.update(skip) # Trivial check that circumvents the bug described in issue 9771: if left.type.is_protocol: members_right = set(right.type.protocol_members) - members_not_to_check @@ -1011,6 +1050,10 @@ def named_type(fullname: str) -> Instance: subtype: ProperType | None = mypy.checkmember.type_object_type( left.type, named_type ) + elif member == "__call__" and left.type.is_metaclass(): + # Special case: we want to avoid falling back to metaclass __call__ + # if constructor signature didn't match, this can cause many false negatives. + subtype = None else: subtype = get_proper_type(find_member(member, left, left, class_obj=class_obj)) # Useful for debugging: @@ -1046,7 +1089,10 @@ def named_type(fullname: str) -> Instance: if not is_subtype(supertype, subtype): return False if not class_obj: - if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): + if IS_SETTABLE not in superflags: + if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + return False + elif (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): return False else: if IS_VAR in superflags and IS_CLASSVAR not in subflags: @@ -1070,7 +1116,7 @@ def named_type(fullname: str) -> Instance: subtype_context=SubtypeContext(ignore_pos_arg_names=ignore_names), proper_subtype=proper_subtype, ) - TypeState.record_subtype_cache_entry(subtype_kind, left, right) + type_state.record_subtype_cache_entry(subtype_kind, left, right) return True @@ -1122,7 +1168,7 @@ def find_member( if isinstance(getattr_type, CallableType): return getattr_type.ret_type return getattr_type - if itype.type.fallback_to_any: + if itype.type.fallback_to_any or class_obj and itype.type.meta_fallback_to_any: return AnyType(TypeOfAny.special_form) if isinstance(v, TypeInfo): # PEP 544 doesn't specify anything about such use cases. So we just try @@ -1202,6 +1248,8 @@ def find_node_type( ) else: typ = node.type + if typ is not None: + typ = expand_self_type(node, typ, subtype) p_typ = get_proper_type(typ) if typ is None: return AnyType(TypeOfAny.from_error) @@ -1358,7 +1406,7 @@ def g(x: int) -> int: ... ignore_pos_arg_names = True # Non-type cannot be a subtype of type. - if right.is_type_obj() and not left.is_type_obj(): + if right.is_type_obj() and not left.is_type_obj() and not allow_partial_overlap: return False # A callable L is a subtype of a generic callable R if L is a @@ -1376,8 +1424,7 @@ def g(x: int) -> int: ... unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False - else: - left = unified + left = unified # If we allow partial overlaps, we don't need to leave R generic: # if we can find even just a single typevar assignment which @@ -1413,6 +1460,18 @@ def g(x: int) -> int: ... ) +def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + def are_parameters_compatible( left: Parameters | NormalizedCallableType, right: Parameters | NormalizedCallableType, @@ -1433,13 +1492,7 @@ def are_parameters_compatible( right_star2 = right.kw_arg() # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" - if ( - right.arg_kinds == [ARG_STAR, ARG_STAR2] - and right_star - and isinstance(get_proper_type(right_star.typ), AnyType) - and right_star2 - and isinstance(get_proper_type(right_star2.typ), AnyType) - ): + if are_trivial_parameters(right): return True # Match up corresponding arguments and check them for compatibility. In @@ -1672,8 +1725,12 @@ def report(*args: Any) -> None: nonlocal had_errors had_errors = True + # This function may be called by the solver, so we need to allow erased types here. + # We anyway allow checking subtyping between other types containing + # (probably also because solver needs subtyping). See also comment in + # ExpandTypeVisitor.visit_erased_type(). applied = mypy.applytype.apply_generic_arguments( - type, non_none_inferred_vars, report, context=target + type, non_none_inferred_vars, report, context=target, allow_erased_callables=True ) if had_errors: return None @@ -1737,14 +1794,19 @@ def covers_at_runtime(item: Type, supertype: Type) -> bool: erase_type(item), supertype, ignore_promotions=True, erase_instances=True ): return True - if isinstance(supertype, Instance) and supertype.type.is_protocol: - # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. - if is_proper_subtype(item, supertype, ignore_promotions=True): - return True - if isinstance(item, TypedDictType) and isinstance(supertype, Instance): - # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). - if supertype.type.fullname == "builtins.dict": - return True + if isinstance(supertype, Instance): + if supertype.type.is_protocol: + # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. + if is_proper_subtype(item, supertype, ignore_promotions=True): + return True + if isinstance(item, TypedDictType): + # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). + if supertype.type.fullname == "builtins.dict": + return True + elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": + # "int" covers all native int types + if item.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True # TODO: Add more special cases. return False diff --git a/mypy/test/data.py b/mypy/test/data.py index f4cb39818b4e..c6f671b2d401 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -81,13 +81,12 @@ def parse_test_case(case: DataDrivenTestCase) -> None: output_files.append((file_entry[0], re.compile(file_entry[1].rstrip(), re.S))) else: output_files.append(file_entry) - elif item.id in ("builtins", "builtins_py2"): + elif item.id == "builtins": # Use an alternative stub file for the builtins module. assert item.arg is not None mpath = join(os.path.dirname(case.file), item.arg) - fnam = "builtins.pyi" if item.id == "builtins" else "__builtin__.pyi" with open(mpath, encoding="utf8") as f: - files.append((join(base_path, fnam), f.read())) + files.append((join(base_path, "builtins.pyi"), f.read())) elif item.id == "typing": # Use an alternative stub file for the typing module. assert item.arg is not None diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 8bee8073bd16..145027404ff7 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -282,8 +282,14 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: def testfile_pyversion(path: str) -> tuple[int, int]: - if path.endswith("python310.test"): + if path.endswith("python311.test"): + return 3, 11 + elif path.endswith("python310.test"): return 3, 10 + elif path.endswith("python39.test"): + return 3, 9 + elif path.endswith("python38.test"): + return 3, 8 else: return defaults.PYTHON3_VERSION diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 442e25b54ff2..4fe2ee6393c0 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -44,6 +44,8 @@ typecheck_files.remove("check-python39.test") if sys.version_info < (3, 10): typecheck_files.remove("check-python310.test") +if sys.version_info < (3, 11): + typecheck_files.remove("check-python311.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 268b6bab1ec2..2e8b0dc9a1cd 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -69,12 +69,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: env["PYTHONPATH"] = PREFIX if os.path.isdir(extra_path): env["PYTHONPATH"] += os.pathsep + extra_path + cwd = os.path.join(test_temp_dir, custom_cwd or "") + args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] process = subprocess.Popen( - fixed + args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=os.path.join(test_temp_dir, custom_cwd or ""), - env=env, + fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env ) outb, errb = process.communicate() result = process.returncode diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index 6b8f596dd605..b46f31327150 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -5,7 +5,7 @@ from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.test.helpers import Suite from mypy.test.typefixture import TypeFixture -from mypy.types import Instance, TupleType, TypeList, UnpackType +from mypy.types import Instance, TupleType, UnpackType class ConstraintsSuite(Suite): @@ -27,13 +27,19 @@ def test_basic_type_var_tuple_subtype(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF - ) == [Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) + ] def test_basic_type_var_tuple(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF - ) == [Constraint(type_var=fx.ts, op=SUPERTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) + ) + ] def test_type_var_tuple_with_prefix_and_suffix(self) -> None: fx = self.fx @@ -45,7 +51,9 @@ def test_type_var_tuple_with_prefix_and_suffix(self) -> None: ) ) == { Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), - Constraint(type_var=fx.ts, op=SUPERTYPE_OF, target=TypeList([fx.b, fx.c])), + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) + ), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d), } @@ -143,3 +151,11 @@ def test_unpack_tuple_length_non_match(self) -> None: Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } + + def test_var_length_tuple_with_fixed_length_tuple(self) -> None: + fx = self.fx + assert not infer_constraints( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), + Instance(fx.std_tuplei, [fx.a]), + SUPERTYPE_OF, + ) diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index ae1c613f7563..3343762cfaaf 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -15,7 +15,7 @@ from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options from mypy.types import Type -from mypy.typestate import TypeState +from mypy.typestate import type_state # Only dependencies in these modules are dumped dumped_modules = ["__main__", "pkg", "pkg.mod"] @@ -54,7 +54,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: for source in new_deps: deps[source].update(new_deps[source]) - TypeState.add_all_protocol_deps(deps) + type_state.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): if source.startswith((" None: for f in result.files.values(): for n in f.names.values(): if isinstance(n.node, TypeInfo): - assert n.fullname is not None + assert n.fullname typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 5a6904bfaaf4..5e59d8efec63 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -302,7 +302,7 @@ def test_arg_kind(self) -> Iterator[Case]: ) @collect_cases - def test_default_value(self) -> Iterator[Case]: + def test_default_presence(self) -> Iterator[Case]: yield Case( stub="def f1(text: str = ...) -> None: ...", runtime="def f1(text = 'asdf'): pass", @@ -336,6 +336,59 @@ def f6(text: _T = ...) -> None: ... error="f6", ) + @collect_cases + def test_default_value(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = 'x') -> None: ...", + runtime="def f1(text = 'y'): pass", + error="f1", + ) + yield Case( + stub='def f2(text: bytes = b"x\'") -> None: ...', + runtime='def f2(text = b"x\'"): pass', + error=None, + ) + yield Case( + stub='def f3(text: bytes = b"y\'") -> None: ...', + runtime='def f3(text = b"x\'"): pass', + error="f3", + ) + yield Case( + stub="def f4(text: object = 1) -> None: ...", + runtime="def f4(text = 1.0): pass", + error="f4", + ) + yield Case( + stub="def f5(text: object = True) -> None: ...", + runtime="def f5(text = 1): pass", + error="f5", + ) + yield Case( + stub="def f6(text: object = True) -> None: ...", + runtime="def f6(text = True): pass", + error=None, + ) + yield Case( + stub="def f7(text: object = not True) -> None: ...", + runtime="def f7(text = False): pass", + error=None, + ) + yield Case( + stub="def f8(text: object = not True) -> None: ...", + runtime="def f8(text = True): pass", + error="f8", + ) + yield Case( + stub="def f9(text: object = {1: 2}) -> None: ...", + runtime="def f9(text = {1: 3}): pass", + error="f9", + ) + yield Case( + stub="def f10(text: object = [1, 2]) -> None: ...", + runtime="def f10(text = [1, 2]): pass", + error=None, + ) + @collect_cases def test_static_class_method(self) -> Iterator[Case]: yield Case( @@ -975,7 +1028,7 @@ def test_all_in_stub_not_at_runtime(self) -> Iterator[Case]: @collect_cases def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: - # We *should* emit an error with the module name itself, + # We *should* emit an error with the module name itself + __all__, # if the stub *does* define __all__, # but the stub's __all__ is inconsistent with the runtime's __all__ yield Case( @@ -987,7 +1040,7 @@ def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: __all__ = [] foo = 'foo' """, - error="", + error="__all__", ) @collect_cases @@ -1029,6 +1082,9 @@ def test_missing_no_runtime_all(self) -> Iterator[Case]: yield Case(stub="", runtime="import sys", error=None) yield Case(stub="", runtime="def g(): ...", error="g") yield Case(stub="", runtime="CONSTANT = 0", error="CONSTANT") + yield Case(stub="", runtime="import re; constant = re.compile('foo')", error="constant") + yield Case(stub="", runtime="from json.scanner import NUMBER_RE", error=None) + yield Case(stub="", runtime="from string import ascii_letters", error=None) @collect_cases def test_non_public_1(self) -> Iterator[Case]: diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py index db155a337980..22ef4272e933 100644 --- a/mypy/test/testtypegen.py +++ b/mypy/test/testtypegen.py @@ -7,7 +7,7 @@ from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource -from mypy.nodes import NameExpr +from mypy.nodes import NameExpr, TempNode from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite @@ -54,6 +54,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: # Filter nodes that should be included in the output. keys = [] for node in nodes: + if isinstance(node, TempNode): + continue if node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 31bdd6690a7a..ee0256e2057a 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -31,6 +31,7 @@ UninhabitedType, UnionType, get_proper_type, + has_recursive_types, ) @@ -157,6 +158,13 @@ def test_type_alias_expand_all(self) -> None: [self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a]) ) + def test_recursive_nested_in_non_recursive(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + T = TypeVarType("T", "T", -1, [], self.fx.o) + NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) + assert not NA.is_recursive + assert has_recursive_types(NA) + def test_indirection_no_infinite_recursion(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) visitor = TypeIndirectionVisitor() diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index 380da909893a..d12e7abab0e2 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -56,9 +56,6 @@ def make_type_var( ) -> TypeVarType: return TypeVarType(name, name, id, values, upper_bound, variance) - def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: - return TypeVarTupleType(name, name, id, upper_bound) - self.t = make_type_var("T", 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var("T", -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var("T", -2, [], self.o, variance) # T`-2 (type variable) @@ -68,10 +65,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self.sf1 = make_type_var("S", -1, [], self.o, variance) # S`-1 (type variable) self.u = make_type_var("U", 3, [], self.o, variance) # U`3 (type variable) - self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) - self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) - self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) - # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneType() @@ -133,10 +126,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy bases=[Instance(self.gi, [self.s1])], ) - self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) - self.gv2i = self.make_type_info( - "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 - ) # list[T] self.std_listi = self.make_type_info( "builtins.list", mro=[self.oi], typevars=["T"], variances=[variance] @@ -218,6 +207,18 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai) + def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: + return TypeVarTupleType(name, name, id, upper_bound, self.std_tuple) + + self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) + self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) + self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) + + self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) + self.gv2i = self.make_type_info( + "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 + ) + def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) @@ -296,7 +297,7 @@ def make_type_info( v: list[TypeVarLikeType] = [] for id, n in enumerate(typevars, 1): if typevar_tuple_index is not None and id - 1 == typevar_tuple_index: - v.append(TypeVarTupleType(n, n, id, self.o)) + v.append(TypeVarTupleType(n, n, id, self.o, self.std_tuple)) else: if variances: variance = variances[id - 1] @@ -339,9 +340,16 @@ def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: A.alias = AN return A, target - def non_rec_alias(self, target: Type) -> TypeAliasType: - AN = TypeAlias(target, "__main__.A", -1, -1) - return TypeAliasType(AN, []) + def non_rec_alias( + self, + target: Type, + alias_tvars: list[TypeVarLikeType] | None = None, + args: list[Type] | None = None, + ) -> TypeAliasType: + AN = TypeAlias(target, "__main__.A", -1, -1, alias_tvars=alias_tvars) + if args is None: + args = [] + return TypeAliasType(AN, args) class InterfaceTypeFixture(TypeFixture): diff --git a/mypy/traverser.py b/mypy/traverser.py index 3c4f21601b88..378d44c67f47 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -262,6 +262,8 @@ def visit_call_expr(self, o: CallExpr) -> None: def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) + if o.analyzed is not None: + o.analyzed.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: diff --git a/mypy/treetransform.py b/mypy/treetransform.py index d7f159d02a22..535f50d5cf5e 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -373,7 +373,7 @@ def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: - return TryStmt( + new = TryStmt( self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), @@ -381,6 +381,8 @@ def visit_try_stmt(self, node: TryStmt) -> TryStmt: self.optional_block(node.else_body), self.optional_block(node.finally_body), ) + new.is_star = node.is_star + return new def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt( @@ -517,7 +519,12 @@ def visit_call_expr(self, node: CallExpr) -> CallExpr: ) def visit_op_expr(self, node: OpExpr) -> OpExpr: - new = OpExpr(node.op, self.expr(node.left), self.expr(node.right)) + new = OpExpr( + node.op, + self.expr(node.left), + self.expr(node.right), + cast(Optional[TypeAliasExpr], self.optional_expr(node.analyzed)), + ) new.method_type = self.optional_type(node.method_type) return new @@ -548,7 +555,7 @@ def visit_super_expr(self, node: SuperExpr) -> SuperExpr: return new def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: - return AssignmentExpr(node.target, node.value) + return AssignmentExpr(self.expr(node.target), self.expr(node.value)) def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) @@ -646,7 +653,11 @@ def visit_paramspec_expr(self, node: ParamSpecExpr) -> ParamSpecExpr: def visit_type_var_tuple_expr(self, node: TypeVarTupleExpr) -> TypeVarTupleExpr: return TypeVarTupleExpr( - node.name, node.fullname, self.type(node.upper_bound), variance=node.variance + node.name, + node.fullname, + self.type(node.upper_bound), + node.tuple_fallback, + variance=node.variance, ) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index f926d0dfb883..9b432d8e68ec 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -115,6 +115,7 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: tvar_expr.fullname, i, upper_bound=tvar_expr.upper_bound, + tuple_fallback=tvar_expr.tuple_fallback, line=tvar_expr.line, column=tvar_expr.column, ) @@ -128,7 +129,7 @@ def bind_existing(self, tvar_def: TypeVarLikeType) -> None: def get_binding(self, item: str | SymbolTableNode) -> TypeVarLikeType | None: fullname = item.fullname if isinstance(item, SymbolTableNode) else item - assert fullname is not None + assert fullname if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index fe404cda0bec..c5324357117b 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -15,6 +15,7 @@ from abc import abstractmethod from typing import Any, Callable, Generic, Iterable, Sequence, TypeVar, cast +from typing_extensions import Final from mypy_extensions import mypyc_attr, trait @@ -314,7 +315,7 @@ class TypeQuery(SyntheticTypeVisitor[T]): # TODO: check that we don't have existing violations of this rule. """ - def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: + def __init__(self, strategy: Callable[[list[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. @@ -404,24 +405,171 @@ def visit_placeholder_type(self, t: PlaceholderType) -> T: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> T: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if t in self.seen_aliases: + return self.strategy([]) + self.seen_aliases.add(t) if self.skip_alias_target: return self.query_types(t.args) return get_proper_type(t).accept(self) def query_types(self, types: Iterable[Type]) -> T: - """Perform a query for a list of types. + """Perform a query for a list of types using the strategy to combine the results.""" + return self.strategy([t.accept(self) for t in types]) - Use the strategy to combine the results. - Skip type aliases already visited types to avoid infinite recursion. + +# Return True if at least one type component returns True +ANY_STRATEGY: Final = 0 +# Return True if no type component returns False +ALL_STRATEGY: Final = 1 + + +class BoolTypeQuery(SyntheticTypeVisitor[bool]): + """Visitor for performing recursive queries of types with a bool result. + + Use TypeQuery if you need non-bool results. + + 'strategy' is used to combine results for a series of types. It must + be ANY_STRATEGY or ALL_STRATEGY. + + Note: This visitor keeps an internal state (tracks type aliases to avoid + recursion), so it should *never* be re-used for querying different types + unless you call reset() first. + """ + + def __init__(self, strategy: int) -> None: + self.strategy = strategy + if strategy == ANY_STRATEGY: + self.default = False + else: + assert strategy == ALL_STRATEGY + self.default = True + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. An empty set is + # represented as None as a micro-optimization. + self.seen_aliases: set[TypeAliasType] | None = None + # By default, we eagerly expand type aliases, and query also types in the + # alias target. In most cases this is a desired behavior, but we may want + # to skip targets in some cases (e.g. when collecting type variables). + self.skip_alias_target = False + + def reset(self) -> None: + """Clear mutable state (but preserve strategy). + + This *must* be called if you want to reuse the visitor. """ - res: list[T] = [] - for t in types: - if isinstance(t, TypeAliasType): - # Avoid infinite recursion for recursive type aliases. - # TODO: Ideally we should fire subvisitors here (or use caching) if we care - # about duplicates. - if t in self.seen_aliases: - continue - self.seen_aliases.add(t) - res.append(t.accept(self)) - return self.strategy(res) + self.seen_aliases = None + + def visit_unbound_type(self, t: UnboundType) -> bool: + return self.query_types(t.args) + + def visit_type_list(self, t: TypeList) -> bool: + return self.query_types(t.items) + + def visit_callable_argument(self, t: CallableArgument) -> bool: + return t.typ.accept(self) + + def visit_any(self, t: AnyType) -> bool: + return self.default + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return self.default + + def visit_none_type(self, t: NoneType) -> bool: + return self.default + + def visit_erased_type(self, t: ErasedType) -> bool: + return self.default + + def visit_deleted_type(self, t: DeletedType) -> bool: + return self.default + + def visit_type_var(self, t: TypeVarType) -> bool: + return self.query_types([t.upper_bound] + t.values) + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return self.default + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return self.default + + def visit_unpack_type(self, t: UnpackType) -> bool: + return self.query_types([t.type]) + + def visit_parameters(self, t: Parameters) -> bool: + return self.query_types(t.arg_types) + + def visit_partial_type(self, t: PartialType) -> bool: + return self.default + + def visit_instance(self, t: Instance) -> bool: + return self.query_types(t.args) + + def visit_callable_type(self, t: CallableType) -> bool: + # FIX generics + # Avoid allocating any objects here as an optimization. + args = self.query_types(t.arg_types) + ret = t.ret_type.accept(self) + if self.strategy == ANY_STRATEGY: + return args or ret + else: + return args and ret + + def visit_tuple_type(self, t: TupleType) -> bool: + return self.query_types(t.items) + + def visit_typeddict_type(self, t: TypedDictType) -> bool: + return self.query_types(list(t.items.values())) + + def visit_raw_expression_type(self, t: RawExpressionType) -> bool: + return self.default + + def visit_literal_type(self, t: LiteralType) -> bool: + return self.default + + def visit_star_type(self, t: StarType) -> bool: + return t.type.accept(self) + + def visit_union_type(self, t: UnionType) -> bool: + return self.query_types(t.items) + + def visit_overloaded(self, t: Overloaded) -> bool: + return self.query_types(t.items) # type: ignore[arg-type] + + def visit_type_type(self, t: TypeType) -> bool: + return t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType) -> bool: + return self.default + + def visit_placeholder_type(self, t: PlaceholderType) -> bool: + return self.query_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType) -> bool: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if self.seen_aliases is None: + self.seen_aliases = set() + elif t in self.seen_aliases: + return self.default + self.seen_aliases.add(t) + if self.skip_alias_target: + return self.query_types(t.args) + return get_proper_type(t).accept(self) + + def query_types(self, types: list[Type] | tuple[Type, ...]) -> bool: + """Perform a query for a sequence of types using the strategy to combine the results.""" + # Special-case for lists and tuples to allow mypyc to produce better code. + if isinstance(types, list): + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) + else: + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 35f60f54605a..07720afeff88 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -4,13 +4,11 @@ import itertools from contextlib import contextmanager -from itertools import chain from typing import Callable, Iterable, Iterator, List, Sequence, Tuple, TypeVar from typing_extensions import Final, Protocol from mypy import errorcodes as codes, message_registry, nodes from mypy.errorcodes import ErrorCode -from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type from mypy.messages import MessageBuilder, format_type_bare, quote_type_string, wrong_type_arg_count from mypy.nodes import ( ARG_NAMED, @@ -23,7 +21,6 @@ ArgKind, Context, Decorator, - Expression, MypyFile, ParamSpecExpr, PlaceholderNode, @@ -44,11 +41,13 @@ from mypy.tvar_scope import TypeVarLikeScope from mypy.types import ( ANNOTATED_TYPE_NAMES, + ANY_STRATEGY, FINAL_TYPE_NAMES, LITERAL_TYPE_NAMES, NEVER_NAMES, TYPE_ALIAS_NAMES, AnyType, + BoolTypeQuery, CallableArgument, CallableType, DeletedType, @@ -87,8 +86,10 @@ callable_with_ellipsis, flatten_nested_unions, get_proper_type, + has_type_vars, ) from mypy.typetraverser import TypeTraverserVisitor +from mypy.typevars import fill_typevars T = TypeVar("T") @@ -117,9 +118,11 @@ "asyncio.futures.Future", } +SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"} + def analyze_type_alias( - node: Expression, + type: Type, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarLikeScope, plugin: Plugin, @@ -128,6 +131,7 @@ def analyze_type_alias( allow_placeholder: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, ) -> tuple[Type, set[str]] | None: """Analyze r.h.s. of a (potential) type alias definition. @@ -135,11 +139,6 @@ def analyze_type_alias( full names of type aliases it depends on (directly or indirectly). Return None otherwise. 'node' must have been semantically analyzed. """ - try: - type = expr_to_unanalyzed_type(node, options, api.is_stub_file) - except TypeTranslationError: - api.fail("Invalid type alias: expression is not a valid type", node, code=codes.VALID_TYPE) - return None analyzer = TypeAnalyser( api, tvar_scope, @@ -148,6 +147,8 @@ def analyze_type_alias( is_typeshed_stub, defining_alias=True, allow_placeholder=allow_placeholder, + prohibit_self_type="type alias target", + allowed_alias_tvars=allowed_alias_tvars, ) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope @@ -196,10 +197,11 @@ def __init__( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, + allow_type_any: bool = False, ) -> None: self.api = api - self.lookup_qualified = api.lookup_qualified - self.lookup_fqn_func = api.lookup_fully_qualified self.fail_func = api.fail self.note_func = api.note self.tvar_scope = tvar_scope @@ -213,8 +215,12 @@ def __init__( self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set( "annotations" ) - # Should we accept unbound type variables (always OK in aliases)? - self.allow_unbound_tvars = allow_unbound_tvars or defining_alias + # Should we accept unbound type variables? This is currently used for class bases, + # and alias right hand sides (before they are analyzed as type aliases). + self.allow_unbound_tvars = allow_unbound_tvars + if allowed_alias_tvars is None: + allowed_alias_tvars = [] + self.allowed_alias_tvars = allowed_alias_tvars # If false, record incomplete ref if we generate PlaceholderType. self.allow_placeholder = allow_placeholder # Are we in a context where Required[] is allowed? @@ -231,6 +237,17 @@ def __init__( self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used: set[str] = set() + self.prohibit_self_type = prohibit_self_type + # Allow variables typed as Type[Any] and type (useful for base classes). + self.allow_type_any = allow_type_any + + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + return self.api.lookup_qualified(name, ctx, suppress_errors) + + def lookup_fully_qualified(self, name: str) -> SymbolTableNode: + return self.api.lookup_fully_qualified(name) def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) @@ -254,7 +271,15 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.api.defer() else: self.api.record_incomplete_ref() - return PlaceholderType(node.fullname, self.anal_array(t.args), t.line) + # Always allow ParamSpec for placeholders, if they are actually not valid, + # they will be reported later, after we resolve placeholders. + return PlaceholderType( + node.fullname, + self.anal_array( + t.args, allow_param_spec=True, allow_param_spec_literals=True + ), + t.line, + ) else: if self.api.final_iteration: self.cannot_resolve_type(t) @@ -281,6 +306,8 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) tvar_def = self.tvar_scope.get_binding(sym) if isinstance(sym.node, ParamSpecExpr): if tvar_def is None: + if self.allow_unbound_tvars: + return t self.fail(f'ParamSpec "{t.name}" is unbound', t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, ParamSpecType) @@ -298,7 +325,12 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) line=t.line, column=t.column, ) - if isinstance(sym.node, TypeVarExpr) and tvar_def is not None and self.defining_alias: + if ( + isinstance(sym.node, TypeVarExpr) + and self.defining_alias + and not defining_literal + and (tvar_def is None or tvar_def not in self.allowed_alias_tvars) + ): self.fail( f'Can\'t use bound type variable "{t.name}" to define generic alias', t, @@ -323,7 +355,9 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) column=t.column, ) if isinstance(sym.node, TypeVarTupleExpr) and ( - tvar_def is not None and self.defining_alias + tvar_def is not None + and self.defining_alias + and tvar_def not in self.allowed_alias_tvars ): self.fail( f'Can\'t use bound type variable "{t.name}" to define generic alias', @@ -340,12 +374,14 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.fail( f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE ) + # Change the line number return TypeVarTupleType( tvar_def.name, tvar_def.fullname, tvar_def.id, tvar_def.upper_bound, + sym.node.tuple_fallback, line=t.line, column=t.column, ) @@ -354,7 +390,14 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) - an_args = self.anal_array(t.args) + an_args = self.anal_array( + t.args, + allow_param_spec=True, + allow_param_spec_literals=node.has_param_spec_type, + ) + if node.has_param_spec_type and len(node.alias_tvars) == 1: + an_args = self.pack_paramspec_args(an_args) + disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = expand_type_alias( node, @@ -397,6 +440,17 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) else: # sym is None return AnyType(TypeOfAny.special_form) + def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: + # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]]. + # These do not support mypy_extensions VarArgs, etc. as they were already analyzed + # TODO: should these be re-analyzed to get rid of this inconsistency? + count = len(an_args) + if count > 0: + first_arg = get_proper_type(an_args[0]) + if not (count == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType))): + return [Parameters(an_args, [ARG_POS] * count, [None] * count)] + return list(an_args) + def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first # need access to MessageBuilder here. Also move the similar @@ -413,6 +467,10 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: # last argument has to be ParamSpec ps = self.anal_type(t.args[-1], allow_param_spec=True) if not isinstance(ps, ParamSpecType): + if isinstance(ps, UnboundType) and self.allow_unbound_tvars: + sym = self.lookup_qualified(ps.name, t) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + return ps self.api.fail( "The last parameter to Concatenate needs to be a ParamSpec", t, @@ -444,7 +502,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ if fullname == "builtins.None": return NoneType() elif fullname == "typing.Any" or fullname == "builtins.Any": - return AnyType(TypeOfAny.explicit) + return AnyType(TypeOfAny.explicit, line=t.line, column=t.column) elif fullname in FINAL_TYPE_NAMES: self.fail( "Final can be only used as an outermost qualifier in a variable annotation", @@ -575,6 +633,24 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) return UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + elif fullname in SELF_TYPE_NAMES: + if t.args: + self.fail("Self type cannot have type arguments", t) + if self.prohibit_self_type is not None: + self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t) + return AnyType(TypeOfAny.from_error) + if self.api.type is None: + self.fail("Self type is only allowed in annotations within class definition", t) + return AnyType(TypeOfAny.from_error) + if self.api.type.has_base("builtins.type"): + self.fail("Self type cannot be used in a metaclass", t) + if self.api.type.self_type is not None: + if self.api.type.is_final: + return fill_typevars(self.api.type) + return self.api.type.self_type.copy_modified(line=t.line, column=t.column) + # TODO: verify this is unreachable and replace with an assert? + self.fail("Unexpected Self type", t) + return AnyType(TypeOfAny.from_error) return None def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: @@ -595,36 +671,24 @@ def analyze_type_with_type_info( fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args), fallback, ctx.line) - # This is a heuristic: it will be checked later anyways but the error - # message may be worse. - with self.set_allow_param_spec_literals(info.has_param_spec_type): - # Analyze arguments and (usually) construct Instance type. The - # number of type arguments and their values are - # checked only later, since we do not always know the - # valid count at this point. Thus we may construct an - # Instance with an invalid number of type arguments. - instance = Instance( - info, self.anal_array(args, allow_param_spec=True), ctx.line, ctx.column - ) - - # "aesthetic" paramspec literals - # these do not support mypy_extensions VarArgs, etc. as they were already analyzed - # TODO: should these be re-analyzed to get rid of this inconsistency? - # another inconsistency is with empty type args (Z[] is more possibly an error imo) - if len(info.type_vars) == 1 and info.has_param_spec_type and len(instance.args) > 0: - first_arg = get_proper_type(instance.args[0]) - - # TODO: can I use tuple syntax to isinstance multiple in 3.6? - if not ( - len(instance.args) == 1 - and ( - isinstance(first_arg, Parameters) - or isinstance(first_arg, ParamSpecType) - or isinstance(first_arg, AnyType) - ) - ): - args = instance.args - instance.args = (Parameters(args, [ARG_POS] * len(args), [None] * len(args)),) + # Analyze arguments and (usually) construct Instance type. The + # number of type arguments and their values are + # checked only later, since we do not always know the + # valid count at this point. Thus we may construct an + # Instance with an invalid number of type arguments. + # + # We allow ParamSpec literals based on a heuristic: it will be + # checked later anyways but the error message may be worse. + instance = Instance( + info, + self.anal_array( + args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type + ), + ctx.line, + ctx.column, + ) + if len(info.type_vars) == 1 and info.has_param_spec_type: + instance.args = tuple(self.pack_paramspec_args(instance.args)) if info.has_type_var_tuple_type: # - 1 to allow for the empty type var tuple case. @@ -649,6 +713,7 @@ def analyze_type_with_type_info( if info.special_alias: return expand_type_alias( info.special_alias, + # TODO: should we allow NamedTuples generic in ParamSpec? self.anal_array(args), self.fail, False, @@ -663,6 +728,7 @@ def analyze_type_with_type_info( if info.special_alias: return expand_type_alias( info.special_alias, + # TODO: should we allow TypedDicts generic in ParamSpec? self.anal_array(args), self.fail, False, @@ -706,6 +772,11 @@ def analyze_unbound_type_without_type_info( return AnyType( TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name ) + elif self.allow_type_any: + if isinstance(typ, Instance) and typ.type.fullname == "builtins.type": + return AnyType(TypeOfAny.special_form) + if isinstance(typ, TypeType) and isinstance(typ.item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=typ.item) # Option 2: # Unbound type variable. Currently these may be still valid, # for example when defining a generic type alias. @@ -778,9 +849,11 @@ def analyze_unbound_type_without_type_info( ) else: message = 'Cannot interpret reference "{}" as a type' - self.fail(message.format(name), t, code=codes.VALID_TYPE) - for note in notes: - self.note(note, t, code=codes.VALID_TYPE) + if not defining_literal: + # Literal check already gives a custom error. Avoid duplicating errors. + self.fail(message.format(name), t, code=codes.VALID_TYPE) + for note in notes: + self.note(note, t, code=codes.VALID_TYPE) # TODO: Would it be better to always return Any instead of UnboundType # in case of an error? On one hand, UnboundType has a name so error messages @@ -853,7 +926,7 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: if self.defining_alias: variables = t.variables else: - variables = self.bind_function_type_variables(t, t) + variables, _ = self.bind_function_type_variables(t, t) special = self.anal_type_guard(t.ret_type) arg_kinds = t.arg_kinds if len(arg_kinds) >= 2 and arg_kinds[-2] == ARG_STAR and arg_kinds[-1] == ARG_STAR2: @@ -1050,7 +1123,7 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: - n = None if t.fullname is None else self.api.lookup_fully_qualified(t.fullname) + n = None if not t.fullname else self.api.lookup_fully_qualified(t.fullname) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t @@ -1070,6 +1143,16 @@ def analyze_callable_args_for_paramspec( return None tvar_def = self.tvar_scope.get_binding(sym) if not isinstance(tvar_def, ParamSpecType): + if ( + tvar_def is None + and self.allow_unbound_tvars + and isinstance(sym.node, ParamSpecExpr) + ): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None return CallableType( @@ -1105,6 +1188,14 @@ def analyze_callable_args_for_concatenate( tvar_def = self.anal_type(callable_args, allow_param_spec=True) if not isinstance(tvar_def, ParamSpecType): + if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): + sym = self.lookup_qualified(tvar_def.name, callable_args) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None # ick, CallableType should take ParamSpecType @@ -1183,6 +1274,7 @@ def analyze_callable_args( args: list[Type] = [] kinds: list[ArgKind] = [] names: list[str | None] = [] + found_unpack = False for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) @@ -1203,6 +1295,19 @@ def analyze_callable_args( if arg.name is not None and kind.is_star(): self.fail(f"{arg.constructor} arguments should not have names", arg) return None + elif isinstance(arg, UnboundType): + kind = ARG_POS + # Potentially a unpack. + sym = self.lookup_qualified(arg.name, arg) + if sym is not None: + if sym.fullname == "typing_extensions.Unpack": + if found_unpack: + self.fail("Callables can only have a single unpack", arg) + found_unpack = True + kind = ARG_STAR + args.append(arg) + kinds.append(kind) + names.append(None) else: args.append(arg) kinds.append(ARG_POS) @@ -1322,14 +1427,17 @@ def tvar_scope_frame(self) -> Iterator[None]: yield self.tvar_scope = old_scope + def find_type_var_likes(self, t: Type, include_callables: bool = True) -> TypeVarLikeList: + return t.accept( + TypeVarLikeQuery(self.api, self.tvar_scope, include_callables=include_callables) + ) + def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLikeExpr]]: """Return list of unique type variables referred to in a callable.""" names: list[str] = [] tvars: list[TypeVarLikeExpr] = [] for arg in type.arg_types: - for name, tvar_expr in arg.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope) - ): + for name, tvar_expr in self.find_type_var_likes(arg): if name not in names: names.append(name) tvars.append(tvar_expr) @@ -1337,29 +1445,37 @@ def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLik # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. - for name, tvar_expr in type.ret_type.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope, include_callables=False) - ): + for name, tvar_expr in self.find_type_var_likes(type.ret_type, include_callables=False): if name not in names: names.append(name) tvars.append(tvar_expr) + + if not names: + return [] # Fast path return list(zip(names, tvars)) def bind_function_type_variables( self, fun_type: CallableType, defn: Context - ) -> Sequence[TypeVarLikeType]: + ) -> tuple[Sequence[TypeVarLikeType], bool]: """Find the type variables of the function type and bind them in our tvar_scope""" + has_self_type = False if fun_type.variables: defs = [] for var in fun_type.variables: + if self.api.type and self.api.type.self_type and var == self.api.type.self_type: + has_self_type = True + continue var_node = self.lookup_qualified(var.name, defn) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarLikeExpr) binding = self.tvar_scope.bind_new(var.name, var_expr) defs.append(binding) - return defs + return defs, has_self_type typevars = self.infer_type_variables(fun_type) + has_self_type = find_self_type( + fun_type, lambda name: self.api.lookup_qualified(name, defn, suppress_errors=True) + ) # Do not define a new type variable if already defined in scope. typevars = [ (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn) @@ -1375,7 +1491,7 @@ def bind_function_type_variables( binding = self.tvar_scope.bind_new(name, tvar) defs.append(binding) - return defs + return defs, has_self_type def is_defined_type_var(self, tvar: str, context: Context) -> bool: tvar_node = self.lookup_qualified(tvar, context) @@ -1384,11 +1500,19 @@ def is_defined_type_var(self, tvar: str, context: Context) -> bool: return self.tvar_scope.get_binding(tvar_node) is not None def anal_array( - self, a: Iterable[Type], nested: bool = True, *, allow_param_spec: bool = False + self, + a: Iterable[Type], + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_param_spec_literals: bool = False, ) -> list[Type]: + old_allow_param_spec_literals = self.allow_param_spec_literals + self.allow_param_spec_literals = allow_param_spec_literals res: list[Type] = [] for t in a: res.append(self.anal_type(t, nested, allow_param_spec=allow_param_spec)) + self.allow_param_spec_literals = old_allow_param_spec_literals return self.check_unpacks_in_list(res) def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = False) -> Type: @@ -1445,7 +1569,7 @@ def named_type( line: int = -1, column: int = -1, ) -> Instance: - node = self.lookup_fqn_func(fully_qualified_name) + node = self.lookup_fully_qualified(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) if args is not None: @@ -1476,15 +1600,6 @@ def tuple_type(self, items: list[Type]) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType(items, fallback=self.named_type("builtins.tuple", [any_type])) - @contextmanager - def set_allow_param_spec_literals(self, to: bool) -> Iterator[None]: - old = self.allow_param_spec_literals - try: - self.allow_param_spec_literals = to - yield - finally: - self.allow_param_spec_literals = old - TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]] @@ -1598,12 +1713,12 @@ def expand_type_alias( """Expand a (generic) type alias target following the rules outlined in TypeAlias docstring. Here: - target: original target type (contains unbound type variables) - alias_tvars: type variable names + target: original target type args: types to be substituted in place of type variables fail: error reporter callback no_args: whether original definition used a bare generic `A = List` ctx: context where expansion happens + unexpanded_type, disallow_any, use_standard_error: used to customize error messages """ exp_len = len(node.alias_tvars) act_len = len(args) @@ -1643,6 +1758,9 @@ def expand_type_alias( msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}" fail(msg, ctx, code=codes.TYPE_ARG) return set_any_tvars(node, ctx.line, ctx.column, from_error=True) + # TODO: we need to check args validity w.r.t alias.alias_tvars. + # Otherwise invalid instantiations will be allowed in runtime context. + # Note: in type context, these will be still caught by semanal_typeargs. typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. @@ -1690,7 +1808,9 @@ def set_any_tvars( return TypeAliasType(node, [any_type] * len(node.alias_tvars), newline, newcolumn) -def remove_dups(tvars: Iterable[T]) -> list[T]: +def remove_dups(tvars: list[T]) -> list[T]: + if len(tvars) <= 1: + return tvars # Get unique elements in order of appearance all_tvars: set[T] = set() new_tvars: list[T] = [] @@ -1701,8 +1821,13 @@ def remove_dups(tvars: Iterable[T]) -> list[T]: return new_tvars -def flatten_tvars(ll: Iterable[list[T]]) -> list[T]: - return remove_dups(chain.from_iterable(ll)) +def flatten_tvars(lists: list[list[T]]) -> list[T]: + result: list[T] = [] + for lst in lists: + for item in lst: + if item not in result: + result.append(item) + return result class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): @@ -1710,17 +1835,15 @@ class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): def __init__( self, - lookup: Callable[[str, Context], SymbolTableNode | None], + api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope, *, include_callables: bool = True, - include_bound_tvars: bool = False, ) -> None: - self.include_callables = include_callables - self.lookup = lookup - self.scope = scope - self.include_bound_tvars = include_bound_tvars super().__init__(flatten_tvars) + self.api = api + self.scope = scope + self.include_callables = include_callables # Only include type variables in type aliases args. This would be anyway # that case if we expand (as target variables would be overridden with args) # and it may cause infinite recursion on invalid (diverging) recursive aliases. @@ -1729,9 +1852,7 @@ def __init__( def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False - if isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)): - return True - return False + return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: name = t.name @@ -1740,16 +1861,16 @@ def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: if name.endswith("args"): if name.endswith(".args") or name.endswith(".kwargs"): base = ".".join(name.split(".")[:-1]) - n = self.lookup(base, t) + n = self.api.lookup_qualified(base, t) if n is not None and isinstance(n.node, ParamSpecExpr): node = n name = base if node is None: - node = self.lookup(name, t) + node = self.api.lookup_qualified(name, t) if ( node and isinstance(node.node, TypeVarLikeExpr) - and (self.include_bound_tvars or self.scope.get_binding(node) is None) + and self.scope.get_binding(node) is None ): assert isinstance(node.node, TypeVarLikeExpr) return [(name, node.node)] @@ -1785,26 +1906,11 @@ def __init__( self.scope = scope self.diverging = False - def is_alias_tvar(self, t: Type) -> bool: - # Generic type aliases use unbound type variables. - if not isinstance(t, UnboundType) or t.args: - return False - node = self.lookup(t.name, t) - if ( - node - and isinstance(node.node, TypeVarLikeExpr) - and self.scope.get_binding(node) is None - ): - return True - return False - def visit_type_alias_type(self, t: TypeAliasType) -> Type: assert t.alias is not None, f"Unfixed type alias {t.type_ref}" if t.alias in self.seen_nodes: for arg in t.args: - if not self.is_alias_tvar(arg) and bool( - arg.accept(TypeVarLikeQuery(self.lookup, self.scope)) - ): + if not isinstance(arg, TypeVarLikeType) and has_type_vars(arg): self.diverging = True return t # All clear for this expansion chain. @@ -1878,9 +1984,9 @@ def has_any_from_unimported_type(t: Type) -> bool: return t.accept(HasAnyFromUnimportedType()) -class HasAnyFromUnimportedType(TypeQuery[bool]): +class HasAnyFromUnimportedType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type @@ -1961,3 +2067,19 @@ def visit_instance(self, typ: Instance) -> None: python_version=self.python_version, use_generic_error=True, ) + + +def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: + return typ.accept(HasSelfType(lookup)) + + +class HasSelfType(BoolTypeQuery): + def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: + self.lookup = lookup + super().__init__(ANY_STRATEGY) + + def visit_unbound_type(self, t: UnboundType) -> bool: + sym = self.lookup(t.name) + if sym and sym.fullname in SELF_TYPE_NAMES: + return True + return super().visit_unbound_type(t) diff --git a/mypy/typeops.py b/mypy/typeops.py index 7eb1a67b46ea..8c01fb118076 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -33,6 +33,7 @@ ENUM_REMOVED_PROPS, AnyType, CallableType, + ExtraAttrs, FormalArgument, FunctionLike, Instance, @@ -41,6 +42,7 @@ Overloaded, Parameters, ParamSpecType, + PartialType, ProperType, TupleType, Type, @@ -71,13 +73,13 @@ def is_recursive_pair(s: Type, t: Type) -> bool: """ if isinstance(s, TypeAliasType) and s.is_recursive: return ( - isinstance(get_proper_type(t), Instance) + isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( - isinstance(get_proper_type(s), Instance) + isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive ) @@ -439,6 +441,7 @@ def make_simplified_union( * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any + * [int, Union[bytes, str]] -> Union[int, bytes, str] Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. @@ -453,10 +456,14 @@ def make_simplified_union( # Step 1: expand all nested unions items = flatten_nested_unions(items) - # Step 2: remove redundant unions + # Step 2: fast path for single item + if len(items) == 1: + return get_proper_type(items[0]) + + # Step 3: remove redundant unions simplified_set: Sequence[Type] = _remove_redundant_union_items(items, keep_erased) - # Step 3: If more than one literal exists in the union, try to simplify + # Step 4: If more than one literal exists in the union, try to simplify if ( contract_literals and sum(isinstance(get_proper_type(item), LiteralType) for item in simplified_set) > 1 @@ -465,16 +472,27 @@ def make_simplified_union( result = get_proper_type(UnionType.make_union(simplified_set, line, column)) - # Step 4: At last, we erase any (inconsistent) extra attributes on instances. - extra_attrs_set = set() - for item in items: - instance = try_getting_instance_fallback(item) - if instance and instance.extra_attrs: - extra_attrs_set.add(instance.extra_attrs) - - fallback = try_getting_instance_fallback(result) - if len(extra_attrs_set) > 1 and fallback: - fallback.extra_attrs = None + nitems = len(items) + if nitems > 1 and ( + nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) + ): + # Step 5: At last, we erase any (inconsistent) extra attributes on instances. + + # Initialize with None instead of an empty set as a micro-optimization. The set + # is needed very rarely, so we try to avoid constructing it. + extra_attrs_set: set[ExtraAttrs] | None = None + for item in items: + instance = try_getting_instance_fallback(item) + if instance and instance.extra_attrs: + if extra_attrs_set is None: + extra_attrs_set = {instance.extra_attrs} + else: + extra_attrs_set.add(instance.extra_attrs) + + if extra_attrs_set is not None and len(extra_attrs_set) > 1: + fallback = try_getting_instance_fallback(result) + if fallback: + fallback.extra_attrs = None return result @@ -1005,14 +1023,30 @@ def try_getting_instance_fallback(typ: Type) -> Instance | None: typ = get_proper_type(typ) if isinstance(typ, Instance): return typ - elif isinstance(typ, TupleType): - return typ.partial_fallback - elif isinstance(typ, TypedDictType): + elif isinstance(typ, LiteralType): return typ.fallback + elif isinstance(typ, NoneType): + return None # Fast path for None, which is common elif isinstance(typ, FunctionLike): return typ.fallback - elif isinstance(typ, LiteralType): + elif isinstance(typ, TupleType): + return typ.partial_fallback + elif isinstance(typ, TypedDictType): return typ.fallback elif isinstance(typ, TypeVarType): return try_getting_instance_fallback(typ.upper_bound) return None + + +def fixup_partial_type(typ: Type) -> Type: + """Convert a partial type that we couldn't resolve into something concrete. + + This means, for None we make it Optional[Any], and for anything else we + fill in all of the type arguments with Any. + """ + if not isinstance(typ, PartialType): + return typ + if typ.type is None: + return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) + else: + return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) diff --git a/mypy/types.py b/mypy/types.py index e322cf02505f..0244f57847c5 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -7,6 +7,7 @@ from typing import ( TYPE_CHECKING, Any, + Callable, ClassVar, Dict, Iterable, @@ -29,6 +30,7 @@ ArgKind, FakeInfo, FuncDef, + FuncItem, SymbolNode, ) from mypy.state import state @@ -65,7 +67,10 @@ # Note: Although "Literal[None]" is a valid type, we internally always convert # such a type directly into "None". So, "None" is not a valid parameter of # LiteralType and is omitted from this list. -LiteralValue: _TypeAlias = Union[int, str, bool] +# +# Note: Float values are only used internally. They are not accepted within +# Literal[...]. +LiteralValue: _TypeAlias = Union[int, str, bool, float] # If we only import type_visitor in the middle of the file, mypy @@ -145,9 +150,20 @@ "typing_extensions.Never", ) +# Mypyc fixed-width native int types (compatible with builtins.int) +MYPYC_NATIVE_INT_NAMES: Final = ("mypy_extensions.i64", "mypy_extensions.i32") + +DATACLASS_TRANSFORM_NAMES: Final = ( + "typing.dataclass_transform", + "typing_extensions.dataclass_transform", +) + # A placeholder used for Bogus[...] parameters _dummy: Final[Any] = object() +# A placeholder for int parameters +_dummy_int: Final = -999999 + class TypeOfAny: """ @@ -193,7 +209,7 @@ def deserialize_type(data: JsonDict | str) -> Type: class Type(mypy.nodes.Context): """Abstract base class for all types.""" - __slots__ = ("can_be_true", "can_be_false") + __slots__ = ("_can_be_true", "_can_be_false") # 'can_be_true' and 'can_be_false' mean whether the value of the # expression can be true or false in a boolean context. They are useful # when inferring the type of logic expressions like `x and y`. @@ -206,8 +222,29 @@ class Type(mypy.nodes.Context): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_true = self.can_be_true_default() - self.can_be_false = self.can_be_false_default() + # Value of these can be -1 (use the default, lazy init), 0 (false) or 1 (true) + self._can_be_true = -1 + self._can_be_false = -1 + + @property + def can_be_true(self) -> bool: + if self._can_be_true == -1: # Lazy init helps mypyc + self._can_be_true = self.can_be_true_default() + return bool(self._can_be_true) + + @can_be_true.setter + def can_be_true(self, v: bool) -> None: + self._can_be_true = v + + @property + def can_be_false(self) -> bool: + if self._can_be_false == -1: # Lazy init helps mypyc + self._can_be_false = self.can_be_false_default() + return bool(self._can_be_false) + + @can_be_false.setter + def can_be_false(self, v: bool) -> None: + self._can_be_false = v def can_be_true_default(self) -> bool: return True @@ -256,10 +293,10 @@ def __init__( line: int = -1, column: int = -1, ) -> None: + super().__init__(line, column) self.alias = alias self.args = args self.type_ref: str | None = None - super().__init__(line, column) def _expand_once(self) -> Type: """Expand to the target type exactly once. @@ -278,30 +315,42 @@ def _expand_once(self) -> Type: self.alias.target, self.alias.alias_tvars, self.args, self.line, self.column ) - def _partial_expansion(self) -> tuple[ProperType, bool]: + def _partial_expansion(self, nothing_args: bool = False) -> tuple[ProperType, bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set()) - unrolled = self.accept(unroller) + if nothing_args: + alias = self.copy_modified(args=[UninhabitedType()] * len(self.args)) + else: + alias = self + unrolled = alias.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed - def expand_all_if_possible(self) -> ProperType | None: + def expand_all_if_possible(self, nothing_args: bool = False) -> ProperType | None: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, - return None. + return None. If nothing_args is True, replace all type arguments with an + UninhabitedType() (used to detect recursively defined aliases). """ - unrolled, recursed = self._partial_expansion() + unrolled, recursed = self._partial_expansion(nothing_args=nothing_args) if recursed: return None return unrolled @property def is_recursive(self) -> bool: + """Whether this type alias is recursive. + + Note this doesn't check generic alias arguments, but only if this alias + *definition* is recursive. The property value thus can be cached on the + underlying TypeAlias node. If you want to include all nested types, use + has_recursive_types() function. + """ assert self.alias is not None, "Unfixed type alias" is_recursive = self.alias._is_recursive if is_recursive is None: - is_recursive = self.expand_all_if_possible() is None + is_recursive = self.expand_all_if_possible(nothing_args=True) is None # We cache the value on the underlying TypeAlias node as an optimization, # since the value is the same for all instances of the same alias. self.alias._is_recursive = is_recursive @@ -402,7 +451,8 @@ class TypeVarId: # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative - # ids -1, ... for generic function type arguments. This convention + # ids -1, ... for generic function type arguments. A special value 0 + # is reserved for Self type variable (autogenerated). This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. @@ -437,14 +487,12 @@ def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: - if isinstance(other, TypeVarId): - return ( - self.raw_id == other.raw_id - and self.meta_level == other.meta_level - and self.namespace == other.namespace - ) - else: - return False + return ( + isinstance(other, TypeVarId) + and self.raw_id == other.raw_id + and self.meta_level == other.meta_level + and self.namespace == other.namespace + ) def __ne__(self, other: object) -> bool: return not (self == other) @@ -524,6 +572,8 @@ def copy_modified( values: Bogus[list[Type]] = _dummy, upper_bound: Bogus[Type] = _dummy, id: Bogus[TypeVarId | int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, ) -> TypeVarType: return TypeVarType( self.name, @@ -532,8 +582,8 @@ def copy_modified( self.values if values is _dummy else values, self.upper_bound if upper_bound is _dummy else upper_bound, self.variance, - self.line, - self.column, + self.line if line == _dummy_int else line, + self.column if column == _dummy_int else column, ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -640,14 +690,14 @@ def copy_modified( self, *, id: Bogus[TypeVarId | int] = _dummy, - flavor: Bogus[int] = _dummy, + flavor: int = _dummy_int, prefix: Bogus[Parameters] = _dummy, ) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, id if id is not _dummy else self.id, - flavor if flavor is not _dummy else self.flavor, + flavor if flavor != _dummy_int else self.flavor, self.upper_bound, line=self.line, column=self.column, @@ -705,6 +755,20 @@ class TypeVarTupleType(TypeVarLikeType): See PEP646 for more information. """ + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId | int, + upper_bound: Type, + tuple_fallback: Instance, + *, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(name, fullname, id, upper_bound, line=line, column=column) + self.tuple_fallback = tuple_fallback + def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { @@ -713,13 +777,18 @@ def serialize(self) -> JsonDict: "fullname": self.fullname, "id": self.id.raw_id, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> TypeVarTupleType: assert data[".class"] == "TypeVarTupleType" return TypeVarTupleType( - data["name"], data["fullname"], data["id"], deserialize_type(data["upper_bound"]) + data["name"], + data["fullname"], + data["id"], + deserialize_type(data["upper_bound"]), + Instance.deserialize(data["tuple_fallback"]), ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -744,6 +813,7 @@ def copy_modified(self, id: Bogus[TypeVarId | int] = _dummy) -> TypeVarTupleType self.fullname, self.id if id is _dummy else id, self.upper_bound, + self.tuple_fallback, line=self.line, column=self.column, ) @@ -910,9 +980,7 @@ def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: - if not isinstance(other, TypeList): - return False - return self.items == other.items + return isinstance(other, TypeList) and self.items == other.items class UnpackType(ProperType): @@ -988,10 +1056,10 @@ def accept(self, visitor: TypeVisitor[T]) -> T: def copy_modified( self, # Mark with Bogus because _dummy is just an object (with type Any) - type_of_any: Bogus[int] = _dummy, + type_of_any: int = _dummy_int, original_any: Bogus[AnyType | None] = _dummy, ) -> AnyType: - if type_of_any is _dummy: + if type_of_any == _dummy_int: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any @@ -1385,7 +1453,7 @@ class FunctionLike(ProperType): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_false = False + self._can_be_false = False @abstractmethod def is_type_obj(self) -> bool: @@ -1709,8 +1777,8 @@ def copy_modified( name: Bogus[str | None] = _dummy, definition: Bogus[SymbolNode] = _dummy, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, - line: Bogus[int] = _dummy, - column: Bogus[int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, is_ellipsis_args: Bogus[bool] = _dummy, implicit: Bogus[bool] = _dummy, special_sig: Bogus[str | None] = _dummy, @@ -1721,7 +1789,7 @@ def copy_modified( from_concatenate: Bogus[bool] = _dummy, unpack_kwargs: Bogus[bool] = _dummy, ) -> CT: - return type(self)( + modified = CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, @@ -1730,8 +1798,8 @@ def copy_modified( name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, - line=line if line is not _dummy else self.line, - column=column if column is not _dummy else self.column, + line=line if line != _dummy_int else self.line, + column=column if column != _dummy_int else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), @@ -1746,6 +1814,9 @@ def copy_modified( ), unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, ) + # Optimization: Only NewTypes are supported as subtypes since + # the class is effectively final, so we can use a cast safely. + return cast(CT, modified) def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" @@ -1940,7 +2011,7 @@ def expand_param_spec( def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: - return NormalizedCallableType(self.copy_modified()) + return cast(NormalizedCallableType, self) last_type = get_proper_type(self.arg_types[-1]) assert isinstance(last_type, TypedDictType) extra_kinds = [ @@ -2090,7 +2161,9 @@ def get_name(self) -> str | None: return self._items[0].name def with_unpacked_kwargs(self) -> Overloaded: - return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + if any(i.unpack_kwargs for i in self.items): + return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + return self def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_overloaded(self) @@ -2139,10 +2212,10 @@ def __init__( column: int = -1, implicit: bool = False, ) -> None: + super().__init__(line, column) self.partial_fallback = fallback self.items = items self.implicit = implicit - super().__init__(line, column) def can_be_true_default(self) -> bool: if self.can_be_any_bool(): @@ -2263,16 +2336,19 @@ def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: - if isinstance(other, TypedDictType): - if frozenset(self.items.keys()) != frozenset(other.items.keys()): - return False - for (_, left_item_type, right_item_type) in self.zip(other): - if not left_item_type == right_item_type: - return False - return self.fallback == other.fallback and self.required_keys == other.required_keys - else: + if not isinstance(other, TypedDictType): return NotImplemented + return ( + frozenset(self.items.keys()) == frozenset(other.items.keys()) + and all( + left_item_type == right_item_type + for (_, left_item_type, right_item_type) in self.zip(other) + ) + and self.fallback == other.fallback + and self.required_keys == other.required_keys + ) + def serialize(self) -> JsonDict: return { ".class": "TypedDictType", @@ -2290,6 +2366,10 @@ def deserialize(cls, data: JsonDict) -> TypedDictType: Instance.deserialize(data["fallback"]), ) + @property + def is_final(self) -> bool: + return self.fallback.type.is_final + def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES @@ -2444,8 +2524,8 @@ class LiteralType(ProperType): def __init__( self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1 ) -> None: - self.value = value super().__init__(line, column) + self.value = value self.fallback = fallback self._hash = -1 # Cached hash value @@ -2551,13 +2631,17 @@ def __init__( # We must keep this false to avoid crashes during semantic analysis. # TODO: maybe switch this to True during type-checking pass? self.items = flatten_nested_unions(items, handle_type_alias_type=False) - self.can_be_true = any(item.can_be_true for item in items) - self.can_be_false = any(item.can_be_false for item in items) # is_evaluated should be set to false for type comments and string literals self.is_evaluated = is_evaluated # uses_pep604_syntax is True if Union uses OR syntax (X | Y) self.uses_pep604_syntax = uses_pep604_syntax + def can_be_true_default(self) -> bool: + return any(item.can_be_true for item in self.items) + + def can_be_false_default(self) -> bool: + return any(item.can_be_false for item in self.items) + def __hash__(self) -> int: return hash(frozenset(self.items)) @@ -2781,6 +2865,14 @@ def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return cast(T, visitor.visit_placeholder_type(self)) + def __hash__(self) -> int: + return hash((self.fullname, tuple(self.args))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PlaceholderType): + return NotImplemented + return self.fullname == other.fullname and self.args == other.args + def serialize(self) -> str: # We should never get here since all placeholders should be replaced # during semantic analysis. @@ -2812,30 +2904,45 @@ def get_proper_type(typ: Type | None) -> ProperType | None: typ = typ.type_guard while isinstance(typ, TypeAliasType): typ = typ._expand_once() - assert isinstance(typ, ProperType), typ # TODO: store the name of original type alias on this type, so we can show it in errors. - return typ + return cast(ProperType, typ) @overload -def get_proper_types(it: Iterable[Type]) -> list[ProperType]: # type: ignore[misc] +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[misc] ... @overload -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType | None]: +def get_proper_types( + types: list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType | None]: ... -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType] | list[ProperType | None]: - return [get_proper_type(t) for t in it] +def get_proper_types( + types: list[Type] | list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType] | list[ProperType | None]: + if isinstance(types, list): + typelist = types + # Optimize for the common case so that we don't need to allocate anything + if not any( + isinstance(t, (TypeAliasType, TypeGuardedType)) for t in typelist # type: ignore[misc] + ): + return cast("list[ProperType]", typelist) + return [get_proper_type(t) for t in typelist] + else: + return [get_proper_type(t) for t in types] # We split off the type visitor base classes to another module # to make it easier to gradually get modules working with mypyc. # Import them here, after the types are defined. # This is intended as a re-export also. -from mypy.type_visitor import ( # noqa: F811 +from mypy.type_visitor import ( # noqa: F811,F401 + ALL_STRATEGY as ALL_STRATEGY, + ANY_STRATEGY as ANY_STRATEGY, + BoolTypeQuery as BoolTypeQuery, SyntheticTypeVisitor as SyntheticTypeVisitor, TypeQuery as TypeQuery, TypeTranslator as TypeTranslator, @@ -3186,24 +3293,45 @@ def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> TypeGuard[In class InstantiateAliasVisitor(TrivialSyntheticTypeTranslator): - def __init__(self, vars: list[str], subs: list[Type]) -> None: - self.replacements = {v: s for (v, s) in zip(vars, subs)} + def __init__(self, vars: list[TypeVarLikeType], subs: list[Type]) -> None: + self.replacements = {v.id: s for (v, s) in zip(vars, subs)} def visit_type_alias_type(self, typ: TypeAliasType) -> Type: return typ.copy_modified(args=[t.accept(self) for t in typ.args]) - def visit_unbound_type(self, typ: UnboundType) -> Type: - # TODO: stop using unbound type variables for type aliases. - # Now that type aliases are very similar to TypeInfos we should - # make type variable tracking similar as well. Maybe we can even support - # upper bounds etc. for generic type aliases. - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_type_var(self, typ: TypeVarType) -> Type: + if typ.id in self.replacements: + return self.replacements[typ.id] return typ - def visit_type_var(self, typ: TypeVarType) -> Type: - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_callable_type(self, t: CallableType) -> Type: + param_spec = t.param_spec() + if param_spec is not None: + # TODO: this branch duplicates the one in expand_type(), find a way to reuse it + # without import cycle types <-> typeanal <-> expandtype. + repl = get_proper_type(self.replacements.get(param_spec.id)) + if isinstance(repl, CallableType) or isinstance(repl, Parameters): + prefix = param_spec.prefix + t = t.expand_param_spec(repl, no_prefix=True) + return t.copy_modified( + arg_types=[t.accept(self) for t in prefix.arg_types] + t.arg_types, + arg_kinds=prefix.arg_kinds + t.arg_kinds, + arg_names=prefix.arg_names + t.arg_names, + ret_type=t.ret_type.accept(self), + type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + ) + return super().visit_callable_type(t) + + def visit_param_spec(self, typ: ParamSpecType) -> Type: + if typ.id in self.replacements: + repl = get_proper_type(self.replacements[typ.id]) + # TODO: all the TODOs from same logic in expand_type() apply here. + if isinstance(repl, Instance): + return repl + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(typ, repl) + else: + return repl return typ @@ -3220,7 +3348,7 @@ def visit_instance(self, typ: Instance) -> None: def replace_alias_tvars( - tp: Type, vars: list[str], subs: list[Type], newline: int, newcolumn: int + tp: Type, vars: list[TypeVarLikeType], subs: list[Type], newline: int, newcolumn: int ) -> Type: """Replace type variables in a generic type alias tp with substitutions subs resetting context. Length of subs should be already checked. @@ -3233,39 +3361,68 @@ def replace_alias_tvars( return new_tp -class HasTypeVars(TypeQuery[bool]): +class HasTypeVars(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) + self.skip_alias_target = True def visit_type_var(self, t: TypeVarType) -> bool: return True + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" return typ.accept(HasTypeVars()) -class HasRecursiveType(TypeQuery[bool]): +class HasRecursiveType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_type_alias_type(self, t: TypeAliasType) -> bool: - return t.is_recursive + return t.is_recursive or self.query_types(t.args) + + +# Use singleton since this is hot (note: call reset() before using) +_has_recursive_type: Final = HasRecursiveType() def has_recursive_types(typ: Type) -> bool: """Check if a type contains any recursive aliases (recursively).""" - return typ.accept(HasRecursiveType()) + _has_recursive_type.reset() + return typ.accept(_has_recursive_type) + + +def _flattened(types: Iterable[Type]) -> Iterable[Type]: + for t in types: + tp = get_proper_type(t) + if isinstance(tp, UnionType): + yield from _flattened(tp.items) + else: + yield t def flatten_nested_unions( - types: Iterable[Type], handle_type_alias_type: bool = True + types: Sequence[Type], handle_type_alias_type: bool = True ) -> list[Type]: """Flatten nested unions in a type list.""" + if not isinstance(types, list): + typelist = list(types) + else: + typelist = cast("list[Type]", types) + + # Fast path: most of the time there is nothing to flatten + if not any(isinstance(t, (TypeAliasType, UnionType)) for t in typelist): # type: ignore[misc] + return typelist + flat_items: list[Type] = [] - # TODO: avoid duplicate types in unions (e.g. using hash) - for t in types: + for t in typelist: tp = get_proper_type(t) if handle_type_alias_type else t if isinstance(tp, ProperType) and isinstance(tp, UnionType): flat_items.extend( @@ -3346,11 +3503,11 @@ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value - if not isinstance(typ, LiteralType): - return False - if typ.fallback.type.fullname != fallback_fullname: - return False - return typ.value == value + return ( + isinstance(typ, LiteralType) + and typ.fallback.type.fullname == fallback_fullname + and typ.value == value + ) def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: @@ -3382,3 +3539,100 @@ def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance fallback=fallback, is_ellipsis_args=True, ) + + +def expand_param_spec( + t: ParamSpecType, repl: ParamSpecType | Parameters | CallableType +) -> ProperType: + """This is shared part of the logic w.r.t. ParamSpec instantiation. + + It is shared between type aliases and proper types, that currently use somewhat different + logic for instantiation.""" + if isinstance(repl, ParamSpecType): + return repl.copy_modified( + flavor=t.flavor, + prefix=t.prefix.copy_modified( + arg_types=t.prefix.arg_types + repl.prefix.arg_types, + arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, + arg_names=t.prefix.arg_names + repl.prefix.arg_names, + ), + ) + else: + # if the paramspec is *P.args or **P.kwargs: + if t.flavor != ParamSpecFlavor.BARE: + assert isinstance(repl, CallableType), "Should not be able to get here." + # Is this always the right thing to do? + param_spec = repl.param_spec() + if param_spec: + return param_spec.with_flavor(t.flavor) + else: + return repl + else: + return Parameters( + t.prefix.arg_types + repl.arg_types, + t.prefix.arg_kinds + repl.arg_kinds, + t.prefix.arg_names + repl.arg_names, + variables=[*t.prefix.variables, *repl.variables], + ) + + +def store_argument_type( + defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] +) -> None: + arg_type = typ.arg_types[i] + if typ.arg_kinds[i] == ARG_STAR: + if isinstance(arg_type, ParamSpecType): + pass + elif isinstance(arg_type, UnpackType): + unpacked_type = get_proper_type(arg_type.type) + if isinstance(unpacked_type, TupleType): + # Instead of using Tuple[Unpack[Tuple[...]]], just use + # Tuple[...] + arg_type = unpacked_type + elif ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + arg_type = unpacked_type + else: + arg_type = TupleType( + [arg_type], + fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), + ) + else: + # builtins.tuple[T] is typing.Tuple[T, ...] + arg_type = named_type("builtins.tuple", [arg_type]) + elif typ.arg_kinds[i] == ARG_STAR2: + if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: + arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) + defn.arguments[i].variable.type = arg_type + + +def remove_trivial(types: Iterable[Type]) -> list[Type]: + """Make trivial simplifications on a list of types without calling is_subtype(). + + This makes following simplifications: + * Remove bottom types (taking into account strict optional setting) + * Remove everything else if there is an `object` + * Remove strict duplicate types + """ + removed_none = False + new_types = [] + all_types = set() + for t in types: + p_t = get_proper_type(t) + if isinstance(p_t, UninhabitedType): + continue + if isinstance(p_t, NoneType) and not state.strict_optional: + removed_none = True + continue + if isinstance(p_t, Instance) and p_t.type.fullname == "builtins.object": + return [p_t] + if p_t not in all_types: + new_types.append(t) + all_types.add(p_t) + if new_types: + return new_types + if removed_none: + return [NoneType()] + return [UninhabitedType()] diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index b7d081f6acb2..7bc47266d713 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -104,14 +104,14 @@ class Assign(stmt): class AugAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") - target: expr + target: Name | Attribute | Subscript op: operator value: expr class AnnAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") - target: expr + target: Name | Attribute | Subscript annotation: expr value: expr | None simple: int @@ -329,7 +329,7 @@ class JoinedStr(expr): if sys.version_info < (3, 8): class Num(expr): # Deprecated in 3.8; use Constant - n: complex + n: int | float | complex class Str(expr): # Deprecated in 3.8; use Constant s: str @@ -349,13 +349,13 @@ class Constant(expr): kind: str | None # Aliases for value, for backwards compatibility s: Any - n: complex + n: int | float | complex if sys.version_info >= (3, 8): class NamedExpr(expr): if sys.version_info >= (3, 10): __match_args__ = ("target", "value") - target: expr + target: Name value: expr class Attribute(expr): diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index 9241ac6a7038..232256fbf614 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -1,5 +1,6 @@ import codecs import sys +from _typeshed import ReadableBuffer from collections.abc import Callable from typing import overload from typing_extensions import Literal, TypeAlias @@ -44,13 +45,13 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... @overload def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] @overload def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... @overload -def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] @overload def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... @@ -64,66 +65,72 @@ def decode( @overload def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... @overload -def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... +def decode(obj: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str: ... def lookup(__encoding: str) -> codecs.CodecInfo: ... def charmap_build(__map: str) -> _CharMap: ... -def ascii_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def charmap_decode(__data: bytes, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ... -def escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... -def latin_1_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... else: - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def readbuffer_encode(__data: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... else: - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info < (3, 8): - def unicode_internal_decode(__obj: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... - def unicode_internal_encode(__obj: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_16_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... ) -> tuple[str, int, int]: ... -def utf_16_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_32_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... ) -> tuple[str, int, int]: ... -def utf_32_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_7_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_8_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def code_page_decode(__codepage: int, __data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def code_page_decode( + __codepage: int, __data: ReadableBuffer, __errors: str | None = ..., __final: int = ... + ) -> tuple[str, int]: ... def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def oem_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index adb1ea84e45b..7053e85f7b7f 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import SupportsRead +from _typeshed import ReadOnlyBuffer, SupportsRead from typing import IO, Any, NamedTuple, overload from typing_extensions import TypeAlias, final if sys.platform != "win32": + # Handled by PyCurses_ConvertToChtype in _cursesmodule.c. _ChType: TypeAlias = str | bytes | int # ACS codes are only initialized after initscr is called @@ -330,7 +331,7 @@ if sys.platform != "win32": def noraw() -> None: ... def pair_content(__pair_number: int) -> tuple[int, int]: ... def pair_number(__attr: int) -> int: ... - def putp(__string: bytes) -> None: ... + def putp(__string: ReadOnlyBuffer) -> None: ... def qiflush(__flag: bool = ...) -> None: ... def raw(__flag: bool = ...) -> None: ... def reset_prog_mode() -> None: ... @@ -352,7 +353,7 @@ if sys.platform != "win32": def tigetnum(__capname: str) -> int: ... def tigetstr(__capname: str) -> bytes | None: ... def tparm( - __str: bytes, + __str: ReadOnlyBuffer, __i1: int = ..., __i2: int = ..., __i3: int = ..., diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index 50c0f23734cd..ca97f69e2147 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -4,7 +4,7 @@ from _typeshed import Self from collections.abc import Container, Sequence from types import TracebackType from typing import Any, ClassVar, NamedTuple, Union, overload -from typing_extensions import TypeAlias +from typing_extensions import Literal, TypeAlias _Decimal: TypeAlias = Decimal | int _DecimalNew: TypeAlias = Union[Decimal, float, str, tuple[int, Sequence[int], int]] @@ -16,7 +16,7 @@ __libmpdec_version__: str class DecimalTuple(NamedTuple): sign: int digits: tuple[int, ...] - exponent: int + exponent: int | Literal["n", "N", "F"] ROUND_DOWN: str ROUND_HALF_UP: str diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi index 9dda8a598549..1b86904d5ebc 100644 --- a/mypy/typeshed/stdlib/_msi.pyi +++ b/mypy/typeshed/stdlib/_msi.pyi @@ -12,11 +12,11 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - # Actual typename Summary, not exposed by the implementation - class _Summary: - def GetProperty(self, propid: int) -> str | bytes | None: ... + # Actual typename SummaryInformation, not exposed by the implementation + class _SummaryInformation: + def GetProperty(self, field: int) -> int | bytes | None: ... def GetPropertyCount(self) -> int: ... - def SetProperty(self, propid: int, value: str | bytes) -> None: ... + def SetProperty(self, field: int, value: int | str) -> None: ... def Persist(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] @@ -25,7 +25,7 @@ if sys.platform == "win32": class _Database: def OpenView(self, sql: str) -> _View: ... def Commit(self) -> None: ... - def GetSummaryInformation(self, updateCount: int) -> _Summary: ... + def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ... def Close(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] diff --git a/mypy/typeshed/stdlib/_operator.pyi b/mypy/typeshed/stdlib/_operator.pyi index 92e04d0f499d..7488724caf74 100644 --- a/mypy/typeshed/stdlib/_operator.pyi +++ b/mypy/typeshed/stdlib/_operator.pyi @@ -1,5 +1,6 @@ import sys -from collections.abc import Callable, Container, Iterable, Mapping, MutableMapping, MutableSequence, Sequence +from _typeshed import SupportsGetItem +from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, TypeVar, overload from typing_extensions import ParamSpec, SupportsIndex, TypeAlias, final @@ -77,11 +78,9 @@ def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... @overload def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... @overload -def getitem(__a: Sequence[_T], __b: SupportsIndex) -> _T: ... -@overload def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... @overload -def getitem(__a: Mapping[_K, _V], __b: _K) -> _V: ... +def getitem(__a: SupportsGetItem[_K, _V], __b: _K) -> _V: ... def indexOf(__a: Iterable[_T], __b: _T) -> int: ... @overload def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... @@ -106,17 +105,30 @@ class attrgetter(Generic[_T_co]): @final class itemgetter(Generic[_T_co]): + # mypy lacks support for PEP 646 https://github.com/python/mypy/issues/12280 + # So we have to define all of these overloads to simulate unpacking the arguments @overload - def __new__(cls, item: Any) -> itemgetter[Any]: ... + def __new__(cls, item: _T_co) -> itemgetter[_T_co]: ... @overload - def __new__(cls, item: Any, __item2: Any) -> itemgetter[tuple[Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co) -> itemgetter[tuple[_T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any) -> itemgetter[tuple[Any, Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co, __item3: _T_co) -> itemgetter[tuple[_T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any, __item4: Any) -> itemgetter[tuple[Any, Any, Any, Any]]: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co + ) -> itemgetter[tuple[_T_co, _T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, *items: Any) -> itemgetter[tuple[Any, ...]]: ... - def __call__(self, obj: Any) -> _T_co: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co, *items: _T_co + ) -> itemgetter[tuple[_T_co, ...]]: ... + # __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie: + # TypeVar "_KT_contra@SupportsGetItem" is contravariant + # "tuple[int, int]" is incompatible with protocol "SupportsIndex" + # preventing [_T_co, ...] instead of [Any, ...] + # + # A suspected mypy issue prevents using [..., _T] instead of [..., Any] here. + # https://github.com/python/mypy/issues/14032 + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... @final class methodcaller: diff --git a/mypy/typeshed/stdlib/_posixsubprocess.pyi b/mypy/typeshed/stdlib/_posixsubprocess.pyi index 2d221c4896f6..ca95336bb503 100644 --- a/mypy/typeshed/stdlib/_posixsubprocess.pyi +++ b/mypy/typeshed/stdlib/_posixsubprocess.pyi @@ -1,24 +1,32 @@ import sys +from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence +from typing_extensions import SupportsIndex if sys.platform != "win32": def cloexec_pipe() -> tuple[int, int]: ... def fork_exec( - args: Sequence[str], - executable_list: Sequence[bytes], - close_fds: bool, - fds_to_keep: Sequence[int], - cwd: str, - env_list: Sequence[bytes], - p2cread: int, - p2cwrite: int, - c2pred: int, - c2pwrite: int, - errread: int, - errwrite: int, - errpipe_read: int, - errpipe_write: int, - restore_signals: int, - start_new_session: int, - preexec_fn: Callable[[], None], + __process_args: Sequence[StrOrBytesPath] | None, + __executable_list: Sequence[bytes], + __close_fds: bool, + __fds_to_keep: tuple[int, ...], + __cwd_obj: str, + __env_list: Sequence[bytes] | None, + __p2cread: int, + __p2cwrite: int, + __c2pred: int, + __c2pwrite: int, + __errread: int, + __errwrite: int, + __errpipe_read: int, + __errpipe_write: int, + __restore_signals: int, + __call_setsid: int, + __pgid_to_set: int, + __gid_object: SupportsIndex | None, + __groups_list: list[int] | None, + __uid_object: SupportsIndex | None, + __child_umask: int, + __preexec_fn: Callable[[], None], + __allow_vfork: bool, ) -> int: ... diff --git a/mypy/typeshed/stdlib/_socket.pyi b/mypy/typeshed/stdlib/_socket.pyi index b2f77893d273..f7b0e6901bf4 100644 --- a/mypy/typeshed/stdlib/_socket.pyi +++ b/mypy/typeshed/stdlib/_socket.pyi @@ -15,10 +15,10 @@ _CMSG: TypeAlias = tuple[int, int, bytes] _CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer] # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, -# AF_NETLINK, AF_TIPC) or strings (AF_UNIX). -_Address: TypeAlias = tuple[Any, ...] | str +# AF_NETLINK, AF_TIPC) or strings/buffers (AF_UNIX). +# See getsockaddrarg() in socketmodule.c. +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer _RetAddress: TypeAlias = Any -# TODO Most methods allow bytes as address objects # ----- Constants ----- # Some socket families are listed in the "Socket families" section of the docs, @@ -583,11 +583,15 @@ class socket: def proto(self) -> int: ... @property def timeout(self) -> float | None: ... - def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... - def bind(self, __address: _Address | bytes) -> None: ... + if sys.platform == "win32": + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | bytes | None = ...) -> None: ... + else: + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... + + def bind(self, __address: _Address) -> None: ... def close(self) -> None: ... - def connect(self, __address: _Address | bytes) -> None: ... - def connect_ex(self, __address: _Address | bytes) -> int: ... + def connect(self, __address: _Address) -> None: ... + def connect_ex(self, __address: _Address) -> int: ... def detach(self) -> int: ... def fileno(self) -> int: ... def getpeername(self) -> _RetAddress: ... @@ -634,7 +638,7 @@ class socket: def setblocking(self, __flag: bool) -> None: ... def settimeout(self, __value: float | None) -> None: ... @overload - def setsockopt(self, __level: int, __optname: int, __value: int | bytes) -> None: ... + def setsockopt(self, __level: int, __optname: int, __value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, __level: int, __optname: int, __value: None, __optlen: int) -> None: ... if sys.platform == "win32": @@ -671,9 +675,9 @@ def ntohs(__x: int) -> int: ... # param & ret val are 16-bit ints def htonl(__x: int) -> int: ... # param & ret val are 32-bit ints def htons(__x: int) -> int: ... # param & ret val are 16-bit ints def inet_aton(__ip_string: str) -> bytes: ... # ret val 4 bytes in length -def inet_ntoa(__packed_ip: bytes) -> str: ... +def inet_ntoa(__packed_ip: ReadableBuffer) -> str: ... def inet_pton(__address_family: int, __ip_string: str) -> bytes: ... -def inet_ntop(__address_family: int, __packed_ip: bytes) -> str: ... +def inet_ntop(__address_family: int, __packed_ip: ReadableBuffer) -> str: ... def getdefaulttimeout() -> float | None: ... def setdefaulttimeout(__timeout: float | None) -> None: ... diff --git a/mypy/typeshed/stdlib/_tkinter.pyi b/mypy/typeshed/stdlib/_tkinter.pyi index c2cf55505afb..fced8c95d2fa 100644 --- a/mypy/typeshed/stdlib/_tkinter.pyi +++ b/mypy/typeshed/stdlib/_tkinter.pyi @@ -17,8 +17,10 @@ from typing_extensions import Literal, final # (, ) @final class Tcl_Obj: - string: str | bytes - typename: str + @property + def string(self) -> str: ... + @property + def typename(self) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, __other): ... def __ge__(self, __other): ... diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index b0ee1f4ad48a..68ac2a9b1900 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -36,6 +36,9 @@ AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 # "Incomplete | None" instead of "Any | None". Incomplete: TypeAlias = Any +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object + # stable class IdentityFunction(Protocol): def __call__(self, __x: _T) -> _T: ... @@ -119,7 +122,7 @@ class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): # stable class SupportsGetItem(Protocol[_KT_contra, _VT_co]): - def __contains__(self, __x: object) -> bool: ... + def __contains__(self, __x: Any) -> bool: ... def __getitem__(self, __key: _KT_contra) -> _VT_co: ... # stable @@ -205,6 +208,7 @@ class HasFileno(Protocol): FileDescriptor: TypeAlias = int # stable FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath # stable class SupportsRead(Protocol[_T_co]): @@ -234,6 +238,30 @@ else: WriteableBuffer: TypeAlias = bytearray | memoryview | array.array[Any] | mmap.mmap | ctypes._CData # stable # Same as _WriteableBuffer, but also includes read-only buffer types (like bytes). ReadableBuffer: TypeAlias = ReadOnlyBuffer | WriteableBuffer # stable +_BufferWithLen: TypeAlias = ReadableBuffer # not stable # noqa: Y047 + +# Anything that implements the read-write buffer interface, and can be sliced/indexed. +SliceableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +IndexableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +# https://github.com/python/typeshed/pull/9115#issuecomment-1304905864 +# Post PEP 688, they should be rewritten as such: +# from collections.abc import Sequence +# from typing import Sized, overload +# class SliceableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# class IndexableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __i: int) -> int: ... +# class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __contains__(self, __x: Any) -> bool: ... +# @overload +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# @overload +# def __getitem__(self, __i: int) -> int: ... +# class SizedBuffer(Sized, Protocol): # instead of _BufferWithLen +# def __buffer__(self, __flags: int) -> memoryview: ... ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] OptExcInfo: TypeAlias = Union[ExcInfo, tuple[None, None, None]] @@ -275,5 +303,4 @@ StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar -# TODO: Ideally this would be a recursive type alias -TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Callable[[FrameType, str, Any], Any] | None] +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index ddea3d67ed14..4fbefc33abb1 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from collections.abc import Sequence from typing import Any, NoReturn, overload from typing_extensions import Literal, final @@ -183,6 +184,7 @@ if sys.platform == "win32": def PeekNamedPipe(__handle: int, __size: int = ...) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... + def UnmapViewOfFile(__address: int) -> None: ... @overload def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @@ -198,11 +200,11 @@ if sys.platform == "win32": def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... def WaitNamedPipe(__name: str, __timeout: int) -> None: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[False] = ...) -> tuple[int, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = ...) -> tuple[int, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: int | bool) -> tuple[Any, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: event: int diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index f7f82333a362..7b39c88ed394 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -16,13 +16,10 @@ class ABCMeta(type): __mcls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any ) -> Self: ... else: - # pyright doesn't like the first parameter being called mcls, hence the `pyright: ignore` - def __new__( - mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any # pyright: ignore - ) -> Self: ... + def __new__(mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any) -> Self: ... - def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... - def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = ...) -> None: ... def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... diff --git a/mypy/typeshed/stdlib/antigravity.pyi b/mypy/typeshed/stdlib/antigravity.pyi index e30917511030..3986e7d1c9f2 100644 --- a/mypy/typeshed/stdlib/antigravity.pyi +++ b/mypy/typeshed/stdlib/antigravity.pyi @@ -1 +1,3 @@ -def geohash(latitude: float, longitude: float, datedow: bytes) -> None: ... +from _typeshed import ReadableBuffer + +def geohash(latitude: float, longitude: float, datedow: ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index 2d27cd72e8df..e84456049df6 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -21,15 +21,19 @@ class array(MutableSequence[_T], Generic[_T]): @property def itemsize(self) -> int: ... @overload - def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[int] = ...) -> None: ... + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | bytearray | Iterable[int] = ...) -> None: ... @overload - def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[float] = ...) -> None: ... + def __init__( + self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | bytearray | Iterable[float] = ... + ) -> None: ... @overload - def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[str] = ...) -> None: ... + def __init__( + self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | bytearray | Iterable[str] = ... + ) -> None: ... @overload def __init__(self, __typecode: str, __initializer: Iterable[_T]) -> None: ... @overload - def __init__(self, __typecode: str, __initializer: bytes = ...) -> None: ... + def __init__(self, __typecode: str, __initializer: bytes | bytearray = ...) -> None: ... def append(self, __v: _T) -> None: ... def buffer_info(self) -> tuple[int, int]: ... def byteswap(self) -> None: ... @@ -52,7 +56,7 @@ class array(MutableSequence[_T], Generic[_T]): def tolist(self) -> list[_T]: ... def tounicode(self) -> str: ... if sys.version_info < (3, 9): - def fromstring(self, __buffer: bytes) -> None: ... + def fromstring(self, __buffer: str | ReadableBuffer) -> None: ... def tostring(self) -> bytes: ... def __len__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 6c9dbd0162b8..9a5bf0a623fb 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -1,5 +1,7 @@ +import os import sys from _ast import * +from _typeshed import ReadableBuffer from collections.abc import Iterator from typing import Any, TypeVar, overload from typing_extensions import Literal @@ -10,7 +12,7 @@ if sys.version_info >= (3, 8): def __init__(cls, *args: object) -> None: ... class Num(Constant, metaclass=_ABC): - value: complex + value: int | float | complex class Str(Constant, metaclass=_ABC): value: str @@ -87,6 +89,7 @@ class NodeVisitor: def visit_Constant(self, node: Constant) -> Any: ... if sys.version_info >= (3, 8): def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... def visit_Attribute(self, node: Attribute) -> Any: ... def visit_Subscript(self, node: Subscript) -> Any: ... @@ -133,6 +136,19 @@ class NodeVisitor: def visit_keyword(self, node: keyword) -> Any: ... def visit_alias(self, node: alias) -> Any: ... def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + # visit methods for deprecated nodes def visit_ExtSlice(self, node: ExtSlice) -> Any: ... def visit_Index(self, node: Index) -> Any: ... @@ -157,8 +173,8 @@ _T = TypeVar("_T", bound=AST) if sys.version_info >= (3, 8): @overload def parse( - source: str | bytes, - filename: str | bytes = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: Literal["exec"] = ..., *, type_comments: bool = ..., @@ -166,8 +182,8 @@ if sys.version_info >= (3, 8): ) -> Module: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"], *, type_comments: bool = ..., @@ -175,8 +191,8 @@ if sys.version_info >= (3, 8): ) -> Expression: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["func_type"], *, type_comments: bool = ..., @@ -184,8 +200,8 @@ if sys.version_info >= (3, 8): ) -> FunctionType: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"], *, type_comments: bool = ..., @@ -193,7 +209,7 @@ if sys.version_info >= (3, 8): ) -> Interactive: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["eval"], type_comments: bool = ..., @@ -201,7 +217,7 @@ if sys.version_info >= (3, 8): ) -> Expression: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["func_type"], type_comments: bool = ..., @@ -209,7 +225,7 @@ if sys.version_info >= (3, 8): ) -> FunctionType: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["single"], type_comments: bool = ..., @@ -217,8 +233,8 @@ if sys.version_info >= (3, 8): ) -> Interactive: ... @overload def parse( - source: str | bytes, - filename: str | bytes = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: str = ..., *, type_comments: bool = ..., @@ -227,17 +243,23 @@ if sys.version_info >= (3, 8): else: @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: Literal["exec"] = ...) -> Module: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: Literal["exec"] = ... + ) -> Module: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["eval"]) -> Expression: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"] + ) -> Expression: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["single"]) -> Interactive: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"] + ) -> Interactive: ... @overload - def parse(source: str | bytes, *, mode: Literal["eval"]) -> Expression: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["eval"]) -> Expression: ... @overload - def parse(source: str | bytes, *, mode: Literal["single"]) -> Interactive: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["single"]) -> Interactive: ... @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: str = ...) -> AST: ... + def parse(source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = ..., mode: str = ...) -> AST: ... if sys.version_info >= (3, 9): def unparse(ast_obj: AST) -> str: ... @@ -253,7 +275,7 @@ else: def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... def fix_missing_locations(node: _T) -> _T: ... -def get_docstring(node: AST, clean: bool = ...) -> str | None: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = ...) -> str | None: ... def increment_lineno(node: _T, n: int = ...) -> _T: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 8697bfe306c4..83576ab6455e 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -1,11 +1,11 @@ import ssl import sys -from _typeshed import FileDescriptorLike, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory from asyncio.futures import Future from asyncio.protocols import BaseProtocol from asyncio.tasks import Task -from asyncio.transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from collections.abc import Awaitable, Callable, Coroutine, Generator, Iterable, Sequence from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket @@ -102,7 +102,7 @@ class BaseEventLoop(AbstractEventLoop): async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, family: int = ..., type: int = ..., @@ -129,7 +129,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -148,7 +148,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload async def create_connection( @@ -167,7 +167,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -185,7 +185,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_connection( @@ -202,7 +202,7 @@ class BaseEventLoop(AbstractEventLoop): local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -218,7 +218,7 @@ class BaseEventLoop(AbstractEventLoop): local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload async def create_server( @@ -266,7 +266,7 @@ class BaseEventLoop(AbstractEventLoop): server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -275,7 +275,7 @@ class BaseEventLoop(AbstractEventLoop): ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_server( @@ -320,7 +320,7 @@ class BaseEventLoop(AbstractEventLoop): server_side: bool = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -328,13 +328,13 @@ class BaseEventLoop(AbstractEventLoop): *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... ) -> int: ... async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] @@ -349,7 +349,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... else: async def create_datagram_endpoint( self, @@ -364,7 +364,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -411,13 +411,13 @@ class BaseEventLoop(AbstractEventLoop): # BaseEventLoop, only on subclasses. We list them here for now for convenience. async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 586116136c1a..7241d5a29f8d 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import FileDescriptorLike, Self, StrPath, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, Self, StrPath, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from contextvars import Context @@ -12,7 +12,7 @@ from .base_events import Server from .futures import Future from .protocols import BaseProtocol from .tasks import Task -from .transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from .unix_events import AbstractChildWatcher if sys.version_info >= (3, 8): @@ -194,7 +194,7 @@ class AbstractEventLoop: async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, family: int = ..., type: int = ..., @@ -223,7 +223,7 @@ class AbstractEventLoop: ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -243,7 +243,7 @@ class AbstractEventLoop: ssl_shutdown_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload @abstractmethod @@ -263,7 +263,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -282,7 +282,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: @overload @abstractmethod @@ -300,7 +300,7 @@ class AbstractEventLoop: local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( @@ -317,7 +317,7 @@ class AbstractEventLoop: local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload @abstractmethod @@ -360,7 +360,7 @@ class AbstractEventLoop: @abstractmethod async def start_tls( self, - transport: BaseTransport, + transport: WriteTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, @@ -368,7 +368,7 @@ class AbstractEventLoop: server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -428,7 +428,7 @@ class AbstractEventLoop: server_side: bool = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -449,7 +449,7 @@ class AbstractEventLoop: ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, @@ -458,7 +458,7 @@ class AbstractEventLoop: *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -470,7 +470,7 @@ class AbstractEventLoop: server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... else: async def create_unix_connection( self, @@ -481,7 +481,7 @@ class AbstractEventLoop: sock: socket | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: ... @abstractmethod async def sock_sendfile( @@ -489,7 +489,7 @@ class AbstractEventLoop: ) -> int: ... @abstractmethod async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... ) -> int: ... @abstractmethod async def create_datagram_endpoint( @@ -505,7 +505,7 @@ class AbstractEventLoop: reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod async def connect_read_pipe( @@ -562,7 +562,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... @abstractmethod - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... @abstractmethod async def sock_connect(self, sock: socket, address: _Address) -> None: ... @abstractmethod @@ -573,7 +573,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... @abstractmethod - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., object], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 49d236bbee9e..74ed83ed8dc4 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -3,6 +3,7 @@ from _typeshed import Self from collections.abc import Callable, Coroutine from contextvars import Context from typing import Any, TypeVar +from typing_extensions import final from .events import AbstractEventLoop @@ -13,6 +14,7 @@ else: _T = TypeVar("_T") if sys.version_info >= (3, 11): + @final class Runner: def __init__(self, *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...) -> None: ... def __enter__(self: Self) -> Self: ... @@ -21,7 +23,12 @@ if sys.version_info >= (3, 11): def get_loop(self) -> AbstractEventLoop: ... def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = ...) -> _T: ... -if sys.version_info >= (3, 8): +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ... + ) -> _T: ... + +elif sys.version_info >= (3, 8): def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = ...) -> _T: ... else: diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi index 3bb4db69c123..3c1c7b2e4edb 100644 --- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi +++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -74,7 +74,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): def get_extra_info(self, name: str, default: Any | None = ...) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... def can_write_eof(self) -> Literal[False]: ... if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 139d86b292c3..00d95d93f2ff 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -3,7 +3,7 @@ import sys from _typeshed import Self, StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence from typing import Any -from typing_extensions import TypeAlias +from typing_extensions import SupportsIndex, TypeAlias from . import events, protocols, transports from .base_events import Server @@ -139,8 +139,8 @@ class StreamWriter: ) -> None: ... @property def transport(self) -> transports.WriteTransport: ... - def write(self, data: bytes) -> None: ... - def writelines(self, data: Iterable[bytes]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def close(self) -> None: ... @@ -160,9 +160,10 @@ class StreamReader(AsyncIterator[bytes]): def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: ... - def feed_data(self, data: bytes) -> None: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... async def readline(self) -> bytes: ... - async def readuntil(self, separator: bytes = ...) -> bytes: ... + # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted + async def readuntil(self, separator: bytes | bytearray | memoryview = ...) -> bytes: ... async def read(self, n: int = ...) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... def __aiter__(self: Self) -> Self: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index 32fcf1a65491..d483f57551b0 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -2,7 +2,7 @@ import subprocess import sys from _typeshed import StrOrBytesPath from asyncio import events, protocols, streams, transports -from collections.abc import Callable +from collections.abc import Callable, Collection from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -38,9 +38,9 @@ class Process: def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - async def communicate(self, input: bytes | None = ...) -> tuple[bytes, bytes]: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = ...) -> tuple[bytes, bytes]: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): async def create_subprocess_shell( cmd: str | bytes, stdin: int | IO[Any] | None = ..., @@ -65,7 +65,13 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, @@ -91,10 +97,80 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... -else: +elif sys.version_info >= (3, 10): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = ..., + stdout: int | IO[Any] | None = ..., + stderr: int | IO[Any] | None = ..., + limit: int = ..., + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = ..., + shell: Literal[True] = ..., + bufsize: Literal[0] = ..., + encoding: None = ..., + errors: None = ..., + text: Literal[False, None] = ..., + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = ..., + stdout: int | IO[Any] | None = ..., + stderr: int | IO[Any] | None = ..., + limit: int = ..., + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = ..., + shell: Literal[True] = ..., + bufsize: Literal[0] = ..., + encoding: None = ..., + errors: None = ..., + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = ..., + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + +else: # >= 3.9 async def create_subprocess_shell( cmd: str | bytes, stdin: int | IO[Any] | None = ..., @@ -120,7 +196,11 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, @@ -147,5 +227,9 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 67581eb6a5ad..43dd020fa99d 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -270,7 +270,7 @@ else: # While this is true in general, here it's sort-of okay to have a covariant subclass, # since the only reason why `asyncio.Future` is invariant is the `set_result()` method, # and `asyncio.Task.set_result()` always raises. -class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] +class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] if sys.version_info >= (3, 8): def __init__( self, diff --git a/mypy/typeshed/stdlib/asyncio/transports.pyi b/mypy/typeshed/stdlib/asyncio/transports.pyi index 3eb3d1ae3173..893292dd12b6 100644 --- a/mypy/typeshed/stdlib/asyncio/transports.pyi +++ b/mypy/typeshed/stdlib/asyncio/transports.pyi @@ -23,8 +23,8 @@ class WriteTransport(BaseTransport): def set_write_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... def get_write_buffer_size(self) -> int: ... def get_write_buffer_limits(self) -> tuple[int, int]: ... - def write(self, data: bytes) -> None: ... - def writelines(self, list_of_data: Iterable[bytes]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... @@ -32,7 +32,7 @@ class WriteTransport(BaseTransport): class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): - def sendto(self, data: bytes, addr: _Address | None = ...) -> None: ... + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = ...) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): diff --git a/mypy/typeshed/stdlib/asyncio/trsock.pyi b/mypy/typeshed/stdlib/asyncio/trsock.pyi index b8972e43d255..742216a84ccd 100644 --- a/mypy/typeshed/stdlib/asyncio/trsock.pyi +++ b/mypy/typeshed/stdlib/asyncio/trsock.pyi @@ -1,5 +1,6 @@ import socket import sys +from _typeshed import ReadableBuffer from builtins import type as Type # alias to avoid name clashes with property named "type" from collections.abc import Iterable from types import TracebackType @@ -7,7 +8,7 @@ from typing import Any, BinaryIO, NoReturn, overload from typing_extensions import TypeAlias # These are based in socket, maybe move them out into _typeshed.pyi or such -_Address: TypeAlias = tuple[Any, ...] | str +_Address: TypeAlias = socket._Address _RetAddress: TypeAlias = Any _WriteBuffer: TypeAlias = bytearray | memoryview _CMSG: TypeAlias = tuple[int, int, bytes] @@ -30,7 +31,7 @@ class TransportSocket: @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... @overload - def setsockopt(self, level: int, optname: int, value: int | bytes) -> None: ... + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... def getpeername(self) -> _RetAddress: ... @@ -42,9 +43,9 @@ class TransportSocket: if sys.version_info < (3, 11): def _na(self, what: str) -> None: ... def accept(self) -> tuple[socket.socket, _RetAddress]: ... - def connect(self, address: _Address | bytes) -> None: ... - def connect_ex(self, address: _Address | bytes) -> int: ... - def bind(self, address: _Address | bytes) -> None: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... + def bind(self, address: _Address) -> None: ... if sys.platform == "win32": def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... else: @@ -57,22 +58,26 @@ class TransportSocket: def detach(self) -> int: ... if sys.platform == "linux": def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> int: ... else: def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> NoReturn: ... def sendmsg( - self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: _Address = ... + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSG] = ..., + __flags: int = ..., + __address: _Address = ..., ) -> int: ... @overload - def sendto(self, data: bytes, address: _Address) -> int: ... + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... @overload - def sendto(self, data: bytes, flags: int, address: _Address) -> int: ... - def send(self, data: bytes, flags: int = ...) -> int: ... - def sendall(self, data: bytes, flags: int = ...) -> None: ... + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... def set_inheritable(self, inheritable: bool) -> None: ... if sys.platform == "win32": def share(self, process_id: int) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi index ffb487fff03a..dca06ea33b13 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,6 +1,6 @@ import socket import sys -from _typeshed import WriteableBuffer +from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from typing import IO, Any, ClassVar, NoReturn from typing_extensions import Literal @@ -50,10 +50,14 @@ if sys.platform == "win32": def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... - def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... + def connect( + self, + conn: socket.socket, + address: tuple[Incomplete, Incomplete] | tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> futures.Future[Any]: ... def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... - async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... + async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... def close(self) -> None: ... SelectorEventLoop = _WindowsSelectorEventLoop diff --git a/mypy/typeshed/stdlib/asyncore.pyi b/mypy/typeshed/stdlib/asyncore.pyi index 0025ec3f9b4e..565deb4d1cad 100644 --- a/mypy/typeshed/stdlib/asyncore.pyi +++ b/mypy/typeshed/stdlib/asyncore.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket from typing import Any, overload from typing_extensions import TypeAlias @@ -45,7 +45,7 @@ class dispatcher: def bind(self, addr: tuple[Any, ...] | str) -> None: ... def connect(self, address: tuple[Any, ...] | str) -> None: ... def accept(self) -> tuple[_Socket, Any] | None: ... - def send(self, data: bytes) -> int: ... + def send(self, data: ReadableBuffer) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/base64.pyi b/mypy/typeshed/stdlib/base64.pyi index c2ec85cac40a..816622eeb071 100644 --- a/mypy/typeshed/stdlib/base64.pyi +++ b/mypy/typeshed/stdlib/base64.pyi @@ -42,7 +42,9 @@ if sys.version_info >= (3, 10): def b32hexdecode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... def a85encode(b: ReadableBuffer, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... -def a85decode(b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: str | bytes = ...) -> bytes: ... +def a85decode( + b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: bytearray | bytes = ... +) -> bytes: ... def b85encode(b: ReadableBuffer, pad: bool = ...) -> bytes: ... def b85decode(b: str | ReadableBuffer) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... diff --git a/mypy/typeshed/stdlib/binhex.pyi b/mypy/typeshed/stdlib/binhex.pyi index 639d30d1d0de..e0993c840ce7 100644 --- a/mypy/typeshed/stdlib/binhex.pyi +++ b/mypy/typeshed/stdlib/binhex.pyi @@ -1,3 +1,4 @@ +from _typeshed import _BufferWithLen from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -27,9 +28,9 @@ class openrsrc: class BinHex: def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... - def write(self, data: bytes) -> None: ... + def write(self, data: _BufferWithLen) -> None: ... def close_data(self) -> None: ... - def write_rsrc(self, data: bytes) -> None: ... + def write_rsrc(self, data: _BufferWithLen) -> None: ... def close(self) -> None: ... def binhex(inp: str, out: str) -> None: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index ed60a7c018e7..b2241bb60527 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1,9 +1,10 @@ +import _ast import sys import types -from _ast import AST from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( AnyStr_co, + FileDescriptorOrPath, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, @@ -11,7 +12,6 @@ from _typeshed import ( OpenTextMode, ReadableBuffer, Self, - StrOrBytesPath, SupportsAdd, SupportsAiter, SupportsAnext, @@ -50,7 +50,6 @@ from typing import ( # noqa: Y027 SupportsComplex, SupportsFloat, SupportsInt, - SupportsRound, TypeVar, overload, type_check_only, @@ -299,7 +298,7 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls: type[Self], x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def __new__(cls: type[Self], __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @@ -495,15 +494,11 @@ class str(Sequence[str]): class bytes(ByteString): @overload - def __new__(cls: type[Self], __ints: Iterable[SupportsIndex]) -> Self: ... + def __new__(cls: type[Self], __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... @overload def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... @overload - def __new__(cls: type[Self], __length: SupportsIndex) -> Self: ... - @overload def __new__(cls: type[Self]) -> Self: ... - @overload - def __new__(cls: type[Self], __o: SupportsBytes) -> Self: ... def capitalize(self) -> bytes: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... def count( @@ -589,7 +584,7 @@ class bytes(ByteString): def __rmul__(self, __n: SupportsIndex) -> bytes: ... def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, __o: SupportsIndex | bytes) -> bool: ... # type: ignore[override] + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... def __lt__(self, __x: bytes) -> bool: ... @@ -604,11 +599,9 @@ class bytearray(MutableSequence[int], ByteString): @overload def __init__(self) -> None: ... @overload - def __init__(self, __ints: Iterable[SupportsIndex]) -> None: ... + def __init__(self, __ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer) -> None: ... @overload def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... - @overload - def __init__(self, __length: SupportsIndex) -> None: ... def append(self, __item: SupportsIndex) -> None: ... def capitalize(self) -> bytearray: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... @@ -712,14 +705,14 @@ class bytearray(MutableSequence[int], ByteString): def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... - def __lt__(self, __x: bytes) -> bool: ... - def __le__(self, __x: bytes) -> bool: ... - def __gt__(self, __x: bytes) -> bool: ... - def __ge__(self, __x: bytes) -> bool: ... + def __lt__(self, __x: ReadableBuffer) -> bool: ... + def __le__(self, __x: ReadableBuffer) -> bool: ... + def __gt__(self, __x: ReadableBuffer) -> bool: ... + def __ge__(self, __x: ReadableBuffer) -> bool: ... def __alloc__(self) -> int: ... @final -class memoryview(Sized, Sequence[int]): +class memoryview(Sequence[int]): @property def format(self) -> str: ... @property @@ -735,7 +728,7 @@ class memoryview(Sized, Sequence[int]): @property def ndim(self) -> int: ... @property - def obj(self) -> bytes | bytearray: ... + def obj(self) -> ReadableBuffer: ... @property def c_contiguous(self) -> bool: ... @property @@ -1103,7 +1096,7 @@ class property: class _NotImplementedType(Any): # type: ignore[misc] # A little weird, but typing the __call__ as NotImplemented makes the error message # for NotImplemented() much better - __call__: NotImplemented # type: ignore[valid-type] + __call__: NotImplemented # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] NotImplemented: _NotImplementedType @@ -1138,7 +1131,7 @@ if sys.version_info >= (3, 10): # TODO: `compile` has a more precise return type in reality; work on a way of expressing that? if sys.version_info >= (3, 8): def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int = ..., @@ -1150,7 +1143,7 @@ if sys.version_info >= (3, 8): else: def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, flags: int = ..., @@ -1239,19 +1232,13 @@ def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: @overload def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... -# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): - def isinstance( - __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - def issubclass( - __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] else: - def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... - def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... def locals() -> dict[str, Any]: ... @@ -1333,13 +1320,12 @@ def next(__i: SupportsNext[_T]) -> _T: ... def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... def oct(__number: int | SupportsIndex) -> str: ... -_OpenFile = StrOrBytesPath | int # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed _Opener: TypeAlias = Callable[[str, int], int] # Text mode: always returns a TextIOWrapper @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenTextMode = ..., buffering: int = ..., encoding: str | None = ..., @@ -1352,7 +1338,7 @@ def open( # Unbuffered binary mode: returns a FileIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., @@ -1365,7 +1351,7 @@ def open( # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeUpdating, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1376,7 +1362,7 @@ def open( ) -> BufferedRandom: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeWriting, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1387,7 +1373,7 @@ def open( ) -> BufferedWriter: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeReading, buffering: Literal[-1, 1] = ..., encoding: None = ..., @@ -1400,7 +1386,7 @@ def open( # Buffering cannot be determined: fall back to BinaryIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., @@ -1413,7 +1399,7 @@ def open( # Fallback if mode is not specified @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: str, buffering: int = ..., encoding: str | None = ..., @@ -1539,12 +1525,21 @@ class reversed(Iterator[_T], Generic[_T]): def __length_hint__(self) -> int: ... def repr(__obj: object) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, __ndigits: int) -> _T_co: ... + @overload -def round(number: SupportsRound[Any]) -> int: ... -@overload -def round(number: SupportsRound[Any], ndigits: None) -> int: ... +def round(number: _SupportsRound1[_T], ndigits: None = ...) -> _T: ... @overload -def round(number: SupportsRound[_T], ndigits: SupportsIndex) -> _T: ... +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... # See https://github.com/python/typeshed/pull/6292#discussion_r748875189 # for why arg 3 of `setattr` should be annotated with `Any` and not `object` @@ -1569,11 +1564,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = ...) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = ...) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... @@ -1586,8 +1581,12 @@ else: @overload def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... -# The argument to `vars()` has to have a `__dict__` attribute, so can't be annotated with `object` +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) +# Use a type: ignore to make complaints about overlapping overloads go away +@overload +def vars(__object: type) -> types.MappingProxyType[str, Any]: ... # type: ignore[misc] +@overload def vars(__object: Any = ...) -> dict[str, Any]: ... class zip(Iterator[_T_co], Generic[_T_co]): @@ -1846,6 +1845,7 @@ if sys.version_info >= (3, 11): _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + # See `check_exception_group.py` for use-cases and comments. class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... @property @@ -1853,18 +1853,34 @@ if sys.version_info >= (3, 11): @property def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... @overload + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload def subgroup( self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> Self | None: ... + def subgroup( + self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... + @overload + def split( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... @overload def split( - self: Self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, Self | None]: ... + self: Self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + # In reality it is `NonEmptySequence`: @overload - def split(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... - def derive(self: Self, __excs: Sequence[_BaseExceptionT_co]) -> Self: ... + def derive(self, __excs: Sequence[_ExceptionT]) -> ExceptionGroup[_ExceptionT]: ... + @overload + def derive(self, __excs: Sequence[_BaseExceptionT]) -> BaseExceptionGroup[_BaseExceptionT]: ... def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): @@ -1877,10 +1893,14 @@ if sys.version_info >= (3, 11): self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] ) -> ExceptionGroup[_ExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> Self | None: ... + def subgroup( + self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> ExceptionGroup[_ExceptionT_co] | None: ... @overload # type: ignore[override] def split( - self: Self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> tuple[ExceptionGroup[_ExceptionT] | None, Self | None]: ... + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... @overload - def split(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... + def split( + self: Self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... diff --git a/mypy/typeshed/stdlib/bz2.pyi b/mypy/typeshed/stdlib/bz2.pyi index cea317e28037..295271d4a80b 100644 --- a/mypy/typeshed/stdlib/bz2.pyi +++ b/mypy/typeshed/stdlib/bz2.pyi @@ -19,8 +19,8 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: bytes, compresslevel: int = ...) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def compress(data: ReadableBuffer, compresslevel: int = ...) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -132,12 +132,12 @@ class BZ2File(BaseStream, IO[bytes]): @final class BZ2Compressor: def __init__(self, compresslevel: int = ...) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... @final class BZ2Decompressor: - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... @property def eof(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index a7b60e38df11..cd6ac0006c53 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,12 +1,11 @@ import types -from _typeshed import Self +from _codecs import * +from _typeshed import ReadableBuffer, Self from abc import abstractmethod from collections.abc import Callable, Generator, Iterable from typing import Any, BinaryIO, Protocol, TextIO from typing_extensions import Literal -from _codecs import * - __all__ = [ "register", "lookup", @@ -173,7 +172,7 @@ class IncrementalDecoder: errors: str def __init__(self, errors: str = ...) -> None: ... @abstractmethod - def decode(self, input: bytes, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... def reset(self) -> None: ... def getstate(self) -> tuple[bytes, int]: ... def setstate(self, state: tuple[bytes, int]) -> None: ... @@ -190,8 +189,8 @@ class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes def __init__(self, errors: str = ...) -> None: ... @abstractmethod - def _buffer_decode(self, input: bytes, errors: str, final: bool) -> tuple[str, int]: ... - def decode(self, input: bytes, final: bool = ...) -> str: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 37505c256d9c..2955aa3b3cd0 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -327,16 +327,17 @@ class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): # The C implementations of the "views" classes # (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, # but they are not exposed anywhere) +# pyright doesn't have a specific error code for subclassing error! @final -class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_KT_co]: ... @final -class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @final -class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_VT_co]: ... class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): diff --git a/mypy/typeshed/stdlib/compileall.pyi b/mypy/typeshed/stdlib/compileall.pyi index dd1de3f496e7..4621500eda96 100644 --- a/mypy/typeshed/stdlib/compileall.pyi +++ b/mypy/typeshed/stdlib/compileall.pyi @@ -8,7 +8,7 @@ __all__ = ["compile_dir", "compile_file", "compile_path"] class _SupportsSearch(Protocol): def search(self, string: str) -> Any: ... -if sys.version_info >= (3, 9): +if sys.version_info >= (3, 10): def compile_dir( dir: StrPath, maxlevels: int | None = ..., @@ -21,7 +21,7 @@ if sys.version_info >= (3, 9): workers: int = ..., invalidation_mode: PycInvalidationMode | None = ..., *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved + stripdir: StrPath | None = ..., prependdir: StrPath | None = ..., limit_sl_dest: StrPath | None = ..., hardlink_dupes: bool = ..., @@ -36,7 +36,41 @@ if sys.version_info >= (3, 9): optimize: int = ..., invalidation_mode: PycInvalidationMode | None = ..., *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved + stripdir: StrPath | None = ..., + prependdir: StrPath | None = ..., + limit_sl_dest: StrPath | None = ..., + hardlink_dupes: bool = ..., + ) -> int: ... + +elif sys.version_info >= (3, 9): + def compile_dir( + dir: StrPath, + maxlevels: int | None = ..., + ddir: StrPath | None = ..., + force: bool = ..., + rx: _SupportsSearch | None = ..., + quiet: int = ..., + legacy: bool = ..., + optimize: int = ..., + workers: int = ..., + invalidation_mode: PycInvalidationMode | None = ..., + *, + stripdir: str | None = ..., # https://bugs.python.org/issue40447 + prependdir: StrPath | None = ..., + limit_sl_dest: StrPath | None = ..., + hardlink_dupes: bool = ..., + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = ..., + force: bool = ..., + rx: _SupportsSearch | None = ..., + quiet: int = ..., + legacy: bool = ..., + optimize: int = ..., + invalidation_mode: PycInvalidationMode | None = ..., + *, + stripdir: str | None = ..., # https://bugs.python.org/issue40447 prependdir: StrPath | None = ..., limit_sl_dest: StrPath | None = ..., hardlink_dupes: bool = ..., diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index ca8830439538..1a6642b643e3 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -1,6 +1,6 @@ import abc import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Self from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType @@ -193,7 +193,7 @@ else: def __exit__(self, *exctype: object) -> None: ... if sys.version_info >= (3, 11): - _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=int | StrOrBytesPath) + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): path: _T_fd_or_any_path diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 78f4ee4d5ab3..2e26a08f81f9 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -64,8 +64,8 @@ class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] - def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _CData(metaclass=_CDataMeta): _b_base: int @@ -194,7 +194,7 @@ class _SimpleCData(Generic[_T], _CData): class c_byte(_SimpleCData[int]): ... class c_char(_SimpleCData[bytes]): - def __init__(self, value: int | bytes = ...) -> None: ... + def __init__(self, value: int | bytes | bytearray = ...) -> None: ... class c_char_p(_PointerLike, _SimpleCData[bytes | None]): def __init__(self, value: int | bytes | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 5926ff0a808e..43f5902c3c06 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -1,7 +1,8 @@ import sys from _typeshed import Self +from abc import abstractmethod from time import struct_time -from typing import ClassVar, NamedTuple, NoReturn, SupportsAbs, TypeVar, overload +from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload from typing_extensions import Literal, TypeAlias, final if sys.version_info >= (3, 11): @@ -15,8 +16,11 @@ MINYEAR: Literal[1] MAXYEAR: Literal[9999] class tzinfo: + @abstractmethod def tzname(self, __dt: datetime | None) -> str | None: ... + @abstractmethod def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + @abstractmethod def dst(self, __dt: datetime | None) -> timedelta | None: ... def fromutc(self, __dt: datetime) -> datetime: ... @@ -29,6 +33,9 @@ class timezone(tzinfo): min: ClassVar[timezone] max: ClassVar[timezone] def __init__(self, offset: timedelta, name: str = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str: ... + def utcoffset(self, __dt: datetime | None) -> timedelta: ... + def dst(self, __dt: datetime | None) -> None: ... if sys.version_info >= (3, 11): UTC: timezone @@ -63,7 +70,14 @@ class date: @property def day(self) -> int: ... def ctime(self) -> str: ... - def strftime(self, __format: str) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... @@ -133,7 +147,14 @@ class time: def isoformat(self, timespec: str = ...) -> str: ... @classmethod def fromisoformat(cls: type[Self], __time_string: str) -> Self: ... - def strftime(self, __format: str) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... @@ -152,7 +173,7 @@ class time: _Date: TypeAlias = date _Time: TypeAlias = time -class timedelta(SupportsAbs[timedelta]): +class timedelta: min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] @@ -226,11 +247,16 @@ class datetime(date): def tzinfo(self) -> _TzInfo | None: ... @property def fold(self) -> int: ... - # The first parameter in `fromtimestamp` is actually positional-or-keyword, - # but it is named "timestamp" in the C implementation and "t" in the Python implementation, - # so it is only truly *safe* to pass it as a positional argument. - @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + # On <3.12, the name of the first parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + @classmethod + def fromtimestamp(cls: type[Self], timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + else: + @classmethod + def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + @classmethod def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... if sys.version_info >= (3, 8): diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi index 9e99f0d5e74c..33b8aab96610 100644 --- a/mypy/typeshed/stdlib/dbm/__init__.pyi +++ b/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -6,7 +6,7 @@ from typing_extensions import Literal, TypeAlias __all__ = ["open", "whichdb", "error"] _KeyType: TypeAlias = str | bytes -_ValueType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes | bytearray _TFlags: TypeAlias = Literal[ "r", "w", diff --git a/mypy/typeshed/stdlib/dbm/dumb.pyi b/mypy/typeshed/stdlib/dbm/dumb.pyi index 4fd199f19728..738e68968ca8 100644 --- a/mypy/typeshed/stdlib/dbm/dumb.pyi +++ b/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -10,6 +10,9 @@ _ValueType: TypeAlias = str | bytes error = OSError +# This class doesn't exist at runtime. open() can return an instance of +# any of the three implementations of dbm (dumb, gnu, ndbm), and this +# class is intended to represent the common interface supported by all three. class _Database(MutableMapping[_KeyType, bytes]): def __init__(self, filebasename: str, mode: str, flag: str = ...) -> None: ... def sync(self) -> None: ... diff --git a/mypy/typeshed/stdlib/dbm/gnu.pyi b/mypy/typeshed/stdlib/dbm/gnu.pyi index 561206c4e0be..93b9df1077ce 100644 --- a/mypy/typeshed/stdlib/dbm/gnu.pyi +++ b/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer, Self from types import TracebackType from typing import TypeVar, overload from typing_extensions import TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer open_flags: str @@ -31,7 +31,7 @@ if sys.platform != "win32": @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime diff --git a/mypy/typeshed/stdlib/dbm/ndbm.pyi b/mypy/typeshed/stdlib/dbm/ndbm.pyi index f1032bf3cae7..ca658098bd5c 100644 --- a/mypy/typeshed/stdlib/dbm/ndbm.pyi +++ b/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer, Self from types import TracebackType from typing import TypeVar, overload from typing_extensions import TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer class error(OSError): ... library: str @@ -27,7 +27,7 @@ if sys.platform != "win32": @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime diff --git a/mypy/typeshed/stdlib/difflib.pyi b/mypy/typeshed/stdlib/difflib.pyi index 854a53d433ae..df2f8be0168a 100644 --- a/mypy/typeshed/stdlib/difflib.pyi +++ b/mypy/typeshed/stdlib/difflib.pyi @@ -127,12 +127,12 @@ class HtmlDiff: def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], - a: Sequence[bytes], - b: Sequence[bytes], - fromfile: bytes = ..., - tofile: bytes = ..., - fromfiledate: bytes = ..., - tofiledate: bytes = ..., + a: Iterable[bytes | bytearray], + b: Iterable[bytes | bytearray], + fromfile: bytes | bytearray = ..., + tofile: bytes | bytearray = ..., + fromfiledate: bytes | bytearray = ..., + tofiledate: bytes | bytearray = ..., n: int = ..., - lineterm: bytes = ..., + lineterm: bytes | bytearray = ..., ) -> Iterator[bytes]: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index dd31d981071f..73adba5c19f5 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -37,7 +37,6 @@ __all__ = [ # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeType | type | Callable[..., Any] -_HaveCodeOrStringType: TypeAlias = _HaveCodeType | str | bytes if sys.version_info >= (3, 11): class Positions(NamedTuple): @@ -75,7 +74,7 @@ class Bytecode: if sys.version_info >= (3, 11): def __init__( self, - x: _HaveCodeOrStringType, + x: _HaveCodeType | str, *, first_line: int | None = ..., current_offset: int | None = ..., @@ -87,9 +86,7 @@ class Bytecode: cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ..., adaptive: bool = ... ) -> Self: ... else: - def __init__( - self, x: _HaveCodeOrStringType, *, first_line: int | None = ..., current_offset: int | None = ... - ) -> None: ... + def __init__(self, x: _HaveCodeType | str, *, first_line: int | None = ..., current_offset: int | None = ...) -> None: ... @classmethod def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... @@ -102,11 +99,11 @@ COMPILER_FLAG_NAMES: dict[int, str] def findlabels(code: _HaveCodeType) -> list[int]: ... def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... def pretty_flags(flags: int) -> str: ... -def code_info(x: _HaveCodeOrStringType) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... if sys.version_info >= (3, 11): def dis( - x: _HaveCodeOrStringType | None = ..., + x: _HaveCodeType | str | bytes | bytearray | None = ..., *, file: IO[str] | None = ..., depth: int | None = ..., @@ -115,7 +112,9 @@ if sys.version_info >= (3, 11): ) -> None: ... else: - def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ..., depth: int | None = ...) -> None: ... + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = ..., *, file: IO[str] | None = ..., depth: int | None = ... + ) -> None: ... if sys.version_info >= (3, 11): def disassemble( diff --git a/mypy/typeshed/stdlib/distutils/dist.pyi b/mypy/typeshed/stdlib/distutils/dist.pyi index ef47e4e4d15a..fc1bce261e57 100644 --- a/mypy/typeshed/stdlib/distutils/dist.pyi +++ b/mypy/typeshed/stdlib/distutils/dist.pyi @@ -1,10 +1,10 @@ -from _typeshed import StrOrBytesPath, SupportsWrite +from _typeshed import FileDescriptorOrPath, SupportsWrite from collections.abc import Iterable, Mapping from distutils.cmd import Command from typing import IO, Any class DistributionMetadata: - def __init__(self, path: int | StrOrBytesPath | None = ...) -> None: ... + def __init__(self, path: FileDescriptorOrPath | None = ...) -> None: ... name: str | None version: str | None author: str | None diff --git a/mypy/typeshed/stdlib/email/__init__.pyi b/mypy/typeshed/stdlib/email/__init__.pyi index 4591b2c3340e..6b59dc73d5cc 100644 --- a/mypy/typeshed/stdlib/email/__init__.pyi +++ b/mypy/typeshed/stdlib/email/__init__.pyi @@ -9,7 +9,7 @@ _ParamType: TypeAlias = Union[str, tuple[str | None, str | None, str]] # noqa: _ParamsType: TypeAlias = Union[str, None, tuple[str, str | None, str]] # noqa: Y047 def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... -def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_bytes(s: bytes | bytearray, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/base64mime.pyi b/mypy/typeshed/stdlib/email/base64mime.pyi index e55658046f55..16118a879ad7 100644 --- a/mypy/typeshed/stdlib/email/base64mime.pyi +++ b/mypy/typeshed/stdlib/email/base64mime.pyi @@ -1,9 +1,13 @@ __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] -def header_length(bytearray: str | bytes) -> int: ... -def header_encode(header_bytes: str | bytes, charset: str = ...) -> str: ... -def body_encode(s: bytes, maxlinelen: int = ..., eol: str = ...) -> str: ... -def decode(string: str | bytes) -> bytes: ... +from _typeshed import ReadableBuffer + +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = ...) -> str: ... + +# First argument should be a buffer that supports slicing and len(). +def body_encode(s: bytes | bytearray, maxlinelen: int = ..., eol: str = ...) -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... body_decode = decode decodestring = decode diff --git a/mypy/typeshed/stdlib/email/feedparser.pyi b/mypy/typeshed/stdlib/email/feedparser.pyi index c535c353daad..809f0b0e112b 100644 --- a/mypy/typeshed/stdlib/email/feedparser.pyi +++ b/mypy/typeshed/stdlib/email/feedparser.pyi @@ -20,5 +20,5 @@ class BytesFeedParser(Generic[_MessageT]): def __init__(self: BytesFeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... - def feed(self, data: bytes) -> None: ... + def feed(self, data: bytes | bytearray) -> None: ... def close(self) -> _MessageT: ... diff --git a/mypy/typeshed/stdlib/email/header.pyi b/mypy/typeshed/stdlib/email/header.pyi index 9248759168a9..58740bd1bdae 100644 --- a/mypy/typeshed/stdlib/email/header.pyi +++ b/mypy/typeshed/stdlib/email/header.pyi @@ -1,3 +1,4 @@ +from collections.abc import Iterable from email.charset import Charset from typing import Any @@ -6,14 +7,14 @@ __all__ = ["Header", "decode_header", "make_header"] class Header: def __init__( self, - s: bytes | str | None = ..., + s: bytes | bytearray | str | None = ..., charset: Charset | str | None = ..., maxlinelen: int | None = ..., header_name: str | None = ..., continuation_ws: str = ..., errors: str = ..., ) -> None: ... - def append(self, s: bytes | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... def encode(self, splitchars: str = ..., maxlinelen: int | None = ..., linesep: str = ...) -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... @@ -23,7 +24,7 @@ class Header: # contains at least one encoded part. def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... def make_header( - decoded_seq: list[tuple[bytes, str | None]], + decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], maxlinelen: int | None = ..., header_name: str | None = ..., continuation_ws: str = ..., diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 4e8f600f7ffd..58b1c1cd8f3d 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -5,14 +5,14 @@ from email.charset import Charset from email.contentmanager import ContentManager from email.errors import MessageDefect from email.policy import Policy -from typing import Any, TypeVar +from typing import Any, TypeVar, overload from typing_extensions import TypeAlias __all__ = ["Message", "EmailMessage"] _T = TypeVar("_T") -_PayloadType: TypeAlias = list[Message] | str | bytes +_PayloadType: TypeAlias = list[Message] | str | bytes | bytearray _CharsetType: TypeAlias = Charset | str | None _HeaderType: TypeAlias = Any @@ -54,7 +54,10 @@ class Message: def get_filename(self, failobj: _T = ...) -> _T | str: ... def get_boundary(self, failobj: _T = ...) -> _T | str: ... def set_boundary(self, boundary: str) -> None: ... - def get_content_charset(self, failobj: _T = ...) -> _T | str: ... + @overload + def get_content_charset(self) -> str | None: ... + @overload + def get_content_charset(self, failobj: _T) -> str | _T: ... def get_charsets(self, failobj: _T = ...) -> _T | list[str]: ... def walk(self: Self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... @@ -73,8 +76,8 @@ class Message: ) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: str) -> None: ... - def raw_items(self) -> Iterator[tuple[str, str]]: ... + def set_raw(self, name: str, value: _HeaderType) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderType]]: ... class MIMEPart(Message): def __init__(self, policy: Policy | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/application.pyi b/mypy/typeshed/stdlib/email/mime/application.pyi index dfff85265ade..5ff60bff6ad2 100644 --- a/mypy/typeshed/stdlib/email/mime/application.pyi +++ b/mypy/typeshed/stdlib/email/mime/application.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): def __init__( self, - _data: str | bytes, + _data: str | bytes | bytearray, _subtype: str = ..., _encoder: Callable[[MIMEApplication], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/mime/audio.pyi b/mypy/typeshed/stdlib/email/mime/audio.pyi index b355d55070ad..05e173f5c4a1 100644 --- a/mypy/typeshed/stdlib/email/mime/audio.pyi +++ b/mypy/typeshed/stdlib/email/mime/audio.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): def __init__( self, - _audiodata: str | bytes, + _audiodata: str | bytes | bytearray, _subtype: str | None = ..., _encoder: Callable[[MIMEAudio], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/mime/image.pyi b/mypy/typeshed/stdlib/email/mime/image.pyi index f575103de2d6..7e46b835b541 100644 --- a/mypy/typeshed/stdlib/email/mime/image.pyi +++ b/mypy/typeshed/stdlib/email/mime/image.pyi @@ -8,7 +8,7 @@ __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): def __init__( self, - _imagedata: str | bytes, + _imagedata: str | bytes | bytearray, _subtype: str | None = ..., _encoder: Callable[[MIMEImage], object] = ..., *, diff --git a/mypy/typeshed/stdlib/email/parser.pyi b/mypy/typeshed/stdlib/email/parser.pyi index bf51c45728fd..1afd8940f4ef 100644 --- a/mypy/typeshed/stdlib/email/parser.pyi +++ b/mypy/typeshed/stdlib/email/parser.pyi @@ -16,6 +16,6 @@ class HeaderParser(Parser): ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... - def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = ...) -> Message: ... class BytesHeaderParser(BytesParser): ... diff --git a/mypy/typeshed/stdlib/email/quoprimime.pyi b/mypy/typeshed/stdlib/email/quoprimime.pyi index c5d324d17e13..ec0c799583bf 100644 --- a/mypy/typeshed/stdlib/email/quoprimime.pyi +++ b/mypy/typeshed/stdlib/email/quoprimime.pyi @@ -1,3 +1,5 @@ +from collections.abc import Iterable + __all__ = [ "body_decode", "body_encode", @@ -13,11 +15,11 @@ __all__ = [ def header_check(octet: int) -> bool: ... def body_check(octet: int) -> bool: ... -def header_length(bytearray: bytes) -> int: ... -def body_length(bytearray: bytes) -> int: ... -def unquote(s: str | bytes) -> str: ... -def quote(c: str | bytes) -> str: ... -def header_encode(header_bytes: bytes, charset: str = ...) -> str: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... +def quote(c: str | bytes | bytearray) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = ...) -> str: ... def body_encode(body: str, maxlinelen: int = ..., eol: str = ...) -> str: ... def decode(encoded: str, eol: str = ...) -> str: ... def header_decode(s: str) -> str: ... diff --git a/mypy/typeshed/stdlib/encodings/__init__.pyi b/mypy/typeshed/stdlib/encodings/__init__.pyi index d86466762268..2e83f0f65a71 100644 --- a/mypy/typeshed/stdlib/encodings/__init__.pyi +++ b/mypy/typeshed/stdlib/encodings/__init__.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from codecs import CodecInfo -from typing import Any class CodecRegistryError(LookupError, SystemError): ... @@ -7,4 +7,4 @@ def normalize_encoding(encoding: str | bytes) -> str: ... def search_function(encoding: str) -> CodecInfo | None: ... # Needed for submodules -def __getattr__(name: str) -> Any: ... # incomplete +def __getattr__(name: str) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8.pyi b/mypy/typeshed/stdlib/encodings/utf_8.pyi index 568fa6013373..8e73756199c1 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8.pyi @@ -1,11 +1,12 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: str, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): @staticmethod - def _buffer_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def _buffer_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): @staticmethod @@ -13,8 +14,8 @@ class StreamWriter(codecs.StreamWriter): class StreamReader(codecs.StreamReader): @staticmethod - def decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def decode(__data: ReadableBuffer, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi index ad0d5bdc4fc7..27171063f53f 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -1,4 +1,5 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors: str = ...) -> None: ... @@ -8,14 +9,14 @@ class IncrementalEncoder(codecs.IncrementalEncoder): class IncrementalDecoder(codecs.BufferedIncrementalDecoder): def __init__(self, errors: str = ...) -> None: ... - def _buffer_decode(self, input: bytes, errors: str | None, final: bool) -> tuple[str, int]: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): def encode(self, input: str, errors: str | None = ...) -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): - def decode(self, input: bytes, errors: str | None = ...) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... def encode(input: str, errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def decode(input: ReadableBuffer, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 69863bf580fa..2df16083c0b7 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -103,7 +103,7 @@ if sys.platform != "win32": @overload def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... @overload - def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: bytes) -> bytes: ... + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: str | ReadOnlyBuffer) -> bytes: ... @overload def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... @overload diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi index 911d582fd538..46426b63c852 100644 --- a/mypy/typeshed/stdlib/genericpath.pyi +++ b/mypy/typeshed/stdlib/genericpath.pyi @@ -1,5 +1,5 @@ import os -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRichComparisonT +from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT from collections.abc import Sequence from typing import overload from typing_extensions import Literal, LiteralString @@ -31,16 +31,16 @@ def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... -def exists(path: StrOrBytesPath | int) -> bool: ... -def getsize(filename: StrOrBytesPath | int) -> int: ... -def isfile(path: StrOrBytesPath | int) -> bool: ... -def isdir(s: StrOrBytesPath | int) -> bool: ... +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: StrOrBytesPath | int) -> float: ... -def getmtime(filename: StrOrBytesPath | int) -> float: ... -def getctime(filename: StrOrBytesPath | int) -> float: ... -def samefile(f1: StrOrBytesPath | int, f2: StrOrBytesPath | int) -> bool: ... +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi index 75a70a5e7a07..580e605b6b38 100644 --- a/mypy/typeshed/stdlib/gzip.pyi +++ b/mypy/typeshed/stdlib/gzip.pyi @@ -1,9 +1,9 @@ import _compression import sys import zlib -from _typeshed import ReadableBuffer, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath, _BufferWithLen from io import FileIO -from typing import Any, Protocol, TextIO, overload +from typing import Protocol, TextIO, overload from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 8): @@ -26,15 +26,15 @@ FCOMMENT: int # actually Literal[16] # undocumented class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... - def seek(self, __n: int) -> Any: ... + def seek(self, __n: int) -> object: ... # The following attributes and methods are optional: # name: str # mode: str # def fileno() -> int: ... class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - def flush(self) -> Any: ... + def write(self, __b: bytes) -> object: ... + def flush(self) -> object: ... # The following attributes and methods are optional: # name: str # mode: str @@ -159,9 +159,9 @@ class _GzipReader(_compression.DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... if sys.version_info >= (3, 8): - def compress(data: bytes, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... else: - def compress(data: bytes, compresslevel: int = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = ...) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/mypy/typeshed/stdlib/hmac.pyi b/mypy/typeshed/stdlib/hmac.pyi index af69fc7ea46d..dc29836b6b87 100644 --- a/mypy/typeshed/stdlib/hmac.pyi +++ b/mypy/typeshed/stdlib/hmac.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, _BufferWithLen from collections.abc import Callable from types import ModuleType from typing import Any, AnyStr, overload @@ -18,19 +18,19 @@ if sys.version_info >= (3, 8): # In reality digestmod has a default value, but the function always throws an error # if the argument is not given, so we pretend it is a required argument. @overload - def new(key: bytes, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... @overload - def new(key: bytes, *, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... else: - def new(key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... class HMAC: digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... def update(self, msg: ReadableBuffer) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... @@ -40,4 +40,4 @@ class HMAC: def compare_digest(__a: ReadableBuffer, __b: ReadableBuffer) -> bool: ... @overload def compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... -def digest(key: bytes, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... +def digest(key: _BufferWithLen, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 2ce52eac9ad9..53cefc0a33d1 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -2,10 +2,10 @@ import email.message import io import ssl import types -from _typeshed import Self, WriteableBuffer +from _typeshed import ReadableBuffer, Self, SupportsRead, WriteableBuffer from collections.abc import Callable, Iterable, Iterator, Mapping from socket import socket -from typing import IO, Any, BinaryIO, TypeVar, overload +from typing import Any, BinaryIO, TypeVar, overload from typing_extensions import TypeAlias __all__ = [ @@ -30,7 +30,7 @@ __all__ = [ "HTTPSConnection", ] -_DataType: TypeAlias = bytes | IO[Any] | Iterable[bytes] | str +_DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer _T = TypeVar("_T") HTTP_PORT: int @@ -154,7 +154,13 @@ class HTTPConnection: blocksize: int = ..., ) -> None: ... def request( - self, method: str, url: str, body: _DataType | None = ..., headers: Mapping[str, str] = ..., *, encode_chunked: bool = ... + self, + method: str, + url: str, + body: _DataType | str | None = ..., + headers: Mapping[str, str] = ..., + *, + encode_chunked: bool = ..., ) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... @@ -164,7 +170,7 @@ class HTTPConnection: def putrequest(self, method: str, url: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... def endheaders(self, message_body: _DataType | None = ..., *, encode_chunked: bool = ...) -> None: ... - def send(self, data: _DataType) -> None: ... + def send(self, data: _DataType | str) -> None: ... class HTTPSConnection(HTTPConnection): # Can be `None` if `.connect()` was not called: diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index 40c94bf62f30..04ac28c3278e 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -1,6 +1,8 @@ +import _socket import email.message import io import socketserver +import sys from _typeshed import StrPath, SupportsRead, SupportsWrite from collections.abc import Mapping, Sequence from typing import Any, AnyStr, BinaryIO, ClassVar @@ -31,7 +33,6 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def __init__(self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... def send_error(self, code: int, message: str | None = ..., explain: str | None = ...) -> None: ... @@ -51,8 +52,15 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: dict[str, str] + if sys.version_info >= (3, 12): + index_pages: ClassVar[tuple[str, ...]] def __init__( - self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer, directory: str | None = ... + self, + request: socketserver._RequestType, + client_address: _socket._RetAddress, + server: socketserver.BaseServer, + *, + directory: str | None = ..., ) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index bd3d0777db15..f13e1c9b656c 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -1,7 +1,7 @@ import subprocess import sys import time -from _typeshed import Self +from _typeshed import ReadableBuffer, Self, _BufferWithLen from builtins import list as _list # conflicts with a method named "list" from collections.abc import Callable from datetime import datetime @@ -9,7 +9,7 @@ from re import Pattern from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType -from typing import IO, Any +from typing import IO, Any, SupportsAbs, SupportsInt from typing_extensions import Literal, TypeAlias __all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] @@ -54,12 +54,12 @@ class IMAP4: file: IO[str] | IO[bytes] def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... - def send(self, data: bytes) -> None: ... + def send(self, data: ReadableBuffer) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def recent(self) -> _CommandResults: ... def response(self, code: str) -> _CommandResults: ... - def append(self, mailbox: str, flags: str, date_time: str, message: bytes) -> str: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... def capability(self) -> _CommandResults: ... def check(self) -> _CommandResults: ... @@ -151,13 +151,13 @@ class IMAP4_stream(IMAP4): def open(self, host: str | None = ..., port: int | None = ...) -> None: ... class _Authenticator: - mech: Callable[[bytes], bytes] - def __init__(self, mechinst: Callable[[bytes], bytes]) -> None: ... + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] + def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... - def encode(self, inp: bytes) -> str: ... - def decode(self, inp: str) -> bytes: ... + def encode(self, inp: bytes | bytearray | memoryview) -> str: ... + def decode(self, inp: str | _BufferWithLen) -> bytes: ... -def Internaldate2tuple(resp: bytes) -> time.struct_time: ... -def Int2AP(num: int) -> str: ... -def ParseFlags(resp: bytes) -> tuple[bytes, ...]: ... +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/mypy/typeshed/stdlib/imp.pyi b/mypy/typeshed/stdlib/imp.pyi index 3054a4465f99..889f0cac4f9f 100644 --- a/mypy/typeshed/stdlib/imp.pyi +++ b/mypy/typeshed/stdlib/imp.pyi @@ -1,9 +1,4 @@ import types -from _typeshed import StrPath -from os import PathLike -from types import TracebackType -from typing import IO, Any, Protocol - from _imp import ( acquire_lock as acquire_lock, create_dynamic as create_dynamic, @@ -15,6 +10,10 @@ from _imp import ( lock_held as lock_held, release_lock as release_lock, ) +from _typeshed import StrPath +from os import PathLike +from types import TracebackType +from typing import IO, Any, Protocol SEARCH_ERROR: int PY_SOURCE: int diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 708037305c67..c961fb2e1f9e 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -1,12 +1,19 @@ import sys import types -from _typeshed import OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, +) from abc import ABCMeta, abstractmethod from collections.abc import Iterator, Mapping, Sequence from importlib.machinery import ModuleSpec from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal if sys.version_info >= (3, 11): __all__ = [ @@ -24,8 +31,6 @@ if sys.version_info >= (3, 11): "TraversableResources", ] -_Path: TypeAlias = bytes | str - class Finder(metaclass=ABCMeta): ... class Loader(metaclass=ABCMeta): @@ -38,7 +43,7 @@ class Loader(metaclass=ABCMeta): class ResourceLoader(Loader): @abstractmethod - def get_data(self, path: _Path) -> bytes: ... + def get_data(self, path: str) -> bytes: ... class InspectLoader(Loader): def is_package(self, fullname: str) -> bool: ... @@ -47,40 +52,40 @@ class InspectLoader(Loader): def get_source(self, fullname: str) -> str | None: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod - def source_to_code(data: bytes | str, path: str = ...) -> types.CodeType: ... + def source_to_code(data: ReadableBuffer | str, path: str = ...) -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod - def get_filename(self, fullname: str) -> _Path: ... + def get_filename(self, fullname: str) -> str: ... class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): - def path_mtime(self, path: _Path) -> float: ... - def set_data(self, path: _Path, data: bytes) -> None: ... + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... def get_source(self, fullname: str) -> str | None: ... - def path_stats(self, path: _Path) -> Mapping[str, Any]: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... # Please keep in sync with sys._MetaPathFinder class MetaPathFinder(Finder): - def find_module(self, fullname: str, path: Sequence[_Path] | None) -> Loader | None: ... + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( - self, fullname: str, path: Sequence[_Path] | None, target: types.ModuleType | None = ... + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[_Path]]: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): name: str - path: _Path - def __init__(self, fullname: str, path: _Path) -> None: ... - def get_data(self, path: _Path) -> bytes: ... - def get_filename(self, name: str | None = ...) -> _Path: ... + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, name: str | None = ...) -> str: ... def load_module(self, name: str | None = ...) -> types.ModuleType: ... class ResourceReader(metaclass=ABCMeta): @@ -174,6 +179,7 @@ if sys.version_info >= (3, 9): self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> IO[Any]: ... @property + @abstractmethod def name(self) -> str: ... @abstractmethod def __truediv__(self, child: str) -> Traversable: ... diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index ba6ed30629e0..6e253521bc0f 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -1,6 +1,7 @@ import importlib.abc import sys import types +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable, Sequence from typing import Any @@ -31,10 +32,10 @@ class ModuleSpec: class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -62,10 +63,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -91,10 +92,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... class PathFinder: @@ -113,10 +114,10 @@ class PathFinder: @classmethod def find_spec( - cls, fullname: str, path: Sequence[bytes | str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = ..., target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... @classmethod - def find_module(cls, fullname: str, path: Sequence[bytes | str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = ...) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] @@ -135,13 +136,13 @@ class FileFinder(importlib.abc.PathEntryFinder): ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): - def set_data(self, path: importlib.abc._Path, data: bytes, *, _mode: int = ...) -> None: ... + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = ...) -> None: ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): - def __init__(self, name: str, path: importlib.abc._Path) -> None: ... - def get_filename(self, name: str | None = ...) -> importlib.abc._Path: ... + def __init__(self, name: str, path: str) -> None: ... + def get_filename(self, name: str | None = ...) -> str: ... def get_source(self, fullname: str) -> None: ... def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi index 4d75032ab44a..e9c08aeccf87 100644 --- a/mypy/typeshed/stdlib/importlib/util.pyi +++ b/mypy/typeshed/stdlib/importlib/util.pyi @@ -1,7 +1,7 @@ import importlib.abc import importlib.machinery import types -from _typeshed import StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable from typing import Any from typing_extensions import ParamSpec @@ -17,7 +17,7 @@ MAGIC_NUMBER: bytes def cache_from_source(path: str, debug_override: bool | None = ..., *, optimization: Any | None = ...) -> str: ... def source_from_cache(path: str) -> str: ... -def decode_source(source_bytes: bytes) -> str: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... def find_spec(name: str, package: str | None = ...) -> importlib.machinery.ModuleSpec | None: ... def spec_from_loader( name: str, loader: importlib.abc.Loader | None, *, origin: str | None = ..., is_package: bool | None = ... @@ -37,4 +37,4 @@ class LazyLoader(importlib.abc.Loader): def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... def exec_module(self, module: types.ModuleType) -> None: ... -def source_hash(source_bytes: bytes) -> int: ... +def source_hash(source_bytes: ReadableBuffer) -> int: ... diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index b97bc601271a..ad68aa93c894 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -585,7 +585,7 @@ _Object: TypeAlias = object class Attribute(NamedTuple): name: str - kind: str + kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type object: _Object diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index 3e9a6cd6861d..c1889300f981 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -2,7 +2,7 @@ import abc import builtins import codecs import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import FileDescriptorOrPath, ReadableBuffer, Self, WriteableBuffer from collections.abc import Callable, Iterable, Iterator from os import _Opener from types import TracebackType @@ -92,9 +92,9 @@ class BufferedIOBase(IOBase): class FileIO(RawIOBase, BinaryIO): mode: str - name: StrOrBytesPath | int # type: ignore[assignment] + name: FileDescriptorOrPath # type: ignore[assignment] def __init__( - self, file: StrOrBytesPath | int, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... + self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... ) -> None: ... @property def closefd(self) -> bool: ... @@ -103,7 +103,7 @@ class FileIO(RawIOBase, BinaryIO): def __enter__(self: Self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): - def __init__(self, initial_bytes: bytes = ...) -> None: ... + def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. @@ -190,7 +190,7 @@ class StringIO(TextIOWrapper): class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... - def decode(self, input: bytes | str, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer | str, final: bool = ...) -> str: ... @property def newlines(self) -> str | tuple[str, ...] | None: ... def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index 2c0292d6fbae..6580ba4f1ac4 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -18,7 +18,9 @@ def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... def ip_network( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = ... ) -> IPv4Network | IPv6Network: ... -def ip_interface(address: _RawIPAddress | _RawNetworkPart) -> IPv4Interface | IPv6Interface: ... +def ip_interface( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int] +) -> IPv4Interface | IPv6Interface: ... class _IPAddressBase: @property diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 7299ee8200db..3cc1bd00de79 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -271,3 +271,9 @@ if sys.version_info >= (3, 10): def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 12): + class batched(Iterator[_T_co], Generic[_T_co]): + def __new__(cls: type[Self], iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index 2fd87622e1fe..73bb5e8b4c1a 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -1,6 +1,6 @@ -from _typeshed import SupportsRead +from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable -from typing import IO, Any +from typing import Any from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder @@ -23,7 +23,7 @@ def dumps( ) -> str: ... def dump( obj: Any, - fp: IO[str], + fp: SupportsWrite[str], *, skipkeys: bool = ..., ensure_ascii: bool = ..., @@ -37,7 +37,7 @@ def dump( **kwds: Any, ) -> None: ... def loads( - s: str | bytes, + s: str | bytes | bytearray, *, cls: type[JSONDecoder] | None = ..., object_hook: Callable[[dict[Any, Any]], Any] | None = ..., @@ -58,4 +58,4 @@ def load( object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., **kwds: Any, ) -> Any: ... -def detect_encoding(b: bytes) -> str: ... # undocumented +def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/json/encoder.pyi b/mypy/typeshed/stdlib/json/encoder.pyi index 60e82061946b..0444ae477a96 100644 --- a/mypy/typeshed/stdlib/json/encoder.pyi +++ b/mypy/typeshed/stdlib/json/encoder.pyi @@ -20,7 +20,7 @@ class JSONEncoder: check_circular: bool allow_nan: bool sort_keys: bool - indent: int + indent: int | str def __init__( self, *, @@ -29,7 +29,7 @@ class JSONEncoder: check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., - indent: int | None = ..., + indent: int | str | None = ..., separators: tuple[str, str] | None = ..., default: Callable[..., Any] | None = ..., ) -> None: ... diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index eec4ed96953a..f01c67d13fe9 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -2,7 +2,7 @@ import datetime import http.client import ssl import sys -from _typeshed import StrPath +from _typeshed import ReadableBuffer, StrPath from collections.abc import Callable from logging import FileHandler, Handler, LogRecord from queue import Queue, SimpleQueue @@ -125,7 +125,7 @@ class SocketHandler(Handler): def __init__(self, host: str, port: int | None) -> None: ... def makeSocket(self, timeout: float = ...) -> socket: ... # timeout is undocumented def makePickle(self, record: LogRecord) -> bytes: ... - def send(self, s: bytes) -> None: ... + def send(self, s: ReadableBuffer) -> None: ... def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index 868da0f05567..9d75c627f76d 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -83,7 +83,7 @@ PRESET_EXTREME: int # v big number @final class LZMADecompressor: def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... @property def check(self) -> int: ... @property @@ -99,7 +99,7 @@ class LZMACompressor: def __init__( self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... ) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... class LZMAError(Exception): ... @@ -189,7 +189,9 @@ def open( newline: str | None = ..., ) -> LZMAFile | TextIO: ... def compress( - data: bytes, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... + data: ReadableBuffer, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ... ) -> bytes: ... -def decompress(data: bytes, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> bytes: ... def is_check_supported(__check_id: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 3169e8cfa689..29cea5cadbb0 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -1,6 +1,7 @@ import email.message +import io import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import Self, StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from types import TracebackType @@ -32,7 +33,10 @@ __all__ = [ _T = TypeVar("_T") _MessageT = TypeVar("_MessageT", bound=Message) -_MessageData: TypeAlias = email.message.Message | bytes | str | IO[str] | IO[bytes] + +class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +_MessageData: TypeAlias = email.message.Message | bytes | str | io.StringIO | _SupportsReadAndReadline class _HasIteritems(Protocol): def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... @@ -43,13 +47,12 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): - - _path: bytes | str # undocumented + _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... @overload - def __init__(self, path: StrOrBytesPath, factory: None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: None = ..., create: bool = ...) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: ... @abstractmethod @@ -105,7 +108,7 @@ class Maildir(Mailbox[MaildirMessage]): colon: str def __init__( - self, dirname: StrOrBytesPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... ) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... @@ -146,19 +149,13 @@ class _mboxMMDF(_singlefileMailbox[_MessageT]): def get_string(self, key: str, from_: bool = ...) -> str: ... class mbox(_mboxMMDF[mboxMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ...) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ...) -> None: ... class MH(Mailbox[MHMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ...) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... def __setitem__(self, key: str, message: _MessageData) -> None: ... @@ -173,17 +170,15 @@ class MH(Mailbox[MHMessage]): def unlock(self) -> None: ... def close(self) -> None: ... def list_folders(self) -> list[str]: ... - def get_folder(self, folder: StrOrBytesPath) -> MH: ... - def add_folder(self, folder: StrOrBytesPath) -> MH: ... - def remove_folder(self, folder: StrOrBytesPath) -> None: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... def get_sequences(self) -> dict[str, list[int]]: ... def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ...) -> None: ... def get_message(self, key: str) -> BabylMessage: ... def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> IO[bytes]: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index b2fde674a647..d46d9c10483d 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -1,8 +1,33 @@ -from typing import IO, Any +import builtins +import types +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from typing import Any, Union +from typing_extensions import TypeAlias version: int -def dump(__value: Any, __file: IO[Any], __version: int = ...) -> None: ... -def load(__file: IO[Any]) -> Any: ... -def dumps(__value: Any, __version: int = ...) -> bytes: ... -def loads(__bytes: bytes) -> Any: ... +_Marshallable: TypeAlias = Union[ + # handled in w_object() in marshal.c + None, + type[StopIteration], + builtins.ellipsis, + bool, + # handled in w_complex_object() in marshal.c + int, + float, + complex, + bytes, + str, + tuple[_Marshallable, ...], + list[Any], + dict[Any, Any], + set[Any], + frozenset[_Marshallable], + types.CodeType, + ReadableBuffer, +] + +def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = ...) -> None: ... +def load(__file: SupportsRead[bytes]) -> Any: ... +def dumps(__value: _Marshallable, __version: int = ...) -> bytes: ... +def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi index 58eda98d8977..ca30acd7e97d 100644 --- a/mypy/typeshed/stdlib/math.pyi +++ b/mypy/typeshed/stdlib/math.pyi @@ -1,9 +1,11 @@ import sys -from _typeshed import SupportsTrunc from collections.abc import Iterable -from typing import SupportsFloat, overload +from typing import Protocol, SupportsFloat, TypeVar, overload from typing_extensions import SupportsIndex, TypeAlias +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + if sys.version_info >= (3, 8): _SupportsFloatOrIndex: TypeAlias = SupportsFloat | SupportsIndex else: @@ -26,6 +28,12 @@ def atanh(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 11): def cbrt(__x: _SupportsFloatOrIndex) -> float: ... +class _SupportsCeil(Protocol[_T_co]): + def __ceil__(self) -> _T_co: ... + +@overload +def ceil(__x: _SupportsCeil[_T]) -> _T: ... +@overload def ceil(__x: _SupportsFloatOrIndex) -> int: ... if sys.version_info >= (3, 8): @@ -55,6 +63,12 @@ if sys.version_info >= (3, 8): else: def factorial(__x: int) -> int: ... +class _SupportsFloor(Protocol[_T_co]): + def __floor__(self) -> _T_co: ... + +@overload +def floor(__x: _SupportsFloor[_T]) -> _T: ... +@overload def floor(__x: _SupportsFloatOrIndex) -> int: ... def fmod(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... def frexp(__x: _SupportsFloatOrIndex) -> tuple[float, int]: ... @@ -119,7 +133,12 @@ def sinh(__x: _SupportsFloatOrIndex) -> float: ... def sqrt(__x: _SupportsFloatOrIndex) -> float: ... def tan(__x: _SupportsFloatOrIndex) -> float: ... def tanh(__x: _SupportsFloatOrIndex) -> float: ... -def trunc(__x: SupportsTrunc) -> int: ... + +# Is different from `_typeshed.SupportsTrunc`, which is not generic +class _SupportsTrunc(Protocol[_T_co]): + def __trunc__(self) -> _T_co: ... + +def trunc(__x: _SupportsTrunc[_T]) -> _T: ... if sys.version_info >= (3, 9): def ulp(__x: _SupportsFloatOrIndex) -> float: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 8dbec2388838..30084b85bc51 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -67,8 +67,11 @@ class mmap(Iterable[int], Sized): def __setitem__(self, __index: int, __object: int) -> None: ... @overload def __setitem__(self, __index: slice, __object: ReadableBuffer) -> None: ... - # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and - # __len__, so we claim that there is also an __iter__ to help type checkers. + # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, + # so we claim that there is also a __contains__ to help type checkers. + def __contains__(self, __o: object) -> bool: ... + # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, + # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... def __enter__(self: Self) -> Self: ... def __exit__(self, *args: object) -> None: ... diff --git a/mypy/typeshed/stdlib/msvcrt.pyi b/mypy/typeshed/stdlib/msvcrt.pyi index 0bea8ce22b06..5849b9b00ca0 100644 --- a/mypy/typeshed/stdlib/msvcrt.pyi +++ b/mypy/typeshed/stdlib/msvcrt.pyi @@ -21,8 +21,8 @@ if sys.platform == "win32": def getwch() -> str: ... def getche() -> bytes: ... def getwche() -> str: ... - def putch(__char: bytes) -> None: ... + def putch(__char: bytes | bytearray) -> None: ... def putwch(__unicode_char: str) -> None: ... - def ungetch(__char: bytes) -> None: ... + def ungetch(__char: bytes | bytearray) -> None: ... def ungetwch(__unicode_char: str) -> None: ... def heapmin() -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index cc9f5cf8f890..5036f0ef222b 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -1,7 +1,7 @@ import socket import sys import types -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Iterable from typing import Any, Union from typing_extensions import SupportsIndex, TypeAlias @@ -21,7 +21,7 @@ class _ConnectionBase: def writable(self) -> bool: ... # undocumented def fileno(self) -> int: ... def close(self) -> None: ... - def send_bytes(self, buf: bytes, offset: int = ..., size: int | None = ...) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = ..., size: int | None = ...) -> None: ... def send(self, obj: Any) -> None: ... def recv_bytes(self, maxlength: int | None = ...) -> bytes: ... def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index f6380e2cfcbf..6622dca19ade 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -2,7 +2,7 @@ import ctypes import sys from collections.abc import Callable, Iterable, Sequence from ctypes import _CData -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from multiprocessing import popen_fork, popen_forkserver, popen_spawn_posix, popen_spawn_win32, queues, synchronize from multiprocessing.managers import SyncManager from multiprocessing.pool import Pool as _Pool @@ -107,7 +107,7 @@ class BaseContext: ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... - def log_to_stderr(self, level: str | None = ...) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = ...) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: list[str]) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi index cab86d866bab..d6b70aefa48d 100644 --- a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi @@ -1,9 +1,14 @@ import pickle import sys -from _typeshed import HasFileno, Incomplete +from _typeshed import HasFileno, SupportsWrite from abc import ABCMeta +from builtins import type as Type # alias to avoid name clash +from collections.abc import Callable from copyreg import _DispatchTableType +from multiprocessing import connection +from pickle import _ReducedType from socket import socket +from typing import Any from typing_extensions import Literal if sys.platform == "win32": @@ -11,18 +16,20 @@ if sys.platform == "win32": else: __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] +HAVE_SEND_HANDLE: bool + class ForkingPickler(pickle.Pickler): dispatch_table: _DispatchTableType - def __init__(self, *args) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod - def register(cls, type, reduce) -> None: ... + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... @classmethod - def dumps(cls, obj, protocol: Incomplete | None = ...): ... + def dumps(cls, obj: Any, protocol: int | None = ...) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj, file, protocol: Incomplete | None = ...) -> None: ... +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... if sys.platform == "win32": if sys.version_info >= (3, 8): @@ -32,13 +39,13 @@ if sys.platform == "win32": else: def duplicate(handle: int, target_process: int | None = ..., inheritable: bool = ...) -> int: ... - def steal_handle(source_pid, handle): ... - def send_handle(conn, handle, destination_pid) -> None: ... - def recv_handle(conn): ... + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection) -> int: ... class DupHandle: - def __init__(self, handle, access, pid: Incomplete | None = ...) -> None: ... - def detach(self): ... + def __init__(self, handle: int, access: int, pid: int | None = ...) -> None: ... + def detach(self) -> int: ... else: if sys.platform == "darwin": @@ -47,10 +54,11 @@ else: ACKNOWLEDGE: Literal[False] def recvfds(sock: socket, size: int) -> list[int]: ... - def send_handle(conn, handle, destination_pid) -> None: ... + # destination_pid is unused + def send_handle(conn: HasFileno, handle: int, destination_pid: object) -> None: ... def recv_handle(conn: HasFileno) -> int: ... - def sendfds(sock, fds) -> None: ... - def DupFd(fd): ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -84,4 +92,5 @@ class AbstractReducer(metaclass=ABCMeta): sendfds = _sendfds recvfds = _recvfds DupFd = _DupFd - def __init__(self, *args) -> None: ... + # *args are unused + def __init__(self, *args: object) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi index 50f3db67467b..e2b940796126 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Incomplete from collections.abc import Sized __all__ = ["ensure_running", "register", "unregister"] @@ -15,4 +15,4 @@ register = _resource_tracker.register unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd -def main(fd: StrOrBytesPath | int) -> None: ... +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index 4b93b7a6a472..263781da9432 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -1,7 +1,7 @@ import threading from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from typing import Any, SupportsInt from typing_extensions import SupportsIndex @@ -37,7 +37,7 @@ def debug(msg: object, *args: object) -> None: ... def info(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... def get_logger() -> Logger: ... -def log_to_stderr(level: int | None = ...) -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = ...) -> Logger: ... def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: bool diff --git a/mypy/typeshed/stdlib/netrc.pyi b/mypy/typeshed/stdlib/netrc.pyi index 803c78073348..217c0eb542d0 100644 --- a/mypy/typeshed/stdlib/netrc.pyi +++ b/mypy/typeshed/stdlib/netrc.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -10,7 +11,10 @@ class NetrcParseError(Exception): def __init__(self, msg: str, filename: StrOrBytesPath | None = ..., lineno: int | None = ...) -> None: ... # (login, account, password) tuple -_NetrcTuple: TypeAlias = tuple[str, str | None, str | None] +if sys.version_info >= (3, 11): + _NetrcTuple: TypeAlias = tuple[str, str, str] +else: + _NetrcTuple: TypeAlias = tuple[str, str | None, str | None] class netrc: hosts: dict[str, _NetrcTuple] diff --git a/mypy/typeshed/stdlib/operator.pyi b/mypy/typeshed/stdlib/operator.pyi index c3fc4b0a8503..a0e5df7977da 100644 --- a/mypy/typeshed/stdlib/operator.pyi +++ b/mypy/typeshed/stdlib/operator.pyi @@ -1,5 +1,4 @@ import sys - from _operator import * __all__ = [ diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 6f51d4e7aa50..ec31cc5e2a76 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -3,6 +3,7 @@ from _typeshed import ( AnyStr_co, BytesPath, FileDescriptorLike, + FileDescriptorOrPath, GenericPath, OpenBinaryMode, OpenBinaryModeReading, @@ -370,9 +371,6 @@ def listdir(path: StrPath | None = ...) -> list[str]: ... def listdir(path: BytesPath) -> list[bytes]: ... @overload def listdir(path: int) -> list[str]: ... - -_FdOrAnyPath: TypeAlias = int | StrOrBytesPath - @final class DirEntry(Generic[AnyStr]): # This is what the scandir iterator yields @@ -503,11 +501,14 @@ if sys.platform != "win32": def getenvb(key: bytes) -> bytes | None: ... @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(__name: StrOrBytesPath, __value: StrOrBytesPath) -> None: ... + def unsetenv(__name: StrOrBytesPath) -> None: ... -def putenv(__name: bytes | str, __value: bytes | str) -> None: ... +else: + def putenv(__name: str, __value: str) -> None: ... -if sys.platform != "win32" or sys.version_info >= (3, 9): - def unsetenv(__name: bytes | str) -> None: ... + if sys.version_info >= (3, 9): + def unsetenv(__name: str) -> None: ... _Opener: TypeAlias = Callable[[str, int], int] @@ -622,7 +623,7 @@ if sys.platform != "win32": def posix_fadvise(__fd: int, __offset: int, __length: int, __advice: int) -> None: ... def pread(__fd: int, __length: int, __offset: int) -> bytes: ... - def pwrite(__fd: int, __buffer: bytes, __offset: int) -> int: ... + def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = ...) -> int: ... def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = ...) -> int: ... @@ -641,8 +642,8 @@ if sys.platform != "win32": in_fd: int, offset: int, count: int, - headers: Sequence[bytes] = ..., - trailers: Sequence[bytes] = ..., + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., flags: int = ..., ) -> int: ... # FreeBSD and Mac OS X only def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... @@ -671,18 +672,18 @@ if sys.platform != "win32": def tcsetpgrp(__fd: int, __pgid: int) -> None: ... def ttyname(__fd: int) -> str: ... -def write(__fd: int, __data: bytes) -> int: ... +def write(__fd: int, __data: ReadableBuffer) -> int: ... def access( - path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... ) -> bool: ... -def chdir(path: _FdOrAnyPath) -> None: ... +def chdir(path: FileDescriptorOrPath) -> None: ... if sys.platform != "win32": def fchdir(fd: FileDescriptorLike) -> None: ... def getcwd() -> str: ... def getcwdb() -> bytes: ... -def chmod(path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix @@ -691,7 +692,9 @@ if sys.platform != "win32" and sys.platform != "linux": if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: ... - def chown(path: _FdOrAnyPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ... + ) -> None: ... def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... def link( @@ -715,7 +718,7 @@ if sys.platform != "win32": def major(__device: int) -> int: ... def minor(__device: int) -> int: ... def makedev(__major: int, __minor: int) -> int: ... - def pathconf(path: _FdOrAnyPath, name: str | int) -> int: ... # Unix only + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... @@ -736,20 +739,20 @@ def scandir(path: None = ...) -> _ScandirIterator[str]: ... def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: _FdOrAnyPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... if sys.platform != "win32": - def statvfs(path: _FdOrAnyPath) -> statvfs_result: ... # Unix only + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = ..., *, dir_fd: int | None = ...) -> None: ... if sys.platform != "win32": def sync() -> None: ... # Unix only -def truncate(path: _FdOrAnyPath, length: int) -> None: ... # Unix only up to version 3.4 +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... def utime( - path: _FdOrAnyPath, + path: FileDescriptorOrPath, times: tuple[int, int] | tuple[float, float] | None = ..., *, ns: tuple[int, int] = ..., @@ -775,14 +778,24 @@ if sys.platform != "win32": ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( - top: bytes, topdown: bool = ..., onerror: _OnError | None = ..., *, follow_symlinks: bool = ..., dir_fd: int | None = ... + top: BytesPath, + topdown: bool = ..., + onerror: _OnError | None = ..., + *, + follow_symlinks: bool = ..., + dir_fd: int | None = ..., ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... - def listxattr(path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... - def removexattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... def setxattr( - path: _FdOrAnyPath, attribute: StrOrBytesPath, value: bytes, flags: int = ..., *, follow_symlinks: bool = ... + path: FileDescriptorOrPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = ..., + *, + follow_symlinks: bool = ..., ) -> None: ... def abort() -> NoReturn: ... @@ -810,10 +823,14 @@ _ExecVArgs: TypeAlias = ( | list[str | PathLike[Any]] | list[bytes | str | PathLike[Any]] ) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... -def execve(path: _FdOrAnyPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def _exit(status: int) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 2a0f1760cae5..79c2352a0f85 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -5,10 +5,12 @@ from _typeshed import ( OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, + ReadableBuffer, Self, + StrOrBytesPath, StrPath, ) -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import TracebackType @@ -188,16 +190,20 @@ class Path(PurePath): def expanduser(self: Self) -> Self: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: str | None = ..., errors: str | None = ...) -> str: ... - def samefile(self, other_path: str | bytes | int | Path) -> bool: ... - def write_bytes(self, data: bytes) -> int: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): def write_text( self, data: str, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> int: ... else: def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... - if sys.version_info >= (3, 8): - def link_to(self, target: StrPath | bytes) -> None: ... + if sys.version_info >= (3, 8) and sys.version_info < (3, 12): + def link_to(self, target: StrOrBytesPath) -> None: ... + if sys.version_info >= (3, 12): + def walk( + self: Self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index 9a94e9eced3c..f393452069a3 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from typing import Any, ClassVar, Protocol, SupportsBytes, Union from typing_extensions import SupportsIndex, TypeAlias, final @@ -97,9 +97,6 @@ class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... def readline(self) -> bytes: ... -class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - if sys.version_info >= (3, 8): @final class PickleBuffer: @@ -109,7 +106,7 @@ if sys.version_info >= (3, 8): _BufferCallback: TypeAlias = Callable[[PickleBuffer], Any] | None def dump( obj: Any, - file: _WritableFileobj, + file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., @@ -136,7 +133,7 @@ if sys.version_info >= (3, 8): ) -> Any: ... else: - def dump(obj: Any, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dumps(obj: Any, protocol: int | None = ..., *, fix_imports: bool = ...) -> bytes: ... def load(file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... def loads(data: ReadableBuffer, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... @@ -162,7 +159,7 @@ class Pickler: if sys.version_info >= (3, 8): def __init__( self, - file: _WritableFileobj, + file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., @@ -170,7 +167,7 @@ class Pickler: ) -> None: ... def reducer_override(self, obj: Any) -> Any: ... else: - def __init__(self, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dump(self, __obj: Any) -> None: ... def clear_memo(self) -> None: ... diff --git a/mypy/typeshed/stdlib/pickletools.pyi b/mypy/typeshed/stdlib/pickletools.pyi index c78848464237..2f0d5f12f8a3 100644 --- a/mypy/typeshed/stdlib/pickletools.pyi +++ b/mypy/typeshed/stdlib/pickletools.pyi @@ -156,10 +156,10 @@ class OpcodeInfo: opcodes: list[OpcodeInfo] -def genops(pickle: bytes | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... -def optimize(p: bytes | IO[bytes]) -> bytes: ... +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... def dis( - pickle: bytes | IO[bytes], + pickle: bytes | bytearray | IO[bytes], out: IO[str] | None = ..., memo: MutableMapping[int, Any] | None = ..., indentlevel: int = ..., diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 9dcfcdb126cb..4ec9cbd5a31c 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Mapping, MutableMapping from datetime import datetime from enum import Enum @@ -48,7 +48,9 @@ FMT_BINARY = PlistFormat.FMT_BINARY if sys.version_info >= (3, 9): def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... - def loads(value: bytes, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... else: def load( @@ -59,7 +61,7 @@ else: dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def loads( - value: bytes, + value: ReadableBuffer, *, fmt: PlistFormat | None = ..., use_builtin_types: bool = ..., @@ -67,7 +69,7 @@ else: ) -> Any: ... def dump( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, fp: IO[bytes], *, fmt: PlistFormat = ..., @@ -75,7 +77,7 @@ def dump( skipkeys: bool = ..., ) -> None: ... def dumps( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., skipkeys: bool = ..., @@ -85,7 +87,7 @@ def dumps( if sys.version_info < (3, 9): def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... def writePlist(value: Mapping[str, Any], pathOrFile: str | IO[bytes]) -> None: ... - def readPlistFromBytes(data: bytes) -> Any: ... + def readPlistFromBytes(data: ReadableBuffer) -> Any: ... def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3, 9): diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index 8d880a072dfb..ff9c2482ace5 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import AnyOrLiteralStr, BytesPath, StrOrBytesPath, StrPath +from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Sequence from genericpath import ( commonprefix as commonprefix, @@ -147,6 +147,6 @@ def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... def isabs(s: StrOrBytesPath) -> bool: ... -def islink(path: StrOrBytesPath | int) -> bool: ... -def ismount(path: StrOrBytesPath | int) -> bool: ... -def lexists(path: StrOrBytesPath | int) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... diff --git a/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/mypy/typeshed/stdlib/pyexpat/__init__.pyi index 6a4ed891fe10..7e635c58c933 100644 --- a/mypy/typeshed/stdlib/pyexpat/__init__.pyi +++ b/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,7 +1,6 @@ -import pyexpat.errors as errors -import pyexpat.model as model -from _typeshed import SupportsRead +from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable +from pyexpat import errors as errors, model as model from typing import Any from typing_extensions import TypeAlias, final @@ -25,7 +24,7 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - def Parse(self, __data: str | bytes, __isfinal: bool = ...) -> int: ... + def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = ...) -> int: ... def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... def SetBase(self, __base: str) -> None: ... def GetBase(self) -> str | None: ... diff --git a/mypy/typeshed/stdlib/quopri.pyi b/mypy/typeshed/stdlib/quopri.pyi index b8dc0787fd1a..549413226bdb 100644 --- a/mypy/typeshed/stdlib/quopri.pyi +++ b/mypy/typeshed/stdlib/quopri.pyi @@ -1,8 +1,11 @@ -from typing import BinaryIO +from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite +from typing import Protocol __all__ = ["encode", "decode", "encodestring", "decodestring"] -def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... -def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... -def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... -def decodestring(s: bytes, header: int = ...) -> bytes: ... +class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: int = ...) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: int = ..., header: int = ...) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: int = ...) -> None: ... +def decodestring(s: str | ReadableBuffer, header: int = ...) -> bytes: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 13c706de1cf4..6dbfbcc06998 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -1,8 +1,8 @@ import os import sys -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence -from typing import Any, AnyStr, NamedTuple, TypeVar, overload +from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload from typing_extensions import TypeAlias __all__ = [ @@ -84,13 +84,22 @@ else: _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], Any, Any], object] -if sys.version_info >= (3, 11): - def rmtree( - path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ..., *, dir_fd: int | None = ... - ) -> None: ... +class _RmtreeType(Protocol): + avoids_symlink_attacks: bool + if sys.version_info >= (3, 11): + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = ..., + onerror: _OnErrorCallback | None = ..., + *, + dir_fd: int | None = ..., + ) -> None: ... -else: - def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + else: + def __call__(self, path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + +rmtree: _RmtreeType _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], object] @@ -109,7 +118,7 @@ class _ntuple_diskusage(NamedTuple): used: int free: int -def disk_usage(path: int | StrOrBytesPath) -> _ntuple_diskusage: ... +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index 2d03b60e7bb4..9fedd6f316d1 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -1,5 +1,6 @@ import sys -from _typeshed import Self +from _socket import _Address as _SourceAddress +from _typeshed import ReadableBuffer, Self, _BufferWithLen from collections.abc import Sequence from email.message import Message as _Message from re import Pattern @@ -28,8 +29,6 @@ __all__ = [ _Reply: TypeAlias = tuple[int, bytes] _SendErrs: TypeAlias = dict[str, _Reply] -# Should match source_address for socket.create_connection -_SourceAddress: TypeAlias = tuple[bytearray | bytes | str, int] SMTP_PORT: int SMTP_SSL_PORT: int @@ -102,7 +101,7 @@ class SMTP: ) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def connect(self, host: str = ..., port: int = ..., source_address: _SourceAddress | None = ...) -> _Reply: ... - def send(self, s: bytes | str) -> None: ... + def send(self, s: ReadableBuffer | str) -> None: ... def putcmd(self, cmd: str, args: str = ...) -> None: ... def getreply(self) -> _Reply: ... def docmd(self, cmd: str, args: str = ...) -> _Reply: ... @@ -114,7 +113,7 @@ class SMTP: def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... - def data(self, msg: bytes | str) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... def verify(self, address: str) -> _Reply: ... vrfy = verify def expn(self, address: str) -> _Reply: ... @@ -125,16 +124,16 @@ class SMTP: @overload def auth_cram_md5(self, challenge: None = ...) -> None: ... @overload - def auth_cram_md5(self, challenge: bytes) -> str: ... - def auth_plain(self, challenge: bytes | None = ...) -> str: ... - def auth_login(self, challenge: bytes | None = ...) -> str: ... + def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... + def auth_plain(self, challenge: ReadableBuffer | None = ...) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = ...) -> str: ... def login(self, user: str, password: str, *, initial_response_ok: bool = ...) -> _Reply: ... def starttls(self, keyfile: str | None = ..., certfile: str | None = ..., context: SSLContext | None = ...) -> _Reply: ... def sendmail( self, from_addr: str, to_addrs: str | Sequence[str], - msg: bytes | str, + msg: _BufferWithLen | str, mail_options: Sequence[str] = ..., rcpt_options: Sequence[str] = ..., ) -> _SendErrs: ... diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index 89a6d059f165..678bdafb25f0 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -1,15 +1,8 @@ -import sys -from _typeshed import ReadableBuffer, Self, WriteableBuffer -from collections.abc import Iterable -from enum import IntEnum, IntFlag -from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper -from typing import Any, Protocol, overload -from typing_extensions import Literal - # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. import _socket +import sys from _socket import ( _FD, EAI_AGAIN as EAI_AGAIN, @@ -119,6 +112,12 @@ from _socket import ( setdefaulttimeout as setdefaulttimeout, timeout as timeout, ) +from _typeshed import ReadableBuffer, Self, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload +from typing_extensions import Literal if sys.platform != "darwin" or sys.version_info >= (3, 9): from _socket import ( @@ -738,7 +737,7 @@ if sys.platform != "win32": if sys.version_info >= (3, 9): # flags and address appear to be unused in send_fds and recv_fds def send_fds( - sock: socket, buffers: Iterable[bytes], fds: bytes | Iterable[int], flags: int = ..., address: None = ... + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: int = ..., address: None = ... ) -> int: ... def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = ...) -> tuple[bytes, list[int], int, Any]: ... @@ -768,16 +767,14 @@ if sys.version_info >= (3, 11): def create_connection( address: tuple[str | None, int], timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + source_address: _Address | None = ..., *, all_errors: bool = ..., ) -> socket: ... else: def create_connection( - address: tuple[str | None, int], - timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = ... # noqa: F811 ) -> socket: ... if sys.version_info >= (3, 8): @@ -788,5 +785,10 @@ if sys.version_info >= (3, 8): # the 5th tuple item is an address def getaddrinfo( - host: bytes | str | None, port: str | int | None, family: int = ..., type: int = ..., proto: int = ..., flags: int = ... + host: bytes | str | None, + port: bytes | str | int | None, + family: int = ..., + type: int = ..., + proto: int = ..., + flags: int = ..., ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index e597818ef7da..b5147d356ffe 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -1,6 +1,7 @@ import sys import types -from _typeshed import Self +from _socket import _Address, _RetAddress +from _typeshed import ReadableBuffer, Self from collections.abc import Callable from socket import socket as _socket from typing import Any, BinaryIO, ClassVar, Union @@ -29,38 +30,39 @@ if sys.platform != "win32": ] _RequestType: TypeAlias = Union[_socket, tuple[bytes, _socket]] -_AddressType: TypeAlias = Union[tuple[str, int], str] +_AfUnixAddress: TypeAlias = str | ReadableBuffer # adddress acceptable for an AF_UNIX socket +_AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket # This can possibly be generic at some point: class BaseServer: address_family: int - server_address: tuple[str, int] + server_address: _Address socket: _socket allow_reuse_address: bool request_queue_size: int socket_type: int timeout: float | None def __init__( - self: Self, server_address: Any, RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler] + self: Self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: ... # It is not actually a `@property`, but we need a `Self` type: @property - def RequestHandlerClass(self: Self) -> Callable[[Any, Any, Self], BaseRequestHandler]: ... + def RequestHandlerClass(self: Self) -> Callable[[Any, _RetAddress, Self], BaseRequestHandler]: ... @RequestHandlerClass.setter - def RequestHandlerClass(self: Self, val: Callable[[Any, Any, Self], BaseRequestHandler]) -> None: ... + def RequestHandlerClass(self: Self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... - def finish_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def get_request(self) -> tuple[Any, Any]: ... - def handle_error(self, request: _RequestType, client_address: _AddressType) -> None: ... + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... def handle_timeout(self) -> None: ... - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... - def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... def __enter__(self: Self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None @@ -72,32 +74,35 @@ class BaseServer: class TCPServer(BaseServer): if sys.version_info >= (3, 11): allow_reuse_port: bool + server_address: _AfInetAddress # type: ignore[assignment] def __init__( self: Self, - server_address: tuple[str, int], - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfInetAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... - def get_request(self) -> tuple[_socket, Any]: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... class UDPServer(TCPServer): max_packet_size: ClassVar[int] - def get_request(self) -> tuple[tuple[bytes, _socket], Any]: ... # type: ignore[override] + def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] if sys.platform != "win32": class UnixStreamServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... class UnixDatagramServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... @@ -110,14 +115,14 @@ if sys.platform != "win32": def collect_children(self, *, blocking: bool = ...) -> None: ... # undocumented def handle_timeout(self) -> None: ... # undocumented def service_actions(self) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... class ThreadingMixIn: daemon_threads: bool block_on_close: bool - def process_request_thread(self, request: _RequestType, client_address: _AddressType) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... if sys.platform != "win32": @@ -132,16 +137,16 @@ if sys.platform != "win32": class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: - # Those are technically of types, respectively: - # * _RequestType - # * _AddressType - # But there are some concerns that having unions here would cause + # `request` is technically of type _RequestType, + # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) + # + # Note also that _RetAddress is also just an alias for `Any` request: Any - client_address: Any + client_address: _RetAddress server: BaseServer - def __init__(self, request: _RequestType, client_address: _AddressType, server: BaseServer) -> None: ... + def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ... def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 189e796de109..efda3b671ed5 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -1,6 +1,6 @@ import sqlite3 import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem +from _typeshed import Incomplete, ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType @@ -227,9 +227,9 @@ else: if sys.version_info < (3, 8): class Cache: - def __init__(self, *args, **kwargs) -> None: ... - def display(self, *args, **kwargs) -> None: ... - def get(self, *args, **kwargs) -> None: ... + def __init__(self, *args: Incomplete, **kwargs: object) -> None: ... + def display(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + def get(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... class _AggregateProtocol(Protocol): def step(self, __value: int) -> object: ... @@ -437,7 +437,7 @@ if sys.version_info >= (3, 8): else: @final class Statement: - def __init__(self, *args, **kwargs): ... + def __init__(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... _Statement: TypeAlias = Statement class Warning(Exception): ... @@ -447,7 +447,7 @@ if sys.version_info >= (3, 11): class Blob: def close(self) -> None: ... def read(self, __length: int = ...) -> bytes: ... - def write(self, __data: bytes) -> None: ... + def write(self, __data: ReadableBuffer) -> None: ... def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END def seek(self, __offset: int, __origin: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 6443a6ea61ba..6d7df5e1c202 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -11,7 +11,7 @@ _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] _PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] _PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] -_PasswordType: TypeAlias = Union[Callable[[], str | bytes], str, bytes] +_PasswordType: TypeAlias = Union[Callable[[], str | bytes | bytearray], str, bytes, bytearray] _SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] @@ -61,29 +61,48 @@ def create_default_context( *, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., -) -> SSLContext: ... -def _create_unverified_context( - protocol: int = ..., - *, - cert_reqs: int = ..., - check_hostname: bool = ..., - purpose: Purpose = ..., - certfile: StrOrBytesPath | None = ..., - keyfile: StrOrBytesPath | None = ..., - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., + cadata: str | ReadableBuffer | None = ..., ) -> SSLContext: ... +if sys.version_info >= (3, 10): + def _create_unverified_context( + protocol: int | None = None, + *, + cert_reqs: int = ..., + check_hostname: bool = ..., + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = ..., + keyfile: StrOrBytesPath | None = ..., + cafile: StrOrBytesPath | None = ..., + capath: StrOrBytesPath | None = ..., + cadata: str | ReadableBuffer | None = ..., + ) -> SSLContext: ... + +else: + def _create_unverified_context( + protocol: int = ..., + *, + cert_reqs: int = ..., + check_hostname: bool = ..., + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = ..., + keyfile: StrOrBytesPath | None = ..., + cafile: StrOrBytesPath | None = ..., + capath: StrOrBytesPath | None = ..., + cadata: str | ReadableBuffer | None = ..., + ) -> SSLContext: ... + _create_default_https_context: Callable[..., SSLContext] def RAND_bytes(__num: int) -> bytes: ... def RAND_pseudo_bytes(__num: int) -> tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... -def RAND_add(__s: bytes, __entropy: float) -> None: ... -def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... +def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ... + +if sys.version_info < (3, 12): + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + def cert_time_to_seconds(cert_time: str) -> int: ... if sys.version_info >= (3, 10): @@ -94,7 +113,7 @@ if sys.version_info >= (3, 10): else: def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... -def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... class DefaultVerifyPaths(NamedTuple): @@ -177,6 +196,8 @@ class Options(enum.IntFlag): OP_NO_RENEGOTIATION: int if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: int + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: int OP_ALL: Options OP_NO_SSLv2: Options @@ -193,6 +214,8 @@ OP_NO_TICKET: Options OP_NO_RENEGOTIATION: Options if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool HAS_SSLv2: bool @@ -290,8 +313,8 @@ class SSLSocket(socket.socket): @property def session_reused(self) -> bool | None: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def connect(self, addr: socket._Address | bytes) -> None: ... - def connect_ex(self, addr: socket._Address | bytes) -> int: ... + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... def recv(self, buflen: int = ..., flags: int = ...) -> bytes: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ...) -> int: ... def recvfrom(self, buflen: int = ..., flags: int = ...) -> tuple[bytes, socket._RetAddress]: ... @@ -301,12 +324,12 @@ class SSLSocket(socket.socket): def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = ...) -> int: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: int | socket._Address, addr: socket._Address | None = ...) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... def do_handshake(self, block: bool = ...) -> None: ... # block is undocumented @overload def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... @@ -362,7 +385,7 @@ class SSLContext: ) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( - self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | bytes | None = ... + self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | ReadableBuffer | None = ... ) -> None: ... @overload def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... @@ -408,7 +431,7 @@ class SSLObject: def session_reused(self) -> bool: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... @overload def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... @overload @@ -433,16 +456,21 @@ class MemoryBIO: pending: int eof: bool def read(self, __size: int = ...) -> bytes: ... - def write(self, __buf: bytes) -> int: ... + def write(self, __buf: ReadableBuffer) -> int: ... def write_eof(self) -> None: ... @final class SSLSession: - id: bytes - time: int - timeout: int - ticket_lifetime_hint: int - has_ticket: bool + @property + def has_ticket(self) -> bool: ... + @property + def id(self) -> bytes: ... + @property + def ticket_lifetime_hint(self) -> int: ... + @property + def time(self) -> int: ... + @property + def timeout(self) -> int: ... class SSLErrorNumber(enum.IntEnum): SSL_ERROR_EOF: int diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index 6fb803fe53be..49802ce81019 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -64,11 +64,20 @@ class Formatter: ) -> LiteralString: ... @overload def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... + def _vformat( # undocumented + self, + format_string: str, + args: Sequence[Any], + kwargs: Mapping[str, Any], + used_args: set[int | str], + recursion_depth: int, + auto_arg_index: int = ..., + ) -> tuple[str, int]: ... def parse( self, format_string: StrOrLiteralStr ) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... - def check_unused_args(self, used_args: Sequence[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... + def check_unused_args(self, used_args: set[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... diff --git a/mypy/typeshed/stdlib/struct.pyi b/mypy/typeshed/stdlib/struct.pyi index 74afddd74262..02097384e0f7 100644 --- a/mypy/typeshed/stdlib/struct.pyi +++ b/mypy/typeshed/stdlib/struct.pyi @@ -6,8 +6,8 @@ __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpac class error(Exception): ... -def pack(fmt: str | bytes, *v: Any) -> bytes: ... -def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... +def pack(__fmt: str | bytes, *v: Any) -> bytes: ... +def pack_into(__fmt: str | bytes, __buffer: WriteableBuffer, __offset: int, *v: Any) -> None: ... def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 25b988adc52d..c0b10a7781c3 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1,6 +1,6 @@ import sys -from _typeshed import Self, StrOrBytesPath -from collections.abc import Callable, Iterable, Mapping, Sequence +from _typeshed import ReadableBuffer, Self, StrOrBytesPath +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, TypeVar, overload from typing_extensions import Literal, TypeAlias @@ -63,13 +63,13 @@ if sys.platform == "win32": # except TimeoutError as e: # reveal_type(e.cmd) # Any, but morally is _CMD _FILE: TypeAlias = None | int | IO[Any] -_TXT: TypeAlias = bytes | str +_InputString: TypeAlias = ReadableBuffer | str if sys.version_info >= (3, 8): _CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] else: # Python 3.7 doesn't support _CMD being a single PathLike. # See: https://bugs.python.org/issue31961 - _CMD: TypeAlias = _TXT | Sequence[StrOrBytesPath] + _CMD: TypeAlias = str | bytes | Sequence[StrOrBytesPath] if sys.platform == "win32": _ENV: TypeAlias = Mapping[str, str] else: @@ -97,7 +97,7 @@ class CompletedProcess(Generic[_T]): args: _CMD, returncode: int, stdout: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] - stderr: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] + stderr: _T | None = ..., ) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 9): @@ -118,12 +118,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -152,12 +152,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -186,12 +186,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -226,7 +226,7 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -255,18 +255,18 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -289,18 +289,18 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -326,12 +326,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -359,12 +359,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -392,12 +392,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -431,7 +431,7 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -459,18 +459,18 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -492,18 +492,18 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -528,12 +528,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -560,12 +560,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -592,12 +592,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -630,7 +630,7 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -657,18 +657,18 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -689,18 +689,18 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., user: str | int | None = ..., @@ -723,12 +723,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -751,12 +751,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -779,12 +779,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., @@ -813,7 +813,7 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start capture_output: bool = ..., check: bool = ..., @@ -836,18 +836,18 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: None = ..., errors: None = ..., - input: bytes | None = ..., + input: ReadableBuffer | None = ..., text: Literal[None, False] = ..., timeout: float | None = ..., ) -> CompletedProcess[bytes]: ... @@ -864,18 +864,18 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, capture_output: bool = ..., check: bool = ..., encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., text: bool | None = ..., timeout: float | None = ..., ) -> CompletedProcess[Any]: ... @@ -895,12 +895,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -926,12 +926,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -956,12 +956,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -984,12 +984,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., text: bool | None = ..., @@ -1010,12 +1010,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1041,12 +1041,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1071,12 +1071,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1099,12 +1099,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1124,15 +1124,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1155,15 +1155,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1186,15 +1186,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1223,10 +1223,10 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1249,15 +1249,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1280,15 +1280,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1298,7 +1298,7 @@ if sys.version_info >= (3, 11): umask: int = ..., pipesize: int = ..., process_group: int | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -1314,15 +1314,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1344,15 +1344,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1374,15 +1374,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1410,10 +1410,10 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1435,15 +1435,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1465,15 +1465,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1482,7 +1482,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = ..., umask: int = ..., pipesize: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1498,15 +1498,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1527,15 +1527,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1556,15 +1556,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1591,10 +1591,10 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1615,15 +1615,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1644,15 +1644,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1660,7 +1660,7 @@ elif sys.version_info >= (3, 9): group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., umask: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes else: @overload @@ -1675,15 +1675,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1700,15 +1700,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1725,15 +1725,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1756,10 +1756,10 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1776,15 +1776,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: None = ..., errors: None = ..., text: Literal[None, False] = ..., @@ -1801,19 +1801,19 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, timeout: float | None = ..., - input: _TXT | None = ..., + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes PIPE: int STDOUT: int @@ -1822,11 +1822,11 @@ DEVNULL: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): - def __init__(self, cmd: _CMD, timeout: float, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__(self, cmd: _CMD, timeout: float, output: str | bytes | None = ..., stderr: str | bytes | None = ...) -> None: ... # morally: _CMD cmd: Any timeout: float - # morally: _TXT | None + # morally: str | bytes | None output: Any stdout: bytes | None stderr: bytes | None @@ -1835,13 +1835,15 @@ class CalledProcessError(SubprocessError): returncode: int # morally: _CMD cmd: Any - # morally: _TXT | None + # morally: str | bytes | None output: Any - # morally: _TXT | None + # morally: str | bytes | None stdout: Any stderr: Any - def __init__(self, returncode: int, cmd: _CMD, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__( + self, returncode: int, cmd: _CMD, output: str | bytes | None = ..., stderr: str | bytes | None = ... + ) -> None: ... class Popen(Generic[AnyStr]): args: _CMD @@ -1868,12 +1870,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -1899,12 +1901,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -1936,7 +1938,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -1962,12 +1964,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -1993,12 +1995,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2024,12 +2026,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2057,12 +2059,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2087,12 +2089,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2123,7 +2125,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2148,12 +2150,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2178,12 +2180,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2208,12 +2210,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2240,12 +2242,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2269,12 +2271,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2304,7 +2306,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2328,12 +2330,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2357,12 +2359,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2386,12 +2388,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2416,12 +2418,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str, @@ -2441,12 +2443,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2472,7 +2474,7 @@ class Popen(Generic[AnyStr]): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start text: bool | None = ..., encoding: str | None = ..., @@ -2492,12 +2494,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[True], encoding: str | None = ..., @@ -2517,12 +2519,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: Literal[None, False] = ..., encoding: None = ..., @@ -2542,12 +2544,12 @@ class Popen(Generic[AnyStr]): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any | None = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, text: bool | None = ..., encoding: str | None = ..., @@ -2556,13 +2558,10 @@ class Popen(Generic[AnyStr]): def poll(self) -> int | None: ... def wait(self, timeout: float | None = ...) -> int: ... - # Return str/bytes - def communicate( - self, - input: AnyStr | None = ..., - timeout: float | None = ..., - # morally this should be optional - ) -> tuple[AnyStr, AnyStr]: ... + # morally the members of the returned tuple should be optional + # TODO this should allow ReadableBuffer for Popen[bytes], but adding + # overloads for that runs into a mypy bug (python/mypy#14070). + def communicate(self, input: AnyStr | None = ..., timeout: float | None = ...) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... @@ -2575,12 +2574,12 @@ class Popen(Generic[AnyStr]): # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... - def getoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... + def getstatusoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... else: - def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... - def getoutput(cmd: _TXT) -> str: ... + def getstatusoutput(cmd: str | bytes) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes) -> str: ... if sys.version_info >= (3, 8): def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/sysconfig.pyi b/mypy/typeshed/stdlib/sysconfig.pyi index 895abc2cd047..4b6257b5f62e 100644 --- a/mypy/typeshed/stdlib/sysconfig.pyi +++ b/mypy/typeshed/stdlib/sysconfig.pyi @@ -32,7 +32,13 @@ def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = ..., ex def get_paths(scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... -def is_python_build(check_home: bool = ...) -> bool: ... + +if sys.version_info >= (3, 11): + def is_python_build(check_home: object = None) -> bool: ... + +else: + def is_python_build(check_home: bool = False) -> bool: ... + def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = ...) -> dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index 8855e1a953db..5ad5af7f20bd 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -346,7 +346,7 @@ class TarInfo: pax_headers: Mapping[str, str] def __init__(self, name: str = ...) -> None: ... @classmethod - def frombuf(cls: Type[Self], buf: bytes, encoding: str, errors: str) -> Self: ... + def frombuf(cls: Type[Self], buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod def fromtarfile(cls: Type[Self], tarfile: TarFile) -> Self: ... @property diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi index 494162a49b38..bf8d7bee2473 100644 --- a/mypy/typeshed/stdlib/termios.pyi +++ b/mypy/typeshed/stdlib/termios.pyi @@ -4,9 +4,9 @@ from typing import Any from typing_extensions import TypeAlias if sys.platform != "win32": + # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. _Attr: TypeAlias = list[int | list[bytes | int]] - # TODO constants not really documented B0: int B1000000: int B110: int @@ -44,17 +44,22 @@ if sys.platform != "win32": BSDLY: int CBAUD: int CBAUDEX: int + CDEL: int CDSUSP: int CEOF: int CEOL: int + CEOL2: int CEOT: int CERASE: int + CESC: int CFLUSH: int CIBAUD: int CINTR: int CKILL: int CLNEXT: int CLOCAL: int + CNUL: int + COMMON: int CQUIT: int CR0: int CR1: int @@ -73,6 +78,7 @@ if sys.platform != "win32": CSTOP: int CSTOPB: int CSUSP: int + CSWTCH: int CWERASE: int ECHO: int ECHOCTL: int @@ -93,6 +99,7 @@ if sys.platform != "win32": FIONREAD: int FLUSHO: int HUPCL: int + IBSHIFT: int ICANON: int ICRNL: int IEXTEN: int @@ -100,6 +107,7 @@ if sys.platform != "win32": IGNCR: int IGNPAR: int IMAXBEL: int + INIT_C_CC: int INLCR: int INPCK: int IOCSIZE_MASK: int @@ -110,17 +118,18 @@ if sys.platform != "win32": IXANY: int IXOFF: int IXON: int + N_MOUSE: int + N_PPP: int + N_SLIP: int + N_STRIP: int + N_TTY: int NCC: int NCCS: int NL0: int NL1: int NLDLY: int NOFLSH: int - N_MOUSE: int - N_PPP: int - N_SLIP: int - N_STRIP: int - N_TTY: int + NSWTCH: int OCRNL: int OFDEL: int OFILL: int @@ -151,6 +160,7 @@ if sys.platform != "win32": TCSADRAIN: int TCSAFLUSH: int TCSANOW: int + TCSASOFT: int TCSBRK: int TCSBRKP: int TCSETA: int @@ -167,15 +177,11 @@ if sys.platform != "win32": TIOCGLCKTRMIOS: int TIOCGPGRP: int TIOCGSERIAL: int + TIOCGSIZE: int TIOCGSOFTCAR: int TIOCGWINSZ: int TIOCINQ: int TIOCLINUX: int - TIOCMBIC: int - TIOCMBIS: int - TIOCMGET: int - TIOCMIWAIT: int - TIOCMSET: int TIOCM_CAR: int TIOCM_CD: int TIOCM_CTS: int @@ -187,10 +193,14 @@ if sys.platform != "win32": TIOCM_RTS: int TIOCM_SR: int TIOCM_ST: int + TIOCMBIC: int + TIOCMBIS: int + TIOCMGET: int + TIOCMIWAIT: int + TIOCMSET: int TIOCNOTTY: int TIOCNXCL: int TIOCOUTQ: int - TIOCPKT: int TIOCPKT_DATA: int TIOCPKT_DOSTOP: int TIOCPKT_FLUSHREAD: int @@ -198,7 +208,9 @@ if sys.platform != "win32": TIOCPKT_NOSTOP: int TIOCPKT_START: int TIOCPKT_STOP: int + TIOCPKT: int TIOCSCTTY: int + TIOCSER_TEMT: int TIOCSERCONFIG: int TIOCSERGETLSR: int TIOCSERGETMULTI: int @@ -206,14 +218,15 @@ if sys.platform != "win32": TIOCSERGWILD: int TIOCSERSETMULTI: int TIOCSERSWILD: int - TIOCSER_TEMT: int TIOCSETD: int TIOCSLCKTRMIOS: int TIOCSPGRP: int TIOCSSERIAL: int + TIOCSSIZE: int TIOCSSOFTCAR: int TIOCSTI: int TIOCSWINSZ: int + TIOCTTYGSTRUCT: int TOSTOP: int VDISCARD: int VEOF: int @@ -238,7 +251,8 @@ if sys.platform != "win32": VWERASE: int XCASE: int XTABS: int - def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... + + def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... def tcdrain(__fd: FileDescriptorLike) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/commondialog.pyi b/mypy/typeshed/stdlib/tkinter/commondialog.pyi index 49101c7e6089..edae62582237 100644 --- a/mypy/typeshed/stdlib/tkinter/commondialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/commondialog.pyi @@ -10,5 +10,5 @@ class Dialog: command: ClassVar[str | None] master: Incomplete | None options: Mapping[str, Incomplete] - def __init__(self, master: Incomplete | None = ..., **options) -> None: ... - def show(self, **options): ... + def __init__(self, master: Incomplete | None = ..., **options: Incomplete) -> None: ... + def show(self, **options: Incomplete) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/tkinter/dialog.pyi b/mypy/typeshed/stdlib/tkinter/dialog.pyi index ef7713f40994..032dac2c15a2 100644 --- a/mypy/typeshed/stdlib/tkinter/dialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -12,5 +12,5 @@ DIALOG_ICON: str class Dialog(Widget): widgetName: str num: int - def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... def destroy(self) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi index e2cfc43f606a..ad7972968f81 100644 --- a/mypy/typeshed/stdlib/tkinter/dnd.pyi +++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi @@ -16,4 +16,4 @@ class DndHandler: def on_motion(self, event: Event[Misc]) -> None: ... def on_release(self, event: Event[Misc]) -> None: ... -def dnd_start(source, event) -> DndHandler | None: ... +def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ... diff --git a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi index 72f6ca8c0687..4d8a7004c6b9 100644 --- a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] @@ -6,4 +7,4 @@ __all__ = ["ScrolledText"] class ScrolledText(Text): frame: Frame vbar: Scrollbar - def __init__(self, master: Misc | None = ..., **kwargs) -> None: ... + def __init__(self, master: Misc | None = ..., **kwargs: Incomplete) -> None: ... diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index 6f242a6cd1ef..ba57402fb845 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from token import * @@ -122,10 +122,10 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode def untokenize(iterable: Iterable[_Token]) -> Any: ... -def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ... -def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented -def open(filename: StrOrBytesPath | int) -> TextIO: ... +def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 16fe096d3117..e3e6418347b1 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -68,6 +68,9 @@ _V_co = TypeVar("_V_co", covariant=True) @final class _Cell: + if sys.version_info >= (3, 8): + def __init__(self, __contents: object = ...) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -307,6 +310,7 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): def __getitem__(self, __key: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... def copy(self) -> dict[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... @@ -569,7 +573,7 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable # The type: ignore is due to overlapping overloads, not the use of ParamSpec @overload -def coroutine(func: Callable[_P, Generator[_R, Any, Any]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] @overload def coroutine(func: _Fn) -> _Fn: ... @@ -585,13 +589,15 @@ if sys.version_info >= (3, 9): @property def __parameters__(self) -> tuple[Any, ...]: ... def __init__(self, origin: type, args: Any) -> None: ... + def __getitem__(self, __typeargs: Any) -> GenericAlias: ... if sys.version_info >= (3, 11): @property def __unpacked__(self) -> bool: ... @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... - def __getattr__(self, name: str) -> Any: ... # incomplete + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 954f47d14502..71018003b6d9 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -566,7 +566,7 @@ class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... -class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): +class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]): def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @@ -621,6 +621,8 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): # -- os._Environ.__ior__ # -- collections.UserDict.__ior__ # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ # -- weakref.WeakValueDictionary.__ior__ # -- weakref.WeakKeyDictionary.__ior__ @overload @@ -638,7 +640,9 @@ TYPE_CHECKING: bool # This differs from runtime, but better reflects the fact that in reality # classes deriving from IO use different names for the arguments. class IO(Iterator[AnyStr], Generic[AnyStr]): - # TODO use abstract properties + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 @property def mode(self) -> str: ... @property @@ -691,7 +695,7 @@ class BinaryIO(IO[bytes]): def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): - # TODO use abstractproperty + # See comment regarding the @properties in the `IO` class @property def buffer(self) -> BinaryIO: ... @property diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index 7337ab8789b2..4569d6584fd6 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -1,6 +1,7 @@ import sys +from _typeshed import ReadOnlyBuffer from typing import Any, TypeVar -from typing_extensions import final +from typing_extensions import Literal, TypeAlias, final ucd_3_2_0: UCD unidata_version: str @@ -16,12 +17,15 @@ def combining(__chr: str) -> int: ... def decimal(__chr: str, __default: _T = ...) -> int | _T: ... def decomposition(__chr: str) -> str: ... def digit(__chr: str, __default: _T = ...) -> int | _T: ... -def east_asian_width(__chr: str) -> str: ... + +_EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] + +def east_asian_width(__chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(__form: str, __unistr: str) -> bool: ... -def lookup(__name: str | bytes) -> str: ... +def lookup(__name: str | ReadOnlyBuffer) -> str: ... def mirrored(__chr: str) -> int: ... def name(__chr: str, __default: _T = ...) -> str | _T: ... def normalize(__form: str, __unistr: str) -> str: ... @@ -37,11 +41,11 @@ class UCD: def decimal(self, __chr: str, __default: _T = ...) -> int | _T: ... def decomposition(self, __chr: str) -> str: ... def digit(self, __chr: str, __default: _T = ...) -> int | _T: ... - def east_asian_width(self, __chr: str) -> str: ... + def east_asian_width(self, __chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(self, __form: str, __unistr: str) -> bool: ... - def lookup(self, __name: str | bytes) -> str: ... + def lookup(self, __name: str | ReadOnlyBuffer) -> str: ... def mirrored(self, __chr: str) -> int: ... def name(self, __chr: str, __default: _T = ...) -> str | _T: ... def normalize(self, __form: str, __unistr: str) -> str: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 200f8dbaea23..42633ed13bb8 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -104,7 +104,7 @@ class TestCase: def tearDownClass(cls) -> None: ... def run(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... - def skipTest(self, reason: Any) -> None: ... + def skipTest(self, reason: Any) -> NoReturn: ... def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... def debug(self) -> None: ... if sys.version_info < (3, 11): @@ -157,18 +157,14 @@ class TestCase: def assertRaisesRegex( # type: ignore[misc] self, expected_exception: type[BaseException] | tuple[type[BaseException], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + expected_regex: str | Pattern[str], callable: Callable[..., Any], *args: Any, **kwargs: Any, ) -> None: ... @overload def assertRaisesRegex( - self, - expected_exception: type[_E] | tuple[type[_E], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - *, - msg: Any = ..., + self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertRaisesContext[_E]: ... @overload def assertWarns( # type: ignore[misc] @@ -186,18 +182,14 @@ class TestCase: def assertWarnsRegex( # type: ignore[misc] self, expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + expected_regex: str | Pattern[str], callable: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs, ) -> None: ... @overload def assertWarnsRegex( - self, - expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - *, - msg: Any = ..., + self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( self, logger: str | logging.Logger | None = ..., level: int | str | None = ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 133380fce334..47535499a9f2 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Self -from collections.abc import Awaitable, Callable, Iterable, Mapping, Sequence +from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence from contextlib import _GeneratorContextManager from types import TracebackType from typing import Any, Generic, TypeVar, overload @@ -9,6 +9,8 @@ from typing_extensions import Literal, TypeAlias _T = TypeVar("_T") _TT = TypeVar("_TT", bound=type[Any]) _R = TypeVar("_R") +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) if sys.version_info >= (3, 8): __all__ = ( @@ -82,7 +84,7 @@ class _Call(tuple[Any, ...]): def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... - def __getattr__(self, attr: Any) -> Any: ... + def __getattr__(self, attr: str) -> Any: ... def __getattribute__(self, attr: str) -> Any: ... if sys.version_info >= (3, 8): @property @@ -258,6 +260,10 @@ class _patch_dict: clear: Any def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... + if sys.version_info >= (3, 10): + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: object) -> Any: ... @@ -300,8 +306,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_Mock]: ... @overload + @staticmethod def object( # type: ignore[misc] - self, target: Any, attribute: str, new: _T, @@ -313,8 +319,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload + @staticmethod def object( - self, target: Any, attribute: str, *, @@ -325,8 +331,8 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[_Mock]: ... + @staticmethod def multiple( - self, target: Any, spec: Any | None = ..., create: bool = ..., @@ -335,7 +341,8 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[Any]: ... - def stopall(self) -> None: ... + @staticmethod + def stopall() -> None: ... patch: _patcher diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 207a05e75a57..8fe5d8b37ac0 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -1,6 +1,6 @@ import sys -from collections.abc import Callable, Mapping, Sequence -from typing import Any, AnyStr, Generic, NamedTuple, overload +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias @@ -132,14 +132,14 @@ def parse_qsl( separator: str = ..., ) -> list[tuple[AnyStr, AnyStr]]: ... @overload -def quote(string: str, safe: str | bytes = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... @overload -def quote(string: bytes, safe: str | bytes = ...) -> str: ... -def quote_from_bytes(bs: bytes, safe: str | bytes = ...) -> str: ... +def quote(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... @overload -def quote_plus(string: str, safe: str | bytes = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote_plus(string: str, safe: str | Iterable[int] = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... @overload -def quote_plus(string: bytes, safe: str | bytes = ...) -> str: ... +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = ...) -> str: ... if sys.version_info >= (3, 9): def unquote(string: str | bytes, encoding: str = ..., errors: str = ...) -> str: ... @@ -147,29 +147,43 @@ if sys.version_info >= (3, 9): else: def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... -def unquote_to_bytes(string: str | bytes) -> bytes: ... +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @overload -def urldefrag(url: bytes | None) -> DefragResultBytes: ... +def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... + +_Q = TypeVar("_Q", bound=str | Iterable[int]) + def urlencode( query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], - doseq: bool = ..., - safe: str | bytes = ..., - encoding: str = ..., - errors: str = ..., - quote_via: Callable[[AnyStr, str | bytes, str, str], str] = ..., + doseq: bool = False, + safe: _Q = ..., + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., ) -> str: ... def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... @overload def urlparse(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> ParseResult: ... @overload -def urlparse(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... +def urlparse( + url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... +) -> ParseResultBytes: ... @overload def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> SplitResult: ... -@overload -def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... + +if sys.version_info >= (3, 11): + @overload + def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... + +else: + @overload + def urlsplit( + url: bytes | bytearray | None, scheme: bytes | bytearray | None = ..., allow_fragments: bool = ... + ) -> SplitResultBytes: ... + @overload def urlunparse( components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 3cd5fc740fca..00c160293762 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import StrOrBytesPath, SupportsRead +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from email.message import Message from http.client import HTTPConnection, HTTPMessage, HTTPResponse @@ -50,7 +50,7 @@ __all__ = [ _T = TypeVar("_T") _UrlopenRet: TypeAlias = Any -_DataType: TypeAlias = bytes | SupportsRead[bytes] | Iterable[bytes] | None +_DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None def urlopen( url: str | Request, @@ -214,7 +214,7 @@ class AbstractDigestAuthHandler: def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... - def get_entity_digest(self, data: bytes | None, chal: Mapping[str, str]) -> str | None: ... + def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header: ClassVar[str] # undocumented @@ -301,14 +301,14 @@ def urlcleanup() -> None: ... class URLopener: version: ClassVar[str] def __init__(self, proxies: dict[str, str] | None = ..., **x509: str) -> None: ... - def open(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... + def open(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... def retrieve( self, url: str, filename: str | None = ..., reporthook: Callable[[int, int, int], object] | None = ..., - data: bytes | None = ..., + data: ReadableBuffer | None = ..., ) -> tuple[str, Message | None]: ... def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented @@ -319,32 +319,32 @@ class URLopener: def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: bytes | None = ...) -> addinfourl: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = ...) -> addinfourl: ... # undocumented def open_file(self, url: str) -> addinfourl: ... # undocumented def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = ...) -> _UrlopenRet: ... # undocumented def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: bytes | None = ...) -> None: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = ...) -> None: ... # undocumented class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> tuple[str, str]: ... # undocumented def http_error_301( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_302( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_303( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_307( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented if sys.version_info >= (3, 11): def http_error_308( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_401( @@ -354,7 +354,7 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., + data: ReadableBuffer | None = ..., retry: bool = ..., ) -> _UrlopenRet | None: ... # undocumented def http_error_407( @@ -364,20 +364,24 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., + data: ReadableBuffer | None = ..., retry: bool = ..., ) -> _UrlopenRet | None: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> addinfourl: ... # undocumented def redirect_internal( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = ... + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented - def retry_http_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented - def retry_https_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented def retry_proxy_http_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented def retry_proxy_https_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = ... ) -> _UrlopenRet | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/urllib/response.pyi b/mypy/typeshed/stdlib/urllib/response.pyi index 8c9a600f3c48..ca9781dbfbb4 100644 --- a/mypy/typeshed/stdlib/urllib/response.pyi +++ b/mypy/typeshed/stdlib/urllib/response.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer, Self from collections.abc import Callable, Iterable from email.message import Message from types import TracebackType @@ -33,8 +33,8 @@ class addbase(BinaryIO): def tell(self) -> int: ... def truncate(self, size: int | None = ...) -> int: ... def writable(self) -> bool: ... - def write(self, s: bytes) -> int: ... - def writelines(self, lines: Iterable[bytes]) -> None: ... + def write(self, s: ReadableBuffer) -> int: ... + def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): closehook: Callable[..., object] diff --git a/mypy/typeshed/stdlib/venv/__init__.pyi b/mypy/typeshed/stdlib/venv/__init__.pyi index 2e34aed4c693..dfa0b69b0870 100644 --- a/mypy/typeshed/stdlib/venv/__init__.pyi +++ b/mypy/typeshed/stdlib/venv/__init__.pyi @@ -1,8 +1,11 @@ +import logging import sys from _typeshed import StrOrBytesPath from collections.abc import Sequence from types import SimpleNamespace +logger: logging.Logger + if sys.version_info >= (3, 9): CORE_VENV_DEPS: tuple[str, ...] diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index af960391e85d..9a619235e689 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -1,10 +1,5 @@ import sys from _typeshed import Self, SupportsKeysAndGetItem -from _weakrefset import WeakSet as WeakSet -from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping -from typing import Any, Generic, TypeVar, overload -from typing_extensions import ParamSpec - from _weakref import ( CallableProxyType as CallableProxyType, ProxyType as ProxyType, @@ -14,6 +9,10 @@ from _weakref import ( proxy as proxy, ref as ref, ) +from _weakrefset import WeakSet as WeakSet +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import ParamSpec __all__ = [ "ref", diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi index 588bd5969e98..fd5a552cf9c1 100644 --- a/mypy/typeshed/stdlib/winsound.pyi +++ b/mypy/typeshed/stdlib/winsound.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from typing import overload from typing_extensions import Literal @@ -21,7 +22,7 @@ if sys.platform == "win32": def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload - def PlaySound(sound: bytes | None, flags: Literal[4]) -> None: ... + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... @overload - def PlaySound(sound: str | bytes | None, flags: int) -> None: ... + def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... def MessageBeep(type: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/__init__.pyi b/mypy/typeshed/stdlib/xml/__init__.pyi index c524ac2b1cfc..a487d2467f41 100644 --- a/mypy/typeshed/stdlib/xml/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/__init__.pyi @@ -1 +1 @@ -import xml.parsers as parsers +from xml import parsers as parsers diff --git a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi index 3ca885dbbaa0..e460d6b21afa 100644 --- a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -1,4 +1,4 @@ -from _typeshed import Incomplete +from _typeshed import Incomplete, ReadableBuffer, SupportsRead from typing import Any, NoReturn from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo from xml.dom.xmlbuilder import DOMBuilderFilter, Options @@ -30,8 +30,8 @@ class ExpatBuilder: def getParser(self): ... def reset(self) -> None: ... def install(self, parser) -> None: ... - def parseFile(self, file) -> Document: ... - def parseString(self, string: str) -> Document: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... def end_doctype_decl_handler(self) -> None: ... def pi_handler(self, target, data) -> None: ... @@ -87,14 +87,14 @@ class ParseEscape(Exception): ... class InternalSubsetExtractor(ExpatBuilder): subset: Any | None def getSubset(self) -> Any | None: ... - def parseFile(self, file) -> None: ... # type: ignore[override] - def parseString(self, string: str) -> None: ... # type: ignore[override] + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] + def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name, attrs) -> NoReturn: ... -def parse(file, namespaces: bool = ...): ... -def parseString(string: str, namespaces: bool = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = ...): ... +def parseString(string: str | ReadableBuffer, namespaces: bool = ...): ... def parseFragment(file, context, namespaces: bool = ...): ... def parseFragmentString(string: str, context, namespaces: bool = ...): ... def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index 04086fdc81b1..5997e031fd73 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,12 +1,12 @@ import sys import xml.dom -from _typeshed import Incomplete, Self, SupportsRead, SupportsWrite +from _typeshed import Incomplete, ReadableBuffer, Self, SupportsRead, SupportsWrite from typing_extensions import Literal from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader -def parse(file: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... -def parseString(string: str | bytes, parser: XMLReader | None = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = ...): ... def getDOMImplementation(features=...) -> DOMImplementation | None: ... class Node(xml.dom.Node): @@ -213,7 +213,7 @@ class CDATASection(Text): class ReadOnlySequentialNamedNodeMap: def __init__(self, seq=...) -> None: ... - def __len__(self): ... + def __len__(self) -> int: ... def getNamedItem(self, name): ... def getNamedItemNS(self, namespaceURI: str, localName): ... def __getitem__(self, name_or_tuple): ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi index 7bb78d0628ce..43b394bd67ec 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import FileDescriptorOrPath from collections.abc import Callable from xml.etree.ElementTree import Element @@ -11,7 +12,7 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: str | bytes | int, parse: str, encoding: str | None = ...) -> str | Element: ... +def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = ...) -> str | Element: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index 84059bc21a87..2b6191a395c3 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,6 +1,6 @@ import sys from _collections_abc import dict_keys -from _typeshed import FileDescriptor, StrOrBytesPath, SupportsRead, SupportsWrite +from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence from typing import Any, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard @@ -38,8 +38,8 @@ if sys.version_info >= (3, 9): __all__ += ["indent"] _T = TypeVar("_T") -_FileRead: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsRead[bytes] | SupportsRead[str] -_FileWriteC14N: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsWrite[bytes] +_FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] +_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] _FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] VERSION: str @@ -54,7 +54,7 @@ def iselement(element: object) -> TypeGuard[Element]: ... if sys.version_info >= (3, 8): @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = ..., *, out: None = ..., from_file: _FileRead | None = ..., @@ -68,7 +68,7 @@ if sys.version_info >= (3, 8): ) -> str: ... @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = ..., *, out: SupportsWrite[str], from_file: _FileRead | None = ..., @@ -270,19 +270,19 @@ def iterparse( class XMLPullParser: def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. # Use `Any` to avoid false-positive errors. def read_events(self) -> Iterator[tuple[str, Any]]: ... -def XML(text: str | bytes, parser: XMLParser | None = ...) -> Element: ... -def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... +def XML(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | bytes], parser: XMLParser | None = ...) -> Element: ... +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = ...) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -313,9 +313,11 @@ class TreeBuilder: def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... def close(self) -> Element: ... - def data(self, __data: str | bytes) -> None: ... - def start(self, __tag: str | bytes, __attrs: dict[str | bytes, str | bytes]) -> Element: ... - def end(self, __tag: str | bytes) -> Element: ... + def data(self, __data: str) -> None: ... + # tag and attrs are passed to the element_factory, so they could be anything + # depending on what the particular factory supports. + def start(self, __tag: Any, __attrs: dict[Any, Any]) -> Element: ... + def end(self, __tag: str) -> Element: ... if sys.version_info >= (3, 8): # These two methods have pos-only parameters in the C implementation def comment(self, __text: str | None) -> Element: ... @@ -355,4 +357,4 @@ class XMLParser: def doctype(self, __name: str, __pubid: str, __system: str) -> None: ... def close(self) -> Any: ... - def feed(self, __data: str | bytes) -> None: ... + def feed(self, __data: str | ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi index cac086235cba..cebdb6a30014 100644 --- a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi @@ -1 +1 @@ -import xml.parsers.expat as expat +from xml.parsers import expat as expat diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index af4ee052480f..b8ab4d439e74 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import SupportsRead, _T_co +from _typeshed import ReadableBuffer, SupportsRead, _T_co from collections.abc import Iterable from typing import Any, NoReturn, Protocol from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler @@ -15,7 +15,7 @@ class SAXException(Exception): def __getitem__(self, ix: Any) -> NoReturn: ... class SAXParseException(SAXException): - def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ... + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... def getColumnNumber(self) -> int: ... def getLineNumber(self) -> int: ... def getPublicId(self): ... @@ -36,5 +36,5 @@ else: def parse( source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, errorHandler: ErrorHandler = ... ) -> None: ... -def parseString(string: bytes | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... +def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi index 517c17072b87..4480f4098635 100644 --- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -53,7 +53,7 @@ class AttributesImpl: def getQNameByName(self, name): ... def getNames(self): ... def getQNames(self): ... - def __len__(self): ... + def __len__(self) -> int: ... def __getitem__(self, name): ... def keys(self): ... def __contains__(self, name): ... diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 150291009f54..0e048f57844d 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -2,7 +2,7 @@ import gzip import http.client import sys import time -from _typeshed import Self, SupportsRead, SupportsWrite +from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite, _BufferWithLen from collections.abc import Callable, Iterable, Mapping from datetime import datetime from io import BytesIO @@ -15,7 +15,20 @@ class _SupportsTimeTuple(Protocol): _DateTimeComparable: TypeAlias = DateTime | datetime | str | _SupportsTimeTuple _Marshallable: TypeAlias = ( - bool | int | float | str | bytes | None | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime | DateTime | Binary + bool + | int + | float + | str + | bytes + | bytearray + | None + | tuple[_Marshallable, ...] + # Ideally we'd use _Marshallable for list and dict, but invariance makes that impractical + | list[Any] + | dict[str, Any] + | datetime + | DateTime + | Binary ) _XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time _HostType: TypeAlias = Union[tuple[str, dict[str, str]], str] @@ -83,18 +96,18 @@ def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: data: bytes - def __init__(self, data: bytes | None = ...) -> None: ... - def decode(self, data: bytes) -> None: ... + def __init__(self, data: bytes | bytearray | None = ...) -> None: ... + def decode(self, data: ReadableBuffer) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... def __eq__(self, other: object) -> bool: ... -def _binary(data: bytes) -> Binary: ... # undocumented +def _binary(data: ReadableBuffer) -> Binary: ... # undocumented WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented class ExpatParser: # undocumented def __init__(self, target: Unmarshaller) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... _WriteCallback: TypeAlias = Callable[[str], object] @@ -115,7 +128,7 @@ class Marshaller: def dump_int(self, value: int, write: _WriteCallback) -> None: ... def dump_double(self, value: float, write: _WriteCallback) -> None: ... def dump_unicode(self, value: str, write: _WriteCallback, escape: Callable[[str], str] = ...) -> None: ... - def dump_bytes(self, value: bytes, write: _WriteCallback) -> None: ... + def dump_bytes(self, value: ReadableBuffer, write: _WriteCallback) -> None: ... def dump_array(self, value: Iterable[_Marshallable], write: _WriteCallback) -> None: ... def dump_struct( self, value: Mapping[str, _Marshallable], write: _WriteCallback, escape: Callable[[str], str] = ... @@ -196,13 +209,13 @@ def dumps( allow_none: bool = ..., ) -> str: ... def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[tuple[_Marshallable, ...], str | None]: ... -def gzip_encode(data: bytes) -> bytes: ... # undocumented -def gzip_decode(data: bytes, max_decode: int = ...) -> bytes: ... # undocumented +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = ...) -> bytes: ... # undocumented class GzipDecodedResponse(gzip.GzipFile): # undocumented io: BytesIO - def __init__(self, response: SupportsRead[bytes]) -> None: ... + def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... class _Method: # undocumented @@ -231,17 +244,21 @@ class Transport: else: def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... - def request(self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ...) -> tuple[_Marshallable, ...]: ... + def request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... + ) -> tuple[_Marshallable, ...]: ... def single_request( - self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ... + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = ... ) -> tuple[_Marshallable, ...]: ... def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... def close(self) -> None: ... - def send_request(self, host: _HostType, handler: str, request_body: bytes, debug: bool) -> http.client.HTTPConnection: ... + def send_request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, debug: bool + ) -> http.client.HTTPConnection: ... def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... - def send_content(self, connection: http.client.HTTPConnection, request_body: bytes) -> None: ... + def send_content(self, connection: http.client.HTTPConnection, request_body: _BufferWithLen) -> None: ... def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index c11d8d8e7a14..4d28974cbbed 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -2,14 +2,10 @@ import http.server import pydoc import socketserver from collections.abc import Callable, Iterable, Mapping -from datetime import datetime from re import Pattern from typing import Any, ClassVar, Protocol from typing_extensions import TypeAlias -from xmlrpc.client import Fault - -# TODO: Recursive type on tuple, list, dict -_Marshallable: TypeAlias = None | bool | int | float | str | bytes | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime +from xmlrpc.client import Fault, _Marshallable # The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy class _DispatchArity0(Protocol): diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index da1710787252..e964cd6eda87 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -1,6 +1,6 @@ import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath +from _typeshed import Self, StrOrBytesPath, StrPath, _BufferWithLen from collections.abc import Callable, Iterable, Iterator from os import PathLike from types import TracebackType @@ -103,7 +103,7 @@ class ZipFile: compression: int # undocumented compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented - pwd: str | None # undocumented + pwd: bytes | None # undocumented if sys.version_info >= (3, 11): @overload def __init__( @@ -173,7 +173,11 @@ class ZipFile: self, filename: StrPath, arcname: StrPath | None = ..., compress_type: int | None = ..., compresslevel: int | None = ... ) -> None: ... def writestr( - self, zinfo_or_arcname: str | ZipInfo, data: bytes | str, compress_type: int | None = ..., compresslevel: int | None = ... + self, + zinfo_or_arcname: str | ZipInfo, + data: _BufferWithLen | str, + compress_type: int | None = ..., + compresslevel: int | None = ..., ) -> None: ... if sys.version_info >= (3, 11): def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index db06544138ca..dc2f1aee0752 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -1,9 +1,8 @@ -import os import sys +from _typeshed import StrOrBytesPath from importlib.abc import ResourceReader from importlib.machinery import ModuleSpec from types import CodeType, ModuleType -from typing import Any if sys.version_info >= (3, 8): __all__ = ["ZipImportError", "zipimporter"] @@ -13,11 +12,15 @@ class ZipImportError(ImportError): ... class zipimporter: archive: str prefix: str - def __init__(self, path: str | bytes | os.PathLike[Any]) -> None: ... + if sys.version_info >= (3, 11): + def __init__(self, path: str) -> None: ... + else: + def __init__(self, path: StrOrBytesPath) -> None: ... + def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... - def get_data(self, pathname: str) -> str: ... + def get_data(self, pathname: str) -> bytes: ... def get_filename(self, fullname: str) -> str: ... def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented def get_source(self, fullname: str) -> str | None: ... diff --git a/mypy/typeshed/stdlib/zlib.pyi b/mypy/typeshed/stdlib/zlib.pyi index cfd6784bb771..ea41567eefc5 100644 --- a/mypy/typeshed/stdlib/zlib.pyi +++ b/mypy/typeshed/stdlib/zlib.pyi @@ -1,6 +1,5 @@ import sys -from array import array -from typing import Any +from _typeshed import ReadableBuffer from typing_extensions import Literal DEFLATED: Literal[8] @@ -29,7 +28,7 @@ Z_TREES: Literal[6] class error(Exception): ... class _Compress: - def compress(self, data: bytes) -> bytes: ... + def compress(self, data: ReadableBuffer) -> bytes: ... def flush(self, mode: int = ...) -> bytes: ... def copy(self) -> _Compress: ... @@ -37,21 +36,26 @@ class _Decompress: unused_data: bytes unconsumed_tail: bytes eof: bool - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(__data: bytes, __value: int = ...) -> int: ... +def adler32(__data: ReadableBuffer, __value: int = ...) -> int: ... if sys.version_info >= (3, 11): - def compress(__data: bytes, level: int = ..., wbits: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = ..., wbits: int = ...) -> bytes: ... else: - def compress(__data: bytes, level: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = ...) -> bytes: ... def compressobj( - level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes | None = ... + level: int = ..., + method: int = ..., + wbits: int = ..., + memLevel: int = ..., + strategy: int = ..., + zdict: ReadableBuffer | None = ..., ) -> _Compress: ... -def crc32(__data: array[Any] | bytes, __value: int = ...) -> int: ... -def decompress(__data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ... -def decompressobj(wbits: int = ..., zdict: bytes = ...) -> _Decompress: ... +def crc32(__data: ReadableBuffer, __value: int = ...) -> int: ... +def decompress(__data: ReadableBuffer, wbits: int = ..., bufsize: int = ...) -> bytes: ... +def decompressobj(wbits: int = ..., zdict: ReadableBuffer = ...) -> _Decompress: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index 7f22c07b32c0..8b9ba9e7023a 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -1,6 +1,6 @@ from _typeshed import Self, StrPath from collections.abc import Iterable, Sequence -from datetime import tzinfo +from datetime import datetime, timedelta, tzinfo from typing import Any, Protocol __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] @@ -19,6 +19,9 @@ class ZoneInfo(tzinfo): def from_file(cls: type[Self], __fobj: _IOBytes, key: str | None = ...) -> Self: ... @classmethod def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str | None: ... + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + def dst(self, __dt: datetime | None) -> timedelta | None: ... # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution diff --git a/mypy/typestate.py b/mypy/typestate.py index a5d65c4b4ea3..9cbad17aa7bd 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -5,12 +5,12 @@ from __future__ import annotations -from typing import ClassVar, Dict, Set, Tuple +from typing import Dict, Set, Tuple from typing_extensions import Final, TypeAlias as _TypeAlias from mypy.nodes import TypeInfo from mypy.server.trigger import make_trigger -from mypy.types import Instance, Type, get_proper_type +from mypy.types import Instance, Type, TypeVarId, get_proper_type # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship: _TypeAlias = Tuple[Instance, Instance] @@ -40,7 +40,7 @@ class TypeState: # was done in strict optional mode and of the specific *kind* of subtyping relationship, # which we represent as an arbitrary hashable tuple. # We need the caches, since subtype checks for structural types are very slow. - _subtype_caches: Final[SubtypeCache] = {} + _subtype_caches: Final[SubtypeCache] # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: @@ -53,7 +53,7 @@ class TypeState: # A blocking error will be generated in this case, since we can't proceed safely. # For the description of kinds of protocol dependencies and corresponding examples, # see _snapshot_protocol_deps. - proto_deps: ClassVar[dict[str, set[str]] | None] = {} + proto_deps: dict[str, set[str]] | None # Protocols (full names) a given class attempted to implement. # Used to calculate fine grained protocol dependencies and optimize protocol @@ -61,13 +61,13 @@ class TypeState: # of type a.A to a function expecting something compatible with protocol p.P, # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental # update. - _attempted_protocols: Final[dict[str, set[str]]] = {} + _attempted_protocols: Final[dict[str, set[str]]] # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like -> # instead of a wildcard to avoid unnecessarily invalidating classes. - _checked_against_members: Final[dict[str, set[str]]] = {} + _checked_against_members: Final[dict[str, set[str]]] # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all @@ -75,74 +75,78 @@ class TypeState: # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. - _rechecked_types: Final[set[TypeInfo]] = set() + _rechecked_types: Final[set[TypeInfo]] # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). - _assuming: Final[list[tuple[Type, Type]]] = [] - _assuming_proper: Final[list[tuple[Type, Type]]] = [] + _assuming: Final[list[tuple[Type, Type]]] + _assuming_proper: Final[list[tuple[Type, Type]]] # Ditto for inference of generic constraints against recursive type aliases. - inferring: Final[list[tuple[Type, Type]]] = [] + inferring: Final[list[tuple[Type, Type]]] # Whether to use joins or unions when solving constraints, see checkexpr.py for details. - infer_unions: ClassVar = False + infer_unions: bool # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing # via the cls parameter, since mypyc can optimize accesses to # Final attributes of a directly referenced type. - @staticmethod - def is_assumed_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming): + def __init__(self) -> None: + self._subtype_caches = {} + self.proto_deps = {} + self._attempted_protocols = {} + self._checked_against_members = {} + self._rechecked_types = set() + self._assuming = [] + self._assuming_proper = [] + self.inferring = [] + self.infer_unions = False + + def is_assumed_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def is_assumed_proper_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming_proper): + def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming_proper): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def get_assumptions(is_proper: bool) -> list[tuple[Type, Type]]: + def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: if is_proper: - return TypeState._assuming_proper - return TypeState._assuming + return self._assuming_proper + return self._assuming - @staticmethod - def reset_all_subtype_caches() -> None: + def reset_all_subtype_caches(self) -> None: """Completely reset all known subtype caches.""" - TypeState._subtype_caches.clear() + self._subtype_caches.clear() - @staticmethod - def reset_subtype_caches_for(info: TypeInfo) -> None: + def reset_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" - if info in TypeState._subtype_caches: - TypeState._subtype_caches[info].clear() + if info in self._subtype_caches: + self._subtype_caches[info].clear() - @staticmethod - def reset_all_subtype_caches_for(info: TypeInfo) -> None: + def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" for item in info.mro: - TypeState.reset_subtype_caches_for(item) + self.reset_subtype_caches_for(item) - @staticmethod - def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) -> bool: + def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Instance) -> bool: if left.last_known_value is not None or right.last_known_value is not None: # If there is a literal last known value, give up. There # will be an unbounded number of potential types to cache, # making caching less effective. return False info = right.type - cache = TypeState._subtype_caches.get(info) + cache = self._subtype_caches.get(info) if cache is None: return False subcache = cache.get(kind) @@ -150,36 +154,32 @@ def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) return False return (left, right) in subcache - @staticmethod - def record_subtype_cache_entry(kind: SubtypeKind, left: Instance, right: Instance) -> None: + def record_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: if left.last_known_value is not None or right.last_known_value is not None: # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return - cache = TypeState._subtype_caches.setdefault(right.type, dict()) + cache = self._subtype_caches.setdefault(right.type, dict()) cache.setdefault(kind, set()).add((left, right)) - @staticmethod - def reset_protocol_deps() -> None: + def reset_protocol_deps(self) -> None: """Reset dependencies after a full run or before a daemon shutdown.""" - TypeState.proto_deps = {} - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() - TypeState._rechecked_types.clear() + self.proto_deps = {} + self._attempted_protocols.clear() + self._checked_against_members.clear() + self._rechecked_types.clear() - @staticmethod - def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: + def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol - TypeState._rechecked_types.add(left_type) - TypeState._attempted_protocols.setdefault(left_type.fullname, set()).add( - right_type.fullname - ) - TypeState._checked_against_members.setdefault(left_type.fullname, set()).update( + self._rechecked_types.add(left_type) + self._attempted_protocols.setdefault(left_type.fullname, set()).add(right_type.fullname) + self._checked_against_members.setdefault(left_type.fullname, set()).update( right_type.protocol_members ) - @staticmethod - def _snapshot_protocol_deps() -> dict[str, set[str]]: + def _snapshot_protocol_deps(self) -> dict[str, set[str]]: """Collect protocol attribute dependencies found so far from registered subtype checks. There are three kinds of protocol dependencies. For example, after a subtype check: @@ -209,8 +209,8 @@ def __iter__(self) -> Iterator[int]: 'subtypes.is_protocol_implementation'). """ deps: dict[str, set[str]] = {} - for info in TypeState._rechecked_types: - for attr in TypeState._checked_against_members[info.fullname]: + for info in self._rechecked_types: + for attr in self._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of # a concrete class may not be reprocessed, so not all -> deps # are added. @@ -220,7 +220,7 @@ def __iter__(self) -> Iterator[int]: # TODO: avoid everything from typeshed continue deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) - for proto in TypeState._attempted_protocols[info.fullname]: + for proto in self._attempted_protocols[info.fullname]: trigger = make_trigger(info.fullname) if "typing" in trigger or "builtins" in trigger: continue @@ -233,45 +233,45 @@ def __iter__(self) -> Iterator[int]: deps.setdefault(trigger, set()).add(proto) return deps - @staticmethod - def update_protocol_deps(second_map: dict[str, set[str]] | None = None) -> None: + def update_protocol_deps(self, second_map: dict[str, set[str]] | None = None) -> None: """Update global protocol dependency map. We update the global map incrementally, using a snapshot only from recently type checked types. If second_map is given, update it as well. This is currently used by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. """ - assert ( - TypeState.proto_deps is not None - ), "This should not be called after failed cache load" - new_deps = TypeState._snapshot_protocol_deps() + assert self.proto_deps is not None, "This should not be called after failed cache load" + new_deps = self._snapshot_protocol_deps() for trigger, targets in new_deps.items(): - TypeState.proto_deps.setdefault(trigger, set()).update(targets) + self.proto_deps.setdefault(trigger, set()).update(targets) if second_map is not None: for trigger, targets in new_deps.items(): second_map.setdefault(trigger, set()).update(targets) - TypeState._rechecked_types.clear() - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() + self._rechecked_types.clear() + self._attempted_protocols.clear() + self._checked_against_members.clear() - @staticmethod - def add_all_protocol_deps(deps: dict[str, set[str]]) -> None: + def add_all_protocol_deps(self, deps: dict[str, set[str]]) -> None: """Add all known protocol dependencies to deps. This is used by tests and debug output, and also when collecting all collected or loaded dependencies as part of build. """ - TypeState.update_protocol_deps() # just in case - if TypeState.proto_deps is not None: - for trigger, targets in TypeState.proto_deps.items(): + self.update_protocol_deps() # just in case + if self.proto_deps is not None: + for trigger, targets in self.proto_deps.items(): deps.setdefault(trigger, set()).update(targets) +type_state: Final = TypeState() + + def reset_global_state() -> None: """Reset most existing global state. Currently most of it is in this module. Few exceptions are strict optional status and and functools.lru_cache. """ - TypeState.reset_all_subtype_caches() - TypeState.reset_protocol_deps() + type_state.reset_all_subtype_caches() + type_state.reset_protocol_deps() + TypeVarId.next_raw_id = 1 diff --git a/mypy/typetraverser.py b/mypy/typetraverser.py index afe77efff78d..9c4a9157ad6a 100644 --- a/mypy/typetraverser.py +++ b/mypy/typetraverser.py @@ -131,6 +131,9 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_type_alias_type(self, t: TypeAliasType) -> None: + # TODO: sometimes we want to traverse target as well + # We need to find a way to indicate explicitly the intent, + # maybe make this method abstract (like for TypeTranslator)? self.traverse_types(t.args) def visit_unpack_type(self, t: UnpackType) -> None: diff --git a/mypy/typevars.py b/mypy/typevars.py index 9c813550d5ea..69c2eed37fa4 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -39,7 +39,15 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType: ) elif isinstance(tv, TypeVarTupleType): tv = UnpackType( - TypeVarTupleType(tv.name, tv.fullname, tv.id, tv.upper_bound, line=-1, column=-1) + TypeVarTupleType( + tv.name, + tv.fullname, + tv.id, + tv.upper_bound, + tv.tuple_fallback, + line=-1, + column=-1, + ) ) else: assert isinstance(tv, ParamSpecType) diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index e93f99d8a825..29b85dae72eb 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -4,7 +4,8 @@ from typing import Sequence, TypeVar -from mypy.types import Instance, ProperType, Type, UnpackType, get_proper_type +from mypy.nodes import ARG_POS, ARG_STAR +from mypy.types import CallableType, Instance, ProperType, Type, UnpackType, get_proper_type def find_unpack_in_list(items: Sequence[Type]) -> int | None: @@ -45,7 +46,12 @@ def split_with_instance( def split_with_mapped_and_template( - mapped: Instance, template: Instance + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, ) -> tuple[ tuple[Type, ...], tuple[Type, ...], @@ -54,7 +60,14 @@ def split_with_mapped_and_template( tuple[Type, ...], tuple[Type, ...], ] | None: - split_result = fully_split_with_mapped_and_template(mapped, template) + split_result = fully_split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) if split_result is None: return None @@ -82,7 +95,12 @@ def split_with_mapped_and_template( def fully_split_with_mapped_and_template( - mapped: Instance, template: Instance + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, ) -> tuple[ tuple[Type, ...], tuple[Type, ...], @@ -95,8 +113,19 @@ def fully_split_with_mapped_and_template( tuple[Type, ...], tuple[Type, ...], ] | None: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - template_prefix, template_middle, template_suffix = split_with_instance(template) + if mapped_prefix_len is not None: + assert mapped_suffix_len is not None + mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix( + tuple(mapped), mapped_prefix_len, mapped_suffix_len + ) + else: + mapped_prefix = tuple() + mapped_suffix = tuple() + mapped_middle = mapped + + template_prefix, template_middle, template_suffix = split_with_prefix_and_suffix( + tuple(template), template_prefix_len, template_suffix_len + ) unpack_prefix = find_unpack_in_list(template_middle) if unpack_prefix is None: @@ -150,3 +179,20 @@ def extract_unpack(types: Sequence[Type]) -> ProperType | None: if isinstance(proper_type, UnpackType): return get_proper_type(proper_type.type) return None + + +def replace_starargs(callable: CallableType, types: list[Type]) -> CallableType: + star_index = callable.arg_kinds.index(ARG_STAR) + arg_kinds = ( + callable.arg_kinds[:star_index] + + [ARG_POS] * len(types) + + callable.arg_kinds[star_index + 1 :] + ) + arg_names = ( + callable.arg_names[:star_index] + + [None] * len(types) + + callable.arg_names[star_index + 1 :] + ) + arg_types = callable.arg_types[:star_index] + types + callable.arg_types[star_index + 1 :] + + return callable.copy_modified(arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds) diff --git a/mypy/util.py b/mypy/util.py index 1a0349e45220..65fe78614839 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -520,7 +520,11 @@ def parse_gray_color(cup: bytes) -> str: def should_force_color() -> bool: - return bool(int(os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")))) + env_var = os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")) + try: + return bool(int(env_var)) + except ValueError: + return bool(env_var) class FancyFormatter: @@ -803,18 +807,16 @@ def unnamed_function(name: str | None) -> bool: return name is not None and name == "_" -# TODO: replace with uses of perf_counter_ns when support for py3.6 is dropped -# (or when mypy properly handles alternate definitions based on python version check -time_ref = time.perf_counter +time_ref = time.perf_counter_ns -def time_spent_us(t0: float) -> int: - return int((time.perf_counter() - t0) * 1e6) +def time_spent_us(t0: int) -> int: + return int((time.perf_counter_ns() - t0) / 1000) def plural_s(s: int | Sized) -> str: count = s if isinstance(s, int) else len(s) - if count > 1: + if count != 1: return "s" else: return "" diff --git a/mypy/version.py b/mypy/version.py index 837206834e38..b125385f9b43 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,10 +5,10 @@ from mypy import git # Base version. -# - Release versions have the form "0.NNN". -# - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). -# - For 1.0 we'll switch back to 1.2.3 form. -__version__ = "0.990+dev" +# - Release versions have the form "1.2.3". +# - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). +# - Before 1.0 we had the form "0.NNN". +__version__ = "1.0.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 719148240c89..d20fcd60a9cb 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -1,8 +1,6 @@ [mypy] strict = True -warn_no_return = True -strict_optional = True disallow_any_unimported = True show_traceback = True pretty = True diff --git a/mypyc/analysis/attrdefined.py b/mypyc/analysis/attrdefined.py index dc871a93eba1..02e02a82a4f9 100644 --- a/mypyc/analysis/attrdefined.py +++ b/mypyc/analysis/attrdefined.py @@ -91,7 +91,7 @@ def foo(self) -> int: SetMem, Unreachable, ) -from mypyc.ir.rtypes import RInstance, is_fixed_width_rtype +from mypyc.ir.rtypes import RInstance # If True, print out all always-defined attributes of native classes (to aid # debugging and testing) @@ -415,6 +415,9 @@ def update_always_defined_attrs_using_subclasses(cl: ClassIR, seen: set[ClassIR] def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: + if cl.is_trait: + return + if cl in seen: return seen.add(cl) @@ -424,5 +427,11 @@ def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: if len(cl.base_mro) > 1: cl.bitmap_attrs.extend(cl.base_mro[1].bitmap_attrs) for n, t in cl.attributes.items(): - if is_fixed_width_rtype(t) and not cl.is_always_defined(n): + if t.error_overlap and not cl.is_always_defined(n): cl.bitmap_attrs.append(n) + + for base in cl.mro[1:]: + if base.is_trait: + for n, t in base.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n) and n not in cl.bitmap_attrs: + cl.bitmap_attrs.append(n) diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index c2cdd073f62e..e96c640fa8a1 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -129,7 +129,11 @@ def check_op_sources_valid(fn: FuncIR) -> list[FnError]: for block in fn.blocks: valid_ops.update(block.ops) - valid_registers.update([op.dest for op in block.ops if isinstance(op, BaseAssign)]) + for op in block.ops: + if isinstance(op, BaseAssign): + valid_registers.add(op.dest) + elif isinstance(op, LoadAddress) and isinstance(op.src, Register): + valid_registers.add(op.src) valid_registers.update(fn.arg_regs) @@ -150,7 +154,7 @@ def check_op_sources_valid(fn: FuncIR) -> list[FnError]: if source not in valid_registers: errors.append( FnError( - source=op, desc=f"Invalid op reference to register {source.name}" + source=op, desc=f"Invalid op reference to register {source.name!r}" ) ) @@ -248,6 +252,15 @@ def check_tuple_items_valid_literals(self, op: LoadLiteral, t: tuple[object, ... if isinstance(x, tuple): self.check_tuple_items_valid_literals(op, x) + def check_frozenset_items_valid_literals(self, op: LoadLiteral, s: frozenset[object]) -> None: + for x in s: + if x is None or isinstance(x, (str, bytes, bool, int, float, complex)): + pass + elif isinstance(x, tuple): + self.check_tuple_items_valid_literals(op, x) + else: + self.fail(op, f"Invalid type for item of frozenset literal: {type(x)})") + def visit_load_literal(self, op: LoadLiteral) -> None: expected_type = None if op.value is None: @@ -267,6 +280,11 @@ def visit_load_literal(self, op: LoadLiteral) -> None: elif isinstance(op.value, tuple): expected_type = "builtins.tuple" self.check_tuple_items_valid_literals(op, op.value) + elif isinstance(op.value, frozenset): + # There's no frozenset_rprimitive type since it'd be pretty useless so we just pretend + # it's a set (when it's really a frozenset). + expected_type = "builtins.set" + self.check_frozenset_items_valid_literals(op, op.value) assert expected_type is not None, "Missed a case for LoadLiteral check" diff --git a/mypyc/build.py b/mypyc/build.py index 4f40a6cd0865..cc03eba95b4e 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -85,6 +85,15 @@ def fail(message: str) -> NoReturn: sys.exit(message) +def emit_messages(options: Options, messages: list[str], dt: float, serious: bool = False) -> None: + # ... you know, just in case. + if options.junit_xml: + py_version = f"{options.python_version[0]}_{options.python_version[1]}" + write_junit_xml(dt, serious, messages, options.junit_xml, py_version, options.platform) + if messages: + print("\n".join(messages)) + + def get_mypy_config( mypy_options: list[str], only_compile_paths: Iterable[str] | None, @@ -191,47 +200,35 @@ def generate_c( """ t0 = time.time() - # Do the actual work now - serious = False - result = None try: result = emitmodule.parse_and_typecheck( sources, options, compiler_options, groups, fscache ) - messages = result.errors except CompileError as e: - messages = e.messages - if not e.use_stdout: - serious = True + emit_messages(options, e.messages, time.time() - t0, serious=(not e.use_stdout)) + sys.exit(1) t1 = time.time() + if result.errors: + emit_messages(options, result.errors, t1 - t0) + sys.exit(1) + if compiler_options.verbose: print(f"Parsed and typechecked in {t1 - t0:.3f}s") - if not messages and result: - errors = Errors() - modules, ctext = emitmodule.compile_modules_to_c( - result, compiler_options=compiler_options, errors=errors, groups=groups - ) - - if errors.num_errors: - messages.extend(errors.new_messages()) - + errors = Errors() + modules, ctext = emitmodule.compile_modules_to_c( + result, compiler_options=compiler_options, errors=errors, groups=groups + ) t2 = time.time() + emit_messages(options, errors.new_messages(), t2 - t1) + if errors.num_errors: + # No need to stop the build if only warnings were emitted. + sys.exit(1) + if compiler_options.verbose: print(f"Compiled to C in {t2 - t1:.3f}s") - # ... you know, just in case. - if options.junit_xml: - py_version = f"{options.python_version[0]}_{options.python_version[1]}" - write_junit_xml( - t2 - t0, serious, messages, options.junit_xml, py_version, options.platform - ) - - if messages: - print("\n".join(messages)) - sys.exit(1) - return ctext, "\n".join(format_modules(modules)) @@ -534,6 +531,10 @@ def mypycify( "-Wno-unused-command-line-argument", "-Wno-unknown-warning-option", "-Wno-unused-but-set-variable", + "-Wno-ignored-optimization-argument", + # Disables C Preprocessor (cpp) warnings + # See https://github.com/mypyc/mypyc/issues/956 + "-Wno-cpp", ] elif compiler.compiler_type == "msvc": # msvc doesn't have levels, '/O2' is full and '/Od' is disable diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 5d47636b4c1e..6e0c89dd0ecf 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -364,7 +364,8 @@ def _emit_attr_bitmap_update( self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str, clear: bool ) -> None: if value: - self.emit_line(f"if (unlikely({value} == {self.c_undefined_value(rtype)})) {{") + check = self.error_value_check(rtype, value, "==") + self.emit_line(f"if (unlikely({check})) {{") index = cl.bitmap_attrs.index(attr) mask = 1 << (index & (BITMAP_BITS - 1)) bitmap = self.attr_bitmap_expr(obj, cl, index) @@ -389,16 +390,10 @@ def emit_undefined_attr_check( *, unlikely: bool = False, ) -> None: - if isinstance(rtype, RTuple): - check = "{}".format( - self.tuple_undefined_check_cond(rtype, attr_expr, self.c_undefined_value, compare) - ) - else: - undefined = self.c_undefined_value(rtype) - check = f"{attr_expr} {compare} {undefined}" + check = self.error_value_check(rtype, attr_expr, compare) if unlikely: check = f"unlikely({check})" - if is_fixed_width_rtype(rtype): + if rtype.error_overlap: index = cl.bitmap_attrs.index(attr) bit = 1 << (index & (BITMAP_BITS - 1)) attr = self.bitmap_field(index) @@ -406,25 +401,47 @@ def emit_undefined_attr_check( check = f"{check} && !(({obj_expr})->{attr} & {bit})" self.emit_line(f"if ({check}) {{") + def error_value_check(self, rtype: RType, value: str, compare: str) -> str: + if isinstance(rtype, RTuple): + return self.tuple_undefined_check_cond( + rtype, value, self.c_error_value, compare, check_exception=False + ) + else: + return f"{value} {compare} {self.c_error_value(rtype)}" + def tuple_undefined_check_cond( self, rtuple: RTuple, tuple_expr_in_c: str, c_type_compare_val: Callable[[RType], str], compare: str, + *, + check_exception: bool = True, ) -> str: if len(rtuple.types) == 0: # empty tuple return "{}.empty_struct_error_flag {} {}".format( tuple_expr_in_c, compare, c_type_compare_val(int_rprimitive) ) - item_type = rtuple.types[0] + if rtuple.error_overlap: + i = 0 + item_type = rtuple.types[0] + else: + for i, typ in enumerate(rtuple.types): + if not typ.error_overlap: + item_type = rtuple.types[i] + break + else: + assert False, "not expecting tuple with error overlap" if isinstance(item_type, RTuple): return self.tuple_undefined_check_cond( - item_type, tuple_expr_in_c + ".f0", c_type_compare_val, compare + item_type, tuple_expr_in_c + f".f{i}", c_type_compare_val, compare ) else: - return f"{tuple_expr_in_c}.f0 {compare} {c_type_compare_val(item_type)}" + check = f"{tuple_expr_in_c}.f{i} {compare} {c_type_compare_val(item_type)}" + if rtuple.error_overlap and check_exception: + check += " && PyErr_Occurred()" + return check def tuple_undefined_value(self, rtuple: RTuple) -> str: return "tuple_undefined_" + rtuple.unique_id @@ -986,18 +1003,18 @@ def emit_box( def emit_error_check(self, value: str, rtype: RType, failure: str) -> None: """Emit code for checking a native function return value for uncaught exception.""" - if is_fixed_width_rtype(rtype): - # The error value is also valid as a normal value, so we need to also check - # for a raised exception. - self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") - elif not isinstance(rtype, RTuple): - self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") - else: + if isinstance(rtype, RTuple): if len(rtype.types) == 0: return # empty tuples can't fail. else: cond = self.tuple_undefined_check_cond(rtype, value, self.c_error_value, "==") self.emit_line(f"if ({cond}) {{") + elif rtype.error_overlap: + # The error value is also valid as a normal value, so we need to also check + # for a raised exception. + self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") + else: + self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") self.emit_lines(failure, "}") def emit_gc_visit(self, target: str, rtype: RType) -> None: diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 0fdb6e8a98c3..72e16345a325 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -20,7 +20,7 @@ from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall from mypyc.ir.class_ir import ClassIR, VTableEntries from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR -from mypyc.ir.rtypes import RTuple, RType, is_fixed_width_rtype, object_rprimitive +from mypyc.ir.rtypes import RTuple, RType, object_rprimitive from mypyc.namegen import NameGenerator from mypyc.sametype import is_same_type @@ -824,7 +824,10 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) ) - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + # Generate getter declaration emitter.emit_line("static PyObject *") emitter.emit_line( @@ -834,7 +837,7 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) # Generate property setter declaration if a setter exists - if cl.properties[prop][1]: + if setter: emitter.emit_line("static int") emitter.emit_line( "{}({} *self, PyObject *value, void *closure);".format( @@ -854,11 +857,13 @@ def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: ) ) emitter.emit_line(" NULL, NULL},") - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + emitter.emit_line(f'{{"{prop}",') emitter.emit_line(f" (getter){getter_name(cl, prop, emitter.names)},") - setter = cl.properties[prop][1] if setter: emitter.emit_line(f" (setter){setter_name(cl, prop, emitter.names)},") emitter.emit_line("NULL, NULL},") @@ -878,6 +883,9 @@ def generate_getseters(cl: ClassIR, emitter: Emitter) -> None: if i < len(cl.attributes) - 1: emitter.emit_line("") for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + rtype = getter.sig.ret_type emitter.emit_line("") generate_readonly_getter(cl, prop, rtype, getter, emitter) @@ -960,13 +968,13 @@ def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> N emitter.emit_lines("if (!tmp)", " return -1;") emitter.emit_inc_ref("tmp", rtype) emitter.emit_line(f"self->{attr_field} = tmp;") - if is_fixed_width_rtype(rtype) and not always_defined: + if rtype.error_overlap and not always_defined: emitter.emit_attr_bitmap_set("tmp", "self", rtype, cl, attr) if deletable: emitter.emit_line("} else") emitter.emit_line(f" self->{attr_field} = {emitter.c_undefined_value(rtype)};") - if is_fixed_width_rtype(rtype): + if rtype.error_overlap: emitter.emit_attr_bitmap_clear("self", rtype, cl, attr) emitter.emit_line("return 0;") emitter.emit_line("}") diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index 2c096655f41e..56a22447eeac 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -60,7 +60,6 @@ RStruct, RTuple, RType, - is_fixed_width_rtype, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -331,7 +330,8 @@ def visit_get_attr(self, op: GetAttr) -> None: rtype = op.class_type cl = rtype.class_ir attr_rtype, decl_cl = cl.attr_details(op.attr) - if cl.get_method(op.attr): + prefer_method = cl.is_trait and attr_rtype.error_overlap + if cl.get_method(op.attr, prefer_method=prefer_method): # Properties are essentially methods, so use vtable access for them. version = "_TRAIT" if cl.is_trait else "" self.emit_line( @@ -442,7 +442,7 @@ def visit_set_attr(self, op: SetAttr) -> None: self.emitter.emit_dec_ref(attr_expr, attr_rtype) if not always_defined: self.emitter.emit_line("}") - elif is_fixed_width_rtype(attr_rtype) and not cl.is_always_defined(op.attr): + elif attr_rtype.error_overlap and not cl.is_always_defined(op.attr): # If there is overlap with the error value, update bitmap to mark # attribute as defined. self.emitter.emit_attr_bitmap_set(src, obj, attr_rtype, cl, op.attr) diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 5dacaf6acab6..9f65aa77c47f 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -669,6 +669,9 @@ def generate_literal_tables(self) -> None: # Descriptions of tuple literals init_tuple = c_array_initializer(literals.encoded_tuple_values()) self.declare_global("const int []", "CPyLit_Tuple", initializer=init_tuple) + # Descriptions of frozenset literals + init_frozenset = c_array_initializer(literals.encoded_frozenset_values()) + self.declare_global("const int []", "CPyLit_FrozenSet", initializer=init_frozenset) def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> None: """Generate the declaration and definition of the group's export struct. @@ -839,7 +842,7 @@ def generate_globals_init(self, emitter: Emitter) -> None: for symbol, fixup in self.simple_inits: emitter.emit_line(f"{symbol} = {fixup};") - values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple" + values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple, CPyLit_FrozenSet" emitter.emit_lines( f"if (CPyStatics_Initialize(CPyStatics, {values}) < 0) {{", "return -1;", "}" ) diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index 1abab53bc39d..1fa1e8548e07 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -32,7 +32,6 @@ RInstance, RType, is_bool_rprimitive, - is_fixed_width_rtype, is_int_rprimitive, is_object_rprimitive, object_rprimitive, @@ -718,9 +717,10 @@ def generate_arg_check( """ error = error or AssignHandler() if typ.is_unboxed: - if is_fixed_width_rtype(typ) and optional: + if typ.error_overlap and optional: # Update bitmap is value is provided. - emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = 0;") + init = emitter.c_undefined_value(typ) + emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = {init};") emitter.emit_line(f"if (obj_{name} != NULL) {{") bitmap = bitmap_name(bitmap_arg_index // BITMAP_BITS) emitter.emit_line(f"{bitmap} |= 1 << {bitmap_arg_index & (BITMAP_BITS - 1)};") @@ -835,7 +835,7 @@ def emit_arg_processing( optional=optional, bitmap_arg_index=bitmap_arg_index, ) - if optional and is_fixed_width_rtype(typ): + if optional and typ.error_overlap: bitmap_arg_index += 1 def emit_call(self, not_implemented_handler: str = "") -> None: diff --git a/mypyc/codegen/literals.py b/mypyc/codegen/literals.py index 29957d52101c..784a8ed27c4e 100644 --- a/mypyc/codegen/literals.py +++ b/mypyc/codegen/literals.py @@ -1,12 +1,13 @@ from __future__ import annotations -from typing import Any, Tuple, Union, cast +from typing import Any, Dict, FrozenSet, List, Tuple, Union, cast from typing_extensions import Final -# Supported Python literal types. All tuple items must have supported +# Supported Python literal types. All tuple / frozenset items must have supported # literal types as well, but we can't represent the type precisely. -LiteralValue = Union[str, bytes, int, bool, float, complex, Tuple[object, ...], None] - +LiteralValue = Union[ + str, bytes, int, bool, float, complex, Tuple[object, ...], FrozenSet[object], None +] # Some literals are singletons and handled specially (None, False and True) NUM_SINGLETONS: Final = 3 @@ -23,6 +24,7 @@ def __init__(self) -> None: self.float_literals: dict[float, int] = {} self.complex_literals: dict[complex, int] = {} self.tuple_literals: dict[tuple[object, ...], int] = {} + self.frozenset_literals: dict[frozenset[object], int] = {} def record_literal(self, value: LiteralValue) -> None: """Ensure that the literal value is available in generated code.""" @@ -55,6 +57,12 @@ def record_literal(self, value: LiteralValue) -> None: for item in value: self.record_literal(cast(Any, item)) tuple_literals[value] = len(tuple_literals) + elif isinstance(value, frozenset): + frozenset_literals = self.frozenset_literals + if value not in frozenset_literals: + for item in value: + self.record_literal(cast(Any, item)) + frozenset_literals[value] = len(frozenset_literals) else: assert False, "invalid literal: %r" % value @@ -86,6 +94,9 @@ def literal_index(self, value: LiteralValue) -> int: n += len(self.complex_literals) if isinstance(value, tuple): return n + self.tuple_literals[value] + n += len(self.tuple_literals) + if isinstance(value, frozenset): + return n + self.frozenset_literals[value] assert False, "invalid literal: %r" % value def num_literals(self) -> int: @@ -98,6 +109,7 @@ def num_literals(self) -> int: + len(self.float_literals) + len(self.complex_literals) + len(self.tuple_literals) + + len(self.frozenset_literals) ) # The following methods return the C encodings of literal values @@ -119,24 +131,32 @@ def encoded_complex_values(self) -> list[str]: return _encode_complex_values(self.complex_literals) def encoded_tuple_values(self) -> list[str]: - """Encode tuple values into a C array. + return self._encode_collection_values(self.tuple_literals) + + def encoded_frozenset_values(self) -> List[str]: + return self._encode_collection_values(self.frozenset_literals) + + def _encode_collection_values( + self, values: dict[tuple[object, ...], int] | dict[frozenset[object], int] + ) -> list[str]: + """Encode tuple/frozenset values into a C array. The format of the result is like this: - - + + ... - + ... """ - values = self.tuple_literals - value_by_index = {index: value for value, index in values.items()} + # FIXME: https://github.com/mypyc/mypyc/issues/965 + value_by_index = {index: value for value, index in cast(Dict[Any, int], values).items()} result = [] - num = len(values) - result.append(str(num)) - for i in range(num): + count = len(values) + result.append(str(count)) + for i in range(count): value = value_by_index[i] result.append(str(len(value))) for item in value: diff --git a/mypyc/common.py b/mypyc/common.py index 6b0bbcee5fc9..7412ebef4752 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -44,13 +44,6 @@ PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 -# Python 3.5 on macOS uses a hybrid 32/64-bit build that requires some workarounds. -# The same generated C will be compiled in both 32 and 64 bit modes when building mypy -# wheels (for an unknown reason). -# -# Note that we use "in ['darwin']" because of https://github.com/mypyc/mypyc/issues/761. -IS_MIXED_32_64_BIT_BUILD: Final = sys.platform in ["darwin"] and sys.version_info < (3, 6) - # Maximum value for a short tagged integer. MAX_SHORT_INT: Final = 2 ** (8 * int(SIZEOF_SIZE_T) - 2) - 1 @@ -59,9 +52,8 @@ # Maximum value for a short tagged integer represented as a C integer literal. # -# Note: Assume that the compiled code uses the same bit width as mypyc, except for -# Python 3.5 on macOS. -MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT if not IS_MIXED_32_64_BIT_BUILD else 2**30 - 1 +# Note: Assume that the compiled code uses the same bit width as mypyc +MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT MIN_LITERAL_SHORT_INT: Final = -MAX_LITERAL_SHORT_INT - 1 # Decription of the C type used to track the definedness of attributes and diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 7f55decfd754..a1534780b79b 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -69,10 +69,11 @@ # placed in the class's shadow vtable (if it has one). -VTableMethod = NamedTuple( - "VTableMethod", - [("cls", "ClassIR"), ("name", str), ("method", FuncIR), ("shadow_method", Optional[FuncIR])], -) +class VTableMethod(NamedTuple): + cls: "ClassIR" + name: str + method: FuncIR + shadow_method: Optional[FuncIR] VTableEntries = List[VTableMethod] @@ -265,10 +266,7 @@ def has_attr(self, name: str) -> bool: return True def is_deletable(self, name: str) -> bool: - for ir in self.mro: - if name in ir.deletable: - return True - return False + return any(name in ir.deletable for ir in self.mro) def is_always_defined(self, name: str) -> bool: if self.is_deletable(name): @@ -281,17 +279,28 @@ def name_prefix(self, names: NameGenerator) -> str: def struct_name(self, names: NameGenerator) -> str: return f"{exported_name(self.fullname)}Object" - def get_method_and_class(self, name: str) -> tuple[FuncIR, ClassIR] | None: + def get_method_and_class( + self, name: str, *, prefer_method: bool = False + ) -> tuple[FuncIR, ClassIR] | None: for ir in self.mro: if name in ir.methods: - return ir.methods[name], ir + func_ir = ir.methods[name] + if not prefer_method and func_ir.decl.implicit: + # This is an implicit accessor, so there is also an attribute definition + # which the caller prefers. This happens if an attribute overrides a + # property. + return None + return func_ir, ir return None - def get_method(self, name: str) -> FuncIR | None: - res = self.get_method_and_class(name) + def get_method(self, name: str, *, prefer_method: bool = False) -> FuncIR | None: + res = self.get_method_and_class(name, prefer_method=prefer_method) return res[0] if res else None + def has_method_decl(self, name: str) -> bool: + return any(name in ir.method_decls for ir in self.mro) + def subclasses(self) -> set[ClassIR] | None: """Return all subclasses of this class, both direct and indirect. diff --git a/mypyc/ir/func_ir.py b/mypyc/ir/func_ir.py index 1b82be278df6..dbb45fc7ec29 100644 --- a/mypyc/ir/func_ir.py +++ b/mypyc/ir/func_ir.py @@ -17,7 +17,7 @@ Register, Value, ) -from mypyc.ir.rtypes import RType, bitmap_rprimitive, deserialize_type, is_fixed_width_rtype +from mypyc.ir.rtypes import RType, bitmap_rprimitive, deserialize_type from mypyc.namegen import NameGenerator @@ -70,6 +70,8 @@ class FuncSignature: def __init__(self, args: Sequence[RuntimeArg], ret_type: RType) -> None: self.args = tuple(args) self.ret_type = ret_type + # Bitmap arguments are use to mark default values for arguments that + # have types with overlapping error values. self.num_bitmap_args = num_bitmap_args(self.args) if self.num_bitmap_args: extra = [ @@ -78,6 +80,12 @@ def __init__(self, args: Sequence[RuntimeArg], ret_type: RType) -> None: ] self.args = self.args + tuple(reversed(extra)) + def real_args(self) -> tuple[RuntimeArg, ...]: + """Return arguments without any synthetic bitmap arguments.""" + if self.num_bitmap_args: + return self.args[: -self.num_bitmap_args] + return self.args + def bound_sig(self) -> "FuncSignature": if self.num_bitmap_args: return FuncSignature(self.args[1 : -self.num_bitmap_args], self.ret_type) @@ -105,7 +113,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncSignature: def num_bitmap_args(args: tuple[RuntimeArg, ...]) -> int: n = 0 for arg in args: - if is_fixed_width_rtype(arg.type) and arg.kind.is_optional(): + if arg.type.error_overlap and arg.kind.is_optional(): n += 1 return (n + (BITMAP_BITS - 1)) // BITMAP_BITS @@ -131,6 +139,7 @@ def __init__( kind: int = FUNC_NORMAL, is_prop_setter: bool = False, is_prop_getter: bool = False, + implicit: bool = False, ) -> None: self.name = name self.class_name = class_name @@ -147,7 +156,11 @@ def __init__( else: self.bound_sig = sig.bound_sig() - # this is optional because this will be set to the line number when the corresponding + # If True, not present in the mypy AST and must be synthesized during irbuild + # Currently only supported for property getters/setters + self.implicit = implicit + + # This is optional because this will be set to the line number when the corresponding # FuncIR is created self._line: int | None = None @@ -190,6 +203,7 @@ def serialize(self) -> JsonDict: "kind": self.kind, "is_prop_setter": self.is_prop_setter, "is_prop_getter": self.is_prop_getter, + "implicit": self.implicit, } # TODO: move this to FuncIR? @@ -211,6 +225,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncDecl: data["kind"], data["is_prop_setter"], data["is_prop_getter"], + data["implicit"], ) diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 361221f5b710..51a0bffcf3f1 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -28,7 +28,6 @@ int_rprimitive, is_bit_rprimitive, is_bool_rprimitive, - is_fixed_width_rtype, is_int_rprimitive, is_none_rprimitive, is_pointer_rprimitive, @@ -40,6 +39,7 @@ ) if TYPE_CHECKING: + from mypyc.codegen.literals import LiteralValue from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR @@ -589,7 +589,7 @@ class LoadLiteral(RegisterOp): This is used to load a static PyObject * value corresponding to a literal of one of the supported types. - Tuple literals must contain only valid literal values as items. + Tuple / frozenset literals must contain only valid literal values as items. NOTE: You can use this to load boxed (Python) int objects. Use Integer to load unboxed, tagged integers or fixed-width, @@ -604,11 +604,7 @@ class LoadLiteral(RegisterOp): error_kind = ERR_NEVER is_borrowed = True - def __init__( - self, - value: None | str | bytes | bool | int | float | complex | tuple[object, ...], - rtype: RType, - ) -> None: + def __init__(self, value: LiteralValue, rtype: RType) -> None: self.value = value self.type = rtype @@ -632,7 +628,7 @@ def __init__(self, obj: Value, attr: str, line: int, *, borrow: bool = False) -> self.class_type = obj.type attr_type = obj.type.attr_type(attr) self.type = attr_type - if is_fixed_width_rtype(attr_type): + if attr_type.error_overlap: self.error_kind = ERR_MAGIC_OVERLAPPING self.is_borrowed = borrow and attr_type.is_refcounted @@ -785,7 +781,7 @@ class TupleGet(RegisterOp): error_kind = ERR_NEVER - def __init__(self, src: Value, index: int, line: int) -> None: + def __init__(self, src: Value, index: int, line: int = -1) -> None: super().__init__(line) self.src = src self.index = index @@ -1454,6 +1450,6 @@ def visit_keep_alive(self, op: KeepAlive) -> T: # # (Serialization and deserialization *will* be used for incremental # compilation but so far it is not hooked up to anything.) -DeserMaps = NamedTuple( - "DeserMaps", [("classes", Dict[str, "ClassIR"]), ("functions", Dict[str, "FuncIR"])] -) +class DeserMaps(NamedTuple): + classes: Dict[str, "ClassIR"] + functions: Dict[str, "FuncIR"] diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index a9324a8608e4..cb9e4a2d2541 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -106,7 +106,18 @@ def visit_load_literal(self, op: LoadLiteral) -> str: # it explicit that this is a Python object. if isinstance(op.value, int): prefix = "object " - return self.format("%r = %s%s", op, prefix, repr(op.value)) + + rvalue = repr(op.value) + if isinstance(op.value, frozenset): + # We need to generate a string representation that won't vary + # run-to-run because sets are unordered, otherwise we may get + # spurious irbuild test failures. + # + # Sorting by the item's string representation is a bit of a + # hack, but it's stable and won't cause TypeErrors. + formatted_items = [repr(i) for i in sorted(op.value, key=str)] + rvalue = "frozenset({" + ", ".join(formatted_items) + "})" + return self.format("%r = %s%s", op, prefix, rvalue) def visit_get_attr(self, op: GetAttr) -> str: return self.format("%r = %s%r.%s", op, self.borrow_prefix(op), op.obj, op.attr) diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index 6db3f249ca9b..babfe0770f35 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -572,6 +572,7 @@ def __init__(self, types: list[RType]) -> None: # Nominally the max c length is 31 chars, but I'm not honestly worried about this. self.struct_name = f"tuple_{self.unique_id}" self._ctype = f"{self.struct_name}" + self.error_overlap = all(t.error_overlap for t in self.types) and bool(self.types) def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_rtuple(self) @@ -796,6 +797,30 @@ def __init__(self, items: list[RType]) -> None: self.items_set = frozenset(items) self._ctype = "PyObject *" + @staticmethod + def make_simplified_union(items: list[RType]) -> RType: + """Return a normalized union that covers the given items. + + Flatten nested unions and remove duplicate items. + + Overlapping items are *not* simplified. For example, + [object, str] will not be simplified. + """ + items = flatten_nested_unions(items) + assert items + + # Remove duplicate items using set + list to preserve item order + seen = set() + new_items = [] + for item in items: + if item not in seen: + new_items.append(item) + seen.add(item) + if len(new_items) > 1: + return RUnion(new_items) + else: + return new_items[0] + def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_runion(self) @@ -822,6 +847,19 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RUnion: return RUnion(types) +def flatten_nested_unions(types: list[RType]) -> list[RType]: + if not any(isinstance(t, RUnion) for t in types): + return types # Fast path + + flat_items: list[RType] = [] + for t in types: + if isinstance(t, RUnion): + flat_items.extend(flatten_nested_unions(t.items)) + else: + flat_items.append(t) + return flat_items + + def optional_value_type(rtype: RType) -> RType | None: """If rtype is the union of none_rprimitive and another type X, return X. diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 443fa6886ea6..f2a70d4e8691 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -53,6 +53,7 @@ Type, TypeOfAny, UninhabitedType, + UnionType, get_proper_type, ) from mypy.util import split_target @@ -85,12 +86,12 @@ RInstance, RTuple, RType, + RUnion, bitmap_rprimitive, c_int_rprimitive, c_pyssize_t_rprimitive, dict_rprimitive, int_rprimitive, - is_fixed_width_rtype, is_list_rprimitive, is_none_rprimitive, is_object_rprimitive, @@ -117,7 +118,7 @@ AssignmentTargetRegister, AssignmentTargetTuple, ) -from mypyc.irbuild.util import is_constant +from mypyc.irbuild.util import bytes_from_str, is_constant from mypyc.options import CompilerOptions from mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op, next_op, py_setattr_op @@ -295,8 +296,7 @@ def load_bytes_from_str_literal(self, value: str) -> Value: are stored in BytesExpr.value, whose type is 'str' not 'bytes'. Thus we perform a special conversion here. """ - bytes_value = bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") - return self.builder.load_bytes(bytes_value) + return self.builder.load_bytes(bytes_from_str(value)) def load_int(self, value: int) -> Value: return self.builder.load_int(value) @@ -865,8 +865,15 @@ def extract_int(self, e: Expression) -> int | None: return None def get_sequence_type(self, expr: Expression) -> RType: - target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) + return self.get_sequence_type_from_type(self.types[expr]) + + def get_sequence_type_from_type(self, target_type: Type) -> RType: + target_type = get_proper_type(target_type) + if isinstance(target_type, UnionType): + return RUnion.make_simplified_union( + [self.get_sequence_type_from_type(item) for item in target_type.items] + ) + assert isinstance(target_type, Instance), target_type if target_type.type.fullname == "builtins.str": return str_rprimitive else: @@ -878,7 +885,7 @@ def get_dict_base_type(self, expr: Expression) -> Instance: This is useful for dict subclasses like SymbolTable. """ target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) + assert isinstance(target_type, Instance), target_type dict_base = next(base for base in target_type.type.mro if base.fullname == "builtins.dict") return map_instance_to_supertype(target_type, dict_base) @@ -994,7 +1001,7 @@ def call_refexpr_with_args( ) -> Value: # Handle data-driven special-cased primitive call ops. - if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values): + if callee.fullname and expr.arg_kinds == [ARG_POS] * len(arg_values): call_c_ops_candidates = function_ops.get(callee.fullname, []) target = self.builder.matching_call_c( call_c_ops_candidates, arg_values, expr.line, self.node_type(expr) @@ -1019,7 +1026,7 @@ def call_refexpr_with_args( callee_node = callee_node.func if ( callee_node is not None - and callee.fullname is not None + and callee.fullname and callee_node in self.mapper.func_to_decl and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds) ): @@ -1233,7 +1240,7 @@ def load_global(self, expr: NameExpr) -> Value: and isinstance(expr.node, TypeInfo) and not self.is_synthetic_type(expr.node) ): - assert expr.fullname is not None + assert expr.fullname return self.load_native_type_object(expr.fullname) return self.load_global_str(expr.name, expr.line) @@ -1308,7 +1315,7 @@ def get_default() -> Value: assert isinstance(target, AssignmentTargetRegister) reg = target.register - if not is_fixed_width_rtype(reg.type): + if not reg.type.error_overlap: builder.assign_if_null(target.register, get_default, arg.initializer.line) else: builder.assign_if_bitmap_unset( diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 2c412253ec71..59b1c05a0ddb 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -24,7 +24,8 @@ TypeInfo, is_class_var, ) -from mypy.types import ENUM_REMOVED_PROPS, Instance, get_proper_type +from mypy.types import ENUM_REMOVED_PROPS, Instance, UnboundType, get_proper_type +from mypyc.common import PROPSET_PREFIX from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import FuncDecl, FuncSignature from mypyc.ir.ops import ( @@ -53,7 +54,13 @@ object_rprimitive, ) from mypyc.irbuild.builder import IRBuilder -from mypyc.irbuild.function import handle_ext_method, handle_non_ext_method, load_type +from mypyc.irbuild.function import ( + gen_property_getter_ir, + gen_property_setter_ir, + handle_ext_method, + handle_non_ext_method, + load_type, +) from mypyc.irbuild.util import dataclass_type, get_func_def, is_constant, is_dataclass_decorator from mypyc.primitives.dict_ops import dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import py_hasattr_op, py_setattr_op @@ -84,7 +91,7 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: # classes aren't necessarily populated yet at # prepare_class_def time. if any(ir.base_mro[i].base != ir.base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): - builder.error("Non-trait MRO must be linear", cdef.line) + builder.error("Multiple inheritance is not supported (except for traits)", cdef.line) if ir.allow_interpreted_subclasses: for parent in ir.mro: @@ -151,6 +158,26 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: else: builder.error("Unsupported statement in class body", stmt.line) + # Generate implicit property setters/getters + for name, decl in ir.method_decls.items(): + if decl.implicit and decl.is_prop_getter: + getter_ir = gen_property_getter_ir(builder, decl, cdef, ir.is_trait) + builder.functions.append(getter_ir) + ir.methods[getter_ir.decl.name] = getter_ir + + setter_ir = None + setter_name = PROPSET_PREFIX + name + if setter_name in ir.method_decls: + setter_ir = gen_property_setter_ir( + builder, ir.method_decls[setter_name], cdef, ir.is_trait + ) + builder.functions.append(setter_ir) + ir.methods[setter_name] = setter_ir + + ir.properties[name] = (getter_ir, setter_ir) + # TODO: Generate glue method if needed? + # TODO: Do we need interpreted glue methods? Maybe not? + cls_builder.finalize(ir) @@ -451,6 +478,7 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: "typing.Collection", "typing.Reversible", "typing.Container", + "typing.Sized", ): # HAX: Synthesized base classes added by mypy don't exist at runtime, so skip them. # This could break if they were added explicitly, though... @@ -482,7 +510,11 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: name = "_NamedTuple" base = builder.get_module_attr("typing", name, cdef.line) else: - base = builder.load_global_str(cls.name, cdef.line) + cls_module = cls.fullname.rsplit(".", 1)[0] + if cls_module == builder.current_module: + base = builder.load_global_str(cls.name, cdef.line) + else: + base = builder.load_module_attr_by_fullname(cls.fullname, cdef.line) bases.append(base) if cls.fullname in MAGIC_TYPED_DICT_CLASSES: # The remaining base classes are synthesized by mypy and should be ignored. @@ -551,6 +583,7 @@ def add_non_ext_class_attr_ann( get_type_info: Callable[[AssignmentStmt], TypeInfo | None] | None = None, ) -> None: """Add a class attribute to __annotations__ of a non-extension class.""" + # FIXME: try to better preserve the special forms and type parameters of generics. typ: Value | None = None if get_type_info is not None: type_info = get_type_info(stmt) @@ -560,7 +593,17 @@ def add_non_ext_class_attr_ann( if typ is None: # FIXME: if get_type_info is not provided, don't fall back to stmt.type? ann_type = get_proper_type(stmt.type) - if isinstance(ann_type, Instance): + if ( + isinstance(stmt.unanalyzed_type, UnboundType) + and stmt.unanalyzed_type.original_str_expr is not None + ): + # Annotation is a forward reference, so don't attempt to load the actual + # type and load the string instead. + # + # TODO: is it possible to determine whether a non-string annotation is + # actually a forward reference due to the __annotations__ future? + typ = builder.load_str(stmt.unanalyzed_type.original_str_expr) + elif isinstance(ann_type, Instance): typ = load_type(builder, ann_type.type, stmt.line) else: typ = builder.add(LoadAddress(type_object_op.type, type_object_op.src, stmt.line)) diff --git a/mypyc/irbuild/constant_fold.py b/mypyc/irbuild/constant_fold.py index 8d0a7fea5d90..4e9eb53b9222 100644 --- a/mypyc/irbuild/constant_fold.py +++ b/mypyc/irbuild/constant_fold.py @@ -1,6 +1,11 @@ """Constant folding of IR values. For example, 3 + 5 can be constant folded into 8. + +This is mostly like mypy.constant_fold, but we can bind some additional +NameExpr and MemberExpr references here, since we have more knowledge +about which definitions can be trusted -- we constant fold only references +to other compiled modules in the same compilation unit. """ from __future__ import annotations @@ -8,6 +13,11 @@ from typing import Union from typing_extensions import Final +from mypy.constant_fold import ( + constant_fold_binary_int_op, + constant_fold_binary_str_op, + constant_fold_unary_int_op, +) from mypy.nodes import Expression, IntExpr, MemberExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var from mypyc.irbuild.builder import IRBuilder @@ -51,52 +61,3 @@ def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | if isinstance(value, int): return constant_fold_unary_int_op(expr.op, value) return None - - -def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: - if op == "+": - return left + right - if op == "-": - return left - right - elif op == "*": - return left * right - elif op == "//": - if right != 0: - return left // right - elif op == "%": - if right != 0: - return left % right - elif op == "&": - return left & right - elif op == "|": - return left | right - elif op == "^": - return left ^ right - elif op == "<<": - if right >= 0: - return left << right - elif op == ">>": - if right >= 0: - return left >> right - elif op == "**": - if right >= 0: - ret = left**right - assert isinstance(ret, int) - return ret - return None - - -def constant_fold_unary_int_op(op: str, value: int) -> int | None: - if op == "-": - return -value - elif op == "~": - return ~value - elif op == "+": - return value - return None - - -def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: - if op == "+": - return left + right - return None diff --git a/mypyc/irbuild/env_class.py b/mypyc/irbuild/env_class.py index 416fba633482..ded8072deb63 100644 --- a/mypyc/irbuild/env_class.py +++ b/mypyc/irbuild/env_class.py @@ -21,7 +21,7 @@ def g() -> int: from mypyc.common import BITMAP_BITS, ENV_ATTR_NAME, SELF_NAME, bitmap_name from mypyc.ir.class_ir import ClassIR from mypyc.ir.ops import Call, GetAttr, SetAttr, Value -from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, is_fixed_width_rtype, object_rprimitive +from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, object_rprimitive from mypyc.irbuild.builder import IRBuilder, SymbolTarget from mypyc.irbuild.context import FuncInfo, GeneratorClass, ImplicitClass from mypyc.irbuild.targets import AssignmentTargetAttr @@ -163,7 +163,7 @@ def num_bitmap_args(builder: IRBuilder, args: list[Argument]) -> int: n = 0 for arg in args: t = builder.type_to_rtype(arg.variable.type) - if is_fixed_width_rtype(t) and arg.kind.is_optional(): + if t.error_overlap and arg.kind.is_optional(): n += 1 return (n + (BITMAP_BITS - 1)) // BITMAP_BITS diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index b7d093cde7ee..3f5b795a1436 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Callable, cast +from typing import Callable, Sequence, cast from mypy.nodes import ( ARG_POS, @@ -55,6 +55,7 @@ ComparisonOp, Integer, LoadAddress, + LoadLiteral, RaiseStandardError, Register, TupleGet, @@ -63,12 +64,14 @@ ) from mypyc.ir.rtypes import ( RTuple, + bool_rprimitive, int_rprimitive, is_fixed_width_rtype, is_int_rprimitive, is_list_rprimitive, is_none_rprimitive, object_rprimitive, + set_rprimitive, ) from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op @@ -86,6 +89,7 @@ tokenizer_printf_style, ) from mypyc.irbuild.specialize import apply_function_specialization, apply_method_specialization +from mypyc.irbuild.util import bytes_from_str from mypyc.primitives.bytes_ops import bytes_slice_op from mypyc.primitives.dict_ops import dict_get_item_op, dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op @@ -93,7 +97,7 @@ from mypyc.primitives.list_ops import list_append_op, list_extend_op, list_slice_op from mypyc.primitives.misc_ops import ellipsis_op, get_module_dict_op, new_slice_op, type_op from mypyc.primitives.registry import CFunctionDescription, builtin_names -from mypyc.primitives.set_ops import set_add_op, set_update_op +from mypyc.primitives.set_ops import set_add_op, set_in_op, set_update_op from mypyc.primitives.str_ops import str_slice_op from mypyc.primitives.tuple_ops import list_tuple_op, tuple_slice_op @@ -266,13 +270,20 @@ def transform_super_expr(builder: IRBuilder, o: SuperExpr) -> Value: def transform_call_expr(builder: IRBuilder, expr: CallExpr) -> Value: + callee = expr.callee if isinstance(expr.analyzed, CastExpr): return translate_cast_expr(builder, expr.analyzed) elif isinstance(expr.analyzed, AssertTypeExpr): # Compile to a no-op. return builder.accept(expr.analyzed.expr) + elif ( + isinstance(callee, (NameExpr, MemberExpr)) + and isinstance(callee.node, TypeInfo) + and callee.node.is_newtype + ): + # A call to a NewType type is a no-op at runtime. + return builder.accept(expr.args[0]) - callee = expr.callee if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): callee = callee.analyzed.expr # Unwrap type application @@ -606,6 +617,54 @@ def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Val return target +def set_literal_values(builder: IRBuilder, items: Sequence[Expression]) -> list[object] | None: + values: list[object] = [] + for item in items: + const_value = constant_fold_expr(builder, item) + if const_value is not None: + values.append(const_value) + continue + + if isinstance(item, RefExpr): + if item.fullname == "builtins.None": + values.append(None) + elif item.fullname == "builtins.True": + values.append(True) + elif item.fullname == "builtins.False": + values.append(False) + elif isinstance(item, (BytesExpr, FloatExpr, ComplexExpr)): + # constant_fold_expr() doesn't handle these (yet?) + v = bytes_from_str(item.value) if isinstance(item, BytesExpr) else item.value + values.append(v) + elif isinstance(item, TupleExpr): + tuple_values = set_literal_values(builder, item.items) + if tuple_values is not None: + values.append(tuple(tuple_values)) + + if len(values) != len(items): + # Bail if not all items can be converted into values. + return None + return values + + +def precompute_set_literal(builder: IRBuilder, s: SetExpr) -> Value | None: + """Try to pre-compute a frozenset literal during module initialization. + + Return None if it's not possible. + + Supported items: + - Anything supported by irbuild.constant_fold.constant_fold_expr() + - None, True, and False + - Float, byte, and complex literals + - Tuple literals with only items listed above + """ + values = set_literal_values(builder, s.items) + if values is not None: + return builder.add(LoadLiteral(frozenset(values), set_rprimitive)) + + return None + + def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: # x in (...)/[...] # x not in (...)/[...] @@ -659,6 +718,23 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: else: return builder.true() + # x in {...} + # x not in {...} + if ( + first_op in ("in", "not in") + and len(e.operators) == 1 + and isinstance(e.operands[1], SetExpr) + ): + set_literal = precompute_set_literal(builder, e.operands[1]) + if set_literal is not None: + lhs = e.operands[0] + result = builder.builder.call_c( + set_in_op, [builder.accept(lhs), set_literal], e.line, bool_rprimitive + ) + if first_op == "not in": + return builder.unary_op(result, "not", e.line) + return result + if len(e.operators) == 1: # Special some common simple cases if first_op in ("is", "is not"): diff --git a/mypyc/irbuild/for_helpers.py b/mypyc/irbuild/for_helpers.py index fc67178af5de..61dbbe960eb2 100644 --- a/mypyc/irbuild/for_helpers.py +++ b/mypyc/irbuild/for_helpers.py @@ -17,6 +17,7 @@ Lvalue, MemberExpr, RefExpr, + SetExpr, TupleExpr, TypeAlias, ) @@ -469,12 +470,22 @@ def make_for_loop_generator( for_dict_gen.init(expr_reg, target_type) return for_dict_gen + iterable_expr_reg: Value | None = None + if isinstance(expr, SetExpr): + # Special case "for x in ". + from mypyc.irbuild.expression import precompute_set_literal + + set_literal = precompute_set_literal(builder, expr) + if set_literal is not None: + iterable_expr_reg = set_literal + # Default to a generic for loop. - expr_reg = builder.accept(expr) + if iterable_expr_reg is None: + iterable_expr_reg = builder.accept(expr) for_obj = ForIterable(builder, index, body_block, loop_exit, line, nested) item_type = builder._analyze_iterable_item_type(expr) item_rtype = builder.type_to_rtype(item_type) - for_obj.init(expr_reg, item_rtype) + for_obj.init(iterable_expr_reg, item_rtype) return for_obj diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index ea8d86ff0468..5262b74e2853 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -28,7 +28,7 @@ Var, ) from mypy.types import CallableType, get_proper_type -from mypyc.common import LAMBDA_NAME, SELF_NAME +from mypyc.common import LAMBDA_NAME, PROPSET_PREFIX, SELF_NAME from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import ( FUNC_CLASSMETHOD, @@ -89,7 +89,7 @@ from mypyc.primitives.generic_ops import py_setattr_op from mypyc.primitives.misc_ops import register_function from mypyc.primitives.registry import builtin_names -from mypyc.sametype import is_same_method_signature +from mypyc.sametype import is_same_method_signature, is_same_type # Top-level transform functions @@ -548,7 +548,7 @@ def is_decorated(builder: IRBuilder, fdef: FuncDef) -> bool: def gen_glue( builder: IRBuilder, - sig: FuncSignature, + base_sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, @@ -566,9 +566,9 @@ def gen_glue( "shadow" glue methods that work with interpreted subclasses. """ if fdef.is_property: - return gen_glue_property(builder, sig, target, cls, base, fdef.line, do_py_ops) + return gen_glue_property(builder, base_sig, target, cls, base, fdef.line, do_py_ops) else: - return gen_glue_method(builder, sig, target, cls, base, fdef.line, do_py_ops) + return gen_glue_method(builder, base_sig, target, cls, base, fdef.line, do_py_ops) class ArgInfo(NamedTuple): @@ -594,7 +594,7 @@ def get_args(builder: IRBuilder, rt_args: Sequence[RuntimeArg], line: int) -> Ar def gen_glue_method( builder: IRBuilder, - sig: FuncSignature, + base_sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, @@ -626,16 +626,25 @@ def f(builder: IRBuilder, x: object) -> int: ... If do_pycall is True, then make the call using the C API instead of a native call. """ + check_native_override(builder, base_sig, target.decl.sig, line) + builder.enter() - builder.ret_types[-1] = sig.ret_type + builder.ret_types[-1] = base_sig.ret_type - rt_args = list(sig.args) + rt_args = list(base_sig.args) if target.decl.kind == FUNC_NORMAL: - rt_args[0] = RuntimeArg(sig.args[0].name, RInstance(cls)) + rt_args[0] = RuntimeArg(base_sig.args[0].name, RInstance(cls)) arg_info = get_args(builder, rt_args, line) args, arg_kinds, arg_names = arg_info.args, arg_info.arg_kinds, arg_info.arg_names + bitmap_args = None + if base_sig.num_bitmap_args: + args = args[: -base_sig.num_bitmap_args] + arg_kinds = arg_kinds[: -base_sig.num_bitmap_args] + arg_names = arg_names[: -base_sig.num_bitmap_args] + bitmap_args = builder.builder.args[-base_sig.num_bitmap_args :] + # We can do a passthrough *args/**kwargs with a native call, but if the # args need to get distributed out to arguments, we just let python handle it if any(kind.is_star() for kind in arg_kinds) and any( @@ -655,11 +664,15 @@ def f(builder: IRBuilder, x: object) -> int: ... first, target.name, args[st:], line, arg_kinds[st:], arg_names[st:] ) else: - retval = builder.builder.call(target.decl, args, arg_kinds, arg_names, line) - retval = builder.coerce(retval, sig.ret_type, line) + retval = builder.builder.call( + target.decl, args, arg_kinds, arg_names, line, bitmap_args=bitmap_args + ) + retval = builder.coerce(retval, base_sig.ret_type, line) builder.add(Return(retval)) arg_regs, _, blocks, ret_type, _ = builder.leave() + if base_sig.num_bitmap_args: + rt_args = rt_args[: -base_sig.num_bitmap_args] return FuncIR( FuncDecl( target.name + "__" + base.name + "_glue", @@ -673,6 +686,35 @@ def f(builder: IRBuilder, x: object) -> int: ... ) +def check_native_override( + builder: IRBuilder, base_sig: FuncSignature, sub_sig: FuncSignature, line: int +) -> None: + """Report an error if an override changes signature in unsupported ways. + + Glue methods can work around many signature changes but not all of them. + """ + for base_arg, sub_arg in zip(base_sig.real_args(), sub_sig.real_args()): + if base_arg.type.error_overlap: + if not base_arg.optional and sub_arg.optional and base_sig.num_bitmap_args: + # This would change the meanings of bits in the argument defaults + # bitmap, which we don't support. We'd need to do tricky bit + # manipulations to support this generally. + builder.error( + "An argument with type " + + f'"{base_arg.type}" cannot be given a default value in a method override', + line, + ) + if base_arg.type.error_overlap or sub_arg.type.error_overlap: + if not is_same_type(base_arg.type, sub_arg.type): + # This would change from signaling a default via an error value to + # signaling a default via bitmap, which we don't support. + builder.error( + "Incompatible argument type " + + f'"{sub_arg.type}" (base class has type "{base_arg.type}")', + line, + ) + + def gen_glue_property( builder: IRBuilder, sig: FuncSignature, @@ -747,7 +789,7 @@ def load_type(builder: IRBuilder, typ: TypeInfo, line: int) -> Value: def load_func(builder: IRBuilder, func_name: str, fullname: str | None, line: int) -> Value: - if fullname is not None and not fullname.startswith(builder.current_module): + if fullname and not fullname.startswith(builder.current_module): # we're calling a function in a different module # We can't use load_module_attr_by_fullname here because we need to load the function using @@ -984,3 +1026,42 @@ def get_native_impl_ids(builder: IRBuilder, singledispatch_func: FuncDef) -> dic """ impls = builder.singledispatch_impls[singledispatch_func] return {impl: i for i, (typ, impl) in enumerate(impls) if not is_decorated(builder, impl)} + + +def gen_property_getter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property getter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + if not is_trait: + value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) + builder.add(Return(value)) + else: + builder.add(Unreachable()) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) + + +def gen_property_setter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property setter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + value_reg = builder.add_argument("value", func_decl.sig.args[1].type) + assert name.startswith(PROPSET_PREFIX) + attr_name = name[len(PROPSET_PREFIX) :] + if not is_trait: + builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) + builder.add(Return(builder.none())) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 25561382fdec..019f709f0acc 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -326,6 +326,17 @@ def coerce( ): # Equivalent types return src + elif ( + is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) + ) and is_int_rprimitive(target_type): + shifted = self.int_op( + bool_rprimitive, src, Integer(1, bool_rprimitive), IntOp.LEFT_SHIFT + ) + return self.add(Extend(shifted, int_rprimitive, signed=False)) + elif ( + is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) + ) and is_fixed_width_rtype(target_type): + return self.add(Extend(src, target_type, signed=False)) else: # To go from one unboxed type to another, we go through a boxed # in-between value, for simplicity. @@ -935,10 +946,19 @@ def call( arg_kinds: list[ArgKind], arg_names: Sequence[str | None], line: int, + *, + bitmap_args: list[Register] | None = None, ) -> Value: - """Call a native function.""" + """Call a native function. + + If bitmap_args is given, they override the values of (some) of the bitmap + arguments used to track the presence of values for certain arguments. By + default, the values of the bitmap arguments are inferred from args. + """ # Normalize args to positionals. - args = self.native_args_to_positional(args, arg_kinds, arg_names, decl.sig, line) + args = self.native_args_to_positional( + args, arg_kinds, arg_names, decl.sig, line, bitmap_args=bitmap_args + ) return self.add(Call(decl, args, line)) def native_args_to_positional( @@ -948,6 +968,8 @@ def native_args_to_positional( arg_names: Sequence[str | None], sig: FuncSignature, line: int, + *, + bitmap_args: list[Register] | None = None, ) -> list[Value]: """Prepare arguments for a native call. @@ -1015,10 +1037,15 @@ def native_args_to_positional( output_args.append(output_arg) for i in reversed(range(n)): + if bitmap_args and i < len(bitmap_args): + # Use override provided by caller + output_args.append(bitmap_args[i]) + continue + # Infer values of bitmap args bitmap = 0 c = 0 for lst, arg in zip(formal_to_actual, sig_args): - if arg.kind.is_optional() and is_fixed_width_rtype(arg.type): + if arg.kind.is_optional() and arg.type.error_overlap: if i * BITMAP_BITS <= c < (i + 1) * BITMAP_BITS: if lst: bitmap |= 1 << (c & (BITMAP_BITS - 1)) @@ -1626,35 +1653,38 @@ def shortcircuit_helper( self.activate_block(next_block) return target - def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: - if is_runtime_subtype(value.type, int_rprimitive): + def bool_value(self, value: Value) -> Value: + """Return bool(value). + + The result type can be bit_rprimitive or bool_rprimitive. + """ + if is_bool_rprimitive(value.type) or is_bit_rprimitive(value.type): + result = value + elif is_runtime_subtype(value.type, int_rprimitive): zero = Integer(0, short_int_rprimitive) - self.compare_tagged_condition(value, zero, "!=", true, false, value.line) - return + result = self.comparison_op(value, zero, ComparisonOp.NEQ, value.line) elif is_fixed_width_rtype(value.type): zero = Integer(0, value.type) - value = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) + result = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) elif is_same_type(value.type, str_rprimitive): - value = self.call_c(str_check_if_true, [value], value.line) + result = self.call_c(str_check_if_true, [value], value.line) elif is_same_type(value.type, list_rprimitive) or is_same_type( value.type, dict_rprimitive ): length = self.builtin_len(value, value.line) zero = Integer(0) - value = self.binary_op(length, zero, "!=", value.line) + result = self.binary_op(length, zero, "!=", value.line) elif ( isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class and value.type.class_ir.has_method("__bool__") ): # Directly call the __bool__ method on classes that have it. - value = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) + result = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) else: value_type = optional_value_type(value.type) if value_type is not None: - is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) - branch = Branch(is_none, true, false, Branch.BOOL) - self.add(branch) + not_none = self.translate_is_op(value, self.none_object(), "is not", value.line) always_truthy = False if isinstance(value_type, RInstance): # check whether X.__bool__ is always just the default (object.__bool__) @@ -1663,18 +1693,55 @@ def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> ) and value_type.class_ir.is_method_final("__bool__"): always_truthy = True - if not always_truthy: - # Optional[X] where X may be falsey and requires a check - branch.true = BasicBlock() - self.activate_block(branch.true) + if always_truthy: + result = not_none + else: + # "X | None" where X may be falsey and requires a check + result = Register(bit_rprimitive) + true, false, end = BasicBlock(), BasicBlock(), BasicBlock() + branch = Branch(not_none, true, false, Branch.BOOL) + self.add(branch) + self.activate_block(true) # unbox_or_cast instead of coerce because we want the # type to change even if it is a subtype. remaining = self.unbox_or_cast(value, value_type, value.line) - self.add_bool_branch(remaining, true, false) - return - elif not is_bool_rprimitive(value.type) and not is_bit_rprimitive(value.type): - value = self.call_c(bool_op, [value], value.line) - self.add(Branch(value, true, false, Branch.BOOL)) + as_bool = self.bool_value(remaining) + self.add(Assign(result, as_bool)) + self.goto(end) + self.activate_block(false) + self.add(Assign(result, Integer(0, bit_rprimitive))) + self.goto(end) + self.activate_block(end) + else: + result = self.call_c(bool_op, [value], value.line) + return result + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + opt_value_type = optional_value_type(value.type) + if opt_value_type is None: + bool_value = self.bool_value(value) + self.add(Branch(bool_value, true, false, Branch.BOOL)) + else: + # Special-case optional types + is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) + branch = Branch(is_none, true, false, Branch.BOOL) + self.add(branch) + always_truthy = False + if isinstance(opt_value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if not opt_value_type.class_ir.has_method( + "__bool__" + ) and opt_value_type.class_ir.is_method_final("__bool__"): + always_truthy = True + + if not always_truthy: + # Optional[X] where X may be falsey and requires a check + branch.true = BasicBlock() + self.activate_block(branch.true) + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, opt_value_type, value.line) + self.add_bool_branch(remaining, true, false) def call_c( self, @@ -1779,7 +1846,7 @@ def matching_call_c( return target return None - def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: + def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> Value: """Generate a native integer binary op. Use native/C semantics, which sometimes differ from Python diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 4364b2b6c511..dddb35230fd5 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -116,7 +116,7 @@ def type_to_rtype(self, typ: Type | None) -> RType: elif isinstance(typ, NoneTyp): return none_rprimitive elif isinstance(typ, UnionType): - return RUnion([self.type_to_rtype(item) for item in typ.items]) + return RUnion.make_simplified_union([self.type_to_rtype(item) for item in typ.items]) elif isinstance(typ, AnyType): return object_rprimitive elif isinstance(typ, TypeType): diff --git a/mypyc/irbuild/match.py b/mypyc/irbuild/match.py new file mode 100644 index 000000000000..a1e671911ea5 --- /dev/null +++ b/mypyc/irbuild/match.py @@ -0,0 +1,355 @@ +from contextlib import contextmanager +from typing import Generator, List, Optional, Tuple + +from mypy.nodes import MatchStmt, NameExpr, TypeInfo +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.traverser import TraverserVisitor +from mypy.types import Instance, TupleType, get_proper_type +from mypyc.ir.ops import BasicBlock, Value +from mypyc.ir.rtypes import object_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.primitives.dict_ops import ( + dict_copy, + dict_del_item, + mapping_has_key, + supports_mapping_protocol, +) +from mypyc.primitives.generic_ops import generic_ssize_t_len_op +from mypyc.primitives.list_ops import ( + sequence_get_item, + sequence_get_slice, + supports_sequence_protocol, +) +from mypyc.primitives.misc_ops import fast_isinstance_op, slow_isinstance_op + +# From: https://peps.python.org/pep-0634/#class-patterns +MATCHABLE_BUILTINS = { + "builtins.bool", + "builtins.bytearray", + "builtins.bytes", + "builtins.dict", + "builtins.float", + "builtins.frozenset", + "builtins.int", + "builtins.list", + "builtins.set", + "builtins.str", + "builtins.tuple", +} + + +class MatchVisitor(TraverserVisitor): + builder: IRBuilder + code_block: BasicBlock + next_block: BasicBlock + final_block: BasicBlock + subject: Value + match: MatchStmt + + as_pattern: Optional[AsPattern] = None + + def __init__(self, builder: IRBuilder, match_node: MatchStmt) -> None: + self.builder = builder + + self.code_block = BasicBlock() + self.next_block = BasicBlock() + self.final_block = BasicBlock() + + self.match = match_node + self.subject = builder.accept(match_node.subject) + + def build_match_body(self, index: int) -> None: + self.builder.activate_block(self.code_block) + + guard = self.match.guards[index] + + if guard: + self.code_block = BasicBlock() + + cond = self.builder.accept(guard) + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + + self.builder.accept(self.match.bodies[index]) + self.builder.goto(self.final_block) + + def visit_match_stmt(self, m: MatchStmt) -> None: + for i, pattern in enumerate(m.patterns): + self.code_block = BasicBlock() + self.next_block = BasicBlock() + + pattern.accept(self) + + self.build_match_body(i) + self.builder.activate_block(self.next_block) + + self.builder.goto_and_activate(self.final_block) + + def visit_value_pattern(self, pattern: ValuePattern) -> None: + value = self.builder.accept(pattern.expr) + + cond = self.builder.binary_op(self.subject, value, "==", pattern.expr.line) + + self.bind_as_pattern(value) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_or_pattern(self, pattern: OrPattern) -> None: + backup_block = self.next_block + self.next_block = BasicBlock() + + for p in pattern.patterns: + # Hack to ensure the as pattern is bound to each pattern in the + # "or" pattern, but not every subpattern + backup = self.as_pattern + p.accept(self) + self.as_pattern = backup + + self.builder.activate_block(self.next_block) + self.next_block = BasicBlock() + + self.next_block = backup_block + self.builder.goto(self.next_block) + + def visit_class_pattern(self, pattern: ClassPattern) -> None: + # TODO: use faster instance check for native classes (while still + # making sure to account for inheritence) + isinstance_op = ( + fast_isinstance_op + if self.builder.is_builtin_ref_expr(pattern.class_ref) + else slow_isinstance_op + ) + + cond = self.builder.call_c( + isinstance_op, [self.subject, self.builder.accept(pattern.class_ref)], pattern.line + ) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.bind_as_pattern(self.subject, new_block=True) + + if pattern.positionals: + if pattern.class_ref.fullname in MATCHABLE_BUILTINS: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + pattern.positionals[0].accept(self) + + return + + node = pattern.class_ref.node + assert isinstance(node, TypeInfo) + + ty = node.names.get("__match_args__") + assert ty + + match_args_type = get_proper_type(ty.type) + assert isinstance(match_args_type, TupleType) + + match_args: List[str] = [] + + for item in match_args_type.items: + proper_item = get_proper_type(item) + assert isinstance(proper_item, Instance) and proper_item.last_known_value + + match_arg = proper_item.last_known_value.value + assert isinstance(match_arg, str) + + match_args.append(match_arg) + + for i, expr in enumerate(pattern.positionals): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: use faster "get_attr" method instead when calling on native or + # builtin objects + positional = self.builder.py_get_attr(self.subject, match_args[i], expr.line) + + with self.enter_subpattern(positional): + expr.accept(self) + + for key, value in zip(pattern.keyword_keys, pattern.keyword_values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: same as above "get_attr" comment + attr = self.builder.py_get_attr(self.subject, key, value.line) + + with self.enter_subpattern(attr): + value.accept(self) + + def visit_as_pattern(self, pattern: AsPattern) -> None: + if pattern.pattern: + old_pattern = self.as_pattern + self.as_pattern = pattern + pattern.pattern.accept(self) + self.as_pattern = old_pattern + + elif pattern.name: + target = self.builder.get_assignment_target(pattern.name) + + self.builder.assign(target, self.subject, pattern.line) + + self.builder.goto(self.code_block) + + def visit_singleton_pattern(self, pattern: SingletonPattern) -> None: + if pattern.value is None: + obj = self.builder.none_object() + elif pattern.value is True: + obj = self.builder.true() + else: + obj = self.builder.false() + + cond = self.builder.binary_op(self.subject, obj, "is", pattern.line) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_mapping_pattern(self, pattern: MappingPattern) -> None: + is_dict = self.builder.call_c(supports_mapping_protocol, [self.subject], pattern.line) + + self.builder.add_bool_branch(is_dict, self.code_block, self.next_block) + + keys: List[Value] = [] + + for key, value in zip(pattern.keys, pattern.values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + key_value = self.builder.accept(key) + keys.append(key_value) + + exists = self.builder.call_c(mapping_has_key, [self.subject, key_value], pattern.line) + + self.builder.add_bool_branch(exists, self.code_block, self.next_block) + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + item = self.builder.gen_method_call( + self.subject, "__getitem__", [key_value], object_rprimitive, pattern.line + ) + + with self.enter_subpattern(item): + value.accept(self) + + if pattern.rest: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + rest = self.builder.call_c(dict_copy, [self.subject], pattern.rest.line) + + target = self.builder.get_assignment_target(pattern.rest) + + self.builder.assign(target, rest, pattern.rest.line) + + for i, key_name in enumerate(keys): + self.builder.call_c(dict_del_item, [rest, key_name], pattern.keys[i].line) + + self.builder.goto(self.code_block) + + def visit_sequence_pattern(self, seq_pattern: SequencePattern) -> None: + star_index, capture, patterns = prep_sequence_pattern(seq_pattern) + + is_list = self.builder.call_c(supports_sequence_protocol, [self.subject], seq_pattern.line) + + self.builder.add_bool_branch(is_list, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + actual_len = self.builder.call_c(generic_ssize_t_len_op, [self.subject], seq_pattern.line) + min_len = len(patterns) + + is_long_enough = self.builder.binary_op( + actual_len, + self.builder.load_int(min_len), + "==" if star_index is None else ">=", + seq_pattern.line, + ) + + self.builder.add_bool_branch(is_long_enough, self.code_block, self.next_block) + + for i, pattern in enumerate(patterns): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + if star_index is not None and i >= star_index: + current = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - i), "-", pattern.line + ) + + else: + current = self.builder.load_int(i) + + item = self.builder.call_c(sequence_get_item, [self.subject, current], pattern.line) + + with self.enter_subpattern(item): + pattern.accept(self) + + if capture and star_index is not None: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + capture_end = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - star_index), "-", capture.line + ) + + rest = self.builder.call_c( + sequence_get_slice, + [self.subject, self.builder.load_int(star_index), capture_end], + capture.line, + ) + + target = self.builder.get_assignment_target(capture) + self.builder.assign(target, rest, capture.line) + + self.builder.goto(self.code_block) + + def bind_as_pattern(self, value: Value, new_block: bool = False) -> None: + if self.as_pattern and self.as_pattern.pattern and self.as_pattern.name: + if new_block: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + target = self.builder.get_assignment_target(self.as_pattern.name) + self.builder.assign(target, value, self.as_pattern.pattern.line) + + self.as_pattern = None + + if new_block: + self.builder.goto(self.code_block) + + @contextmanager + def enter_subpattern(self, subject: Value) -> Generator[None, None, None]: + old_subject = self.subject + self.subject = subject + yield + self.subject = old_subject + + +def prep_sequence_pattern( + seq_pattern: SequencePattern, +) -> Tuple[Optional[int], Optional[NameExpr], List[Pattern]]: + star_index: Optional[int] = None + capture: Optional[NameExpr] = None + patterns: List[Pattern] = [] + + for i, pattern in enumerate(seq_pattern.patterns): + if isinstance(pattern, StarredPattern): + star_index = i + capture = pattern.capture + + else: + patterns.append(pattern) + + return star_index, capture, patterns diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index 7d52dc8da57c..d99453955002 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -162,12 +162,10 @@ def visit_symbol_node(self, symbol: SymbolNode) -> None: def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: # Check if child is nested within fdef (possibly indirectly # within multiple nested functions). - if child in self.nested_funcs: - parent = self.nested_funcs[child] - if parent == fitem: - return True - return self.is_parent(fitem, parent) - return False + if child not in self.nested_funcs: + return False + parent = self.nested_funcs[child] + return parent == fitem or self.is_parent(fitem, parent) def add_free_variable(self, symbol: SymbolNode) -> None: # Find the function where the symbol was (likely) first declared, diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index 82162d1d0d0e..3c519c3d1c33 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -50,7 +50,7 @@ RuntimeArg, ) from mypyc.ir.ops import DeserMaps -from mypyc.ir.rtypes import RInstance, dict_rprimitive, tuple_rprimitive +from mypyc.ir.rtypes import RInstance, RType, dict_rprimitive, none_rprimitive, tuple_rprimitive from mypyc.irbuild.mapper import Mapper from mypyc.irbuild.util import ( get_func_def, @@ -98,6 +98,12 @@ def build_type_map( else: prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) + # Prepare implicit attribute accessors as needed if an attribute overrides a property. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + if class_ir.is_ext_class: + prepare_implicit_property_accessors(cdef.info, class_ir, module.fullname, mapper) + # Collect all the functions also. We collect from the symbol table # so that we can easily pick out the right copy of a function that # is conditionally defined. @@ -168,6 +174,8 @@ def prepare_method_def( # works correctly. decl.name = PROPSET_PREFIX + decl.name decl.is_prop_setter = True + # Making the argument implicitly positional-only avoids unnecessary glue methods + decl.sig.args[1].pos_only = True ir.method_decls[PROPSET_PREFIX + node.name] = decl if node.func.is_property: @@ -178,15 +186,20 @@ def prepare_method_def( def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: # Checks to ensure supported property decorator semantics - if len(prop.items) == 2: - getter = prop.items[0] - setter = prop.items[1] - if isinstance(getter, Decorator) and isinstance(setter, Decorator): - if getter.func.is_property and len(setter.decorators) == 1: - if isinstance(setter.decorators[0], MemberExpr): - if setter.decorators[0].name == "setter": - return True - return False + if len(prop.items) != 2: + return False + + getter = prop.items[0] + setter = prop.items[1] + + return ( + isinstance(getter, Decorator) + and isinstance(setter, Decorator) + and getter.func.is_property + and len(setter.decorators) == 1 + and isinstance(setter.decorators[0], MemberExpr) + and setter.decorators[0].name == "setter" + ) def can_subclass_builtin(builtin_base: str) -> bool: @@ -207,6 +220,11 @@ def can_subclass_builtin(builtin_base: str) -> bool: def prepare_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper ) -> None: + """Populate the interface-level information in a class IR. + + This includes attribute and method declarations, and the MRO, among other things, but + method bodies are generated in a later pass. + """ ir = mapper.type_to_ir[cdef.info] info = cdef.info @@ -218,8 +236,68 @@ def prepare_class_def( # Supports copy.copy and pickle (including subclasses) ir._serializable = True - # We sort the table for determinism here on Python 3.5 - for name, node in sorted(info.names.items()): + # Check for subclassing from builtin types + for cls in info.mro: + # Special case exceptions and dicts + # XXX: How do we handle *other* things?? + if cls.fullname == "builtins.BaseException": + ir.builtin_base = "PyBaseExceptionObject" + elif cls.fullname == "builtins.dict": + ir.builtin_base = "PyDictObject" + elif cls.fullname.startswith("builtins."): + if not can_subclass_builtin(cls.fullname): + # Note that if we try to subclass a C extension class that + # isn't in builtins, bad things will happen and we won't + # catch it here! But this should catch a lot of the most + # common pitfalls. + errors.error( + "Inheriting from most builtin types is unimplemented", path, cdef.line + ) + + # Set up the parent class + bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] + if not all(c.is_trait for c in bases[1:]): + errors.error("Non-trait bases must appear first in parent list", path, cdef.line) + ir.traits = [c for c in bases if c.is_trait] + + mro = [] # All mypyc base classes + base_mro = [] # Non-trait mypyc base classes + for cls in info.mro: + if cls not in mapper.type_to_ir: + if cls.fullname != "builtins.object": + ir.inherits_python = True + continue + base_ir = mapper.type_to_ir[cls] + if not base_ir.is_trait: + base_mro.append(base_ir) + mro.append(base_ir) + + if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: + ir.inherits_python = True + + base_idx = 1 if not ir.is_trait else 0 + if len(base_mro) > base_idx: + ir.base = base_mro[base_idx] + ir.mro = mro + ir.base_mro = base_mro + + prepare_methods_and_attributes(cdef, ir, path, module_name, errors, mapper) + prepare_init_method(cdef, ir, module_name, mapper) + + for base in bases: + if base.children is not None: + base.children.append(ir) + + if is_dataclass(cdef): + ir.is_augmented = True + + +def prepare_methods_and_attributes( + cdef: ClassDef, ir: ClassIR, path: str, module_name: str, errors: Errors, mapper: Mapper +) -> None: + """Populate attribute and method declarations.""" + info = cdef.info + for name, node in info.names.items(): # Currently all plugin generated methods are dummies and not included. if node.plugin_generated: continue @@ -227,7 +305,15 @@ def prepare_class_def( if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): - ir.attributes[name] = mapper.type_to_rtype(node.node.type) + attr_rtype = mapper.type_to_rtype(node.node.type) + if ir.is_trait and attr_rtype.error_overlap: + # Traits don't have attribute definedness bitmaps, so use + # property accessor methods to access attributes that need them. + # We will generate accessor implementations that use the class bitmap + # for any concrete subclasses. + add_getter_declaration(ir, name, attr_rtype, module_name) + add_setter_declaration(ir, name, attr_rtype, module_name) + ir.attributes[name] = attr_rtype elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): @@ -244,27 +330,86 @@ def prepare_class_def( assert node.node.impl prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) - # Check for subclassing from builtin types - for cls in info.mro: - # Special case exceptions and dicts - # XXX: How do we handle *other* things?? - if cls.fullname == "builtins.BaseException": - ir.builtin_base = "PyBaseExceptionObject" - elif cls.fullname == "builtins.dict": - ir.builtin_base = "PyDictObject" - elif cls.fullname.startswith("builtins."): - if not can_subclass_builtin(cls.fullname): - # Note that if we try to subclass a C extension class that - # isn't in builtins, bad things will happen and we won't - # catch it here! But this should catch a lot of the most - # common pitfalls. - errors.error( - "Inheriting from most builtin types is unimplemented", path, cdef.line - ) - if ir.builtin_base: ir.attributes.clear() + +def prepare_implicit_property_accessors( + info: TypeInfo, ir: ClassIR, module_name: str, mapper: Mapper +) -> None: + concrete_attributes = set() + for base in ir.base_mro: + for name, attr_rtype in base.attributes.items(): + concrete_attributes.add(name) + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + for base in ir.mro[1:]: + if base.is_trait: + for name, attr_rtype in base.attributes.items(): + if name not in concrete_attributes: + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + + +def add_property_methods_for_attribute_if_needed( + info: TypeInfo, + ir: ClassIR, + attr_name: str, + attr_rtype: RType, + module_name: str, + mapper: Mapper, +) -> None: + """Add getter and/or setter for attribute if defined as property in a base class. + + Only add declarations. The body IR will be synthesized later during irbuild. + """ + for base in info.mro[1:]: + if base in mapper.type_to_ir: + base_ir = mapper.type_to_ir[base] + n = base.names.get(attr_name) + if n is None: + continue + node = n.node + if isinstance(node, Decorator) and node.name not in ir.method_decls: + # Defined as a read-only property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + elif isinstance(node, OverloadedFuncDef) and is_valid_multipart_property_def(node): + # Defined as a read-write property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + elif base_ir.is_trait and attr_rtype.error_overlap: + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + + +def add_getter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + sig = FuncSignature([self_arg], attr_rtype) + decl = FuncDecl(attr_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_getter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[attr_name] = decl + ir.property_types[attr_name] = attr_rtype # TODO: Needed?? + + +def add_setter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + value_arg = RuntimeArg("value", attr_rtype, pos_only=True) + sig = FuncSignature([self_arg, value_arg], none_rprimitive) + setter_name = PROPSET_PREFIX + attr_name + decl = FuncDecl(setter_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_setter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[setter_name] = decl + + +def prepare_init_method(cdef: ClassDef, ir: ClassIR, module_name: str, mapper: Mapper) -> None: # Set up a constructor decl init_node = cdef.info["__init__"].node if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): @@ -293,40 +438,6 @@ def prepare_class_def( ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) mapper.func_to_decl[cdef.info] = ir.ctor - # Set up the parent class - bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] - if not all(c.is_trait for c in bases[1:]): - errors.error("Non-trait bases must appear first in parent list", path, cdef.line) - ir.traits = [c for c in bases if c.is_trait] - - mro = [] - base_mro = [] - for cls in info.mro: - if cls not in mapper.type_to_ir: - if cls.fullname != "builtins.object": - ir.inherits_python = True - continue - base_ir = mapper.type_to_ir[cls] - if not base_ir.is_trait: - base_mro.append(base_ir) - mro.append(base_ir) - - if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: - ir.inherits_python = True - - base_idx = 1 if not ir.is_trait else 0 - if len(base_mro) > base_idx: - ir.base = base_mro[base_idx] - ir.mro = mro - ir.base_mro = base_mro - - for base in bases: - if base.children is not None: - base.children.append(ir) - - if is_dataclass(cdef): - ir.is_augmented = True - def prepare_non_ext_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index 5810482cd43d..8cb24c5b47da 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -51,7 +51,10 @@ dict_rprimitive, int32_rprimitive, int64_rprimitive, + int_rprimitive, + is_bool_rprimitive, is_dict_rprimitive, + is_fixed_width_rtype, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -154,14 +157,26 @@ def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Va @specialize_function("builtins.abs") -def translate_abs(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: - """Specialize calls on native classes that implement __abs__.""" - if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]: +@specialize_function("builtins.int") +@specialize_function("builtins.float") +@specialize_function("builtins.complex") +@specialize_function("mypy_extensions.i64") +@specialize_function("mypy_extensions.i32") +def translate_builtins_with_unary_dunder( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Specialize calls on native classes that implement the associated dunder.""" + if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(callee, NameExpr): arg = expr.args[0] arg_typ = builder.node_type(arg) - if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method("__abs__"): + shortname = callee.fullname.split(".")[1] + if shortname in ("i64", "i32"): + method = "__int__" + else: + method = f"__{shortname}__" + if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method(method): obj = builder.accept(arg) - return builder.gen_method_call(obj, "__abs__", [], None, expr.line) + return builder.gen_method_call(obj, method, [], None, expr.line) return None @@ -667,7 +682,7 @@ def translate_i64(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value elif is_int32_rprimitive(arg_type): val = builder.accept(arg) return builder.add(Extend(val, int64_rprimitive, signed=True, line=expr.line)) - elif is_int_rprimitive(arg_type): + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): val = builder.accept(arg) return builder.coerce(val, int64_rprimitive, expr.line) return None @@ -684,7 +699,32 @@ def translate_i32(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value elif is_int64_rprimitive(arg_type): val = builder.accept(arg) return builder.add(Truncate(val, int32_rprimitive, line=expr.line)) - elif is_int_rprimitive(arg_type): + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): val = builder.accept(arg) return builder.coerce(val, int32_rprimitive, expr.line) return None + + +@specialize_function("builtins.int") +def translate_int(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if ( + is_bool_rprimitive(arg_type) + or is_int_rprimitive(arg_type) + or is_fixed_width_rtype(arg_type) + ): + src = builder.accept(arg) + return builder.coerce(src, int_rprimitive, expr.line) + return None + + +@specialize_function("builtins.bool") +def translate_bool(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + src = builder.accept(arg) + return builder.builder.bool_value(src) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 371a305e67b9..b9754ba1a147 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -28,6 +28,7 @@ ImportFrom, ListExpr, Lvalue, + MatchStmt, OperatorAssignmentStmt, RaiseStmt, ReturnStmt, @@ -49,6 +50,7 @@ Integer, LoadAddress, LoadErrorValue, + MethodCall, RaiseStandardError, Register, Return, @@ -60,6 +62,7 @@ RInstance, exc_rtuple, is_tagged, + none_rprimitive, object_pointer_rprimitive, object_rprimitive, ) @@ -99,6 +102,8 @@ yield_from_except_op, ) +from .match import MatchVisitor + GenFunc = Callable[[], None] ValueGenFunc = Callable[[], Value] @@ -616,6 +621,8 @@ def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: # constructs that we compile separately. When we have a # try/except/else/finally, we treat the try/except/else as the # body of a try/finally block. + if t.is_star: + builder.error("Exception groups and except* cannot be compiled yet", t.line) if t.finally_body: def transform_try_body() -> None: @@ -652,14 +659,45 @@ def transform_with( al = "a" if is_async else "" mgr_v = builder.accept(expr) - typ = builder.call_c(type_op, [mgr_v], line) - exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) - value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + is_native = isinstance(mgr_v.type, RInstance) + if is_native: + value = builder.add(MethodCall(mgr_v, f"__{al}enter__", args=[], line=line)) + exit_ = None + else: + typ = builder.call_c(type_op, [mgr_v], line) + exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) + value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + mgr = builder.maybe_spill(mgr_v) exc = builder.maybe_spill_assignable(builder.true()) if is_async: value = emit_await(builder, value, line) + def maybe_natively_call_exit(exc_info: bool) -> Value: + if exc_info: + args = get_sys_exc_info(builder) + else: + none = builder.none_object() + args = [none, none, none] + + if is_native: + assert isinstance(mgr_v.type, RInstance) + exit_val = builder.gen_method_call( + builder.read(mgr), + f"__{al}exit__", + arg_values=args, + line=line, + result_type=none_rprimitive, + ) + else: + assert exit_ is not None + exit_val = builder.py_call(builder.read(exit_), [builder.read(mgr)] + args, line) + + if is_async: + return emit_await(builder, exit_val, line) + else: + return exit_val + def try_body() -> None: if target: builder.assign(builder.get_assignment_target(target), value, line) @@ -668,13 +706,7 @@ def try_body() -> None: def except_body() -> None: builder.assign(exc, builder.false(), line) out_block, reraise_block = BasicBlock(), BasicBlock() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr)] + get_sys_exc_info(builder), line - ) - if is_async: - exit_val = emit_await(builder, exit_val, line) - - builder.add_bool_branch(exit_val, out_block, reraise_block) + builder.add_bool_branch(maybe_natively_call_exit(exc_info=True), out_block, reraise_block) builder.activate_block(reraise_block) builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) builder.add(Unreachable()) @@ -684,13 +716,8 @@ def finally_body() -> None: out_block, exit_block = BasicBlock(), BasicBlock() builder.add(Branch(builder.read(exc), exit_block, out_block, Branch.BOOL)) builder.activate_block(exit_block) - none = builder.none_object() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr), none, none, none], line - ) - if is_async: - emit_await(builder, exit_val, line) + maybe_natively_call_exit(exc_info=False) builder.goto_and_activate(out_block) transform_try_finally_stmt( @@ -896,3 +923,7 @@ def transform_yield_from_expr(builder: IRBuilder, o: YieldFromExpr) -> Value: def transform_await_expr(builder: IRBuilder, o: AwaitExpr) -> Value: return emit_yield_from_or_await(builder, builder.accept(o.expr), o.line, is_await=True) + + +def transform_match_stmt(builder: IRBuilder, m: MatchStmt) -> None: + m.accept(MatchVisitor(builder, m)) diff --git a/mypyc/irbuild/util.py b/mypyc/irbuild/util.py index f50241b96cb3..ed01a59d1214 100644 --- a/mypyc/irbuild/util.py +++ b/mypyc/irbuild/util.py @@ -177,3 +177,13 @@ def is_constant(e: Expression) -> bool: ) ) ) + + +def bytes_from_str(value: str) -> bytes: + """Convert a string representing bytes into actual bytes. + + This is needed because the literal characters of BytesExpr (the + characters inside b'') are stored in BytesExpr.value, whose type is + 'str' not 'bytes'. + """ + return bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index dc126d410409..d8725ee04dc5 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -131,6 +131,7 @@ transform_import, transform_import_all, transform_import_from, + transform_match_stmt, transform_operator_assignment_stmt, transform_raise_stmt, transform_return_stmt, @@ -242,7 +243,7 @@ def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: pass def visit_match_stmt(self, stmt: MatchStmt) -> None: - self.bail("Match statements are not yet supported", stmt.line) + transform_match_stmt(self.builder, stmt) # Expressions diff --git a/mypyc/irbuild/vtable.py b/mypyc/irbuild/vtable.py index a02cd622cee1..2d4f7261e4ca 100644 --- a/mypyc/irbuild/vtable.py +++ b/mypyc/irbuild/vtable.py @@ -40,7 +40,7 @@ def compute_vtable(cls: ClassIR) -> None: for t in [cls] + cls.traits: for fn in itertools.chain(t.methods.values()): # TODO: don't generate a new entry when we overload without changing the type - if fn == cls.get_method(fn.name): + if fn == cls.get_method(fn.name, prefer_method=True): cls.vtable[fn.name] = len(entries) # If the class contains a glue method referring to itself, that is a # shadow glue method to support interpreted subclasses. @@ -60,9 +60,9 @@ def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: for entry in parent.vtable_entries: # Find the original method corresponding to this vtable entry. # (This may not be the method in the entry, if it was overridden.) - orig_parent_method = entry.cls.get_method(entry.name) + orig_parent_method = entry.cls.get_method(entry.name, prefer_method=True) assert orig_parent_method - method_cls = cls.get_method_and_class(entry.name) + method_cls = cls.get_method_and_class(entry.name, prefer_method=True) if method_cls: child_method, defining_cls = method_cls # TODO: emit a wrapper for __init__ that raises or something diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index cffbbb3e1666..befa397051ef 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -371,6 +371,7 @@ CPyTagged CPyList_Index(PyObject *list, PyObject *obj); PyObject *CPySequence_Multiply(PyObject *seq, CPyTagged t_size); PyObject *CPySequence_RMultiply(CPyTagged t_size, PyObject *seq); PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +int CPySequence_Check(PyObject *obj); // Dict operations @@ -402,6 +403,7 @@ PyObject *CPyDict_GetValuesIter(PyObject *dict); tuple_T3CIO CPyDict_NextKey(PyObject *dict_or_iter, CPyTagged offset); tuple_T3CIO CPyDict_NextValue(PyObject *dict_or_iter, CPyTagged offset); tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset); +int CPyMapping_Check(PyObject *obj); // Check that dictionary didn't change size during iteration. static inline char CPyDict_CheckSize(PyObject *dict, CPyTagged size) { @@ -497,13 +499,8 @@ static inline bool CPy_KeepPropagating(void) { } // We want to avoid the public PyErr_GetExcInfo API for these because // it requires a bunch of spurious refcount traffic on the parts of -// the triple we don't care about. Unfortunately the layout of the -// data structure changed in 3.7 so we need to handle that. -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 +// the triple we don't care about. #define CPy_ExcState() PyThreadState_GET()->exc_info -#else -#define CPy_ExcState() PyThreadState_GET() -#endif void CPy_Raise(PyObject *exc); void CPy_Reraise(void); @@ -525,7 +522,7 @@ void CPy_AttributeError(const char *filename, const char *funcname, const char * // Misc operations -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 8 +#if PY_VERSION_HEX >= 0x03080000 #define CPy_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN(op, dealloc) #define CPy_TRASHCAN_END(op) Py_TRASHCAN_END #else @@ -600,7 +597,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples); + const int *tuples, + const int *frozensets); PyObject *CPy_Super(PyObject *builtins, PyObject *self); PyObject *CPy_CallReverseOpMethod(PyObject *left, PyObject *right, const char *op, _Py_Identifier *method); diff --git a/mypyc/lib-rt/dict_ops.c b/mypyc/lib-rt/dict_ops.c index b013a8a5f0b9..ba565257fd72 100644 --- a/mypyc/lib-rt/dict_ops.c +++ b/mypyc/lib-rt/dict_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_MAPPING +#define Py_TPFLAGS_MAPPING (1 << 6) +#endif + // Dict subclasses like defaultdict override things in interesting // ways, so we don't want to just directly use the dict methods. Not // sure if it is actually worth doing all this stuff, but it saves @@ -436,3 +440,7 @@ tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset) { Py_INCREF(ret.f3); return ret; } + +int CPyMapping_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MAPPING; +} diff --git a/mypyc/lib-rt/getargsfast.c b/mypyc/lib-rt/getargsfast.c index afb161e643c7..387deed4399b 100644 --- a/mypyc/lib-rt/getargsfast.c +++ b/mypyc/lib-rt/getargsfast.c @@ -18,9 +18,6 @@ #include #include "CPy.h" -/* None of this is supported on Python 3.6 or earlier */ -#if PY_VERSION_HEX >= 0x03070000 - #define PARSER_INITED(parser) ((parser)->kwtuple != NULL) /* Forward */ @@ -570,5 +567,3 @@ skipitem_fast(const char **p_format, va_list *p_va) *p_format = format; } - -#endif diff --git a/mypyc/lib-rt/list_ops.c b/mypyc/lib-rt/list_ops.c index cb72662e22ee..df87228a0d10 100644 --- a/mypyc/lib-rt/list_ops.c +++ b/mypyc/lib-rt/list_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_SEQUENCE +#define Py_TPFLAGS_SEQUENCE (1 << 5) +#endif + PyObject *CPyList_Build(Py_ssize_t len, ...) { Py_ssize_t i; @@ -325,3 +329,7 @@ PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { } return CPyObject_GetSlice(obj, start, end); } + +int CPySequence_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_SEQUENCE; +} diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index 25f33c5f56c7..5fda78704bbc 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -535,7 +535,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples) { + const int *tuples, + const int *frozensets) { PyObject **result = statics; // Start with some hard-coded values *result++ = Py_None; @@ -635,6 +636,24 @@ int CPyStatics_Initialize(PyObject **statics, *result++ = obj; } } + if (frozensets) { + int num = *frozensets++; + while (num-- > 0) { + int num_items = *frozensets++; + PyObject *obj = PyFrozenSet_New(NULL); + if (obj == NULL) { + return -1; + } + for (int i = 0; i < num_items; i++) { + PyObject *item = statics[*frozensets++]; + Py_INCREF(item); + if (PySet_Add(obj, item) == -1) { + return -1; + } + } + *result++ = obj; + } + } return 0; } diff --git a/mypyc/lib-rt/pythonsupport.h b/mypyc/lib-rt/pythonsupport.h index cd66c4cb4df8..8a1159a98853 100644 --- a/mypyc/lib-rt/pythonsupport.h +++ b/mypyc/lib-rt/pythonsupport.h @@ -22,7 +22,6 @@ extern "C" { ///////////////////////////////////////// // Adapted from bltinmodule.c in Python 3.7.0 -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 _Py_IDENTIFIER(__mro_entries__); static PyObject* update_bases(PyObject *bases) @@ -96,16 +95,8 @@ update_bases(PyObject *bases) Py_XDECREF(new_bases); return NULL; } -#else -static PyObject* -update_bases(PyObject *bases) -{ - return bases; -} -#endif // From Python 3.7's typeobject.c -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 _Py_IDENTIFIER(__init_subclass__); static int init_subclass(PyTypeObject *type, PyObject *kwds) @@ -134,14 +125,6 @@ init_subclass(PyTypeObject *type, PyObject *kwds) return 0; } -#else -static int -init_subclass(PyTypeObject *type, PyObject *kwds) -{ - return 0; -} -#endif - // Adapted from longobject.c in Python 3.7.0 /* This function adapted from PyLong_AsLongLongAndOverflow, but with @@ -306,7 +289,7 @@ list_count(PyListObject *self, PyObject *value) return CPyTagged_ShortFromSsize_t(count); } -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 8 +#if PY_VERSION_HEX < 0x03080000 static PyObject * _PyDict_GetItemStringWithError(PyObject *v, const char *key) { @@ -321,13 +304,7 @@ _PyDict_GetItemStringWithError(PyObject *v, const char *key) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 6 -/* _PyUnicode_EqualToASCIIString got added in 3.5.3 (argh!) so we can't actually know - * whether it will be present at runtime, so we just assume we don't have it in 3.5. */ -#define CPyUnicode_EqualToASCIIString(x, y) (PyUnicode_CompareWithASCIIString((x), (y)) == 0) -#elif PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 #define CPyUnicode_EqualToASCIIString(x, y) _PyUnicode_EqualToASCIIString(x, y) -#endif // Adapted from genobject.c in Python 3.7.2 // Copied because it wasn't in 3.5.2 and it is undocumented anyways. @@ -390,7 +367,7 @@ _CPyDictView_New(PyObject *dict, PyTypeObject *type) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >=10 +#if PY_VERSION_HEX >= 0x030A0000 // 3.10 static int _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { PyObject *tmp = NULL; @@ -404,7 +381,7 @@ _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { #define _CPyObject_HasAttrId _PyObject_HasAttrId #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 9 +#if PY_VERSION_HEX < 0x03090000 // OneArgs and NoArgs functions got added in 3.9 #define _PyObject_CallMethodIdNoArgs(self, name) \ _PyObject_CallMethodIdObjArgs((self), (name), NULL) diff --git a/mypyc/options.py b/mypyc/options.py index d554cbed164f..5f0cf12aeefe 100644 --- a/mypyc/options.py +++ b/mypyc/options.py @@ -13,6 +13,7 @@ def __init__( target_dir: str | None = None, include_runtime_files: bool | None = None, capi_version: tuple[int, int] | None = None, + python_version: tuple[int, int] | None = None, ) -> None: self.strip_asserts = strip_asserts self.multi_file = multi_file @@ -28,3 +29,4 @@ def __init__( # binaries are backward compatible even if no recent API # features are used. self.capi_version = capi_version or sys.version_info[:2] + self.python_version = python_version diff --git a/mypyc/primitives/dict_ops.py b/mypyc/primitives/dict_ops.py index d1dca5a79e63..9f477d0b7b90 100644 --- a/mypyc/primitives/dict_ops.py +++ b/mypyc/primitives/dict_ops.py @@ -63,7 +63,7 @@ ) # Generic one-argument dict constructor: dict(obj) -function_op( +dict_copy = function_op( name="builtins.dict", arg_types=[object_rprimitive], return_type=dict_rprimitive, @@ -301,3 +301,25 @@ c_function_name="PyDict_Size", error_kind=ERR_NEVER, ) + +# Delete an item from a dict +dict_del_item = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_DelItem", + error_kind=ERR_NEG_INT, +) + +supports_mapping_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyMapping_Check", + error_kind=ERR_NEVER, +) + +mapping_has_key = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyMapping_HasKey", + error_kind=ERR_NEVER, +) diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 55ef16ef5466..7eda9bab7e3c 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -35,39 +35,43 @@ unary_op, ) -# These int constructors produce object_rprimitives that then need to be unboxed -# I guess unboxing ourselves would save a check and branch though? - -# Get the type object for 'builtins.int'. -# For ordinary calls to int() we use a load_address to the type -load_address_op(name="builtins.int", type=object_rprimitive, src="PyLong_Type") - -# int(float). We could do a bit better directly. -function_op( - name="builtins.int", - arg_types=[float_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromFloat", - error_kind=ERR_MAGIC, -) +# Constructors for builtins.int and native int types have the same behavior. In +# interpreted mode, native int types are just aliases to 'int'. +for int_name in ("builtins.int", "mypy_extensions.i64", "mypy_extensions.i32"): + # These int constructors produce object_rprimitives that then need to be unboxed + # I guess unboxing ourselves would save a check and branch though? + + # Get the type object for 'builtins.int' or a native int type. + # For ordinary calls to int() we use a load_address to the type. + # Native ints don't have a separate type object -- we just use 'builtins.int'. + load_address_op(name=int_name, type=object_rprimitive, src="PyLong_Type") + + # int(float). We could do a bit better directly. + function_op( + name=int_name, + arg_types=[float_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromFloat", + error_kind=ERR_MAGIC, + ) -# int(string) -function_op( - name="builtins.int", - arg_types=[str_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStr", - error_kind=ERR_MAGIC, -) + # int(string) + function_op( + name=int_name, + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStr", + error_kind=ERR_MAGIC, + ) -# int(string, base) -function_op( - name="builtins.int", - arg_types=[str_rprimitive, int_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStrWithBase", - error_kind=ERR_MAGIC, -) + # int(string, base) + function_op( + name=int_name, + arg_types=[str_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStrWithBase", + error_kind=ERR_MAGIC, + ) # str(int) int_to_str_op = function_op( @@ -160,15 +164,11 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: # c_func_description: the C function to call when operands are tagged integers # c_func_negated: whether to negate the C function call's result # c_func_swap_operands: whether to swap lhs and rhs when call the function -IntComparisonOpDescription = NamedTuple( - "IntComparisonOpDescription", - [ - ("binary_op_variant", int), - ("c_func_description", CFunctionDescription), - ("c_func_negated", bool), - ("c_func_swap_operands", bool), - ], -) +class IntComparisonOpDescription(NamedTuple): + binary_op_variant: int + c_func_description: CFunctionDescription + c_func_negated: bool + c_func_swap_operands: bool # Equals operation on two boxed tagged integers diff --git a/mypyc/primitives/list_ops.py b/mypyc/primitives/list_ops.py index c729e264fc14..7fe3157f3a38 100644 --- a/mypyc/primitives/list_ops.py +++ b/mypyc/primitives/list_ops.py @@ -277,3 +277,24 @@ c_function_name="CPyList_GetSlice", error_kind=ERR_MAGIC, ) + +supports_sequence_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPySequence_Check", + error_kind=ERR_NEVER, +) + +sequence_get_item = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetItem", + error_kind=ERR_NEVER, +) + +sequence_get_slice = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetSlice", + error_kind=ERR_MAGIC, +) diff --git a/mypyc/primitives/registry.py b/mypyc/primitives/registry.py index d7d171b72cca..1e2cf2695ee7 100644 --- a/mypyc/primitives/registry.py +++ b/mypyc/primitives/registry.py @@ -47,29 +47,27 @@ # is only used for primitives. We translate it away during IR building. ERR_NEG_INT: Final = 10 -CFunctionDescription = NamedTuple( - "CFunctionDescription", - [ - ("name", str), - ("arg_types", List[RType]), - ("return_type", RType), - ("var_arg_type", Optional[RType]), - ("truncated_type", Optional[RType]), - ("c_function_name", str), - ("error_kind", int), - ("steals", StealsDescription), - ("is_borrowed", bool), - ("ordering", Optional[List[int]]), - ("extra_int_constants", List[Tuple[int, RType]]), - ("priority", int), - ], -) + +class CFunctionDescription(NamedTuple): + name: str + arg_types: List[RType] + return_type: RType + var_arg_type: Optional[RType] + truncated_type: Optional[RType] + c_function_name: str + error_kind: int + steals: StealsDescription + is_borrowed: bool + ordering: Optional[List[int]] + extra_int_constants: List[Tuple[int, RType]] + priority: int # A description for C load operations including LoadGlobal and LoadAddress -LoadAddressDescription = NamedTuple( - "LoadAddressDescription", [("name", str), ("type", RType), ("src", str)] -) # name of the target to load +class LoadAddressDescription(NamedTuple): + name: str + type: RType + src: str # name of the target to load # CallC op for method call(such as 'str.join') diff --git a/mypyc/primitives/set_ops.py b/mypyc/primitives/set_ops.py index 801fdad34ea4..fcfb7847dc7d 100644 --- a/mypyc/primitives/set_ops.py +++ b/mypyc/primitives/set_ops.py @@ -54,7 +54,7 @@ ) # item in set -binary_op( +set_in_op = binary_op( name="in", arg_types=[object_rprimitive, set_rprimitive], return_type=c_int_rprimitive, diff --git a/mypyc/sametype.py b/mypyc/sametype.py index a3cfd5c08059..1b811d4e9041 100644 --- a/mypyc/sametype.py +++ b/mypyc/sametype.py @@ -35,7 +35,9 @@ def is_same_method_signature(a: FuncSignature, b: FuncSignature) -> bool: len(a.args) == len(b.args) and is_same_type(a.ret_type, b.ret_type) and all( - is_same_type(t1.type, t2.type) and t1.name == t2.name + is_same_type(t1.type, t2.type) + and ((t1.pos_only and t2.pos_only) or t1.name == t2.name) + and t1.optional == t2.optional for t1, t2 in zip(a.args[1:], b.args[1:]) ) ) diff --git a/mypyc/test-data/commandline.test b/mypyc/test-data/commandline.test index 6612df9e1886..e7ba11192d28 100644 --- a/mypyc/test-data/commandline.test +++ b/mypyc/test-data/commandline.test @@ -164,7 +164,7 @@ class Trait2(Concrete2): class NonExt(Concrete1): # E: Non-extension classes may not inherit from extension classes pass -class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Non-trait MRO must be linear +class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Multiple inheritance is not supported (except for traits) pass @decorator @@ -223,3 +223,9 @@ def h(arg: str) -> None: @a.register def i(arg: Foo) -> None: pass + +[case testOnlyWarningOutput] +# cmd: test.py + +[file test.py] +names = (str(v) for v in [1, 2, 3]) # W: Treating generator comprehension as list diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 0e437f4597ea..2f3c18e9c731 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -221,12 +221,14 @@ def clear(self) -> None: pass def pop(self) -> T: pass def update(self, x: Iterable[S]) -> None: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... class frozenset(Generic[T]): def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass def __iter__(self) -> Iterator[T]: pass def __len__(self) -> int: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... class slice: pass diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 8e54b25b673b..a06977d037b2 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -1108,7 +1108,9 @@ L0: return 1 [case testCallableTypes] -from typing import Callable +from typing import Callable, Any +from m import f + def absolute_value(x: int) -> int: return x if x > 0 else -x @@ -1116,7 +1118,7 @@ def call_native_function(x: int) -> int: return absolute_value(x) def call_python_function(x: int) -> int: - return int(x) + return f(x) def return_float() -> float: return 5.0 @@ -1127,6 +1129,9 @@ def return_callable_type() -> Callable[[], float]: def call_callable_type() -> float: f = return_callable_type() return f() +[file m.py] +def f(x: int) -> int: + return x [out] def absolute_value(x): x :: int @@ -1158,14 +1163,18 @@ L0: return r0 def call_python_function(x): x :: int - r0, r1, r2 :: object - r3 :: int + r0 :: dict + r1 :: str + r2, r3, r4 :: object + r5 :: int L0: - r0 = load_address PyLong_Type - r1 = box(int, x) - r2 = PyObject_CallFunctionObjArgs(r0, r1, 0) - r3 = unbox(int, r2) - return r3 + r0 = __main__.globals :: static + r1 = 'f' + r2 = CPyDict_GetItem(r0, r1) + r3 = box(int, x) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + return r5 def return_float(): r0 :: float L0: @@ -2228,243 +2237,6 @@ L0: r1 = CPyTagged_Multiply(4, r0) return r1 -[case testPropertyDerivedGen] -from typing import Callable -class BaseProperty: - @property - def value(self) -> object: - return self._incrementer - - @property - def bad_value(self) -> object: - return self._incrementer - - @property - def next(self) -> BaseProperty: - return BaseProperty(self._incrementer + 1) - - def __init__(self, value: int) -> None: - self._incrementer = value - -class DerivedProperty(BaseProperty): - @property - def value(self) -> int: - return self._incrementer - - @property - def bad_value(self) -> object: - return self._incrementer - - @property - def next(self) -> DerivedProperty: - return DerivedProperty(self._incr_func, self._incr_func(self.value)) - - def __init__(self, incr_func: Callable[[int], int], value: int) -> None: - BaseProperty.__init__(self, value) - self._incr_func = incr_func - - -class AgainProperty(DerivedProperty): - @property - def next(self) -> AgainProperty: - return AgainProperty(self._incr_func, self._incr_func(self._incr_func(self.value))) - - @property - def bad_value(self) -> int: - return self._incrementer -[out] -def BaseProperty.value(self): - self :: __main__.BaseProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def BaseProperty.bad_value(self): - self :: __main__.BaseProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def BaseProperty.next(self): - self :: __main__.BaseProperty - r0, r1 :: int - r2 :: __main__.BaseProperty -L0: - r0 = borrow self._incrementer - r1 = CPyTagged_Add(r0, 2) - keep_alive self - r2 = BaseProperty(r1) - return r2 -def BaseProperty.__init__(self, value): - self :: __main__.BaseProperty - value :: int -L0: - self._incrementer = value - return 1 -def DerivedProperty.value(self): - self :: __main__.DerivedProperty - r0 :: int -L0: - r0 = self._incrementer - return r0 -def DerivedProperty.value__BaseProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.DerivedProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.value - r1 = box(int, r0) - return r1 -def DerivedProperty.bad_value(self): - self :: __main__.DerivedProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def DerivedProperty.next(self): - self :: __main__.DerivedProperty - r0 :: object - r1 :: int - r2, r3, r4 :: object - r5 :: int - r6 :: __main__.DerivedProperty -L0: - r0 = self._incr_func - r1 = self.value - r2 = self._incr_func - r3 = box(int, r1) - r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) - r5 = unbox(int, r4) - r6 = DerivedProperty(r0, r5) - return r6 -def DerivedProperty.next__BaseProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.DerivedProperty -L0: - r0 = __mypyc_self__.next - return r0 -def DerivedProperty.__init__(self, incr_func, value): - self :: __main__.DerivedProperty - incr_func :: object - value :: int - r0 :: None -L0: - r0 = BaseProperty.__init__(self, value) - self._incr_func = incr_func - return 1 -def AgainProperty.next(self): - self :: __main__.AgainProperty - r0 :: object - r1 :: int - r2, r3, r4 :: object - r5 :: int - r6, r7, r8 :: object - r9 :: int - r10 :: __main__.AgainProperty -L0: - r0 = self._incr_func - r1 = self.value - r2 = self._incr_func - r3 = box(int, r1) - r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) - r5 = unbox(int, r4) - r6 = self._incr_func - r7 = box(int, r5) - r8 = PyObject_CallFunctionObjArgs(r6, r7, 0) - r9 = unbox(int, r8) - r10 = AgainProperty(r0, r9) - return r10 -def AgainProperty.next__DerivedProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.AgainProperty -L0: - r0 = __mypyc_self__.next - return r0 -def AgainProperty.next__BaseProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.AgainProperty -L0: - r0 = __mypyc_self__.next - return r0 -def AgainProperty.bad_value(self): - self :: __main__.AgainProperty - r0 :: int -L0: - r0 = self._incrementer - return r0 -def AgainProperty.bad_value__DerivedProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.AgainProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.bad_value - r1 = box(int, r0) - return r1 -def AgainProperty.bad_value__BaseProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.AgainProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.bad_value - r1 = box(int, r0) - return r1 - -[case testPropertyTraitSubclassing] -from mypy_extensions import trait -@trait -class SubclassedTrait: - @property - def this(self) -> SubclassedTrait: - return self - - @property - def boxed(self) -> object: - return 3 - -class DerivingObject(SubclassedTrait): - @property - def this(self) -> DerivingObject: - return self - - @property - def boxed(self) -> int: - return 5 -[out] -def SubclassedTrait.this(self): - self :: __main__.SubclassedTrait -L0: - return self -def SubclassedTrait.boxed(self): - self :: __main__.SubclassedTrait - r0 :: object -L0: - r0 = object 3 - return r0 -def DerivingObject.this(self): - self :: __main__.DerivingObject -L0: - return self -def DerivingObject.this__SubclassedTrait_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.DerivingObject -L0: - r0 = __mypyc_self__.this - return r0 -def DerivingObject.boxed(self): - self :: __main__.DerivingObject -L0: - return 10 -def DerivingObject.boxed__SubclassedTrait_glue(__mypyc_self__): - __mypyc_self__ :: __main__.DerivingObject - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.boxed - r1 = box(int, r0) - return r1 - [case testNativeIndex] from typing import List class A: @@ -2575,11 +2347,8 @@ def __top_level__(): r92, r93, r94, r95 :: ptr r96 :: dict r97 :: str - r98, r99 :: object - r100 :: dict - r101 :: str - r102 :: int32 - r103 :: bit + r98 :: int32 + r99 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2691,13 +2460,9 @@ L2: set_mem r95, r91 :: builtins.object* keep_alive r88 r96 = __main__.globals :: static - r97 = 'Bar' - r98 = CPyDict_GetItem(r96, r97) - r99 = PyObject_CallFunctionObjArgs(r98, r88, 0) - r100 = __main__.globals :: static - r101 = 'y' - r102 = CPyDict_SetItem(r100, r101, r99) - r103 = r102 >= 0 :: signed + r97 = 'y' + r98 = CPyDict_SetItem(r96, r97, r88) + r99 = r98 >= 0 :: signed return 1 [case testChainedConditional] @@ -3312,8 +3077,7 @@ def call_sum(l, comparison): r1, r2 :: object r3, x :: int r4, r5 :: object - r6 :: bool - r7 :: object + r6, r7 :: bool r8, r9 :: int r10 :: bit L0: @@ -3328,8 +3092,8 @@ L2: r4 = box(int, x) r5 = PyObject_CallFunctionObjArgs(comparison, r4, 0) r6 = unbox(bool, r5) - r7 = box(bool, r6) - r8 = unbox(int, r7) + r7 = r6 << 1 + r8 = extend r7: builtins.bool to builtins.int r9 = CPyTagged_Add(r0, r8) r0 = r9 L3: @@ -3510,7 +3274,7 @@ L2: [case testFinalStaticInt] from typing import Final -x: Final = 1 + 1 +x: Final = 1 + int() def f() -> int: return x - 1 @@ -3821,3 +3585,18 @@ L0: r3 = 0.0 i__redef____redef__ = r3 return 1 + +[case testNewType] +from typing import NewType + +class A: pass + +N = NewType("N", A) + +def f(arg: A) -> N: + return N(arg) +[out] +def f(arg): + arg :: __main__.A +L0: + return arg diff --git a/mypyc/test-data/irbuild-bool.test b/mypyc/test-data/irbuild-bool.test new file mode 100644 index 000000000000..407ab8bcda93 --- /dev/null +++ b/mypyc/test-data/irbuild-bool.test @@ -0,0 +1,144 @@ +[case testBoolToAndFromInt] +from mypy_extensions import i64 + +def bool_to_int(b: bool) -> int: + return b +def int_to_bool(n: int) -> bool: + return bool(n) +def bool_to_i64(b: bool) -> i64: + return b +def i64_to_bool(n: i64) -> bool: + return bool(n) +def bit_to_int(n1: i64, n2: i64) -> int: + return bool(n1 == n2) +def bit_to_i64(n1: i64, n2: i64) -> i64: + return bool(n1 == n2) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_bool(n): + n :: int + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def i64_to_bool(n): + n :: int64 + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bit_to_int(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: bool + r2 :: int +L0: + r0 = n1 == n2 + r1 = r0 << 1 + r2 = extend r1: builtins.bool to builtins.int + return r2 +def bit_to_i64(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: int64 +L0: + r0 = n1 == n2 + r1 = extend r0: bit to int64 + return r1 + +[case testConversionToBool] +from typing import List, Optional + +class C: pass +class D: + def __bool__(self) -> bool: + return True + +def list_to_bool(l: List[str]) -> bool: + return bool(l) + +def always_truthy_instance_to_bool(o: C) -> bool: + return bool(o) + +def instance_to_bool(o: D) -> bool: + return bool(o) + +def optional_truthy_to_bool(o: Optional[C]) -> bool: + return bool(o) + +def optional_maybe_falsey_to_bool(o: Optional[D]) -> bool: + return bool(o) +[out] +def D.__bool__(self): + self :: __main__.D +L0: + return 1 +def list_to_bool(l): + l :: list + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: bit +L0: + r0 = get_element_ptr l ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive l + r2 = r1 << 1 + r3 = r2 != 0 + return r3 +def always_truthy_instance_to_bool(o): + o :: __main__.C + r0 :: int32 + r1 :: bit + r2 :: bool +L0: + r0 = PyObject_IsTrue(o) + r1 = r0 >= 0 :: signed + r2 = truncate r0: int32 to builtins.bool + return r2 +def instance_to_bool(o): + o :: __main__.D + r0 :: bool +L0: + r0 = o.__bool__() + return r0 +def optional_truthy_to_bool(o): + o :: union[__main__.C, None] + r0 :: object + r1 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + return r1 +def optional_maybe_falsey_to_bool(o): + o :: union[__main__.D, None] + r0 :: object + r1 :: bit + r2 :: __main__.D + r3 :: bool + r4 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = cast(__main__.D, o) + r3 = r2.__bool__() + r4 = r3 + goto L3 +L2: + r4 = 0 +L3: + return r4 diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index 5a574ac44354..700a529f9627 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -181,94 +181,6 @@ L0: o.x = r1; r2 = is_error return o -[case testSubclassSpecialize2] -class A: - def foo(self, x: int) -> object: - return str(x) -class B(A): - def foo(self, x: object) -> object: - return x -class C(B): - def foo(self, x: object) -> int: - return id(x) - -def use_a(x: A, y: int) -> object: - return x.foo(y) - -def use_b(x: B, y: object) -> object: - return x.foo(y) - -def use_c(x: C, y: object) -> int: - return x.foo(y) -[out] -def A.foo(self, x): - self :: __main__.A - x :: int - r0 :: str -L0: - r0 = CPyTagged_Str(x) - return r0 -def B.foo(self, x): - self :: __main__.B - x :: object -L0: - return x -def B.foo__A_glue(self, x): - self :: __main__.B - x :: int - r0, r1 :: object -L0: - r0 = box(int, x) - r1 = B.foo(self, r0) - return r1 -def C.foo(self, x): - self :: __main__.C - x :: object - r0 :: int -L0: - r0 = CPyTagged_Id(x) - return r0 -def C.foo__B_glue(self, x): - self :: __main__.C - x :: object - r0 :: int - r1 :: object -L0: - r0 = C.foo(self, x) - r1 = box(int, r0) - return r1 -def C.foo__A_glue(self, x): - self :: __main__.C - x :: int - r0 :: object - r1 :: int - r2 :: object -L0: - r0 = box(int, x) - r1 = C.foo(self, r0) - r2 = box(int, r1) - return r2 -def use_a(x, y): - x :: __main__.A - y :: int - r0 :: object -L0: - r0 = x.foo(y) - return r0 -def use_b(x, y): - x :: __main__.B - y, r0 :: object -L0: - r0 = x.foo(y) - return r0 -def use_c(x, y): - x :: __main__.C - y :: object - r0 :: int -L0: - r0 = x.foo(y) - return r0 - [case testSubclass_toplevel] from typing import TypeVar, Generic from mypy_extensions import trait diff --git a/mypyc/test-data/irbuild-constant-fold.test b/mypyc/test-data/irbuild-constant-fold.test index dd75c01443f1..7d9127887aa6 100644 --- a/mypyc/test-data/irbuild-constant-fold.test +++ b/mypyc/test-data/irbuild-constant-fold.test @@ -205,23 +205,13 @@ Y: Final = 2 + 4 def f() -> None: a = X + 1 - # TODO: Constant fold this as well a = Y + 1 [out] def f(): - a, r0 :: int - r1 :: bool - r2 :: int + a :: int L0: a = 12 - r0 = __main__.Y :: static - if is_error(r0) goto L1 else goto L2 -L1: - r1 = raise NameError('value for final name "Y" was not set') - unreachable -L2: - r2 = CPyTagged_Add(r0, 2) - a = r2 + a = 14 return 1 [case testIntConstantFoldingClassFinal] diff --git a/mypyc/test-data/irbuild-dunders.test b/mypyc/test-data/irbuild-dunders.test index 24e708913354..82f04dcdf687 100644 --- a/mypyc/test-data/irbuild-dunders.test +++ b/mypyc/test-data/irbuild-dunders.test @@ -154,6 +154,12 @@ class C: def __abs__(self) -> int: return 6 + def __bool__(self) -> bool: + return False + + def __complex__(self) -> complex: + return 7j + def f(c: C) -> None: -c ~c @@ -161,6 +167,8 @@ def f(c: C) -> None: float(c) +c abs(c) + bool(c) + complex(c) [out] def C.__neg__(self): self :: __main__.C @@ -188,19 +196,31 @@ def C.__abs__(self): self :: __main__.C L0: return 12 +def C.__bool__(self): + self :: __main__.C +L0: + return 0 +def C.__complex__(self): + self :: __main__.C + r0 :: object +L0: + r0 = 7j + return r0 def f(c): c :: __main__.C - r0, r1 :: int - r2, r3, r4, r5 :: object - r6, r7 :: int + r0, r1, r2 :: int + r3 :: float + r4, r5 :: int + r6 :: bool + r7 :: object L0: r0 = c.__neg__() r1 = c.__invert__() - r2 = load_address PyLong_Type - r3 = PyObject_CallFunctionObjArgs(r2, c, 0) - r4 = load_address PyFloat_Type - r5 = PyObject_CallFunctionObjArgs(r4, c, 0) - r6 = c.__pos__() - r7 = c.__abs__() + r2 = c.__int__() + r3 = c.__float__() + r4 = c.__pos__() + r5 = c.__abs__() + r6 = c.__bool__() + r7 = c.__complex__() return 1 diff --git a/mypyc/test-data/irbuild-glue-methods.test b/mypyc/test-data/irbuild-glue-methods.test new file mode 100644 index 000000000000..6d749bf5dd84 --- /dev/null +++ b/mypyc/test-data/irbuild-glue-methods.test @@ -0,0 +1,437 @@ +# Test cases for glue methods. +# +# These are used when subclass method signature has a different representation +# compared to the base class. + +[case testSubclassSpecialize2] +class A: + def foo(self, x: int) -> object: + return str(x) +class B(A): + def foo(self, x: object) -> object: + return x +class C(B): + def foo(self, x: object) -> int: + return id(x) + +def use_a(x: A, y: int) -> object: + return x.foo(y) + +def use_b(x: B, y: object) -> object: + return x.foo(y) + +def use_c(x: C, y: object) -> int: + return x.foo(y) +[out] +def A.foo(self, x): + self :: __main__.A + x :: int + r0 :: str +L0: + r0 = CPyTagged_Str(x) + return r0 +def B.foo(self, x): + self :: __main__.B + x :: object +L0: + return x +def B.foo__A_glue(self, x): + self :: __main__.B + x :: int + r0, r1 :: object +L0: + r0 = box(int, x) + r1 = B.foo(self, r0) + return r1 +def C.foo(self, x): + self :: __main__.C + x :: object + r0 :: int +L0: + r0 = CPyTagged_Id(x) + return r0 +def C.foo__B_glue(self, x): + self :: __main__.C + x :: object + r0 :: int + r1 :: object +L0: + r0 = C.foo(self, x) + r1 = box(int, r0) + return r1 +def C.foo__A_glue(self, x): + self :: __main__.C + x :: int + r0 :: object + r1 :: int + r2 :: object +L0: + r0 = box(int, x) + r1 = C.foo(self, r0) + r2 = box(int, r1) + return r2 +def use_a(x, y): + x :: __main__.A + y :: int + r0 :: object +L0: + r0 = x.foo(y) + return r0 +def use_b(x, y): + x :: __main__.B + y, r0 :: object +L0: + r0 = x.foo(y) + return r0 +def use_c(x, y): + x :: __main__.C + y :: object + r0 :: int +L0: + r0 = x.foo(y) + return r0 + +[case testPropertyDerivedGen] +from typing import Callable +class BaseProperty: + @property + def value(self) -> object: + return self._incrementer + + @property + def bad_value(self) -> object: + return self._incrementer + + @property + def next(self) -> BaseProperty: + return BaseProperty(self._incrementer + 1) + + def __init__(self, value: int) -> None: + self._incrementer = value + +class DerivedProperty(BaseProperty): + @property + def value(self) -> int: + return self._incrementer + + @property + def bad_value(self) -> object: + return self._incrementer + + @property + def next(self) -> DerivedProperty: + return DerivedProperty(self._incr_func, self._incr_func(self.value)) + + def __init__(self, incr_func: Callable[[int], int], value: int) -> None: + BaseProperty.__init__(self, value) + self._incr_func = incr_func + + +class AgainProperty(DerivedProperty): + @property + def next(self) -> AgainProperty: + return AgainProperty(self._incr_func, self._incr_func(self._incr_func(self.value))) + + @property + def bad_value(self) -> int: + return self._incrementer +[out] +def BaseProperty.value(self): + self :: __main__.BaseProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def BaseProperty.bad_value(self): + self :: __main__.BaseProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def BaseProperty.next(self): + self :: __main__.BaseProperty + r0, r1 :: int + r2 :: __main__.BaseProperty +L0: + r0 = borrow self._incrementer + r1 = CPyTagged_Add(r0, 2) + keep_alive self + r2 = BaseProperty(r1) + return r2 +def BaseProperty.__init__(self, value): + self :: __main__.BaseProperty + value :: int +L0: + self._incrementer = value + return 1 +def DerivedProperty.value(self): + self :: __main__.DerivedProperty + r0 :: int +L0: + r0 = self._incrementer + return r0 +def DerivedProperty.value__BaseProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.DerivedProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.value + r1 = box(int, r0) + return r1 +def DerivedProperty.bad_value(self): + self :: __main__.DerivedProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def DerivedProperty.next(self): + self :: __main__.DerivedProperty + r0 :: object + r1 :: int + r2, r3, r4 :: object + r5 :: int + r6 :: __main__.DerivedProperty +L0: + r0 = self._incr_func + r1 = self.value + r2 = self._incr_func + r3 = box(int, r1) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + r6 = DerivedProperty(r0, r5) + return r6 +def DerivedProperty.next__BaseProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.DerivedProperty +L0: + r0 = __mypyc_self__.next + return r0 +def DerivedProperty.__init__(self, incr_func, value): + self :: __main__.DerivedProperty + incr_func :: object + value :: int + r0 :: None +L0: + r0 = BaseProperty.__init__(self, value) + self._incr_func = incr_func + return 1 +def AgainProperty.next(self): + self :: __main__.AgainProperty + r0 :: object + r1 :: int + r2, r3, r4 :: object + r5 :: int + r6, r7, r8 :: object + r9 :: int + r10 :: __main__.AgainProperty +L0: + r0 = self._incr_func + r1 = self.value + r2 = self._incr_func + r3 = box(int, r1) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + r6 = self._incr_func + r7 = box(int, r5) + r8 = PyObject_CallFunctionObjArgs(r6, r7, 0) + r9 = unbox(int, r8) + r10 = AgainProperty(r0, r9) + return r10 +def AgainProperty.next__DerivedProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.AgainProperty +L0: + r0 = __mypyc_self__.next + return r0 +def AgainProperty.next__BaseProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.AgainProperty +L0: + r0 = __mypyc_self__.next + return r0 +def AgainProperty.bad_value(self): + self :: __main__.AgainProperty + r0 :: int +L0: + r0 = self._incrementer + return r0 +def AgainProperty.bad_value__DerivedProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.AgainProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.bad_value + r1 = box(int, r0) + return r1 +def AgainProperty.bad_value__BaseProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.AgainProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.bad_value + r1 = box(int, r0) + return r1 + +[case testPropertyTraitSubclassing] +from mypy_extensions import trait +@trait +class SubclassedTrait: + @property + def this(self) -> SubclassedTrait: + return self + + @property + def boxed(self) -> object: + return 3 + +class DerivingObject(SubclassedTrait): + @property + def this(self) -> DerivingObject: + return self + + @property + def boxed(self) -> int: + return 5 +[out] +def SubclassedTrait.this(self): + self :: __main__.SubclassedTrait +L0: + return self +def SubclassedTrait.boxed(self): + self :: __main__.SubclassedTrait + r0 :: object +L0: + r0 = object 3 + return r0 +def DerivingObject.this(self): + self :: __main__.DerivingObject +L0: + return self +def DerivingObject.this__SubclassedTrait_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.DerivingObject +L0: + r0 = __mypyc_self__.this + return r0 +def DerivingObject.boxed(self): + self :: __main__.DerivingObject +L0: + return 10 +def DerivingObject.boxed__SubclassedTrait_glue(__mypyc_self__): + __mypyc_self__ :: __main__.DerivingObject + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.boxed + r1 = box(int, r0) + return r1 + +[case testI64GlueWithExtraDefaultArg] +from mypy_extensions import i64 + +class C: + def f(self) -> None: pass + +class D(C): + def f(self, x: i64 = 44) -> None: pass +[out] +def C.f(self): + self :: __main__.C +L0: + return 1 +def D.f(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 44 +L2: + return 1 +def D.f__C_glue(self): + self :: __main__.D + r0 :: None +L0: + r0 = D.f(self, 0, 0) + return r0 + +[case testI64GlueWithSecondDefaultArg] +from mypy_extensions import i64 + +class C: + def f(self, x: i64 = 11) -> None: pass +class D(C): + def f(self, x: i64 = 12, y: i64 = 13) -> None: pass +[out] +def C.f(self, x, __bitmap): + self :: __main__.C + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 11 +L2: + return 1 +def D.f(self, x, y, __bitmap): + self :: __main__.D + x, y :: int64 + __bitmap, r0 :: uint32 + r1 :: bit + r2 :: uint32 + r3 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 12 +L2: + r2 = __bitmap & 2 + r3 = r2 == 0 + if r3 goto L3 else goto L4 :: bool +L3: + y = 13 +L4: + return 1 +def D.f__C_glue(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap :: uint32 + r0 :: None +L0: + r0 = D.f(self, x, 0, __bitmap) + return r0 + +[case testI64GlueWithInvalidOverride] +from mypy_extensions import i64 + +class C: + def f(self, x: i64, y: i64 = 5) -> None: pass + def ff(self, x: int) -> None: pass +class CC(C): + def f(self, x: i64 = 12, y: i64 = 5) -> None: pass # Line 7 + def ff(self, x: int = 12) -> None: pass + +class D: + def f(self, x: int) -> None: pass +class DD(D): + def f(self, x: i64) -> None: pass # Line 13 + +class E: + def f(self, x: i64) -> None: pass +class EE(E): + def f(self, x: int) -> None: pass # Line 18 +[out] +main:7: error: An argument with type "int64" cannot be given a default value in a method override +main:13: error: Incompatible argument type "int64" (base class has type "int") +main:18: error: Incompatible argument type "int" (base class has type "int64") diff --git a/mypyc/test-data/irbuild-i32.test b/mypyc/test-data/irbuild-i32.test index 818c3138e4e3..7ea3c0864728 100644 --- a/mypyc/test-data/irbuild-i32.test +++ b/mypyc/test-data/irbuild-i32.test @@ -480,3 +480,55 @@ L0: y = 11 z = -3 return 1 + +[case testI32ExplicitConversionFromVariousTypes] +from mypy_extensions import i32 + +def bool_to_i32(b: bool) -> i32: + return i32(b) + +def str_to_i32(s: str) -> i32: + return i32(s) + +class C: + def __int__(self) -> i32: + return 5 + +def instance_to_i32(c: C) -> i32: + return i32(c) + +def float_to_i32(x: float) -> i32: + return i32(x) +[out] +def bool_to_i32(b): + b :: bool + r0 :: int32 +L0: + r0 = extend b: builtins.bool to int32 + return r0 +def str_to_i32(s): + s :: str + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int32, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i32(c): + c :: __main__.C + r0 :: int32 +L0: + r0 = c.__int__() + return r0 +def float_to_i32(x): + x :: float + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int32, r0) + return r1 diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index a04894913c33..6b8dd357421f 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -1559,6 +1559,32 @@ L2: L3: return r3 +[case testI64ExplicitConversionToInt_64bit] +from mypy_extensions import i64 + +def f(x: i64) -> int: + return int(x) +[out] +def f(x): + x :: int64 + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = x <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = x << 1 + r3 = r4 +L4: + return r3 + [case testI64ExplicitConversionFromLiteral] from mypy_extensions import i64 @@ -1628,3 +1654,247 @@ L3: goto L1 L4: return 1 + +[case testI64MethodDefaultValueOverride] +from mypy_extensions import i64 + +class C: + def f(self, x: i64 = 11) -> None: pass +class D(C): + def f(self, x: i64 = 12) -> None: pass +[out] +def C.f(self, x, __bitmap): + self :: __main__.C + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 11 +L2: + return 1 +def D.f(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 12 +L2: + return 1 + +[case testI64FinalConstants] +from typing_extensions import Final +from mypy_extensions import i64 + +A: Final = -1 +B: Final = -(1 + 3*2) +C: Final = 0 +D: Final = A - B +E: Final[i64] = 1 + 3 + +def f1() -> i64: + return A + +def f2() -> i64: + return A + B + +def f3() -> i64: + return C + +def f4() -> i64: + return D + +def f5() -> i64: + return E +[out] +def f1(): +L0: + return -1 +def f2(): +L0: + return -8 +def f3(): +L0: + return 0 +def f4(): +L0: + return 6 +def f5(): +L0: + return 4 + +[case testI64Cast] +from typing import cast +from mypy_extensions import i64 + +def cast_object(o: object) -> i64: + return cast(i64, o) + +def cast_int(x: int) -> i64: + return cast(i64, x) +[out] +def cast_object(o): + o :: object + r0 :: int64 +L0: + r0 = unbox(int64, o) + return r0 +def cast_int(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + return r3 + +[case testI64ExplicitConversionFromVariousTypes] +from mypy_extensions import i64 + +def bool_to_i64(b: bool) -> i64: + return i64(b) + +def str_to_i64(s: str) -> i64: + return i64(s) + +def str_to_i64_with_base(s: str) -> i64: + return i64(s, 2) + +class C: + def __int__(self) -> i64: + return 5 + +def instance_to_i64(c: C) -> i64: + return i64(c) + +def float_to_i64(x: float) -> i64: + return i64(x) +[out] +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def str_to_i64(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int64, r0) + return r1 +def str_to_i64_with_base(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStrWithBase(s, 4) + r1 = unbox(int64, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i64(c): + c :: __main__.C + r0 :: int64 +L0: + r0 = c.__int__() + return r0 +def float_to_i64(x): + x :: float + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int64, r0) + return r1 + +[case testI64IsinstanceNarrowing] +from typing import Union +from mypy_extensions import i64 + +class C: + a: i64 + +def narrow1(x: Union[C, i64]) -> i64: + if isinstance(x, i64): + return x + return x.a + +def narrow2(x: Union[C, i64]) -> i64: + if isinstance(x, int): + return x + return x.a +[out] +def narrow1(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 +def narrow2(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index 8bf43cfa4923..aebadce5650e 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -155,3 +155,70 @@ def divby8(x): L0: r0 = CPyTagged_Rshift(x, 6) return r0 + +[case testFinalConstantFolding] +from typing_extensions import Final + +X: Final = -1 +Y: Final = -(1 + 3*2) +Z: Final = Y + 1 + +class C: + A: Final = 1 + B: Final = -1 + +def f1() -> int: + return X + +def f2() -> int: + return X + Y + +def f3() -> int: + return Z + +def f4() -> int: + return C.A + +def f5() -> int: + return C.B +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C +L0: + __mypyc_self__.A = 2 + __mypyc_self__.B = -2 + return 1 +def f1(): +L0: + return -2 +def f2(): +L0: + return -16 +def f3(): +L0: + return -12 +def f4(): +L0: + return 2 +def f5(): +L0: + return -2 + +[case testConvertIntegralToInt] +def bool_to_int(b: bool) -> int: + return int(b) + +def int_to_int(n: int) -> int: + return int(n) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_int(n): + n :: int +L0: + return n diff --git a/mypyc/test-data/irbuild-lists.test b/mypyc/test-data/irbuild-lists.test index 47f7ada709e3..cb9687a2f942 100644 --- a/mypyc/test-data/irbuild-lists.test +++ b/mypyc/test-data/irbuild-lists.test @@ -428,3 +428,104 @@ L4: L5: res = r8 return 1 + +[case testSimplifyListUnion] +from typing import List, Union, Optional + +def narrow(a: Union[List[str], List[bytes], int]) -> int: + if isinstance(a, list): + return len(a) + return a +def loop(a: Union[List[str], List[bytes]]) -> None: + for x in a: + pass +def nested_union(a: Union[List[str], List[Optional[str]]]) -> None: + for x in a: + pass +[out] +def narrow(a): + a :: union[list, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: list + r5 :: ptr + r6 :: native_int + r7 :: short_int + r8 :: int +L0: + r0 = load_address PyList_Type + r1 = PyObject_IsInstance(a, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = borrow cast(list, a) + r5 = get_element_ptr r4 ob_size :: PyVarObject + r6 = load_mem r5 :: native_int* + keep_alive r4 + r7 = r6 << 1 + keep_alive a + return r7 +L2: + r8 = unbox(int, a) + return r8 +def loop(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, bytes] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, bytes], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 +def nested_union(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, None] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, None], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 diff --git a/mypyc/test-data/irbuild-match.test b/mypyc/test-data/irbuild-match.test new file mode 100644 index 000000000000..2afe3d862f51 --- /dev/null +++ b/mypyc/test-data/irbuild-match.test @@ -0,0 +1,1708 @@ +[case testMatchValuePattern_python3_10] +def f(): + match 123: + case 123: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchOrPattern_python3_10] +def f(): + match 123: + case 123 | 456: + print("matched") +[out] +def f(): + r0, r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = 246 == 246 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = 246 == 912 + if r1 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L5 +L4: +L5: + r9 = box(None, 1) + return r9 +[case testMatchOrPatternManyPatterns_python3_10] +def f(): + match 1: + case 1 | 2 | 3 | 4: + print("matched") +[out] +def f(): + r0, r1, r2, r3 :: bit + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L5 else goto L1 :: bool +L1: + r1 = 2 == 4 + if r1 goto L5 else goto L2 :: bool +L2: + r2 = 2 == 6 + if r2 goto L5 else goto L3 :: bool +L3: + r3 = 2 == 8 + if r3 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L7 +L6: +L7: + r11 = box(None, 1) + return r11 +[case testMatchClassPattern_python3_10] +def f(): + match 123: + case int(): + print("matched") +[out] +def f(): + r0, r1 :: object + r2 :: bool + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L3 +L2: +L3: + r10 = box(None, 1) + return r10 +[case testMatchExaustivePattern_python3_10] +def f(): + match 123: + case _: + print("matched") +[out] +def f(): + r0 :: str + r1 :: object + r2 :: str + r3 :: object + r4 :: object[1] + r5 :: object_ptr + r6, r7 :: object +L0: +L1: + r0 = 'matched' + r1 = builtins :: module + r2 = 'print' + r3 = CPyObject_GetAttr(r1, r2) + r4 = [r0] + r5 = load_address r4 + r6 = _PyObject_Vectorcall(r3, r5, 1, 0) + keep_alive r0 + goto L3 +L2: +L3: + r7 = box(None, 1) + return r7 +[case testMatchMultipleBodies_python3_10] +def f(): + match 123: + case 123: + print("matched") + case 456: + print("no match") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8 :: bit + r9 :: str + r10 :: object + r11 :: str + r12 :: object + r13 :: object[1] + r14 :: object_ptr + r15, r16 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L5 +L2: + r8 = 246 == 912 + if r8 goto L3 else goto L4 :: bool +L3: + r9 = 'no match' + r10 = builtins :: module + r11 = 'print' + r12 = CPyObject_GetAttr(r10, r11) + r13 = [r9] + r14 = load_address r13 + r15 = _PyObject_Vectorcall(r12, r14, 1, 0) + keep_alive r9 + goto L5 +L4: +L5: + r16 = box(None, 1) + return r16 +[case testMatchMultiBodyAndComplexOr_python3_10] +def f(): + match 123: + case 1: + print("here 1") + case 2 | 3: + print("here 2 | 3") + case 123: + print("here 123") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8, r9 :: bit + r10 :: str + r11 :: object + r12 :: str + r13 :: object + r14 :: object[1] + r15 :: object_ptr + r16 :: object + r17 :: bit + r18 :: str + r19 :: object + r20 :: str + r21 :: object + r22 :: object[1] + r23 :: object_ptr + r24, r25 :: object +L0: + r0 = 246 == 2 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'here 1' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L9 +L2: + r8 = 246 == 4 + if r8 goto L5 else goto L3 :: bool +L3: + r9 = 246 == 6 + if r9 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r10 = 'here 2 | 3' + r11 = builtins :: module + r12 = 'print' + r13 = CPyObject_GetAttr(r11, r12) + r14 = [r10] + r15 = load_address r14 + r16 = _PyObject_Vectorcall(r13, r15, 1, 0) + keep_alive r10 + goto L9 +L6: + r17 = 246 == 246 + if r17 goto L7 else goto L8 :: bool +L7: + r18 = 'here 123' + r19 = builtins :: module + r20 = 'print' + r21 = CPyObject_GetAttr(r19, r20) + r22 = [r18] + r23 = load_address r22 + r24 = _PyObject_Vectorcall(r21, r23, 1, 0) + keep_alive r18 + goto L9 +L8: +L9: + r25 = box(None, 1) + return r25 +[case testMatchWithGuard_python3_10] +def f(): + match 123: + case 123 if True: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L3 :: bool +L1: + if 1 goto L2 else goto L3 :: bool +L2: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L4 +L3: +L4: + r8 = box(None, 1) + return r8 +[case testMatchSingleton_python3_10] +def f(): + match 123: + case True: + print("value is True") + case False: + print("value is False") + case None: + print("value is None") +[out] +def f(): + r0, r1 :: object + r2 :: bit + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10, r11 :: object + r12 :: bit + r13 :: str + r14 :: object + r15 :: str + r16 :: object + r17 :: object[1] + r18 :: object_ptr + r19, r20, r21 :: object + r22 :: bit + r23 :: str + r24 :: object + r25 :: str + r26 :: object + r27 :: object[1] + r28 :: object_ptr + r29, r30 :: object +L0: + r0 = object 123 + r1 = box(bool, 1) + r2 = r0 == r1 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'value is True' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L7 +L2: + r10 = object 123 + r11 = box(bool, 0) + r12 = r10 == r11 + if r12 goto L3 else goto L4 :: bool +L3: + r13 = 'value is False' + r14 = builtins :: module + r15 = 'print' + r16 = CPyObject_GetAttr(r14, r15) + r17 = [r13] + r18 = load_address r17 + r19 = _PyObject_Vectorcall(r16, r18, 1, 0) + keep_alive r13 + goto L7 +L4: + r20 = load_address _Py_NoneStruct + r21 = object 123 + r22 = r21 == r20 + if r22 goto L5 else goto L6 :: bool +L5: + r23 = 'value is None' + r24 = builtins :: module + r25 = 'print' + r26 = CPyObject_GetAttr(r24, r25) + r27 = [r23] + r28 = load_address r27 + r29 = _PyObject_Vectorcall(r26, r28, 1, 0) + keep_alive r23 + goto L7 +L6: +L7: + r30 = box(None, 1) + return r30 +[case testMatchRecursiveOrPattern_python3_10] +def f(): + match 1: + case 1 | int(): + print("matched") +[out] +def f(): + r0 :: bit + r1, r2 :: object + r3 :: bool + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = load_address PyLong_Type + r2 = object 1 + r3 = CPy_TypeCheck(r2, r1) + if r3 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L5 +L4: +L5: + r11 = box(None, 1) + return r11 +[case testMatchAsPattern_python3_10] +def f(): + match 123: + case 123 as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x, r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + r1 = object 123 + x = r1 + if r0 goto L1 else goto L2 :: bool +L1: + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [x] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive x + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchAsPatternOnOrPattern_python3_10] +def f(): + match 1: + case (1 | 2) as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x :: object + r2 :: bit + r3, r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = 2 == 2 + r1 = object 1 + x = r1 + if r0 goto L3 else goto L1 :: bool +L1: + r2 = 2 == 4 + r3 = object 2 + x = r3 + if r2 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [x] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive x + goto L5 +L4: +L5: + r10 = box(None, 1) + return r10 +[case testMatchAsPatternOnClassPattern_python3_10] +def f(): + match 123: + case int() as i: + print(i) +[out] +def f(): + r0, r1 :: object + r2 :: bool + i :: int + r3 :: object + r4 :: str + r5, r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L3 :: bool +L1: + i = 246 +L2: + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = box(int, i) + r7 = [r6] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r5, r8, 1, 0) + keep_alive r6 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchClassPatternWithPositionalArgs_python3_10] +class Position: + __match_args__ = ("x", "y", "z") + + x: int + y: int + z: int + +def f(x): + match x: + case Position(1, 2, 3): + print("matched") +[out] +def Position.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.Position + r0, r1, r2 :: str + r3 :: tuple[str, str, str] +L0: + r0 = 'x' + r1 = 'y' + r2 = 'z' + r3 = (r0, r1, r2) + __mypyc_self__.__match_args__ = r3 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'z' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithKeywordPatterns_python3_10] +class Position: + x: int + y: int + z: int + +def f(x): + match x: + case Position(z=1, y=2, x=3): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'z' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'x' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithNestedPattern_python3_10] +class C: + num: int + +def f(x): + match x: + case C(num=1 | 2): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12 :: object + r13 :: int32 + r14 :: bit + r15 :: bool + r16 :: str + r17 :: object + r18 :: str + r19 :: object + r20 :: object[1] + r21 :: object_ptr + r22, r23 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'num' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L4 else goto L2 :: bool +L2: + r11 = object 2 + r12 = PyObject_RichCompare(r5, r11, 2) + r13 = PyObject_IsTrue(r12) + r14 = r13 >= 0 :: signed + r15 = truncate r13: int32 to builtins.bool + if r15 goto L4 else goto L3 :: bool +L3: + goto L5 +L4: + r16 = 'matched' + r17 = builtins :: module + r18 = 'print' + r19 = CPyObject_GetAttr(r17, r18) + r20 = [r16] + r21 = load_address r20 + r22 = _PyObject_Vectorcall(r19, r21, 1, 0) + keep_alive r16 + goto L6 +L5: +L6: + r23 = box(None, 1) + return r23 +[case testAsPatternDoesntBleedIntoSubPatterns_python3_10] +class C: + __match_args__ = ("a", "b") + a: int + b: int + +def f(x): + match x: + case C(1, 2) as y: + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0, r1 :: str + r2 :: tuple[str, str] +L0: + r0 = 'a' + r1 = 'b' + r2 = (r0, r1) + __mypyc_self__.__match_args__ = r2 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4, y :: __main__.C + r5 :: str + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: str + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = cast(__main__.C, x) + y = r4 +L2: + r5 = 'a' + r6 = CPyObject_GetAttr(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L5 :: bool +L3: + r12 = 'b' + r13 = CPyObject_GetAttr(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L5 :: bool +L4: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L6 +L5: +L6: + r26 = box(None, 1) + return r26 +[case testMatchClassPatternPositionalCapture_python3_10] +class C: + __match_args__ = ("x",) + + x: int + +def f(x): + match x: + case C(num): + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0 :: str + r1 :: tuple[str] +L0: + r0 = 'x' + r1 = (r0) + __mypyc_self__.__match_args__ = r1 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, num :: int + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + num = r6 +L2: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L4 +L3: +L4: + r14 = box(None, 1) + return r14 +[case testMatchMappingEmpty_python3_10] +def f(x): + match x: + case {}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L3 +L2: +L3: + r9 = box(None, 1) + return r9 +[case testMatchMappingPatternWithKeys_python3_10] +def f(x): + match x: + case {"key": "value"}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12 :: object + r13 :: str + r14 :: object + r15 :: object[1] + r16 :: object_ptr + r17, r18 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L4 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L4 :: bool +L3: + r11 = 'matched' + r12 = builtins :: module + r13 = 'print' + r14 = CPyObject_GetAttr(r12, r13) + r15 = [r11] + r16 = load_address r15 + r17 = _PyObject_Vectorcall(r14, r16, 1, 0) + keep_alive r11 + goto L5 +L4: +L5: + r18 = box(None, 1) + return r18 +[case testMatchMappingPatternWithRest_python3_10] +def f(x): + match x: + case {**rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2, rest :: dict + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = CPyDict_FromAny(x) + rest = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchMappingPatternWithRestPopKeys_python3_10] +def f(x): + match x: + case {"key": "value", **rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, rest :: dict + r12 :: int32 + r13 :: bit + r14 :: str + r15 :: object + r16 :: str + r17 :: object + r18 :: object[1] + r19 :: object_ptr + r20, r21 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = CPyDict_FromAny(x) + rest = r11 + r12 = PyDict_DelItem(r11, r2) + r13 = r12 >= 0 :: signed +L4: + r14 = 'matched' + r15 = builtins :: module + r16 = 'print' + r17 = CPyObject_GetAttr(r15, r16) + r18 = [r14] + r19 = load_address r18 + r20 = _PyObject_Vectorcall(r17, r19, 1, 0) + keep_alive r14 + goto L6 +L5: +L6: + r21 = box(None, 1) + return r21 +[case testMatchEmptySequencePattern_python3_10] +def f(x): + match x: + case []: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: str + r6 :: object + r7 :: str + r8 :: object + r9 :: object[1] + r10 :: object_ptr + r11, r12 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 0 + if r4 goto L2 else goto L3 :: bool +L2: + r5 = 'matched' + r6 = builtins :: module + r7 = 'print' + r8 = CPyObject_GetAttr(r6, r7) + r9 = [r5] + r10 = load_address r9 + r11 = _PyObject_Vectorcall(r8, r10, 1, 0) + keep_alive r5 + goto L4 +L3: +L4: + r12 = box(None, 1) + return r12 +[case testMatchFixedLengthSequencePattern_python3_10] +def f(x): + match x: + case [1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 2 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingUnboundStar_python3_10] +def f(x): + match x: + case [1, 2, *_]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingBoundStar_python3_10] +def f(x): + match x: + case [1, 2, *rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: native_int + r18, rest :: object + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L6 :: bool +L4: + r17 = r2 - 0 + r18 = PySequence_GetSlice(x, 2, r17) + rest = r18 +L5: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L7 +L6: +L7: + r26 = box(None, 1) + return r26 +[case testMatchSequenceWithStarPatternInTheMiddle_python3_10] +def f(x): + match x: + case ["start", *rest, "end"]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: native_int + r12 :: object + r13 :: str + r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: native_int + r19, rest :: object + r20 :: str + r21 :: object + r22 :: str + r23 :: object + r24 :: object[1] + r25 :: object_ptr + r26, r27 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = 'start' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = r2 - 1 + r12 = PySequence_GetItem(x, r11) + r13 = 'end' + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L4 else goto L6 :: bool +L4: + r18 = r2 - 1 + r19 = PySequence_GetSlice(x, 1, r18) + rest = r19 +L5: + r20 = 'matched' + r21 = builtins :: module + r22 = 'print' + r23 = CPyObject_GetAttr(r21, r22) + r24 = [r20] + r25 = load_address r24 + r26 = _PyObject_Vectorcall(r23, r25, 1, 0) + keep_alive r20 + goto L7 +L6: +L7: + r27 = box(None, 1) + return r27 +[case testMatchSequenceWithStarPatternAtTheStart_python3_10] +def f(x): + match x: + case [*rest, 1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: native_int + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: native_int + r20, rest :: object + r21 :: str + r22 :: object + r23 :: str + r24 :: object + r25 :: object[1] + r26 :: object_ptr + r27, r28 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = r2 - 2 + r6 = PySequence_GetItem(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L6 :: bool +L3: + r12 = r2 - 1 + r13 = PySequence_GetItem(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L6 :: bool +L4: + r19 = r2 - 2 + r20 = PySequence_GetSlice(x, 0, r19) + rest = r20 +L5: + r21 = 'matched' + r22 = builtins :: module + r23 = 'print' + r24 = CPyObject_GetAttr(r22, r23) + r25 = [r21] + r26 = load_address r25 + r27 = _PyObject_Vectorcall(r24, r26, 1, 0) + keep_alive r21 + goto L7 +L6: +L7: + r28 = box(None, 1) + return r28 +[case testMatchBuiltinClassPattern_python3_10] +def f(x): + match x: + case int(y): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: bool + r2, y :: int + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = CPy_TypeCheck(x, r0) + if r1 goto L1 else goto L3 :: bool +L1: + r2 = unbox(int, x) + y = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchSequenceCaptureAll_python3_10] +def f(x): + match x: + case [*rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, rest :: object + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 0 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = r2 - 0 + r6 = PySequence_GetSlice(x, 0, r5) + rest = r6 +L3: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L5 +L4: +L5: + r14 = box(None, 1) + return r14 +[case testMatchTypeAnnotatedNativeClass_python3_10] +class A: + a: int + +def f(x: A | int) -> int: + match x: + case A(a=a): + return a + case int(): + return x +[out] +def f(x): + x :: union[__main__.A, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, a :: int + r7 :: object + r8 :: bool + r9 :: int +L0: + r0 = __main__.A :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'a' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + a = r6 +L2: + return a +L3: + r7 = load_address PyLong_Type + r8 = CPy_TypeCheck(x, r7) + if r8 goto L4 else goto L5 :: bool +L4: + r9 = unbox(int, x) + return r9 +L5: +L6: + unreachable diff --git a/mypyc/test-data/irbuild-optional.test b/mypyc/test-data/irbuild-optional.test index 4b1d3d1ffec2..e98cf1b19e2e 100644 --- a/mypyc/test-data/irbuild-optional.test +++ b/mypyc/test-data/irbuild-optional.test @@ -527,14 +527,10 @@ class B: [out] def f(o): - o :: union[object, object] - r0 :: object - r1 :: str - r2, r3 :: object + o :: object + r0 :: str + r1 :: object L0: - r0 = o - r1 = 'x' - r2 = CPyObject_GetAttr(r0, r1) - r3 = r2 -L1: + r0 = 'x' + r1 = CPyObject_GetAttr(o, r0) return 1 diff --git a/mypyc/test-data/irbuild-set.test b/mypyc/test-data/irbuild-set.test index fec76751c915..c567422abac7 100644 --- a/mypyc/test-data/irbuild-set.test +++ b/mypyc/test-data/irbuild-set.test @@ -655,3 +655,185 @@ L0: r12 = PySet_Add(r0, r11) r13 = r12 >= 0 :: signed return r0 + +[case testOperatorInSetLiteral] +from typing_extensions import Final + +CONST: Final = "daylily" +non_const = 10 + +def precomputed(i: object) -> bool: + return i in {1, 2.0, 1 +2, 4j, "foo", b"bar", CONST, (None, (27,)), (), False} +def not_precomputed_non_final_name(i: int) -> bool: + return i in {non_const} +def not_precomputed_nested_set(i: int) -> bool: + return i in {frozenset({1}), 2} +[out] +def precomputed(i): + i :: object + r0 :: set + r1 :: int32 + r2 :: bit + r3 :: bool +L0: + r0 = frozenset({(), (None, (27,)), 1, 2.0, 3, 4j, False, b'bar', 'daylily', 'foo'}) + r1 = PySet_Contains(r0, i) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + return r3 +def not_precomputed_non_final_name(i): + i :: int + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = box(int, i) + r9 = PySet_Contains(r4, r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + return r11 +def not_precomputed_nested_set(i): + i :: int + r0 :: set + r1 :: object + r2 :: int32 + r3 :: bit + r4 :: object + r5 :: set + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: object + r12 :: int32 + r13 :: bit + r14 :: bool +L0: + r0 = PySet_New(0) + r1 = object 1 + r2 = PySet_Add(r0, r1) + r3 = r2 >= 0 :: signed + r4 = PyFrozenSet_New(r0) + r5 = PySet_New(0) + r6 = PySet_Add(r5, r4) + r7 = r6 >= 0 :: signed + r8 = object 2 + r9 = PySet_Add(r5, r8) + r10 = r9 >= 0 :: signed + r11 = box(int, i) + r12 = PySet_Contains(r5, r11) + r13 = r12 >= 0 :: signed + r14 = truncate r12: int32 to builtins.bool + return r14 + +[case testForSetLiteral] +from typing_extensions import Final + +CONST: Final = 10 +non_const = 20 + +def precomputed() -> None: + for _ in {"None", "True", "False"}: + pass + +def precomputed2() -> None: + for _ in {None, False, 1, 2.0, "4", b"5", (6,), 7j, CONST, CONST + 1}: + pass + +def not_precomputed() -> None: + for not_optimized in {non_const}: + pass + +[out] +def precomputed(): + r0 :: set + r1, r2 :: object + r3 :: str + _ :: object + r4 :: bit +L0: + r0 = frozenset({'False', 'None', 'True'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + r3 = cast(str, r2) + _ = r3 +L3: + goto L1 +L4: + r4 = CPy_NoErrOccured() +L5: + return 1 +def precomputed2(): + r0 :: set + r1, r2, _ :: object + r3 :: bit +L0: + r0 = frozenset({(6,), 1, 10, 11, 2.0, '4', 7j, False, None, b'5'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + _ = r2 +L3: + goto L1 +L4: + r3 = CPy_NoErrOccured() +L5: + return 1 +def not_precomputed(): + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8, r9 :: object + r10, not_optimized :: int + r11 :: bit +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = PyObject_GetIter(r4) +L1: + r9 = PyIter_Next(r8) + if is_error(r9) goto L4 else goto L2 +L2: + r10 = unbox(int, r9) + not_optimized = r10 +L3: + goto L1 +L4: + r11 = CPy_NoErrOccured() +L5: + return 1 + diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index ab947c956b74..090c7ed9f3df 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -1006,9 +1006,9 @@ L5: return 1 [case testForZip] -from typing import List, Iterable +from typing import List, Iterable, Sequence -def f(a: List[int], b: Iterable[bool]) -> None: +def f(a: List[int], b: Sequence[bool]) -> None: for x, y in zip(a, b): if b: x = 1 diff --git a/mypyc/test-data/irbuild-try.test b/mypyc/test-data/irbuild-try.test index d1119c5deefd..faf3fa1dbd2f 100644 --- a/mypyc/test-data/irbuild-try.test +++ b/mypyc/test-data/irbuild-try.test @@ -416,3 +416,108 @@ L19: L20: return 1 +[case testWithNativeSimple] +class DummyContext: + def __enter__(self) -> None: + pass + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + pass + +def foo(x: DummyContext) -> None: + with x: + print('hello') +[out] +def DummyContext.__enter__(self): + self :: __main__.DummyContext +L0: + return 1 +def DummyContext.__exit__(self, exc_type, exc_val, exc_tb): + self :: __main__.DummyContext + exc_type, exc_val, exc_tb :: object +L0: + return 1 +def foo(x): + x :: __main__.DummyContext + r0 :: None + r1 :: bool + r2 :: str + r3 :: object + r4 :: str + r5, r6 :: object + r7, r8 :: tuple[object, object, object] + r9, r10, r11 :: object + r12 :: None + r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: bit + r18, r19, r20 :: tuple[object, object, object] + r21 :: object + r22 :: None + r23 :: bit +L0: + r0 = x.__enter__() + r1 = 1 +L1: +L2: + r2 = 'hello' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) + goto L8 +L3: (handler for L2) + r7 = CPy_CatchError() + r1 = 0 + r8 = CPy_GetExcInfo() + r9 = r8[0] + r10 = r8[1] + r11 = r8[2] + r12 = x.__exit__(r9, r10, r11) + r13 = box(None, r12) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L5 else goto L4 :: bool +L4: + CPy_Reraise() + unreachable +L5: +L6: + CPy_RestoreExcInfo(r7) + goto L8 +L7: (handler for L3, L4, L5) + CPy_RestoreExcInfo(r7) + r17 = CPy_KeepPropagating() + unreachable +L8: +L9: +L10: + r18 = :: tuple[object, object, object] + r19 = r18 + goto L12 +L11: (handler for L1, L6, L7, L8) + r20 = CPy_CatchError() + r19 = r20 +L12: + if r1 goto L13 else goto L14 :: bool +L13: + r21 = load_address _Py_NoneStruct + r22 = x.__exit__(r21, r21, r21) +L14: + if is_error(r19) goto L16 else goto L15 +L15: + CPy_Reraise() + unreachable +L16: + goto L20 +L17: (handler for L12, L13, L14, L15) + if is_error(r19) goto L19 else goto L18 +L18: + CPy_RestoreExcInfo(r19) +L19: + r23 = CPy_KeepPropagating() + unreachable +L20: + return 1 diff --git a/mypyc/test-data/run-async.test b/mypyc/test-data/run-async.test index e664ed3bb55a..85ad172d61df 100644 --- a/mypyc/test-data/run-async.test +++ b/mypyc/test-data/run-async.test @@ -13,6 +13,9 @@ async def g() -> int: async def f() -> int: return await g() +[file asyncio/__init__.pyi] +async def sleep(t: float) -> None: ... + [typing fixtures/typing-full.pyi] [file driver.py] diff --git a/mypyc/test-data/run-bools.test b/mypyc/test-data/run-bools.test index a7afc5f2b1a2..e23b35d82fc5 100644 --- a/mypyc/test-data/run-bools.test +++ b/mypyc/test-data/run-bools.test @@ -15,6 +15,8 @@ True False [case testBoolOps] +from typing import Optional, Any + def f(x: bool) -> bool: if x: return False @@ -27,8 +29,8 @@ def test_if() -> None: def test_bitwise_and() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t & t == True assert t & f == False assert f & t == False @@ -40,8 +42,8 @@ def test_bitwise_and() -> None: def test_bitwise_or() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t | t == True assert t | f == True assert f | t == True @@ -53,8 +55,8 @@ def test_bitwise_or() -> None: def test_bitwise_xor() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t ^ t == False assert t ^ f == True assert f ^ t == True @@ -66,7 +68,6 @@ def test_bitwise_xor() -> None: f ^= f assert f == False -[case testIsinstanceBool] def test_isinstance_bool() -> None: a = True b = 1.0 @@ -76,3 +77,45 @@ def test_isinstance_bool() -> None: assert isinstance(b, bool) == False assert isinstance(c, bool) == False assert isinstance(d, bool) == True + +class C: pass +class D: + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +class E: pass +class F(E): + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +def optional_to_bool1(o: Optional[C]) -> bool: + return bool(o) + +def optional_to_bool2(o: Optional[D]) -> bool: + return bool(o) + +def optional_to_bool3(o: Optional[E]) -> bool: + return bool(o) + +def test_optional_to_bool() -> None: + assert not optional_to_bool1(None) + assert optional_to_bool1(C()) + assert not optional_to_bool2(None) + assert not optional_to_bool2(D(False)) + assert optional_to_bool2(D(True)) + assert not optional_to_bool3(None) + assert optional_to_bool3(E()) + assert not optional_to_bool3(F(False)) + assert optional_to_bool3(F(True)) + +def test_any_to_bool() -> None: + a: Any = int() + b: Any = a + 1 + assert not bool(a) + assert bool(b) diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index d505bda2d705..92ec3873bf38 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -263,6 +263,16 @@ class TestEnum(Enum): assert TestEnum.test() == 3 +import enum + +class Pokemon(enum.Enum): + magikarp = 1 + squirtle = 2 + slowbro = 3 + +assert Pokemon.magikarp.value == 1 +assert Pokemon.squirtle.name == 'squirtle' + [file other.py] # Force a multi-module test to make sure we can compile multi-file with # non-extension classes @@ -1958,6 +1968,188 @@ import other_interpreted [out] +[case testAttributeOverridesProperty] +from typing import Any +from mypy_extensions import trait + +@trait +class T1: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class C1(T1): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C1() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T1 = C1() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class B2: + @property + def x(self) -> int: + return 11 + + @property + def y(self) -> int: + return 25 + +class C2(B2): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_class_implemented_as_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + b: B2 = C2() + assert b.y == 4 + b = c + assert b.x == 5 + assert b.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T3: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class B3: + x: int = 1 + y: int = 4 + +class C3(B3, T3): + pass + +def test_read_only_property_implemented_as_attribute_indirectly() -> None: + c = C3() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T3 = C3() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T4: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class C4(T4): + x: int = 1 + y: int = 4 + +def test_read_write_property_implemented_as_attribute() -> None: + c = C4() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T4 = C4() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T5: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class B5: + x: int = 1 + y: int = 4 + +class BB5(B5): + pass + +class C5(BB5, T5): + pass + +def test_read_write_property_indirectly_implemented_as_attribute() -> None: + c = C5() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T5 = C5() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + [case testSubclassAttributeAccess] from mypy_extensions import trait @@ -2246,3 +2438,14 @@ class Derived(Base): pass assert Derived()() == 1 + +[case testClassWithFinalAttribute] +from typing_extensions import Final + +class C: + A: Final = -1 + a: Final = [A] + +def test_final_attribute() -> None: + assert C.A == -1 + assert C.a == [-1] diff --git a/mypyc/test-data/run-generators.test b/mypyc/test-data/run-generators.test index 0f2cbe152fc0..bcf9da1846ae 100644 --- a/mypyc/test-data/run-generators.test +++ b/mypyc/test-data/run-generators.test @@ -662,3 +662,20 @@ def list_comp() -> List[int]: [file driver.py] from native import list_comp assert list_comp() == [5] + +[case testWithNative] +class DummyContext: + def __init__(self) -> None: + self.x = 0 + + def __enter__(self) -> None: + self.x += 1 + + def __exit__(self, exc_type, exc_value, exc_tb) -> None: + self.x -= 1 + +def test_basic() -> None: + context = DummyContext() + with context: + assert context.x == 1 + assert context.x == 0 diff --git a/mypyc/test-data/run-i32.test b/mypyc/test-data/run-i32.test index 3d2f3e59e83c..384e6bd4f02c 100644 --- a/mypyc/test-data/run-i32.test +++ b/mypyc/test-data/run-i32.test @@ -306,6 +306,22 @@ def test_i32_truncate_from_i64() -> None: x = i32(small2) assert x == 2**31 - 1 +def from_float(x: float) -> i32: + return i32(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**31 - 1) == 2**31 - 1 + assert from_float(-2**31) == -2**31 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i32"): + assert from_float(float(2**31)) + with assertRaises(OverflowError, "int too large to convert to i32"): + # One ulp below the lowest valid i64 value + from_float(float(-2**31 - 2048)) + def test_tuple_i32() -> None: a: i32 = 1 b: i32 = 2 diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index 357a6b0811b6..ea94741dbd51 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -1,5 +1,5 @@ [case testI64BasicOps] -from typing import List, Any, Tuple +from typing import List, Any, Tuple, Union MYPY = False if MYPY: @@ -62,6 +62,37 @@ def test_comparisons() -> None: assert one != two assert not (one != one2) +def is_true(x: i64) -> bool: + if x: + return True + else: + return False + +def is_true2(x: i64) -> bool: + return bool(x) + +def is_false(x: i64) -> bool: + if not x: + return True + else: + return False + +def test_i64_as_bool() -> None: + assert not is_true(0) + assert not is_true2(0) + assert is_false(0) + for x in 1, 55, -1, -7, 1 << 40, -(1 << 50): + assert is_true(x) + assert is_true2(x) + assert not is_false(x) + +def bool_as_i64(b: bool) -> i64: + return b + +def test_bool_as_i64() -> None: + assert bool_as_i64(False) == 0 + assert bool_as_i64(True) == 1 + def div_by_3(x: i64) -> i64: return x // 3 @@ -229,6 +260,16 @@ def test_coerce_to_and_from_int() -> None: m: int = x assert m == n +def test_coerce_to_and_from_int2() -> None: + for shift in range(0, 64): + for sign in 1, -1: + for delta in range(-5, 5): + n = sign * (1 << shift) + delta + if -(1 << 63) <= n < (1 << 63): + x: i64 = i64(n) + m: int = int(x) + assert m == n + def test_explicit_conversion_to_i64() -> None: x = i64(5) assert x == 5 @@ -269,6 +310,68 @@ def test_i64_from_large_small_literal() -> None: x = i64(-2**63) assert x == -2**63 +def from_float(x: float) -> i64: + return i64(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**63 - 1) == 2**63 - 1 + assert from_float(-2**63) == -2**63 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i64"): + assert from_float(float(2**63)) + with assertRaises(OverflowError, "int too large to convert to i64"): + # One ulp below the lowest valid i64 value + from_float(float(-2**63 - 2048)) + +def from_str(s: str) -> i64: + return i64(s) + +def test_explicit_conversion_from_str() -> None: + assert from_str("0") == 0 + assert from_str("1") == 1 + assert from_str("-1234") == -1234 + with assertRaises(ValueError): + from_str("1.2") + +def from_str_with_base(s: str, base: int) -> i64: + return i64(s, base) + +def test_explicit_conversion_from_str_with_base() -> None: + assert from_str_with_base("101", 2) == 5 + assert from_str_with_base("109", 10) == 109 + assert from_str_with_base("-f0A", 16) == -3850 + assert from_str_with_base("0x1a", 16) == 26 + assert from_str_with_base("0X1A", 16) == 26 + with assertRaises(ValueError): + from_str_with_base("1.2", 16) + +def from_bool(b: bool) -> i64: + return i64(b) + +def test_explicit_conversion_from_bool() -> None: + assert from_bool(True) == 1 + assert from_bool(False) == 0 + +class IntConv: + def __init__(self, x: i64) -> None: + self.x = x + + def __int__(self) -> i64: + return self.x + 1 + +def test_explicit_conversion_from_instance() -> None: + assert i64(IntConv(0)) == 1 + assert i64(IntConv(12345)) == 12346 + assert i64(IntConv(-23)) == -22 + +def test_explicit_conversion_from_any() -> None: + # This can't be specialized + a: Any = "101" + assert i64(a, base=2) == 5 + def test_tuple_i64() -> None: a: i64 = 1 b: i64 = 2 @@ -394,8 +497,24 @@ def test_for_loop() -> None: assert n == 9 assert sum([x * x for x in range(i64(4 + int()))]) == 1 + 4 + 9 +def narrow1(x: Union[str, i64]) -> i64: + if isinstance(x, i64): + return x + return len(x) + +def narrow2(x: Union[str, i64]) -> i64: + if isinstance(x, int): + return x + return len(x) + +def test_isinstance() -> None: + assert narrow1(123) == 123 + assert narrow1("foobar") == 6 + assert narrow2(123) == 123 + assert narrow2("foobar") == 6 + [case testI64ErrorValuesAndUndefined] -from typing import Any +from typing import Any, Tuple import sys from mypy_extensions import mypyc_attr @@ -430,6 +549,33 @@ def test_method_error_value() -> None: with assertRaises(ValueError): C().maybe_raise(0, True) +def maybe_raise_tuple(n: i64, error: bool) -> Tuple[i64, i64]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + f: Any = maybe_raise_tuple + for i in range(-1000, 1000): + assert f(i, False) == (i, i + 1) + with assertRaises(ValueError): + f(0, True) + +def maybe_raise_tuple2(n: i64, error: bool) -> Tuple[i64, int]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value_2() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple2(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + def test_unbox_int() -> None: for i in list(range(-1000, 1000)) + [-(1 << 63), (1 << 63) - 1]: o: Any = i @@ -733,8 +879,34 @@ def test_del() -> None: with assertRaises(AttributeError): o.x +class UndefinedTuple: + def __init__(self, x: i64, y: i64) -> None: + if x != 0: + self.t = (x, y) + +def test_undefined_native_int_tuple() -> None: + o = UndefinedTuple(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = UndefinedTuple(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + +def test_undefined_native_int_tuple_via_any() -> None: + cls: Any = UndefinedTuple + o: Any = cls(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = cls(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + [case testI64DefaultArgValues] -from typing import Any, Iterator +from typing import Any, Iterator, Tuple from typing_extensions import Final MAGIC: Final = -113 @@ -893,6 +1065,31 @@ def test_kw_only_default_args() -> None: assert kw_only2(a=2, c=4) == 12 assert kw_only2(c=4, a=2) == 12 +def tuples(t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> i64: + return t[0] + t[1] + +def test_tuple_arg_defaults() -> None: + assert tuples() == 2 * MAGIC + assert tuples((1, 2)) == 3 + assert tuples((MAGIC, MAGIC)) == 2 * MAGIC + tuples2: Any = tuples + assert tuples2() == 2 * MAGIC + assert tuples2((1, 2)) == 3 + assert tuples2((MAGIC, MAGIC)) == 2 * MAGIC + +class TupleInit: + def __init__(self, t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> None: + self.t = t[0] + t[1] + +def test_tuple_init_arg_defaults() -> None: + assert TupleInit().t == 2 * MAGIC + assert TupleInit((1, 2)).t == 3 + assert TupleInit((MAGIC, MAGIC)).t == 2 * MAGIC + o: Any = TupleInit + assert o().t == 2 * MAGIC + assert o((1, 2)).t == 3 + assert o((MAGIC, MAGIC)).t == 2 * MAGIC + def many_args( a1: i64 = 0, a2: i64 = 1, @@ -1060,6 +1257,24 @@ def test_assign_error_value_conditionally() -> None: assert y == MAGIC assert z == MAGIC +def tuple_case(x: i64, y: i64) -> None: + if not int(): + t = (x, y) + assert t == (x, y) + if int(): + t2 = (x, y) + try: + print(t2) + except NameError as e: + assert str(e) == 'local variable "t2" referenced before assignment' + else: + assert False + +def test_conditionally_undefined_tuple() -> None: + tuple_case(2, 3) + tuple_case(-2, -3) + tuple_case(MAGIC, MAGIC) + def test_many_locals() -> None: x = int() if x: @@ -1126,3 +1341,173 @@ def test_many_locals() -> None: assert a31 == 10 assert a32 == 55 assert a33 == 20 + +[case testI64GlueMethodsAndInheritance] +from typing import Any +from typing_extensions import Final + +MYPY = False +if MYPY: + from mypy_extensions import i64, trait + +from testutil import assertRaises + +MAGIC: Final = -113 + +class Base: + def foo(self) -> i64: + return 5 + + def bar(self, x: i64 = 2) -> i64: + return x + 1 + + def hoho(self, x: i64) -> i64: + return x - 1 + +class Derived(Base): + def foo(self, x: i64 = 5) -> i64: + return x + 10 + + def bar(self, x: i64 = 3, y: i64 = 20) -> i64: + return x + y + 2 + + def hoho(self, x: i64 = 7) -> i64: + return x - 2 + +def test_derived_adds_bitmap() -> None: + b: Base = Derived() + assert b.foo() == 15 + +def test_derived_adds_another_default_arg() -> None: + b: Base = Derived() + assert b.bar() == 25 + assert b.bar(1) == 23 + assert b.bar(MAGIC) == MAGIC + 22 + +def test_derived_switches_arg_to_have_default() -> None: + b: Base = Derived() + assert b.hoho(5) == 3 + assert b.hoho(MAGIC) == MAGIC - 2 + +@trait +class T: + @property + def x(self) -> i64: ... + @property + def y(self) -> i64: ... + +class C(T): + x: i64 = 1 + y: i64 = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T = C() + assert t.y == 4 + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D(T): + xx: i64 + + @property + def x(self) -> i64: + return self.xx + + @property + def y(self) -> i64: + raise TypeError + +def test_read_only_property_in_trait_implemented_as_property() -> None: + d = D() + d.xx = 5 + assert d.x == 5 + d.xx = MAGIC + assert d.x == MAGIC + with assertRaises(TypeError): + d.y + t: T = d + assert t.x == MAGIC + d.xx = 6 + assert t.x == 6 + with assertRaises(TypeError): + t.y + +@trait +class T2: + x: i64 + y: i64 + +class C2(T2): + pass + +def test_inherit_trait_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + with assertRaises(AttributeError): + c.y + c.y = 6 + assert c.y == 6 + t: T2 = C2() + with assertRaises(AttributeError): + t.y + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D2(T2): + x: i64 + y: i64 = 4 + +def test_implement_trait_attribute() -> None: + d = D2() + d.x = 5 + assert d.x == 5 + d.x = MAGIC + assert d.x == MAGIC + assert d.y == 4 + d.y = 6 + assert d.y == 6 + t: T2 = D2() + assert t.y == 4 + t = d + assert t.x == MAGIC + d.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = d + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 diff --git a/mypyc/test-data/run-integers.test b/mypyc/test-data/run-integers.test index 74e7cd6b8fb7..c65f36110b46 100644 --- a/mypyc/test-data/run-integers.test +++ b/mypyc/test-data/run-integers.test @@ -353,6 +353,9 @@ def is_true(x: int) -> bool: else: return False +def is_true2(x: int) -> bool: + return bool(x) + def is_false(x: int) -> bool: if not x: return True @@ -361,11 +364,32 @@ def is_false(x: int) -> bool: def test_int_as_bool() -> None: assert not is_true(0) + assert not is_true2(0) assert is_false(0) for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): assert is_true(x) + assert is_true2(x) assert not is_false(x) +def bool_as_int(b: bool) -> int: + return b + +def bool_as_int2(b: bool) -> int: + return int(b) + +def test_bool_as_int() -> None: + assert bool_as_int(False) == 0 + assert bool_as_int(True) == 1 + assert bool_as_int2(False) == 0 + assert bool_as_int2(True) == 1 + +def no_op_conversion(n: int) -> int: + return int(n) + +def test_no_op_conversion() -> None: + for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): + assert no_op_conversion(x) == x + def test_divide() -> None: for x in range(-100, 100): for y in range(-100, 100): diff --git a/mypyc/test-data/run-match.test b/mypyc/test-data/run-match.test new file mode 100644 index 000000000000..7b7ad9a4342c --- /dev/null +++ b/mypyc/test-data/run-match.test @@ -0,0 +1,283 @@ +[case testTheBigMatch_python3_10] +class Person: + __match_args__ = ("name", "age") + + name: str + age: int + + def __init__(self, name: str, age: int) -> None: + self.name = name + self.age = age + + def __str__(self) -> str: + return f"Person(name={self.name!r}, age={self.age})" + + +def f(x: object) -> None: + match x: + case 123: + print("test 1") + + case 456 | 789: + print("test 2") + + case True | False | None: + print("test 3") + + case Person("bob" as name, age): + print(f"test 4 ({name=}, {age=})") + + case num if num == 5: + print("test 5") + + case 6 as num: + print(f"test 6 ({num=})") + + case (7 | "7") as value: + print(f"test 7 ({value=})") + + case Person("alice", age=123): + print("test 8") + + case Person("charlie", age=123 | 456): + print("test 9") + + case Person("dave", 123) as dave: + print(f"test 10 {dave}") + + case {"test": 11}: + print("test 11") + + case {"test": 12, **rest}: + print(f"test 12 (rest={rest})") + + case {}: + print("test map final") + + case ["test", 13]: + print("test 13") + + case ["test", 13, _]: + print("test 13b") + + case ["test", 14, *_]: + print("test 14") + + # TODO: Fix "rest" being used here coliding with above "rest" + case ["test", 15, *rest2]: + print(f"test 15 ({rest2})") + + case ["test", *rest3, 16]: + print(f"test 16 ({rest3})") + + case [*rest4, "test", 17]: + print(f"test 17 ({rest4})") + + case [*rest4, "test", 18, "some", "fluff"]: + print(f"test 18 ({rest4})") + + case str("test 19"): + print("test 19") + + case str(test_20) if test_20.startswith("test 20"): + print(f"test 20 ({test_20[7:]!r})") + + case ("test 21" as value) | ("test 21 as well" as value): + print(f"test 21 ({value[7:]!r})") + + case []: + print("test sequence final") + + case _: + print("test final") +[file driver.py] +from native import f, Person + +# test 1 +f(123) + +# test 2 +f(456) +f(789) + +# test 3 +f(True) +f(False) +f(None) + +# test 4 +f(Person("bob", 123)) + +# test 5 +f(5) + +# test 6 +f(6) + +# test 7 +f(7) +f("7") + +# test 8 +f(Person("alice", 123)) + +# test 9 +f(Person("charlie", 123)) +f(Person("charlie", 456)) + +# test 10 +f(Person("dave", 123)) + +# test 11 +f({"test": 11}) +f({"test": 11, "some": "key"}) + +# test 12 +f({"test": 12}) +f({"test": 12, "key": "value"}) +f({"test": 12, "key": "value", "abc": "123"}) + +# test map final +f({}) + +# test 13 +f(["test", 13]) + +# test 13b +f(["test", 13, "fail"]) + +# test 14 +f(["test", 14]) +f(["test", 14, "something"]) + +# test 15 +f(["test", 15]) +f(["test", 15, "something"]) + +# test 16 +f(["test", 16]) +f(["test", "filler", 16]) +f(["test", "more", "filler", 16]) + +# test 17 +f(["test", 17]) +f(["stuff", "test", 17]) +f(["more", "stuff", "test", 17]) + +# test 18 +f(["test", 18, "some", "fluff"]) +f(["stuff", "test", 18, "some", "fluff"]) +f(["more", "stuff", "test", 18, "some", "fluff"]) + +# test 19 +f("test 19") + +# test 20 +f("test 20") +f("test 20 something else") + +# test 21 +f("test 21") +f("test 21 as well") + +# test sequence final +f([]) + +# test final +f("") + +[out] +test 1 +test 2 +test 2 +test 3 +test 3 +test 3 +test 4 (name='bob', age=123) +test 5 +test 6 (num=6) +test 7 (value=7) +test 7 (value='7') +test 8 +test 9 +test 9 +test 10 Person(name='dave', age=123) +test 11 +test 11 +test 12 (rest={}) +test 12 (rest={'key': 'value'}) +test 12 (rest={'key': 'value', 'abc': '123'}) +test map final +test 13 +test 13b +test 14 +test 14 +test 15 ([]) +test 15 (['something']) +test 16 ([]) +test 16 (['filler']) +test 16 (['more', 'filler']) +test 17 ([]) +test 17 (['stuff']) +test 17 (['more', 'stuff']) +test 18 ([]) +test 18 (['stuff']) +test 18 (['more', 'stuff']) +test 19 +test 20 ('') +test 20 (' something else') +test 21 ('') +test 21 (' as well') +test sequence final +test final +[case testCustomMappingAndSequenceObjects_python3_10] +def f(x: object) -> None: + match x: + case {"key": "value", **rest}: + print(rest, type(rest)) + + case [1, 2, *rest2]: + print(rest2, type(rest2)) + +[file driver.py] +from collections.abc import Mapping, Sequence + +from native import f + +class CustomMapping(Mapping): + inner: dict + + def __init__(self, inner: dict) -> None: + self.inner = inner + + def __getitem__(self, key): + return self.inner[key] + + def __iter__(self): + return iter(self.inner) + + def __len__(self) -> int: + return len(self.inner) + + +class CustomSequence(Sequence): + inner: list + + def __init__(self, inner: list) -> None: + self.inner = inner + + def __getitem__(self, index: int) -> None: + return self.inner[index] + + def __len__(self) -> int: + return len(self.inner) + +mapping = CustomMapping({"key": "value", "some": "data"}) +sequence = CustomSequence([1, 2, 3]) + +f(mapping) +f(sequence) + +[out] +{'some': 'data'} +[3] diff --git a/mypyc/test-data/run-misc.test b/mypyc/test-data/run-misc.test index 001e0aa41b25..267a3441808f 100644 --- a/mypyc/test-data/run-misc.test +++ b/mypyc/test-data/run-misc.test @@ -1116,3 +1116,33 @@ i = b"foo" def test_redefinition() -> None: assert i == b"foo" + +[case testWithNative] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 + +[case testWithNativeVarArgs] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, *args: object) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 diff --git a/mypyc/test-data/run-sets.test b/mypyc/test-data/run-sets.test index 98ac92d569b7..56c946933fac 100644 --- a/mypyc/test-data/run-sets.test +++ b/mypyc/test-data/run-sets.test @@ -115,3 +115,36 @@ from native import update s = {1, 2, 3} update(s, [5, 4, 3]) assert s == {1, 2, 3, 4, 5} + +[case testPrecomputedFrozenSets] +from typing import Any +from typing_extensions import Final + +CONST: Final = "CONST" +non_const = "non_const" + +def main_set(item: Any) -> bool: + return item in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def main_negated_set(item: Any) -> bool: + return item not in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def non_final_name_set(item: Any) -> bool: + return item in {non_const} + +s = set() +for i in {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}: + s.add(i) + +def test_in_set() -> None: + for item in (None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST): + assert main_set(item), f"{item!r} should be in set_main" + assert not main_negated_set(item), item + + assert non_final_name_set(non_const) + global non_const + non_const = "updated" + assert non_final_name_set("updated") + +def test_for_set() -> None: + assert not s ^ {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}, s diff --git a/mypyc/test-data/run-strings.test b/mypyc/test-data/run-strings.test index c2b010bdb2bd..4a20c13ce789 100644 --- a/mypyc/test-data/run-strings.test +++ b/mypyc/test-data/run-strings.test @@ -136,6 +136,9 @@ def is_true(x: str) -> bool: else: return False +def is_true2(x: str) -> bool: + return bool(x) + def is_false(x: str) -> bool: if not x: return True @@ -145,8 +148,10 @@ def is_false(x: str) -> bool: def test_str_to_bool() -> None: assert is_false('') assert not is_true('') + assert not is_true2('') for x in 'a', 'foo', 'bar', 'some string': assert is_true(x) + assert is_true2(x) assert not is_false(x) def test_str_min_max() -> None: diff --git a/mypyc/test-data/run-tuples.test b/mypyc/test-data/run-tuples.test index 26b039320844..f6c92b9c720f 100644 --- a/mypyc/test-data/run-tuples.test +++ b/mypyc/test-data/run-tuples.test @@ -99,8 +99,6 @@ assert f(Sub(3, 2)) == 3 [case testNamedTupleClassSyntax] from typing import Dict, List, NamedTuple, Optional, Tuple, Union -class ClassIR: pass - class FuncIR: pass StealsDescription = Union[bool, List[bool]] @@ -119,8 +117,12 @@ class Record(NamedTuple): ordering: Optional[List[int]] extra_int_constants: List[Tuple[int]] +# Make sure mypyc loads the annotation string for this forward reference. +# Ref: https://github.com/mypyc/mypyc/issues/938 +class ClassIR: pass + [file driver.py] -from typing import Optional +from typing import ForwardRef, Optional from native import ClassIR, FuncIR, Record assert Record.__annotations__ == { @@ -129,7 +131,7 @@ assert Record.__annotations__ == { 'is_borrowed': bool, 'hash': str, 'python_path': tuple, - 'type': ClassIR, + 'type': ForwardRef('ClassIR'), 'method': FuncIR, 'shadow_method': type, 'classes': dict, diff --git a/mypyc/test/test_commandline.py b/mypyc/test/test_commandline.py index aafe1e4adc1b..f66ca2ec8ff0 100644 --- a/mypyc/test/test_commandline.py +++ b/mypyc/test/test_commandline.py @@ -58,6 +58,11 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: ) if "ErrorOutput" in testcase.name or cmd.returncode != 0: out += cmd.stdout + elif "WarningOutput" in testcase.name: + # Strip out setuptools build related output since we're only + # interested in the messages emitted during compilation. + messages, _, _ = cmd.stdout.partition(b"running build_ext") + out += messages if cmd.returncode == 0: # Run main program diff --git a/mypyc/test/test_irbuild.py b/mypyc/test/test_irbuild.py index bfce57c97903..cb5e690eed55 100644 --- a/mypyc/test/test_irbuild.py +++ b/mypyc/test/test_irbuild.py @@ -3,6 +3,7 @@ from __future__ import annotations import os.path +import sys from mypy.errors import CompileError from mypy.test.config import test_temp_dir @@ -23,6 +24,7 @@ files = [ "irbuild-basic.test", "irbuild-int.test", + "irbuild-bool.test", "irbuild-lists.test", "irbuild-tuple.test", "irbuild-dict.test", @@ -45,8 +47,12 @@ "irbuild-dunders.test", "irbuild-singledispatch.test", "irbuild-constant-fold.test", + "irbuild-glue-methods.test", ] +if sys.version_info >= (3, 10): + files.append("irbuild-match.test") + class TestGenOps(MypycDataSuite): files = files diff --git a/mypyc/test/test_ircheck.py b/mypyc/test/test_ircheck.py index 30ddd39fef0d..008963642272 100644 --- a/mypyc/test/test_ircheck.py +++ b/mypyc/test/test_ircheck.py @@ -5,7 +5,17 @@ from mypyc.analysis.ircheck import FnError, can_coerce_to, check_func_ir from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature -from mypyc.ir.ops import Assign, BasicBlock, Goto, Integer, LoadLiteral, Op, Register, Return +from mypyc.ir.ops import ( + Assign, + BasicBlock, + Goto, + Integer, + LoadAddress, + LoadLiteral, + Op, + Register, + Return, +) from mypyc.ir.pprint import format_func from mypyc.ir.rtypes import ( RInstance, @@ -16,6 +26,7 @@ int64_rprimitive, none_rprimitive, object_rprimitive, + pointer_rprimitive, str_rprimitive, ) @@ -88,7 +99,7 @@ def test_invalid_register_source(self) -> None: ret = Return(value=Register(type=none_rprimitive, name="r1")) block = self.basic_block([ret]) fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) - assert_has_error(fn, FnError(source=ret, desc="Invalid op reference to register r1")) + assert_has_error(fn, FnError(source=ret, desc="Invalid op reference to register 'r1'")) def test_invalid_op_source(self) -> None: ret = Return(value=LoadLiteral(value="foo", rtype=str_rprimitive)) @@ -170,3 +181,19 @@ def test_pprint(self) -> None: " goto L1", " ERR: Invalid control operation target: 1", ] + + def test_load_address_declares_register(self) -> None: + rx = Register(str_rprimitive, "x") + ry = Register(pointer_rprimitive, "y") + load_addr = LoadAddress(pointer_rprimitive, rx) + assert_no_errors( + FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + blocks=[ + self.basic_block( + ops=[load_addr, Assign(ry, load_addr), Return(value=NONE_VALUE)] + ) + ], + ) + ) diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 351caf7c93ed..c867c9d37dac 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -68,6 +68,9 @@ if sys.version_info >= (3, 8): files.append("run-python38.test") +if sys.version_info >= (3, 10): + files.append("run-match.test") + setup_format = """\ from setuptools import setup from mypyc.build import mypycify @@ -309,6 +312,9 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> stderr=subprocess.STDOUT, env=env, ) + if sys.version_info >= (3, 12): + # TODO: testDecorators1 hangs on 3.12, remove this once fixed + proc.wait(timeout=30) output = proc.communicate()[0].decode("utf8") outlines = output.splitlines() diff --git a/mypyc/test/test_subtype.py b/mypyc/test/test_typeops.py similarity index 64% rename from mypyc/test/test_subtype.py rename to mypyc/test/test_typeops.py index 4a0d8737c852..f414edd1a2bb 100644 --- a/mypyc/test/test_subtype.py +++ b/mypyc/test/test_typeops.py @@ -1,16 +1,19 @@ -"""Test cases for is_subtype and is_runtime_subtype.""" +"""Test cases for various RType operations.""" from __future__ import annotations import unittest from mypyc.ir.rtypes import ( + RUnion, bit_rprimitive, bool_rprimitive, int32_rprimitive, int64_rprimitive, int_rprimitive, + object_rprimitive, short_int_rprimitive, + str_rprimitive, ) from mypyc.rt_subtype import is_runtime_subtype from mypyc.subtype import is_subtype @@ -50,3 +53,24 @@ def test_bit(self) -> None: def test_bool(self) -> None: assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) + + +class TestUnionSimplification(unittest.TestCase): + def test_simple_type_result(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive]) == int_rprimitive + + def test_remove_duplicate(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive, int_rprimitive]) == int_rprimitive + + def test_cannot_simplify(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, str_rprimitive, object_rprimitive] + ) == RUnion([int_rprimitive, str_rprimitive, object_rprimitive]) + + def test_nested(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, int_rprimitive])] + ) == RUnion([int_rprimitive, str_rprimitive]) + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, RUnion([int_rprimitive])])] + ) == RUnion([int_rprimitive, str_rprimitive]) diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 8339889fa9f5..609ffc27385e 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -108,7 +108,7 @@ def build_ir_for_single_file2( options.hide_error_codes = True options.use_builtins_fixtures = True options.strict_optional = True - options.python_version = (3, 6) + options.python_version = compiler_options.python_version or (3, 6) options.export_types = True options.preserve_asts = True options.allow_empty_bodies = True @@ -277,6 +277,7 @@ def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None: m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name) if m: options.capi_version = (int(m.group(1)), int(m.group(2))) + options.python_version = options.capi_version elif "_py" in name or "_Python" in name: assert False, f"Invalid _py* suffix (should be _pythonX_Y): {name}" return options diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index cc638142c397..2851955ff38f 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -26,12 +26,14 @@ GetAttr, Integer, LoadErrorValue, + Op, RegisterOp, Return, SetAttr, + TupleGet, Value, ) -from mypyc.ir.rtypes import bool_rprimitive +from mypyc.ir.rtypes import RTuple, bool_rprimitive from mypyc.primitives.exc_ops import err_occurred_op from mypyc.primitives.registry import CFunctionDescription @@ -100,9 +102,7 @@ def split_blocks_at_errors( # semantics, using a temporary bool with value false target = Integer(0, bool_rprimitive) elif op.error_kind == ERR_MAGIC_OVERLAPPING: - errvalue = Integer(int(target.type.c_undefined), rtype=op.type) - comp = ComparisonOp(target, errvalue, ComparisonOp.EQ) - cur_block.ops.append(comp) + comp = insert_overlapping_error_value_check(cur_block.ops, target) new_block2 = BasicBlock() new_blocks.append(new_block2) branch = Branch( @@ -163,3 +163,17 @@ def adjust_error_kinds(block: BasicBlock) -> None: if isinstance(op, SetAttr): if op.class_type.class_ir.is_always_defined(op.attr): op.error_kind = ERR_NEVER + + +def insert_overlapping_error_value_check(ops: list[Op], target: Value) -> ComparisonOp: + """Append to ops to check for an overlapping error value.""" + typ = target.type + if isinstance(typ, RTuple): + item = TupleGet(target, 0) + ops.append(item) + return insert_overlapping_error_value_check(ops, item) + else: + errvalue = Integer(int(typ.c_undefined), rtype=typ) + op = ComparisonOp(target, errvalue, ComparisonOp.EQ) + ops.append(op) + return op diff --git a/mypyc/transform/uninit.py b/mypyc/transform/uninit.py index 041dd2545dff..6bf71ac4a8bc 100644 --- a/mypyc/transform/uninit.py +++ b/mypyc/transform/uninit.py @@ -20,7 +20,7 @@ Unreachable, Value, ) -from mypyc.ir.rtypes import bitmap_rprimitive, is_fixed_width_rtype +from mypyc.ir.rtypes import bitmap_rprimitive def insert_uninit_checks(ir: FuncIR) -> None: @@ -77,7 +77,7 @@ def split_blocks_at_uninits( init_registers.append(src) init_registers_set.add(src) - if not is_fixed_width_rtype(src.type): + if not src.type.error_overlap: cur_block.ops.append( Branch( src, diff --git a/pyproject.toml b/pyproject.toml index fe41bbccb6a5..1348b9463639 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,9 +19,9 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 99 -target-version = ['py37'] +target-version = ["py37", "py38", "py39", "py310", "py311"] skip-magic-trailing-comma = true -extend-exclude = ''' +force-exclude = ''' ^/mypy/typeshed| ^/mypyc/test-data| ^/test-data diff --git a/runtests.py b/runtests.py index be4ad4add08a..ade0a8adee5e 100755 --- a/runtests.py +++ b/runtests.py @@ -50,7 +50,7 @@ # Self type check "self": [executable, "-m", "mypy", "--config-file", "mypy_self_check.ini", "-p", "mypy"], # Lint - "lint": ["flake8", "-j0"], + "lint": ["flake8", "-j3"], "format-black": ["black", "."], "format-isort": ["isort", "."], # Fast test cases only (this is the bulk of the test suite) diff --git a/setup.py b/setup.py index 669e0cc4b615..a148237f0b95 100644 --- a/setup.py +++ b/setup.py @@ -171,7 +171,7 @@ def run(self): classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", diff --git a/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c b/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder/pkg1/a b/test-data/packages/modulefinder/pkg1/a new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index f67d9859397e..98be314b9c27 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -102,16 +102,16 @@ class B(A, I): pass from abc import abstractmethod, ABCMeta +class I(metaclass=ABCMeta): + @abstractmethod + def f(self): pass + o = None # type: object t = None # type: type o = I t = I -class I(metaclass=ABCMeta): - @abstractmethod - def f(self): pass - [case testAbstractClassInCasts] from typing import cast from abc import abstractmethod, ABCMeta diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 195e70cf5880..40efe2d2cece 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -925,3 +925,33 @@ async def f() -> AsyncGenerator[int, None]: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testAwaitUnion] +from typing import overload, Union + +class A: ... +class B: ... + +@overload +async def foo(x: A) -> B: ... +@overload +async def foo(x: B) -> A: ... +async def foo(x): ... + +async def bar(x: Union[A, B]) -> None: + reveal_type(await foo(x)) # N: Revealed type is "Union[__main__.B, __main__.A]" + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + +[case testInvalidComprehensionNoCrash] +async def foo(x: int) -> int: ... + +crasher = [await foo(x) for x in [1, 2, 3]] # E: "await" outside function + +def bad() -> None: + y = [await foo(x) for x in [1, 2, 3]] # E: "await" outside coroutine ("async def") +async def good() -> None: + y = [await foo(x) for x in [1, 2, 3]] # OK +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index fe123acfa001..f555f2ea7011 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1788,3 +1788,82 @@ class C: c = C(x=[C.D()]) reveal_type(c.x) # N: Revealed type is "builtins.list[__main__.C.D]" [builtins fixtures/list.pyi] + +[case testRedefinitionInFrozenClassNoCrash] +import attr + +@attr.s +class MyData: + is_foo: bool = attr.ib() + + @staticmethod # E: Name "is_foo" already defined on line 5 + def is_foo(string: str) -> bool: ... +[builtins fixtures/classmethod.pyi] + +[case testOverrideWithPropertyInFrozenClassNoCrash] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> str: ... +[builtins fixtures/property.pyi] + +[case testOverrideWithPropertyInFrozenClassChecked] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> int: ... # E: Signature of "name" incompatible with supertype "Base" + +# This matches runtime semantics +reveal_type(Sub) # N: Revealed type is "def (*, name: builtins.str, first_name: builtins.str, last_name: builtins.str) -> __main__.Sub" +[builtins fixtures/property.pyi] + +[case testFinalInstanceAttribute] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +reveal_type(C) # N: Revealed type is "def (a: builtins.int) -> __main__.C" + +C(1).a = 2 # E: Cannot assign to final attribute "a" + +[builtins fixtures/property.pyi] + +[case testFinalInstanceAttributeInheritance] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +@define +class D(C): + b: Final[str] + +reveal_type(D) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> __main__.D" + +D(1, "").a = 2 # E: Cannot assign to final attribute "a" +D(1, "").b = "2" # E: Cannot assign to final attribute "b" + +[builtins fixtures/property.pyi] \ No newline at end of file diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index a4056c8cb576..c16b9e40122d 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -12,25 +12,26 @@ class A: pass class B: pass [case testConstructionAndAssignment] -x = None # type: A -x = A() -if int(): - x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: def __init__(self): pass class B: def __init__(self): pass +x = None # type: A +x = A() +if int(): + x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testInheritInitFromObject] +class A(object): pass +class B(object): pass x = None # type: A if int(): x = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A(object): pass -class B(object): pass - [case testImplicitInheritInitFromObject] +class A: pass +class B: pass x = None # type: A o = None # type: object if int(): @@ -39,10 +40,6 @@ if int(): x = A() if int(): o = x -class A: pass -class B: pass -[out] - [case testTooManyConstructorArgs] import typing object(object()) @@ -51,21 +48,15 @@ main:2: error: Too many arguments for "object" [case testVarDefWithInit] import typing -a = A() # type: A -b = object() # type: A class A: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") - +a = A() # type: A +b = object() # type: A # E: Incompatible types in assignment (expression has type "object", variable has type "A") [case testInheritanceBasedSubtyping] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") - +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testDeclaredVariableInParentheses] (x) = None # type: int @@ -101,32 +92,22 @@ w = 1 # E: Incompatible types in assignment (expression has type "int", variabl [case testFunction] import typing -def f(x: 'A') -> None: pass -f(A()) -f(B()) # Fail class A: pass class B: pass -[out] -main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" - +def f(x: 'A') -> None: pass +f(A()) +f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [case testNotCallable] import typing -A()() class A: pass -[out] -main:2: error: "A" not callable - +A()() # E: "A" not callable [case testSubtypeArgument] import typing -def f(x: 'A', y: 'B') -> None: pass -f(B(), A()) # Fail -f(B(), B()) - class A: pass class B(A): pass -[out] -main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B" - +def f(x: 'A', y: 'B') -> None: pass +f(B(), A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(B(), B()) [case testInvalidArgumentCount] import typing def f(x, y) -> None: pass @@ -194,12 +175,10 @@ main:4: error: Incompatible types in assignment (expression has type "B", variab [case testVariableInitializationWithSubtype] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") -- Misc @@ -217,15 +196,11 @@ main:3: error: Incompatible return value type (got "B", expected "A") [case testTopLevelContextAndInvalidReturn] import typing -def f() -> 'A': - return B() -a = B() # type: A class A: pass class B: pass -[out] -main:3: error: Incompatible return value type (got "B", expected "A") -main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") - +def f() -> 'A': + return B() # E: Incompatible return value type (got "B", expected "A") +a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testEmptyReturnInAnyTypedFunction] from typing import Any def f() -> Any: @@ -252,6 +227,8 @@ reveal_type(__annotations__) # N: Revealed type is "builtins.dict[builtins.str, [case testLocalVariableShadowing] +class A: pass +class B: pass a = None # type: A if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -263,10 +240,6 @@ def f() -> None: a = B() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = A() - -class A: pass -class B: pass - [case testGlobalDefinedInBlockWithType] class A: pass while A: diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index 8e0545953bd8..8ae7f6555f9d 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -393,8 +393,6 @@ class X(typing.NamedTuple): [out] main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" -main:7: error: Type cannot be declared in assignment to non-self attribute -main:7: error: "int" has no attribute "x" main:9: error: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index b16387f194d4..f1af13923fd7 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -3,64 +3,56 @@ [case testMethodCall] +class A: + def foo(self, x: 'A') -> None: pass +class B: + def bar(self, x: 'B', y: A) -> None: pass a = None # type: A b = None # type: B -a.foo(B()) # Fail -a.bar(B(), A()) # Fail +a.foo(B()) # E: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" +a.bar(B(), A()) # E: "A" has no attribute "bar" a.foo(A()) b.bar(B(), A()) +[case testMethodCallWithSubtype] class A: def foo(self, x: 'A') -> None: pass -class B: - def bar(self, x: 'B', y: A) -> None: pass -[out] -main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" -main:6: error: "A" has no attribute "bar" - -[case testMethodCallWithSubtype] + def bar(self, x: 'B') -> None: pass +class B(A): pass a = None # type: A a.foo(A()) a.foo(B()) -a.bar(A()) # Fail +a.bar(A()) # E: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" a.bar(B()) +[case testInheritingMethod] class A: - def foo(self, x: 'A') -> None: pass - def bar(self, x: 'B') -> None: pass + def foo(self, x: 'B') -> None: pass class B(A): pass -[out] -main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" - -[case testInheritingMethod] a = None # type: B a.foo(A()) # Fail a.foo(B()) -class A: - def foo(self, x: 'B') -> None: pass -class B(A): pass -[targets __main__, __main__, __main__.A.foo] +[targets __main__, __main__.A.foo] [out] -main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" [case testMethodCallWithInvalidNumberOfArguments] +class A: + def foo(self, x: 'A') -> None: pass a = None # type: A a.foo() # Fail a.foo(object(), A()) # Fail - -class A: - def foo(self, x: 'A') -> None: pass [out] -main:3: error: Missing positional argument "x" in call to "foo" of "A" -main:4: error: Too many arguments for "foo" of "A" -main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" +main:5: error: Missing positional argument "x" in call to "foo" of "A" +main:6: error: Too many arguments for "foo" of "A" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" [case testMethodBody] import typing @@ -216,13 +208,11 @@ main:11: error: "B" has no attribute "a" [case testExplicitAttributeInBody] -a = None # type: A -a.x = object() # Fail -a.x = A() class A: x = None # type: A -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") +a = None # type: A +a.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") +a.x = A() [case testAttributeDefinedInNonInitMethod] import typing @@ -629,64 +619,50 @@ class B(A): [case testTrivialConstructor] -import typing -a = A() # type: A -b = A() # type: B # Fail class A: def __init__(self) -> None: pass -class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +a = A() # type: A +b = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +class B: pass [case testConstructor] -import typing -a = A(B()) # type: A -aa = A(object()) # type: A # Fail -b = A(B()) # type: B # Fail class A: def __init__(self, x: 'B') -> None: pass class B: pass -[out] -main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B" -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -[case testConstructorWithTwoArguments] -import typing -a = A(C(), B()) # type: A # Fail +a = A(B()) # type: A +aa = A(object()) # type: A # E: Argument 1 to "A" has incompatible type "object"; expected "B" +b = A(B()) # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +[case testConstructorWithTwoArguments] class A: def __init__(self, x: 'B', y: 'C') -> None: pass class B: pass class C(B): pass -[out] -main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C" + +a = A(C(), B()) # type: A # E: Argument 2 to "A" has incompatible type "B"; expected "C" [case testInheritedConstructor] -import typing -b = B(C()) # type: B -a = B(D()) # type: A # Fail -class A: - def __init__(self, x: 'C') -> None: pass class B(A): pass class C: pass class D: pass -[out] -main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C" + +b = B(C()) # type: B +a = B(D()) # type: A # E: Argument 1 to "B" has incompatible type "D"; expected "C" +class A: + def __init__(self, x: 'C') -> None: pass [case testOverridingWithIncompatibleConstructor] -import typing -A() # Fail -B(C()) # Fail -A(C()) -B() class A: def __init__(self, x: 'C') -> None: pass class B(A): def __init__(self) -> None: pass class C: pass -[out] -main:2: error: Missing positional argument "x" in call to "A" -main:3: error: Too many arguments for "B" + +A() # E: Missing positional argument "x" in call to "A" +B(C()) # E: Too many arguments for "B" +A(C()) +B() [case testConstructorWithReturnValueType] import typing @@ -826,15 +802,12 @@ class Foo: pass [case testGlobalFunctionInitWithReturnType] -import typing -a = __init__() # type: A -b = __init__() # type: B # Fail -def __init__() -> 'A': pass class A: pass class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +def __init__() -> 'A': pass +a = __init__() # type: A +b = __init__() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingInit] from typing import Any, cast class A: @@ -844,7 +817,12 @@ a.__init__(a) # E: Accessing "__init__" on an instance is unsound, since instan (cast(Any, a)).__init__(a) [case testDeepInheritanceHierarchy] -import typing +class A: pass +class B(A): pass +class C(B): pass +class D(C): pass +class D2(C): pass + d = C() # type: D # E: Incompatible types in assignment (expression has type "C", variable has type "D") if int(): d = B() # E: Incompatible types in assignment (expression has type "B", variable has type "D") @@ -859,12 +837,6 @@ b = D() # type: B if int(): b = D2() -class A: pass -class B(A): pass -class C(B): pass -class D(C): pass -class D2(C): pass - [case testConstructorJoinsWithCustomMetaclass] # flags: --strict-optional from typing import TypeVar @@ -1030,7 +1002,7 @@ A.B = None # E: Cannot assign to a type [targets __main__] [case testAccessingClassAttributeWithTypeInferenceIssue] -x = C.x # E: Cannot determine type of "x" +x = C.x # E: Cannot determine type of "x" # E: Name "C" is used before definition def f() -> int: return 1 class C: x = f() @@ -1209,13 +1181,9 @@ class A: [case testMultipleClassDefinition] -import typing -A() class A: pass -class A: pass -[out] -main:4: error: Name "A" already defined on line 3 - +class A: pass # E: Name "A" already defined on line 1 +A() [case testDocstringInClass] import typing class A: @@ -2351,7 +2319,7 @@ reveal_type(Fraction() + Fraction()) # N: Revealed type is "builtins.str" [case testReverseOperatorTypeVar2b] from typing import TypeVar -T = TypeVar("T", Real, Fraction) +T = TypeVar("T", "Real", "Fraction") class Real: def __add__(self, other: Fraction) -> str: ... class Fraction(Real): @@ -2901,7 +2869,7 @@ b.bad = 'a' # E: Incompatible types in assignment (expression has type "str", v from typing import Any class Test: - def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "Callable[[], None]" for "__setattr__" + def __setattr__() -> None: ... # E: Method must have at least one argument. Did you forget the "self" argument? # E: Invalid signature "Callable[[], None]" for "__setattr__" t = Test() t.crash = 'test' # E: "Test" has no attribute "crash" @@ -2955,7 +2923,11 @@ c.__setattr__("x", 42, p=True) [case testCallableObject] -import typing +class A: + def __call__(self, x: 'A') -> 'A': + pass +class B: pass + a = A() b = B() @@ -2968,11 +2940,6 @@ if int(): if int(): b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") -class A: - def __call__(self, x: A) -> A: - pass -class B: pass - -- __new__ -- -------- @@ -3872,28 +3839,59 @@ class Super: def foo(self, a: C) -> C: pass class Sub(Super): - @overload # Fail + @overload def foo(self, a: A) -> A: pass @overload def foo(self, a: B) -> C: pass # Fail @overload def foo(self, a: C) -> C: pass + +class Sub2(Super): + @overload + def foo(self, a: B) -> C: pass # Fail + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass + +class Sub3(Super): + @overload + def foo(self, a: A) -> int: pass + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass [builtins fixtures/classmethod.pyi] [out] -tmp/foo.pyi:16: error: Signature of "foo" incompatible with supertype "Super" -tmp/foo.pyi:16: note: Superclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C -tmp/foo.pyi:16: note: Subclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: B) -> C -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C tmp/foo.pyi:19: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader +tmp/foo.pyi:24: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:24: note: Superclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:24: note: Subclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: B) -> C +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:25: error: Overloaded function signatures 1 and 2 overlap with incompatible return types +tmp/foo.pyi:32: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:32: note: Superclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:32: note: Subclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> int +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:35: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testTypeTypeOverlapsWithObjectAndType] from foo import * @@ -4441,7 +4439,7 @@ def f(TB: Type[B]): reveal_type(TB.x) # N: Revealed type is "builtins.int" [case testMetaclassAsAny] -from typing import Any, ClassVar +from typing import Any, ClassVar, Type MyAny: Any class WithMeta(metaclass=MyAny): @@ -4453,13 +4451,15 @@ reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" WithMeta().m # E: "WithMeta" has no attribute "m" WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK [case testMetaclassAsAnyWithAFlag] # flags: --disallow-subclassing-any -from typing import Any, ClassVar +from typing import Any, ClassVar, Type MyAny: Any -class WithMeta(metaclass=MyAny): # E: Class cannot use "__main__.MyAny" as a metaclass (has type "Any") +class WithMeta(metaclass=MyAny): # E: Class cannot use "MyAny" as a metaclass (has type "Any") x: ClassVar[int] reveal_type(WithMeta.a) # N: Revealed type is "Any" @@ -4468,6 +4468,8 @@ reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" WithMeta().m # E: "WithMeta" has no attribute "m" WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK [case testMetaclassIterable] from typing import Iterable, Iterator @@ -4902,7 +4904,7 @@ reveal_type(x.frob) # N: Revealed type is "def (foos: builtins.dict[Any, __main_ [case testNewTypeFromForwardNamedTuple] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -4916,7 +4918,7 @@ x = NT(N(1)) from typing import NewType, Tuple from mypy_extensions import TypedDict -NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got "N") +NT = NewType('NT', 'N') # E: Argument 2 to NewType(...) must be subclassable (got "N") class N(TypedDict): x: int [builtins fixtures/dict.pyi] @@ -5029,7 +5031,7 @@ def foo(node: Node) -> Node: [case testForwardReferencesInNewTypeMRORecomputed] from typing import NewType x: Foo -Foo = NewType('Foo', B) +Foo = NewType('Foo', 'B') class A: x: int class B(A): @@ -5295,6 +5297,19 @@ class F(six.with_metaclass(t.M)): pass class G: pass [builtins fixtures/tuple.pyi] +[case testSixMetaclassGenericBase] +import six +import abc +from typing import TypeVar, Generic + +T = TypeVar("T") + +class C(six.with_metaclass(abc.ABCMeta, Generic[T])): + pass +class D(six.with_metaclass(abc.ABCMeta, C[T])): + pass +[builtins fixtures/tuple.pyi] + -- Special support for future.utils -- -------------------------------- @@ -5414,7 +5429,7 @@ class F: [case testCorrectEnclosingClassPushedInDeferred2] from typing import TypeVar -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') class C: def m(self: T) -> T: class Inner: @@ -7028,7 +7043,7 @@ reveal_type(C.__new__) # N: Revealed type is "def (cls: Type[__main__.C]) -> An [case testOverrideGenericSelfClassMethod] from typing import Generic, TypeVar, Type, List -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: @classmethod @@ -7089,7 +7104,7 @@ reveal_type(Foo().y) # N: Revealed type is "builtins.list[Any]" # flags: --check-untyped-defs class Foo: - def bad(): # E: Method must have at least one argument + def bad(): # E: Method must have at least one argument. Did you forget the "self" argument? self.x = 0 # E: Name "self" is not defined [case testTypeAfterAttributeAccessWithDisallowAnyExpr] @@ -7178,7 +7193,7 @@ from typing import Callable class C: x: Callable[[C], int] = lambda x: x.y.g() # E: "C" has no attribute "y" -[case testOpWithInheritedFromAny] +[case testOpWithInheritedFromAny-xfail] from typing import Any C: Any class D(C): @@ -7414,8 +7429,7 @@ class Foo: def meth1(self, a: str) -> str: ... # E: Name "meth1" already defined on line 5 def meth2(self, a: str) -> str: ... - from mod1 import meth2 # E: Unsupported class scoped import \ - # E: Name "meth2" already defined on line 8 + from mod1 import meth2 # E: Incompatible import of "meth2" (imported name has type "Callable[[int], int]", local name has type "Callable[[Foo, str], str]") class Bar: from mod1 import foo # E: Unsupported class scoped import @@ -7634,3 +7648,33 @@ class C(B): def foo(self) -> int: # E: Signature of "foo" incompatible with supertype "B" ... [builtins fixtures/property.pyi] + +[case testAllowArgumentAsBaseClass] +from typing import Any, Type + +def e(b) -> None: + class D(b): ... + +def f(b: Any) -> None: + class D(b): ... + +def g(b: Type[Any]) -> None: + class D(b): ... + +def h(b: type) -> None: + class D(b): ... + +[case testNoCrashOnSelfWithForwardRefGenericClass] +from typing import Generic, Sequence, TypeVar, Self + +_T = TypeVar('_T', bound="Foo") + +class Foo: + foo: int + +class Element(Generic[_T]): + elements: Sequence[Self] + +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 7c4681c7a709..6748646b65aa 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -211,7 +211,7 @@ y: Dict[int, int] = { [builtins fixtures/dict.pyi] [case testColumnCannotDetermineType] -(x) # E:2: Cannot determine type of "x" +(x) # E:2: Cannot determine type of "x" # E:2: Name "x" is used before definition x = None [case testColumnInvalidIndexing] diff --git a/test-data/unit/check-ctypes.test b/test-data/unit/check-ctypes.test index 5a350256f8e9..beb1afd779c0 100644 --- a/test-data/unit/check-ctypes.test +++ b/test-data/unit/check-ctypes.test @@ -20,6 +20,7 @@ a[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches ar for x in a: reveal_type(x) # N: Revealed type is "builtins.int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayCustomElementType] import ctypes @@ -52,6 +53,7 @@ myu: Union[ctypes.Array[ctypes.c_int], List[str]] for myi in myu: reveal_type(myi) # N: Revealed type is "Union[builtins.int, builtins.str]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayUnionElementType] import ctypes @@ -76,6 +78,7 @@ mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches for myx in mya: reveal_type(myx) # N: Revealed type is "Union[__main__.MyCInt, builtins.int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharArrayAttrs] import ctypes @@ -84,6 +87,7 @@ ca = (ctypes.c_char * 4)(b'a', b'b', b'c', b'\x00') reveal_type(ca.value) # N: Revealed type is "builtins.bytes" reveal_type(ca.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharPArrayDoesNotCrash] import ctypes @@ -91,6 +95,7 @@ import ctypes # The following line used to crash with "Could not find builtin symbol 'NoneType'" ca = (ctypes.c_char_p * 0)() [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesWcharArrayAttrs] import ctypes @@ -99,6 +104,7 @@ wca = (ctypes.c_wchar * 4)('a', 'b', 'c', '\x00') reveal_type(wca.value) # N: Revealed type is "builtins.str" wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharUnionArrayAttrs] import ctypes @@ -108,6 +114,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_wchar]] reveal_type(cua.value) # N: Revealed type is "Union[builtins.bytes, builtins.str]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_wchar]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyUnionArrayAttrs] import ctypes @@ -117,6 +124,7 @@ caa: ctypes.Array[Union[ctypes.c_char, Any]] reveal_type(caa.value) # N: Revealed type is "Union[builtins.bytes, Any]" reveal_type(caa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherUnionArrayAttrs] import ctypes @@ -126,6 +134,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_int]] cua.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "Union[c_char, c_int]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyArrayAttrs] import ctypes @@ -134,6 +143,7 @@ aa: ctypes.Array[Any] reveal_type(aa.value) # N: Revealed type is "Any" reveal_type(aa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherArrayAttrs] import ctypes @@ -142,6 +152,7 @@ oa = (ctypes.c_int * 4)(1, 2, 3, 4) oa.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "c_int" oa.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayConstructorStarargs] import ctypes @@ -154,6 +165,7 @@ reveal_type(intarr4(*int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_ reveal_type(intarr4(*c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +[typing fixtures/typing-medium.pyi] float_values = [1.0, 2.0, 3.0, 4.0] intarr4(*float_values) # E: Array constructor argument 1 of type "List[float]" is not convertible to the array element type "Iterable[c_int]" @@ -167,3 +179,4 @@ x = {"a": 1, "b": 2} intarr4(**x) [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index a716109d345e..d7beea0390e7 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -163,6 +163,15 @@ reveal_type(f()) # N: Revealed type is "builtins.int" \[mypy] plugins=/test-data/unit/plugins/customentry.py:register +[case testCustomPluginEntryPointFileTrailingComma] +# flags: --config-file tmp/mypy.ini +def f() -> str: ... +reveal_type(f()) # N: Revealed type is "builtins.int" +[file mypy.ini] +\[mypy] +plugins = + /test-data/unit/plugins/customentry.py:register, + [case testCustomPluginEntryPoint] # flags: --config-file tmp/mypy.ini def f() -> str: ... diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test new file mode 100644 index 000000000000..4f907e3186b6 --- /dev/null +++ b/test-data/unit/check-dataclass-transform.test @@ -0,0 +1,46 @@ +[case testDataclassTransformReusesDataclassLogic] +# flags: --python-version 3.7 +from typing import dataclass_transform, Type + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-medium.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformIsFoundInTypingExtensions] +# flags: --python-version 3.7 +from typing import Type +from typing_extensions import dataclass_transform + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 3ec4c60e6929..631a92f9963b 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -633,8 +633,9 @@ class Two: c = Two() x = c.S -reveal_type(x) # N: Revealed type is "builtins.object" +reveal_type(x) # N: Revealed type is "typing._SpecialForm" [builtins fixtures/dataclasses.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassOrdering] # flags: --python-version 3.7 @@ -1928,7 +1929,75 @@ reveal_type(D) # N: Revealed type is "def (x: builtins.list[b.C]) -> a.D" [file b.py] from typing import List import a -B = List[C] -class C(CC): ... class CC: ... +class C(CC): ... +B = List[C] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassSelfType] +# flags: --strict-optional +from dataclasses import dataclass +from typing import Self, TypeVar, Generic, Optional + +T = TypeVar("T") + +@dataclass +class LinkedList(Generic[T]): + value: T + next: Optional[Self] = None + + def meth(self) -> None: + reveal_type(self.next) # N: Revealed type is "Union[Self`0, None]" + +l_int: LinkedList[int] = LinkedList(1, LinkedList("no", None)) # E: Argument 1 to "LinkedList" has incompatible type "str"; expected "int" + +@dataclass +class SubLinkedList(LinkedList[int]): ... + +lst = SubLinkedList(1, LinkedList(2)) # E: Argument 2 to "SubLinkedList" has incompatible type "LinkedList[int]"; expected "Optional[SubLinkedList]" +reveal_type(lst.next) # N: Revealed type is "Union[__main__.SubLinkedList, None]" +reveal_type(SubLinkedList) # N: Revealed type is "def (value: builtins.int, next: Union[__main__.SubLinkedList, None] =) -> __main__.SubLinkedList" +[builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnNestedGenericCallable] +from dataclasses import dataclass +from typing import Generic, TypeVar, Callable + +T = TypeVar('T') +R = TypeVar('R') +X = TypeVar('X') + +@dataclass +class Box(Generic[T]): + inner: T + +@dataclass +class Cont(Generic[R]): + run: Box[Callable[[X], R]] + +def const_two(x: T) -> str: + return "two" + +c = Cont(Box(const_two)) +reveal_type(c) # N: Revealed type is "__main__.Cont[builtins.str]" +[builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnSelfWithForwardRefGenericDataclass] +from typing import Generic, Sequence, TypeVar, Self +from dataclasses import dataclass + +_T = TypeVar('_T', bound="Foo") + +@dataclass +class Foo: + foo: int + +@dataclass +class Element(Generic[_T]): + elements: Sequence[Self] + +@dataclass +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" [builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test index 7b016c342e95..dd4cc1579639 100644 --- a/test-data/unit/check-dynamic-typing.test +++ b/test-data/unit/check-dynamic-typing.test @@ -47,6 +47,10 @@ class B: pass [case testCallingFunctionWithDynamicArgumentTypes] from typing import Any + +def f(x: Any) -> 'A': + pass + a, b = None, None # type: (A, B) if int(): @@ -61,15 +65,16 @@ if int(): if int(): a = f(f) -def f(x: Any) -> 'A': - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testCallingWithDynamicReturnType] from typing import Any + +def f(x: 'A') -> Any: + pass + a, b = None, None # type: (A, B) a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" @@ -77,9 +82,6 @@ a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" a = f(a) b = f(a) -def f(x: 'A') -> Any: - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -145,6 +147,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testBinaryOperationsWithDynamicAsRightOperand] from typing import Any @@ -217,6 +220,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testDynamicWithUnaryExpressions] from typing import Any @@ -283,6 +287,8 @@ class A: pass from typing import Any, cast class A: pass class B: pass +def f() -> None: pass + d = None # type: Any a = None # type: A b = None # type: B @@ -294,10 +300,15 @@ if int(): b = cast(Any, d) if int(): a = cast(Any, f()) -def f() -> None: pass - [case testCompatibilityOfDynamicWithOtherTypes] from typing import Any, Tuple + +def g(a: 'A') -> None: + pass + +class A: pass +class B: pass + d = None # type: Any t = None # type: Tuple[A, A] # TODO: callable types, overloaded functions @@ -308,12 +319,6 @@ d = g d = A t = d f = d - -def g(a: 'A') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -362,6 +367,8 @@ a = None # type: A g = None # type: Callable[[], None] h = None # type: Callable[[A], None] +def f(x): pass + f() # E: Missing positional argument "x" in call to "f" f(x, x) # E: Too many arguments for "f" if int(): @@ -373,8 +380,6 @@ if int(): if int(): h = f -def f(x): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDifferentArgCounts] @@ -384,6 +389,9 @@ g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A +def f0(): pass +def f2(x, y): pass + if int(): g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]") if int(): @@ -400,16 +408,18 @@ if int(): f0() f2(a, a) -def f0(): pass - -def f2(x, y): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDefaultArgs] from typing import Callable +class A: pass +class B: pass + a, b = None, None # type: (A, B) +def f01(x = b): pass +def f13(x, y = b, z = b): pass + g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] @@ -443,11 +453,6 @@ if int(): if int(): g3 = f13 -def f01(x = b): pass -def f13(x, y = b, z = b): pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testSkipTypeCheckingWithImplicitSignature] @@ -550,6 +555,10 @@ f(o, o, o) [case testInitMethodWithImplicitSignature] from typing import Callable + +class A: + def __init__(self, a, b): pass + f1 = None # type: Callable[[A], A] f2 = None # type: Callable[[A, A], A] a = None # type: A @@ -562,20 +571,14 @@ A(a, a) if int(): f2 = A -class A: - def __init__(self, a, b): pass - [case testUsingImplicitTypeObjectWithIs] - -t = None # type: type -t = A -t = B - class A: pass class B: def __init__(self): pass - +t = None # type: type +t = A +t = B -- Type compatibility -- ------------------ diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 039ddd1621cd..9343e8d5c562 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -549,7 +549,7 @@ reveal_type(list(Color)) # N: Revealed type is "builtins.list[__main__.Color]" [case testEnumWorkWithForward] from enum import Enum -a: E = E.x +a: E = E.x # type: ignore[used-before-def] class E(Enum): x = 1 y = 2 @@ -2100,3 +2100,30 @@ class Some: class A(Some, Enum): __labels__ = {1: "1"} [builtins fixtures/dict.pyi] + +[case testEnumWithPartialTypes] +from enum import Enum + +class Mixed(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = None + + def check(self) -> None: + reveal_type(Mixed.a.value) # N: Revealed type is "builtins.list[Any]" + reveal_type(Mixed.b.value) # N: Revealed type is "None" + + # Inferring Any here instead of a union seems to be a deliberate + # choice; see the testEnumValueInhomogenous case above. + reveal_type(self.value) # N: Revealed type is "Any" + + for field in Mixed: + reveal_type(field.value) # N: Revealed type is "Any" + if field.value is None: + pass + +class AllPartialList(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = [] # E: Need type annotation for "b" (hint: "b: List[] = ...") + + def check(self) -> None: + reveal_type(self.value) # N: Revealed type is "builtins.list[Any]" diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 81b8948be14a..8c6a446d101e 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -77,8 +77,8 @@ a = 'x'.foobar(b) # type: ignore[xyz, w, attr-defined] # E: Name "b" is not de # N: Error code "name-defined" not covered by "type: ignore" comment [case testErrorCodeIgnoreMultiple2] -a = 'x'.foobar(b) # type: int # type: ignore[name-defined, attr-defined] -b = 'x'.foobar(b) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ +a = 'x'.foobar(c) # type: int # type: ignore[name-defined, attr-defined] +b = 'x'.foobar(c) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ # N: Error code "attr-defined" not covered by "type: ignore" comment [case testErrorCodeWarnUnusedIgnores1] @@ -455,11 +455,15 @@ class E(TypedDict): y: int a: D = {'x': ''} # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -b: D = {'y': ''} # E: Extra key "y" for TypedDict "D" [typeddict-item] +b: D = {'y': ''} # E: Missing key "x" for TypedDict "D" [typeddict-item] \ + # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] c = D(x=0) if int() else E(x=0, y=0) c = {} # E: Expected TypedDict key "x" but found no keys [typeddict-item] +d: D = {'x': '', 'y': 1} # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] \ + # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-item] + +a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-unknown-key] a['x'] = 'x' # E: Value of "x" has incompatible type "str"; expected "int" [typeddict-item] a['y'] # E: TypedDict "D" has no key "y" [typeddict-item] [builtins fixtures/dict.pyi] @@ -472,12 +476,13 @@ class A(TypedDict): two_commonparts: int a: A = {'one_commonpart': 1, 'two_commonparts': 2} -a['other_commonpart'] = 3 # type: ignore[typeddict-item] +a['other_commonpart'] = 3 # type: ignore[typeddict-unknown-key] +not_exist = a['not_exist'] # type: ignore[typeddict-item] [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] [case testErrorCodeCannotDetermineType] -y = x # E: Cannot determine type of "x" [has-type] +y = x # E: Cannot determine type of "x" [has-type] # E: Name "x" is used before definition [used-before-def] reveal_type(y) # N: Revealed type is "Any" x = None @@ -803,12 +808,15 @@ from typing_extensions import TypedDict Foo = TypedDict("Bar", {}) # E: First argument "Bar" to TypedDict() does not match variable name "Foo" [name-match] [builtins fixtures/dict.pyi] + [case testTruthyBool] # flags: --enable-error-code truthy-bool -from typing import List, Union +from typing import List, Union, Any class Foo: pass +class Bar: + pass foo = Foo() if foo: # E: "__main__.foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] @@ -836,15 +844,30 @@ if good_union: if not good_union: pass -bad_union: Union[Foo, object] = Foo() -if bad_union: # E: "__main__.bad_union" has type "Union[Foo, object]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +bad_union: Union[Foo, Bar] = Foo() +if bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass +if not bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + pass + +# 'object' is special and is treated as potentially falsy +obj: object = Foo() +if obj: pass -if not bad_union: # E: "__main__.bad_union" has type "object" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +if not obj: pass lst: List[int] = [] if lst: pass + +a: Any +if a: + pass + +any_or_object: Union[object, Any] +if any_or_object: + pass [builtins fixtures/list.pyi] [case testTruthyFunctions] @@ -857,6 +880,13 @@ if not f: # E: Function "Callable[[], Any]" could always be true in boolean con pass conditional_result = 'foo' if f else 'bar' # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] +[case testTruthyIterable] +# flags: --strict-optional --enable-error-code truthy-iterable +from typing import Iterable +def func(var: Iterable[str]) -> None: + if var: # E: "var" has type "Iterable[str]" which can always be true in boolean context. Consider using "Collection[str]" instead. [truthy-iterable] + ... + [case testNoOverloadImplementation] from typing import overload diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index f7aa43d43f3e..49a3f0d4aaa7 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -692,6 +692,7 @@ tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str" [case testDivmod] +# flags: --disable-error-code=used-before-def from typing import Tuple, Union, SupportsInt _Decimal = Union[Decimal, int] class Decimal(SupportsInt): @@ -991,6 +992,15 @@ assert_type(reduce_it(True), Scalar) [case testNoneReturnTypeBasics] +def f() -> None: + pass + +class A: + def g(self, x: object) -> None: + pass + def __call__(self) -> None: + pass + a, o = None, None # type: (A, object) if int(): a = f() # E: "f" does not return a value @@ -1004,40 +1014,30 @@ A().g(f()) # E: "f" does not return a value x: A = f() # E: "f" does not return a value f() A().g(a) - -def f() -> None: - pass - -class A: - def g(self, x: object) -> None: - pass - def __call__(self) -> None: - pass [builtins fixtures/tuple.pyi] [case testNoneReturnTypeWithStatements] import typing -if f(): # Fail +def f() -> None: pass + +if f(): # E: "f" does not return a value pass -elif f(): # Fail +elif f(): # E: "f" does not return a value pass -while f(): # Fail +while f(): # E: "f" does not return a value pass def g() -> object: - return f() # Fail -raise f() # Fail - -def f() -> None: pass + return f() # E: "f" does not return a value +raise f() # E: "f" does not return a value [builtins fixtures/exception.pyi] -[out] -main:2: error: "f" does not return a value -main:4: error: "f" does not return a value -main:6: error: "f" does not return a value -main:9: error: "f" does not return a value -main:10: error: "f" does not return a value [case testNoneReturnTypeWithExpressions] from typing import cast + +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': pass + a = None # type: A [f()] # E: "f" does not return a value f() + a # E: "f" does not return a value @@ -1046,15 +1046,16 @@ f() == a # E: "f" does not return a value a != f() # E: "f" does not return a value cast(A, f()) f().foo # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] import typing +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': + pass + a, b = None, None # type: (A, bool) f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") a < f() # E: "f" does not return a value @@ -1064,11 +1065,6 @@ a in f() # E: "f" does not return a value not f() # E: "f" does not return a value f() and b # E: "f" does not return a value b or f() # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': - pass [builtins fixtures/bool.pyi] @@ -1424,19 +1420,13 @@ z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not retu from typing import cast class A: def __add__(self, a: 'A') -> 'A': pass -a = None # type: A -None + a # Fail -f + a # Fail -a + f # Fail -cast(A, f) - def f() -> None: pass -[out] -main:5: error: Unsupported left operand type for + ("None") -main:6: error: Unsupported left operand type for + ("Callable[[], None]") -main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]") - +a = None # type: A +None + a # E: Unsupported left operand type for + ("None") +f + a # E: Unsupported left operand type for + ("Callable[[], None]") +a + f # E: Unsupported operand types for + ("A" and "Callable[[], None]") +cast(A, f) [case testOperatorMethodWithInvalidArgCount] a = None # type: A @@ -1724,7 +1714,7 @@ reveal_type = 1 [case testRevealForward] def f() -> None: reveal_type(x) -x = 1 + 1 +x = 1 + int() [out] main:2: note: Revealed type is "builtins.int" @@ -1995,6 +1985,24 @@ class B: A() == B() # E: Unsupported operand types for == ("A" and "B") [builtins fixtures/bool.pyi] +[case testStrictEqualitySequenceAndCustomEq] +# flags: --strict-equality +from typing import Tuple + +class C: pass +class D: + def __eq__(self, other): return True + +a = [C()] +b = [D()] +a == b +b == a +t1: Tuple[C, ...] +t2: Tuple[D, ...] +t1 == t2 +t2 == t1 +[builtins fixtures/bool.pyi] + [case testCustomEqCheckStrictEqualityOKInstance] # flags: --strict-equality class A: @@ -2213,6 +2221,32 @@ int == y y == int [builtins fixtures/bool.pyi] +[case testStrictEqualityAndEnumWithCustomEq] +# flags: --strict-equality +from enum import Enum + +class E1(Enum): + X = 0 + Y = 1 + +class E2(Enum): + X = 0 + Y = 1 + + def __eq__(self, other: object) -> bool: + return bool() + +E1.X == E1.Y # E: Non-overlapping equality check (left operand type: "Literal[E1.X]", right operand type: "Literal[E1.Y]") +E2.X == E2.Y +[builtins fixtures/bool.pyi] + +[case testStrictEqualityWithBytesContains] +# flags: --strict-equality +data = b"xy" +b"x" in data +[builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] + [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name "Any" is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index d7ce4c8848e3..ebb3744e9f08 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1152,13 +1152,13 @@ from typing import Any def f(s): yield s +def g(x) -> Any: + yield x # E: Expression has type "Any" + x = f(0) # E: Expression has type "Any" for x in f(0): # E: Expression has type "Any" g(x) # E: Expression has type "Any" -def g(x) -> Any: - yield x # E: Expression has type "Any" - l = [1, 2, 3] l[f(0)] # E: Expression has type "Any" f(l) @@ -1606,14 +1606,19 @@ strict_equality = false [case testNoImplicitReexport] -# flags: --no-implicit-reexport -from other_module_2 import a +# flags: --no-implicit-reexport --show-error-codes +from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" [attr-defined] +reveal_type(a) # N: Revealed type is "builtins.int" + +import other_module_2 +# TODO: this should also reveal builtins.int, see #13965 +reveal_type(other_module_2.a) # E: "object" does not explicitly export attribute "a" [attr-defined] \ + # N: Revealed type is "Any" + [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import a -[out] -main:2: error: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled [case testNoImplicitReexportRespectsAll] # flags: --no-implicit-reexport @@ -1627,7 +1632,7 @@ from other_module_1 import a, b __all__ = ('b',) [builtins fixtures/tuple.pyi] [out] -main:2: error: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testNoImplicitReexportStarConsideredExplicit] # flags: --no-implicit-reexport @@ -1643,7 +1648,7 @@ __all__ = ('b',) [case testNoImplicitReexportGetAttr] # flags: --no-implicit-reexport --python-version 3.7 -from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled +from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" [file other_module_1.py] from typing import Any def __getattr__(name: str) -> Any: ... @@ -1661,7 +1666,7 @@ attr_2 = 6 [file other_module_2.py] from other_module_1 import attr_1, attr_2 [out] -main:2: error: Module "other_module_2" does not explicitly export attribute "attr_1"; implicit reexport disabled +main:2: error: Module "other_module_2" does not explicitly export attribute "attr_1" [case testNoImplicitReexportMypyIni] # flags: --config-file tmp/mypy.ini @@ -1679,7 +1684,7 @@ implicit_reexport = True \[mypy-other_module_2] implicit_reexport = False [out] -main:2: error: Module "other_module_2" has no attribute "a" +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testNoImplicitReexportPyProjectTOML] @@ -1700,7 +1705,7 @@ module = 'other_module_2' implicit_reexport = false [out] -main:2: error: Module "other_module_2" has no attribute "a" +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testImplicitAnyOKForNoArgs] @@ -2105,7 +2110,7 @@ if foo: ... # E: Function "Callable[[], int]" could always be true in boolean c 42 + "no" # type: ignore [file mypy.ini] \[mypy] -enable_error_code = ignore-without-code, truthy-bool +enable_error_code = ignore-without-code, truthy-bool, used-before-def \[mypy-tests.*] disable_error_code = ignore-without-code @@ -2128,3 +2133,18 @@ Ts = TypeVarTuple("Ts") # E: "TypeVarTuple" support is experimental, use --enab from typing_extensions import TypeVarTuple Ts = TypeVarTuple("Ts") # OK [builtins fixtures/tuple.pyi] + + +[case testDisableBytearrayPromotion] +# flags: --disable-bytearray-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) # E: Argument 1 to "f" has incompatible type "bytearray"; expected "bytes" +f(memoryview(b"asdf")) +[builtins fixtures/primitives.pyi] + +[case testDisableMemoryviewPromotion] +# flags: --disable-memoryview-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) +f(memoryview(b"asdf")) # E: Argument 1 to "f" has incompatible type "memoryview"; expected "bytes" +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index bb36b65f35de..c23bbb77f643 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -337,6 +337,11 @@ class A: pass [out] [case testCompatibilityOfSimpleTypeObjectWithStdType] +class A: + def __init__(self, a: 'A') -> None: pass + +def f() -> None: pass + t = None # type: type a = None # type: A @@ -347,11 +352,6 @@ if int(): if int(): t = A -class A: - def __init__(self, a: 'A') -> None: pass - -def f() -> None: pass - [case testFunctionTypesWithOverloads] from foo import * [file foo.pyi] @@ -466,6 +466,11 @@ if int(): [case testCallingFunctionsWithDefaultArgumentValues] # flags: --implicit-optional --no-strict-optional +class A: pass +class AA(A): pass +class B: pass + +def f(x: 'A' = None) -> 'B': pass a, b = None, None # type: (A, B) if int(): @@ -482,11 +487,6 @@ if int(): if int(): b = f(AA()) -def f(x: 'A' = None) -> 'B': pass - -class A: pass -class AA(A): pass -class B: pass [builtins fixtures/tuple.pyi] [case testDefaultArgumentExpressions] @@ -1133,6 +1133,7 @@ def dec(f: T) -> T: [out] [case testForwardReferenceToFunctionWithMultipleDecorators] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -1167,6 +1168,7 @@ def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -1475,6 +1477,20 @@ else: @dec def f(): pass +[case testConditionalFunctionDefinitionUnreachable] +def bar() -> None: + if False: + foo = 1 + else: + def foo(obj): ... + +def baz() -> None: + if False: + foo: int = 1 + else: + def foo(obj): ... # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "int") +[builtins fixtures/tuple.pyi] + [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass @@ -2594,9 +2610,9 @@ import p def f() -> int: ... [case testLambdaDefaultTypeErrors] -lambda a=nonsense: a # E: Name "nonsense" is not defined lambda a=(1 + 'asdf'): a # E: Unsupported operand types for + ("int" and "str") -def f(x: int = i): # E: Name "i" is not defined +lambda a=nonsense: a # E: Name "nonsense" is not defined +def f(x: int = i): # E: Name "i" is not defined # E: Name "i" is used before definition i = 42 [case testRevealTypeOfCallExpressionReturningNoneWorks] @@ -2694,7 +2710,7 @@ class A: @dec def e(self) -> int: pass @property - def g() -> int: pass # E: Method must have at least one argument + def g() -> int: pass # E: Method must have at least one argument. Did you forget the "self" argument? @property def h(self, *args, **kwargs) -> int: pass # OK [builtins fixtures/property.pyi] diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index b7d98a783a49..a62028ca94ea 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -20,21 +20,19 @@ class C: pass [case testGenericMethodArgument] from typing import TypeVar, Generic T = TypeVar('T') -a.f(c) # Fail -a.f(b) + +class A(Generic[T]): + def f(self, a: T) -> None: pass a = None # type: A[B] b = None # type: B c = None # type: C -class A(Generic[T]): - def f(self, a: T) -> None: pass +a.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" +a.f(b) class B: pass class C: pass -[out] -main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" - [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') @@ -655,16 +653,16 @@ a: other.Array[float] reveal_type(a) # N: Revealed type is "other.array[Any, other.dtype[builtins.float]]" [out] -main:3: error: Type argument "float" of "dtype" must be a subtype of "generic" [type-var] +main:3: error: Type argument "float" of "Array" must be a subtype of "generic" [type-var] a: other.Array[float] ^ [file other.py] from typing import Any, Generic, TypeVar -DT = TypeVar("DT", covariant=True, bound=dtype[Any]) -DTS = TypeVar("DTS", covariant=True, bound=generic) +DT = TypeVar("DT", covariant=True, bound='dtype[Any]') +DTS = TypeVar("DTS", covariant=True, bound='generic') S = TypeVar("S", bound=Any) -ST = TypeVar("ST", bound=generic, covariant=True) +ST = TypeVar("ST", bound='generic', covariant=True) class common: pass class generic(common): pass @@ -1031,8 +1029,9 @@ IntNode[int](1, 1) IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" SameNode = Node[T, T] -# TODO: fix https://github.com/python/mypy/issues/7084. -ff = SameNode[T](1, 1) +ff = SameNode[T](1, 1) # E: Type variable "__main__.T" is unbound \ + # N: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) \ + # N: (Hint: Use "T" in function signature to bind "T" inside a function) a = SameNode(1, 'x') reveal_type(a) # N: Revealed type is "__main__.Node[Any, Any]" b = SameNode[int](1, 1) @@ -1049,20 +1048,20 @@ CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] -cs = CA + 1 # E: Unsupported left operand type for + ("object") +cs = CA + 1 # E: Unsupported left operand type for + ("") reveal_type(cs) # N: Revealed type is "Any" -ts = TA() # E: "object" not callable +ts = TA() # E: "" not callable reveal_type(ts) # N: Revealed type is "Any" -us = UA.x # E: "object" has no attribute "x" +us = UA.x # E: "" has no attribute "x" reveal_type(us) # N: Revealed type is "Any" xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes [builtins fixtures/tuple.pyi] - +[typing fixtures/typing-medium.pyi] [out] [case testGenericTypeAliasesTypeVarBinding] @@ -1101,13 +1100,12 @@ BadA = A[str, T] # One error here SameA = A[T, T] x = None # type: SameA[int] -y = None # type: SameA[str] # Two errors here, for both args of A +y = None # type: SameA[str] # Another error here [builtins fixtures/list.pyi] [out] main:9:8: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "S" of "A" cannot be "str" +main:13:1: error: Value of type variable "T" of "SameA" cannot be "str" [case testGenericTypeAliasesIgnoredPotentialAlias] class A: ... @@ -1333,6 +1331,7 @@ class type: pass class tuple: pass class function: pass class str: pass +class dict: pass [case testMultipleAssignmentWithIterable] from typing import Iterable, TypeVar @@ -1383,10 +1382,11 @@ Z = TypeVar('Z') class OO: pass a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object] -f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" - def f(a: OO) -> None: pass + +f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" + class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass [case testErrorWithShorterGenericTypeName] @@ -1394,9 +1394,10 @@ from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, B] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -1405,9 +1406,10 @@ from typing import Callable, TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, Callable[[], None]] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -1544,7 +1546,7 @@ class C(Generic[T]): reveal_type(C.F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C("").F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C.F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" -reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" +reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`6) -> __main__.C.F[K`6]" -- Callable subtyping with generic functions @@ -2143,7 +2145,7 @@ from typing import Generic, TypeVar, Any, Tuple, Type T = TypeVar('T') S = TypeVar('S') -Q = TypeVar('Q', bound=A[Any]) +Q = TypeVar('Q', bound='A[Any]') class A(Generic[T]): @classmethod @@ -2580,3 +2582,119 @@ class Bar(Foo[AnyStr]): [out] main:10: error: Argument 1 to "method1" of "Foo" has incompatible type "str"; expected "AnyStr" main:10: error: Argument 2 to "method1" of "Foo" has incompatible type "bytes"; expected "AnyStr" + +[case testTypeVariableClashVar] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], R] + +def func(x: C[R]) -> R: + return x.x(42) # OK + +[case testTypeVariableClashVarTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], Tuple[R, T]] + +def func(x: C[R]) -> R: + if bool(): + return x.x(42)[0] # OK + else: + return x.x(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashMethod] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], R]: ... + +def func(x: C[R]) -> R: + return x.x()(42) # OK + +[case testTypeVariableClashMethodTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], Tuple[R, T]]: ... + +def func(x: C[R]) -> R: + if bool(): + return x.x()(42)[0] # OK + else: + return x.x()(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashVarSelf] +from typing import Self, TypeVar, Generic, Callable + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + x: Callable[[S], Self] + y: T + +def foo(x: C[T]) -> T: + return x.x(42).y # OK + +[case testNestedGenericFunctionTypeApplication] +from typing import TypeVar, Generic, List + +A = TypeVar("A") +B = TypeVar("B") + +class C(Generic[A]): + x: A + +def foo(x: A) -> A: + def bar() -> List[A]: + y = C[List[A]]() + z = C[List[B]]() # E: Type variable "__main__.B" is unbound \ + # N: (Hint: Use "Generic[B]" or "Protocol[B]" base class to bind "B" inside a class) \ + # N: (Hint: Use "B" in function signature to bind "B" inside a function) + return y.x + return bar()[0] + + +-- TypeVar imported from typing_extensions +-- --------------------------------------- + +[case testTypeVarTypingExtensionsSimpleGeneric] +from typing import Generic +from typing_extensions import TypeVar + +T = TypeVar("T") + +class A(Generic[T]): + def __init__(self, value: T) -> None: + self.value = value + +a: A = A(8) +b: A[str] = A("") + +reveal_type(A(1.23)) # N: Revealed type is "__main__.A[builtins.float]" + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTypingExtensionsSimpleBound] +from typing_extensions import TypeVar + +T= TypeVar("T") + +def func(var: T) -> T: + return var + +reveal_type(func(1)) # N: Revealed type is "builtins.int" + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test index f06dad293184..146494df1bd6 100644 --- a/test-data/unit/check-incomplete-fixture.test +++ b/test-data/unit/check-incomplete-fixture.test @@ -12,14 +12,6 @@ import m m.x # E: "object" has no attribute "x" [file m.py] -[case testDictMissingFromStubs] -from typing import Dict -def f(x: Dict[int]) -> None: pass -[out] -main:1: error: Module "typing" has no attribute "Dict" -main:1: note: Maybe your test fixture does not define "builtins.dict"? -main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description - [case testSetMissingFromStubs] from typing import Set def f(x: Set[int]) -> None: pass diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index d4e6779403b4..1aff1ba2862f 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1025,10 +1025,7 @@ import a.b [file a/b.py] -[rechecked b] -[stale] -[out2] -tmp/b.py:4: error: Name "a" already defined on line 3 +[stale b] [case testIncrementalSilentImportsAndImportsInClass] # flags: --ignore-missing-imports @@ -2510,7 +2507,7 @@ A = Tuple[int] [case testNewTypeFromForwardNamedTupleIncremental] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -2594,8 +2591,8 @@ class C(NamedTuple): # type: ignore from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') -IntNode = Node[int, S] -AnyNode = Node[S, T] +IntNode = Node[int, S] # type: ignore[used-before-def] +AnyNode = Node[S, T] # type: ignore[used-before-def] class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: @@ -2645,8 +2642,8 @@ class G(Generic[T]): x: T yg: G[M] -z: int = G[M]().x.x -z = G[M]().x[0] +z: int = G[M]().x.x # type: ignore[used-before-def] +z = G[M]().x[0] # type: ignore[used-before-def] M = NamedTuple('M', [('x', int)]) [builtins fixtures/tuple.pyi] [out] @@ -6289,3 +6286,76 @@ class C: ... [out] [out2] [out3] + +[case testNoCrashOnPartialLambdaInference] +import m +[file m.py] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +[file m.py.2] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +reveal_type(xs) +[builtins fixtures/list.pyi] +[out] +[out2] +tmp/m.py:9: note: Revealed type is "builtins.list[builtins.int]" + +[case testTypingSelfCoarse] +import m +[file lib.py] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file m.py] +import lib +class D: ... +[file m.py.2] +import lib +class D(lib.C): ... + +reveal_type(D.meth) +reveal_type(D().meth) +[out] +[out2] +tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0" +tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D" + +[case testIncrementalNestedGenericCallableCrash] +from typing import TypeVar, Callable + +T = TypeVar("T") + +class B: + def foo(self) -> Callable[[T], T]: ... + +class C(B): + def __init__(self) -> None: + self.x = self.foo() +[out] +[out2] + +[case testNoCrashIncrementalMetaAny] +import a +[file a.py] +from m import Foo +[file a.py.2] +from m import Foo +# touch +[file m.py] +from missing_module import Meta # type: ignore[import] +class Foo(metaclass=Meta): ... diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 2e26f54c6e93..625ab091a6a9 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -7,6 +7,12 @@ [case testBasicContextInference] from typing import TypeVar, Generic T = TypeVar('T') + +def f() -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -17,15 +23,11 @@ if int(): ab = f() if int(): b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -def f() -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') +class A(Generic[T]): pass +class B: pass ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -36,13 +38,16 @@ if int(): ab = A() if int(): b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -class A(Generic[T]): pass -class B: pass - [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') +def f(a: T) -> 'A[T]': + pass + +class A(Generic[T]): pass + +class B: pass +class C: pass b = None # type: B c = None # type: C ab = None # type: A[B] @@ -63,14 +68,6 @@ if int(): if int(): ac = f(c) -def f(a: T) -> 'A[T]': - pass - -class A(Generic[T]): pass - -class B: pass -class C: pass - -- Local variables -- --------------- @@ -159,6 +156,12 @@ class B: pass [case testInferenceWithTypeVariableTwiceInReturnType] from typing import TypeVar, Tuple, Generic T = TypeVar('T') + +def f(a: T) -> 'Tuple[A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -175,17 +178,20 @@ if int(): ab, ab = f(b) if int(): ao, ao = f(o) - -def f(a: T) -> 'Tuple[A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables] from typing import TypeVar, Tuple, Generic S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass +def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass +def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -206,13 +212,6 @@ if int(): ab, ab, ao = g(b, b) if int(): ab, ab, ab, ab = h(b, b) - -def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass -def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass -def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] @@ -223,6 +222,13 @@ class B: pass [case testMultipleTvatInstancesInArgs] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T, b: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass +class C(B): pass + ac = None # type: A[C] ab = None # type: A[B] ao = None # type: A[object] @@ -246,12 +252,6 @@ if int(): if int(): ab = f(c, b) -def f(a: T, b: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass -class C(B): pass - -- Nested generic function calls -- ----------------------------- @@ -260,6 +260,12 @@ class C(B): pass [case testNestedGenericFunctionCall1] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + aab = None # type: A[A[B]] aao = None # type: A[A[object]] ao = None # type: A[object] @@ -273,15 +279,16 @@ if int(): aab = f(f(b)) aao = f(f(b)) ao = f(f(b)) +[case testNestedGenericFunctionCall2] +from typing import TypeVar, Generic +T = TypeVar('T') -def f(a: T) -> 'A[T]': pass +def f(a: T) -> T: pass +def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass -[case testNestedGenericFunctionCall2] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -293,17 +300,16 @@ if int(): if int(): ab = f(g(b)) ao = f(g(b)) - -def f(a: T) -> T: pass +[case testNestedGenericFunctionCall3] +from typing import TypeVar, Generic +T = TypeVar('T') +def f(a: T, b: T) -> T: + pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass - -[case testNestedGenericFunctionCall3] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -320,14 +326,6 @@ if int(): if int(): ao = f(g(o), g(b)) -def f(a: T, b: T) -> T: - pass - -def g(a: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - -- Method calls -- ------------ @@ -339,6 +337,13 @@ T = TypeVar('T') o = None # type: object b = None # type: B c = None # type: C +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): + def g(self, a: 'A[T]') -> 'A[T]': pass + +class B: pass +class C(B): pass ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] @@ -353,14 +358,6 @@ if int(): ab = f(b).g(f(c)) ab.g(f(c)) -def f(a: T) -> 'A[T]': pass - -class A(Generic[T]): - def g(self, a: 'A[T]') -> 'A[T]': pass - -class B: pass -class C(B): pass - -- List expressions -- ---------------- @@ -461,8 +458,8 @@ class B: pass [case testParenthesesAndContext] from typing import List -l = ([A()]) # type: List[object] class A: pass +l = ([A()]) # type: List[object] [builtins fixtures/list.pyi] [case testComplexTypeInferenceWithTuple] @@ -470,14 +467,15 @@ from typing import TypeVar, Tuple, Generic k = TypeVar('k') t = TypeVar('t') v = TypeVar('v') -def f(x: Tuple[k]) -> 'A[k]': pass - -d = f((A(),)) # type: A[A[B]] class A(Generic[t]): pass class B: pass class C: pass class D(Generic[k, v]): pass + +def f(x: Tuple[k]) -> 'A[k]': pass + +d = f((A(),)) # type: A[A[B]] [builtins fixtures/list.pyi] @@ -505,12 +503,12 @@ d = {A() : a_c, [case testInitializationWithInferredGenericType] from typing import TypeVar, Generic T = TypeVar('T') -c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" def f(x: T) -> T: pass class C(Generic[T]): pass class A: pass +c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" [case testInferredGenericTypeAsReturnValue] from typing import TypeVar, Generic T = TypeVar('T') @@ -544,9 +542,6 @@ class B: pass from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -x = A() # type: I[int] -a_object = A() # type: A[object] -y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") class I(Generic[t]): @abstractmethod @@ -554,16 +549,20 @@ class I(Generic[t]): class A(I[t], Generic[t]): def f(self): pass +x = A() # type: I[int] +a_object = A() # type: A[object] +y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") + [case testInferenceWithAbstractClassContext2] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -a = f(A()) # type: A[int] -a_int = A() # type: A[int] -aa = f(a_int) class I(Generic[t]): pass class A(I[t], Generic[t]): pass def f(i: I[t]) -> A[t]: pass +a = f(A()) # type: A[int] +a_int = A() # type: A[int] +aa = f(a_int) [case testInferenceWithAbstractClassContext3] from typing import TypeVar, Generic, Iterable @@ -585,9 +584,9 @@ if int(): from typing import Any, TypeVar, Generic s = TypeVar('s') t = TypeVar('t') +class C(Generic[s, t]): pass x = [] # type: Any y = C() # type: Any -class C(Generic[s, t]): pass [builtins fixtures/list.pyi] @@ -737,6 +736,9 @@ a = m # type: List[A] # E: Incompatible types in assignment (expression has type [case testOrOperationInferredFromContext] from typing import List +class A: pass +class B: pass +class C(B): pass a, b, c = None, None, None # type: (List[A], List[B], List[C]) if int(): a = a or [] @@ -748,10 +750,6 @@ if int(): a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]") if int(): b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]") - -class A: pass -class B: pass -class C(B): pass [builtins fixtures/list.pyi] @@ -765,38 +763,38 @@ t = TypeVar('t') s = TypeVar('s') # Some type variables can be inferred using context, but not all of them. a = None # type: List[A] +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testSomeTypeVarsInferredFromContext2] from typing import List, TypeVar s = TypeVar('s') t = TypeVar('t') +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass # Like testSomeTypeVarsInferredFromContext, but tvars in different order. a = None # type: List[A] if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - [lambda x: x], []) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass +map( + [lambda x: x], []) [builtins fixtures/list.pyi] [out] @@ -1419,3 +1417,60 @@ def bar(x: Union[Mapping[Any, Any], Dict[Any, Sequence[Any]]]) -> None: ... bar({1: 2}) [builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} + +def foo(arg: Optional[str] = None) -> None: + if arg is None: + arg = d.get("a", "b") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall2] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} +x: Optional[str] +if x: + reveal_type(x) # N: Revealed type is "builtins.str" + x = d.get(x, x) + reveal_type(x) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall3] +# flags: --strict-optional +from typing import Generic, TypeVar, Union + +T = TypeVar("T") +def bar(arg: Union[str, T]) -> Union[str, T]: ... + +def foo(arg: Union[str, int]) -> None: + if isinstance(arg, int): + arg = bar("default") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/isinstance.pyi] + +[case testOptionalTypeNarrowedByGenericCall4] +# flags: --strict-optional +from typing import Optional, List, Generic, TypeVar + +T = TypeVar("T", covariant=True) +class C(Generic[T]): ... + +x: Optional[C[int]] = None +y = x = C() +reveal_type(y) # N: Revealed type is "__main__.C[builtins.int]" + +[case testOptionalTypeNarrowedByGenericCall5] +from typing import Any, Tuple, Union + +i: Union[Tuple[Any, ...], int] +b: Any +i = i if isinstance(i, int) else b +reveal_type(i) # N: Revealed type is "Union[Any, builtins.int]" +[builtins fixtures/isinstance.pyi] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 6767f1c7995c..fc8113766f1a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3,7 +3,9 @@ [case testInferSimpleGvarType] -import typing +class A: pass +class B: pass + x = A() y = B() if int(): @@ -14,9 +16,6 @@ if int(): x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = x -class A: pass -class B: pass - [case testInferSimpleLvarType] import typing def f() -> None: @@ -401,6 +400,8 @@ a = None # type: A b = None # type: B c = None # type: Tuple[A, object] +def id(a: T) -> T: pass + if int(): b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -412,8 +413,6 @@ if int(): b = id(b) c = id(c) -def id(a: T) -> T: pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -444,20 +443,26 @@ def ff() -> None: x = f() # E: Need type annotation for "x" reveal_type(x) # N: Revealed type is "Any" +def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar +def g(a: T) -> None: pass + g(None) # Ok f() # Ok because not used to infer local variable type g(a) - -def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar -def g(a: T) -> None: pass [out] [case testInferenceWithMultipleConstraints] from typing import TypeVar + +class A: pass +class B(A): pass + T = TypeVar('T') a = None # type: A b = None # type: B +def f(a: T, b: T) -> T: pass + if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): @@ -467,15 +472,16 @@ if int(): if int(): a = f(b, a) -def f(a: T, b: T) -> T: pass - -class A: pass -class B(A): pass - [case testInferenceWithMultipleVariables] from typing import Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') + +def f(a: T, b: S) -> Tuple[T, S]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) taa = None # type: Tuple[A, A] tab = None # type: Tuple[A, B] @@ -493,11 +499,6 @@ if int(): tab = f(a, b) if int(): tba = f(b, a) - -def f(a: T, b: S) -> Tuple[T, S]: pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testConstraintSolvingWithSimpleGenerics] @@ -507,6 +508,14 @@ ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] +def f(a: 'A[T]') -> 'A[T]': pass + +def g(a: T) -> T: pass + +class A(Generic[T]): pass +class B: pass +class C: pass + if int(): ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]" ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]" @@ -524,37 +533,33 @@ if int(): if int(): ab = g(ab) ao = g(ao) - -def f(a: 'A[T]') -> 'A[T]': pass - -def g(a: T) -> T: pass - -class A(Generic[T]): pass -class B: pass -class C: pass - [case testConstraintSolvingFailureWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] -f(ao, ab) # E: Cannot infer type argument 1 of "f" -f(ab, ao) # E: Cannot infer type argument 1 of "f" -f(ao, ao) -f(ab, ab) - def f(a: 'A[T]', b: 'A[T]') -> None: pass class A(Generic[T]): pass class B: pass + +f(ao, ab) # E: Cannot infer type argument 1 of "f" +f(ab, ao) # E: Cannot infer type argument 1 of "f" +f(ao, ao) +f(ab, ab) [case testTypeInferenceWithCalleeDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T = None) -> T: pass +def g(a: T, b: T = None) -> T: pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -569,11 +574,6 @@ if int(): if int(): a = g(a) -def f(a: T = None) -> T: pass -def g(a: T, b: T = None) -> T: pass - -class A: pass - -- Generic function inference with multiple inheritance -- ---------------------------------------------------- @@ -655,6 +655,12 @@ g(c) [case testPrecedenceOfFirstBaseAsInferenceResult] from typing import TypeVar from abc import abstractmethod, ABCMeta +class A: pass +class B(A, I, J): pass +class C(A, I, J): pass + +def f(a: T, b: T) -> T: pass + T = TypeVar('T') a, i, j = None, None, None # type: (A, I, J) @@ -663,11 +669,7 @@ a = f(B(), C()) class I(metaclass=ABCMeta): pass class J(metaclass=ABCMeta): pass -def f(a: T, b: T) -> T: pass -class A: pass -class B(A, I, J): pass -class C(A, I, J): pass [builtins fixtures/tuple.pyi] @@ -966,6 +968,9 @@ list_2 = [f, h] [case testInferenceOfFor1] a, b = None, None # type: (A, B) +class A: pass +class B: pass + for x in [A()]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x @@ -973,40 +978,32 @@ for x in [A()]: for y in []: # E: Need type annotation for "y" a = y reveal_type(y) # N: Revealed type is "Any" - -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testInferenceOfFor2] +class A: pass +class B: pass +class C: pass a, b, c = None, None, None # type: (A, B, C) for x, (y, z) in [(A(), (B(), C()))]: - b = x # Fail - c = y # Fail - a = z # Fail + b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") + c = y # E: Incompatible types in assignment (expression has type "B", variable has type "C") + a = z # E: Incompatible types in assignment (expression has type "C", variable has type "A") a = x b = y c = z -for xx, yy, zz in [(A(), B())]: # Fail +for xx, yy, zz in [(A(), B())]: # E: Need more than 2 values to unpack (3 expected) pass -for xx, (yy, zz) in [(A(), B())]: # Fail +for xx, (yy, zz) in [(A(), B())]: # E: "B" object is not iterable pass for xxx, yyy in [(None, None)]: pass - -class A: pass -class B: pass -class C: pass [builtins fixtures/for.pyi] -[out] -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C") -main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A") -main:10: error: Need more than 2 values to unpack (3 expected) -main:12: error: "B" object is not iterable [case testInferenceOfFor3] +class A: pass +class B: pass a, b = None, None # type: (A, B) @@ -1021,19 +1018,21 @@ for e, f in [[]]: # E: Need type annotation for "e" \ reveal_type(e) # N: Revealed type is "Any" reveal_type(f) # N: Revealed type is "Any" -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testForStatementInferenceWithVoid] -import typing +def f() -> None: pass + for x in f(): # E: "f" does not return a value pass -def f() -> None: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex] import typing + +class A: pass +class B: pass + for a in [A()]: pass a = A() if int(): @@ -1041,8 +1040,6 @@ if int(): for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex2] @@ -1128,15 +1125,15 @@ if int(): class A: pass [case testInferGlobalDefinedInBlock] -import typing +class A: pass +class B: pass + if A: a = A() if int(): a = A() if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [case testAssigningAnyStrToNone] from typing import Tuple, TypeVar @@ -1314,7 +1311,7 @@ class A: pass [case testAccessGlobalVarBeforeItsTypeIsAvailable] import typing -x.y # E: Cannot determine type of "x" +x.y # E: Cannot determine type of "x" # E: Name "x" is used before definition x = object() x.y # E: "object" has no attribute "y" @@ -1954,6 +1951,13 @@ class A: [out] main:4: error: "None" has no attribute "__iter__" (not iterable) +[case testPartialTypeErrorSpecialCase4] +# This used to crash. +arr = [] +arr.append(arr.append(1)) +[builtins fixtures/list.pyi] +[out] +main:3: error: "append" of "list" does not return a value -- Multipass -- --------- @@ -1977,9 +1981,9 @@ class A: [out] [case testMultipassAndTopLevelVariable] -y = x # E: Cannot determine type of "x" +y = x # E: Cannot determine type of "x" # E: Name "x" is used before definition y() -x = 1+0 +x = 1+int() [out] [case testMultipassAndDecoratedMethod] @@ -2160,7 +2164,7 @@ from typing import TypeVar, Callable T = TypeVar('T') def dec() -> Callable[[T], T]: pass -A.g # E: Cannot determine type of "g" +A.g # E: Cannot determine type of "g" # E: Name "A" is used before definition class A: @classmethod @@ -2990,13 +2994,14 @@ class C: [case testUnionGenericWithBoundedVariable] from typing import Generic, TypeVar, Union +class A: ... +class B(A): ... + T = TypeVar('T', bound=A) class Z(Generic[T]): def __init__(self, y: T) -> None: self.y = y -class A: ... -class B(A): ... F = TypeVar('F', bound=A) def q1(x: Union[F, Z[F]]) -> F: @@ -3382,3 +3387,15 @@ class A: T = TypeVar("T") def type_or_callable(value: T, tp: Union[Type[T], Callable[[int], T]]) -> T: ... reveal_type(type_or_callable(A("test"), A)) # N: Revealed type is "__main__.A" + +[case testJoinWithAnyFallback] +from unknown import X # type: ignore[import] + +class A: ... +class B(X, A): ... +class C(B): ... +class D(C): ... +class E(D): ... + +reveal_type([E(), D()]) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type([D(), E()]) # N: Revealed type is "builtins.list[__main__.D]" diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 046a4fc43537..0722ee8d91e5 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1750,11 +1750,8 @@ def f(cls: Type[object]) -> None: [case testIsinstanceTypeArgs] from typing import Iterable, TypeVar x = 1 -T = TypeVar('T') - isinstance(x, Iterable) isinstance(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -isinstance(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, (str, Iterable[int]))) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstancelist.pyi] @@ -1783,10 +1780,8 @@ isinstance(x, It2) # E: Parameterized generics cannot be used with class or ins [case testIssubclassTypeArgs] from typing import Iterable, TypeVar x = int -T = TypeVar('T') issubclass(x, Iterable) issubclass(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -issubclass(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] @@ -2392,7 +2387,7 @@ class B: x1: Literal[1] = self.f() def t2(self) -> None: - if isinstance(self, (A0, A1)): # E: Subclass of "B" and "A0" cannot exist: would have incompatible method signatures + if isinstance(self, (A0, A1)): reveal_type(self) # N: Revealed type is "__main__.1" x0: Literal[0] = self.f() # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[0]") x1: Literal[1] = self.f() diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index e59c295b58ac..e0fe389bbbd9 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -8,24 +8,27 @@ f(o=None()) # E: "None" not callable [case testSimpleKeywordArgument] import typing +class A: pass def f(a: 'A') -> None: pass f(a=A()) f(a=object()) # E: Argument "a" to "f" has incompatible type "object"; expected "A" -class A: pass [case testTwoKeywordArgumentsNotInOrder] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(b=A(), a=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(b=B(), a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "A" f(a=A(), b=B()) f(b=B(), a=A()) -class A: pass -class B: pass [case testOneOfSeveralOptionalKeywordArguments] # flags: --implicit-optional import typing +class A: pass +class B: pass +class C: pass def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass f(a=A()) f(b=B()) @@ -35,39 +38,34 @@ f(a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "Optional[ f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(c=B()) # E: Argument "c" to "f" has incompatible type "B"; expected "Optional[C]" f(b=B(), c=A()) # E: Argument "c" to "f" has incompatible type "A"; expected "Optional[C]" -class A: pass -class B: pass -class C: pass - [case testBothPositionalAndKeywordArguments] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(A(), b=B()) -class A: pass -class B: pass [case testContextSensitiveTypeInferenceForKeywordArg] from typing import List +class A: pass def f(a: 'A', b: 'List[A]') -> None: pass f(b=[], a=A()) -class A: pass [builtins fixtures/list.pyi] [case testGivingArgumentAsPositionalAndKeywordArg] import typing -def f(a: 'A', b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +def f(a: 'A', b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testGivingArgumentAsPositionalAndKeywordArg2] import typing -def f(a: 'A' = None, b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass - +def f(a: 'A' = None, b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPositionalAndKeywordForSameArg] # This used to crash in check_argument_count(). See #1095. def f(a: int): pass @@ -81,57 +79,57 @@ f(b=object()) # E: Unexpected keyword argument "b" for "f" class A: pass [case testKeywordMisspelling] +class A: pass def f(other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [case testMultipleKeywordsForMisspelling] -def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other" or "atter"? class A: pass class B: pass +def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "other"? [case testKeywordMisspellingDifferentType] -def f(other: 'A') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'A') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [case testKeywordMisspellingInheritance] -def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? class A: pass class B(A): pass class C: pass +def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingFloatInt] def f(atter: float, btter: int) -> None: pass # N: "f" defined here x: int = 5 -f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? +f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingVarArgs] +class A: pass def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingOnlyVarArgs] +class A: pass def f(*other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f" -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarArgsDifferentTypes] -def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarKwargs] +class A: pass def f(other: 'A', **atter: 'A') -> None: pass f(otter=A()) # E: Missing positional argument "other" in call to "f" -class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentsWithDynamicallyTypedCallable] @@ -143,18 +141,15 @@ f(x=None) [case testKeywordArgumentWithFunctionObject] from typing import Callable -f = None # type: Callable[[A, B], None] -f(a=A(), b=B()) -f(A(), b=B()) class A: pass class B: pass -[out] -main:3: error: Unexpected keyword argument "a" -main:3: error: Unexpected keyword argument "b" -main:4: error: Unexpected keyword argument "b" - +f = None # type: Callable[[A, B], None] +f(a=A(), b=B()) # E: Unexpected keyword argument "a" # E: Unexpected keyword argument "b" +f(A(), b=B()) # E: Unexpected keyword argument "b" [case testKeywordOnlyArguments] import typing +class A: pass +class B: pass def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -177,13 +172,12 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) +[case testKeywordOnlyArgumentsFastparse] +import typing class A: pass class B: pass -[case testKeywordOnlyArgumentsFastparse] - -import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -206,10 +200,6 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) - -class A: pass -class B: pass - [case testKwargsAfterBareArgs] from typing import Tuple, Any def f(a, *, b=None) -> None: pass @@ -222,6 +212,8 @@ f(a, **b) [case testKeywordArgAfterVarArgs] # flags: --implicit-optional import typing +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass f() f(A()) @@ -232,13 +224,13 @@ f(A(), A(), b=B()) f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs] # flags: --implicit-optional --no-strict-optional from typing import List +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass a = None # type: List[A] f(*a) @@ -249,18 +241,16 @@ f(A(), *a, b=B()) f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(*a, b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallingDynamicallyTypedFunctionWithKeywordArgs] import typing +class A: pass def f(x, y=A()): pass # N: "f" defined here f(x=A(), y=A()) f(y=A(), x=A()) f(y=A()) # E: Missing positional argument "x" in call to "f" f(A(), z=A()) # E: Unexpected keyword argument "z" for "f" -class A: pass [case testKwargsArgumentInFunctionBody] from typing import Dict, Any @@ -284,6 +274,8 @@ class A: pass [case testCallingFunctionThatAcceptsVarKwargs] import typing +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass f() f(x=A()) @@ -291,12 +283,12 @@ f(y=A(), z=A()) f(x=B()) # E: Argument "x" to "f" has incompatible type "B"; expected "A" f(A()) # E: Too many arguments for "f" # Perhaps a better message would be "Too many *positional* arguments..." -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testCallingFunctionWithKeywordVarArgs] from typing import Dict +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass d = None # type: Dict[str, A] f(**d) @@ -305,8 +297,6 @@ d2 = None # type: Dict[str, B] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(**{'x': B()}) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testKwargsAllowedInDunderCall] @@ -355,11 +345,11 @@ class A: pass [case testInvalidTypeForKeywordVarArg] # flags: --strict-optional from typing import Dict, Any, Optional +class A: pass def f(**kwargs: 'A') -> None: pass d = {} # type: Dict[A, A] f(**d) # E: Keywords must be strings f(**A()) # E: Argument after ** must be a mapping, not "A" -class A: pass kwargs: Optional[Any] f(**kwargs) # E: Argument after ** must be a mapping, not "Optional[Any]" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index da8f1570a4f4..d523e5c08af8 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1484,16 +1484,17 @@ Alias = Literal[3] isinstance(3, Literal[3]) # E: Cannot use isinstance() with Literal type isinstance(3, Alias) # E: Cannot use isinstance() with Literal type \ - # E: Argument 2 to "isinstance" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "isinstance" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" isinstance(3, Renamed[3]) # E: Cannot use isinstance() with Literal type isinstance(3, indirect.Literal[3]) # E: Cannot use isinstance() with Literal type issubclass(int, Literal[3]) # E: Cannot use issubclass() with Literal type issubclass(int, Alias) # E: Cannot use issubclass() with Literal type \ - # E: Argument 2 to "issubclass" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "issubclass" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" issubclass(int, Renamed[3]) # E: Cannot use issubclass() with Literal type issubclass(int, indirect.Literal[3]) # E: Cannot use issubclass() with Literal type [builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testLiteralErrorsWhenSubclassed] @@ -2436,23 +2437,10 @@ b: Final = 3 c: Final[Literal[3]] = 3 d: Literal[3] -# TODO: Consider if we want to support cases 'b' and 'd' or not. -# Probably not: we want to mostly keep the 'types' and 'value' worlds distinct. -# However, according to final semantics, we ought to be able to substitute "b" with -# "3" wherever it's used and get the same behavior -- so maybe we do need to support -# at least case "b" for consistency? -a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.a" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.b" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.c" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.d" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid +b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid +c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid +d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid [builtins fixtures/tuple.pyi] [out] @@ -2516,9 +2504,7 @@ r: Literal[Color.RED] g: Literal[Color.GREEN] b: Literal[Color.BLUE] bad1: Literal[Color] # E: Parameter 1 of Literal[...] is invalid -bad2: Literal[Color.func] # E: Function "__main__.Color.func" is not valid as a type \ - # N: Perhaps you need "Callable[...]" or a callback protocol? \ - # E: Parameter 1 of Literal[...] is invalid +bad2: Literal[Color.func] # E: Parameter 1 of Literal[...] is invalid bad3: Literal[Color.func()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions def expects_color(x: Color) -> None: pass diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 9b41692e52e6..b11a959df4cc 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -651,10 +651,29 @@ try: from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]") except: pass + +import m as f # E: Incompatible import of "f" (imported name has type "object", local name has type "Callable[[Any], Any]") + [file m.py] def f(x): pass def g(x, y): pass +[case testRedefineTypeViaImport] +from typing import Type +import mod + +X: Type[mod.A] +Y: Type[mod.B] +from mod import B as X +from mod import A as Y # E: Incompatible import of "Y" (imported name has type "Type[A]", local name has type "Type[B]") + +import mod as X # E: Incompatible import of "X" (imported name has type "object", local name has type "Type[A]") + +[file mod.py] +class A: ... +class B(A): ... + + [case testImportVariableAndAssignNone] try: from m import x @@ -1329,13 +1348,13 @@ import a import b def f() -> int: return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/b.py:3: note: Revealed type is "builtins.int" @@ -1346,12 +1365,12 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:3: note: Revealed type is "builtins.int" @@ -1366,7 +1385,7 @@ class C: self.x2 = b.b [file b.py] import a -b = 1 + 1 +b = 1 + int() [out] tmp/a.py:4: error: Cannot determine type of "x2" @@ -1379,7 +1398,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1392,7 +1411,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1405,7 +1424,7 @@ def g() -> None: @b.deco def f(a: str) -> int: pass reveal_type(f) -x = 1 + 1 +x = 1 + int() [file b.py] from typing import Callable, TypeVar import a @@ -1787,14 +1806,14 @@ m = n # E: Cannot assign multiple modules to name "m" without explicit "types.M [builtins fixtures/module.pyi] [case testNoReExportFromStubs] -from stub import Iterable # E: Module "stub" has no attribute "Iterable" -from stub import D # E: Module "stub" has no attribute "D" +from stub import Iterable # E: Module "stub" does not explicitly export attribute "Iterable" +from stub import D # E: Module "stub" does not explicitly export attribute "D" from stub import C c = C() reveal_type(c.x) # N: Revealed type is "builtins.int" it: Iterable[int] -reveal_type(it) # N: Revealed type is "Any" +reveal_type(it) # N: Revealed type is "typing.Iterable[builtins.int]" [file stub.pyi] from typing import Iterable @@ -1884,7 +1903,7 @@ class C: from util import mod reveal_type(mod) # N: Revealed type is "def () -> package.mod.mod" -from util import internal_detail # E: Module "util" has no attribute "internal_detail" +from util import internal_detail # E: Module "util" does not explicitly export attribute "internal_detail" [file package/__init__.pyi] from .mod import mod as mod @@ -1899,7 +1918,7 @@ from package import mod as internal_detail [builtins fixtures/module.pyi] [case testNoReExportUnrelatedModule] -from mod2 import unrelated # E: Module "mod2" has no attribute "unrelated" +from mod2 import unrelated # E: Module "mod2" does not explicitly export attribute "unrelated" [file mod1/__init__.pyi] [file mod1/unrelated.pyi] @@ -1910,7 +1929,7 @@ from mod1 import unrelated [builtins fixtures/module.pyi] [case testNoReExportUnrelatedSiblingPrefix] -from pkg.unrel import unrelated # E: Module "pkg.unrel" has no attribute "unrelated" +from pkg.unrel import unrelated # E: Module "pkg.unrel" does not explicitly export attribute "unrelated" [file pkg/__init__.pyi] [file pkg/unrelated.pyi] @@ -1922,7 +1941,7 @@ from pkg import unrelated [case testNoReExportChildStubs] import mod -from mod import C, D # E: Module "mod" has no attribute "C" +from mod import C, D # E: Module "mod" does not explicitly export attribute "C" reveal_type(mod.x) # N: Revealed type is "mod.submod.C" mod.C # E: "Module mod" does not explicitly export attribute "C" @@ -1940,7 +1959,7 @@ class D: [builtins fixtures/module.pyi] [case testNoReExportNestedStub] -from stub import substub # E: Module "stub" has no attribute "substub" +from stub import substub # E: Module "stub" does not explicitly export attribute "substub" [file stub.pyi] import substub @@ -2852,7 +2871,7 @@ aaaaa: int [case testModuleAttributeThreeSuggestions] import m -m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aabaa", "aaaba", or "aaaab"? +m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aaaab", "aaaba", or "aabaa"? [file m.py] aaaab: int @@ -2887,10 +2906,10 @@ CustomDict = TypedDict( [builtins fixtures/tuple.pyi] [case testNoReExportFromMissingStubs] -from stub import a # E: Module "stub" has no attribute "a" +from stub import a # E: Module "stub" does not explicitly export attribute "a" from stub import b from stub import c # E: Module "stub" has no attribute "c" -from stub import d # E: Module "stub" has no attribute "d" +from stub import d # E: Module "stub" does not explicitly export attribute "d" [file stub.pyi] from mystery import a, b as b, c as d diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 438e17a6ba0a..6b9f139f541c 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -561,6 +561,7 @@ A # E: Name "A" is not defined [builtins fixtures/tuple.pyi] [case testNamedTupleForwardAsUpperBound] +# flags: --disable-error-code=used-before-def from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): @@ -723,7 +724,7 @@ reveal_type(n.y[0]) # N: Revealed type is "Any" from typing import NamedTuple B = NamedTuple('B', [ - ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) + ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) # E: Name "A" is used before definition ('y', int), ]) A = NamedTuple('A', [ @@ -904,6 +905,7 @@ if not b: [builtins fixtures/tuple.pyi] [case testNamedTupleDoubleForward] +# flags: --disable-error-code=used-before-def from typing import Union, Mapping, NamedTuple class MyBaseTuple(NamedTuple): @@ -1306,3 +1308,32 @@ class C( [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleSelfItemNotAllowed] +from typing import Self, NamedTuple, Optional + +class NT(NamedTuple): + val: int + next: Optional[Self] # E: Self type cannot be used in NamedTuple item type +NTC = NamedTuple("NTC", [("val", int), ("next", Optional[Self])]) # E: Self type cannot be used in NamedTuple item type +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleTypingSelfMethod] +from typing import Self, NamedTuple, TypeVar, Generic + +T = TypeVar("T") +class NT(NamedTuple, Generic[T]): + key: str + val: T + def meth(self) -> Self: + nt: NT[int] + if bool(): + return nt._replace() # E: Incompatible return value type (got "NT[int]", expected "Self") + else: + return self._replace() + +class SNT(NT[int]): ... +reveal_type(SNT("test", 42).meth()) # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.SNT]" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test index 24bf0d99b145..1e945d0af27d 100644 --- a/test-data/unit/check-native-int.test +++ b/test-data/unit/check-native-int.test @@ -184,3 +184,47 @@ from mypy_extensions import i64, i32 reveal_type([a for a in range(i64(5))]) # N: Revealed type is "builtins.list[mypy_extensions.i64]" [reveal_type(a) for a in range(0, i32(5))] # N: Revealed type is "mypy_extensions.i32" [builtins fixtures/primitives.pyi] + +[case testNativeIntNarrowing] +from typing import Union +from mypy_extensions import i64, i32 + +def narrow_i64(x: Union[str, i64]) -> None: + if isinstance(x, i64): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + +def narrow_i32(x: Union[str, i32]) -> None: + if isinstance(x, i32): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 97cf1ef1494d..99f4141a4d64 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -434,7 +434,7 @@ def main() -> None: x # E: Name "x" is not defined [case testNewAnalyzerCyclicDefinitions] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def gx = gy # E: Cannot resolve name "gy" (possible cyclic definition) gy = gx def main() -> None: @@ -521,12 +521,6 @@ reveal_type(b.x) # N: Revealed type is "builtins.int" reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerNestedClass2] -b: A.B -b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" -reveal_type(b) # N: Revealed type is "__main__.A.B" -reveal_type(b.x) # N: Revealed type is "builtins.int" -reveal_type(b.f()) # N: Revealed type is "builtins.str" - class A: class B: x: int @@ -537,17 +531,14 @@ class A: def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") +b: A.B +b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" +reveal_type(b) # N: Revealed type is "__main__.A.B" +reveal_type(b.x) # N: Revealed type is "builtins.int" +reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerGenerics] from typing import TypeVar, Generic -c: C[int] -c2: C[int, str] # E: "C" expects 1 type argument, but 2 given -c3: C -c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" -reveal_type(c.get()) # N: Revealed type is "builtins.int" -reveal_type(c2) # N: Revealed type is "__main__.C[Any]" -reveal_type(c3) # N: Revealed type is "__main__.C[Any]" - T = TypeVar('T') class C(Generic[T]): @@ -557,6 +548,13 @@ class C(Generic[T]): def get(self) -> T: return self.x +c: C[int] +c2: C[int, str] # E: "C" expects 1 type argument, but 2 given +c3: C +c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" +reveal_type(c.get()) # N: Revealed type is "builtins.int" +reveal_type(c2) # N: Revealed type is "__main__.C[Any]" +reveal_type(c3) # N: Revealed type is "__main__.C[Any]" [case testNewAnalyzerGenericsTypeVarForwardRef] from typing import TypeVar, Generic @@ -577,6 +575,12 @@ reveal_type(c.get()) # N: Revealed type is "builtins.int" [case testNewAnalyzerTypeAlias] from typing import Union, TypeVar, Generic +T = TypeVar('T') +S = TypeVar('S') +class D(Generic[T, S]): pass + +class C: pass + C2 = C U = Union[C, int] G = D[T, C] @@ -587,13 +591,6 @@ u: U reveal_type(u) # N: Revealed type is "Union[__main__.C, builtins.int]" g: G[int] reveal_type(g) # N: Revealed type is "__main__.D[builtins.int, __main__.C]" - -class C: pass - -T = TypeVar('T') -S = TypeVar('S') -class D(Generic[T, S]): pass - [case testNewAnalyzerTypeAlias2] from typing import Union @@ -678,13 +675,14 @@ a.f(1.0) # E: No overload variant of "f" of "A" matches argument type "float" \ # N: def f(self, x: str) -> str [case testNewAnalyzerPromotion] +def f(x: float) -> None: pass y: int f(y) f(1) -def f(x: float) -> None: pass [builtins fixtures/primitives.pyi] [case testNewAnalyzerFunctionDecorator] +# flags: --disable-error-code used-before-def from typing import Callable @dec @@ -702,6 +700,7 @@ reveal_type(f1('')) # N: Revealed type is "builtins.str" f2(1) # E: Argument 1 to "f2" has incompatible type "int"; expected "str" [case testNewAnalyzerTypeVarForwardReference] +# flags: --disable-error-code used-before-def from typing import TypeVar, Generic T = TypeVar('T') @@ -721,7 +720,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -737,7 +736,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -755,7 +754,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -TY = TypeVar('TY', bound=Y) +TY = TypeVar('TY', bound='Y') class C(Generic[T]): pass @@ -775,7 +774,7 @@ class C(Generic[T]): def func(x: U) -> U: ... U = TypeVar('U', asdf, asdf) # E: Name "asdf" is not defined -T = TypeVar('T', bound=asdf) # E: Name "asdf" is not defined +T = TypeVar('T', bound='asdf') # E: Name "asdf" is not defined reveal_type(C) # N: Revealed type is "def [T <: Any] (x: T`1) -> __main__.C[T`1]" reveal_type(func) # N: Revealed type is "def [U in (Any, Any)] (x: U`-1) -> U`-1" @@ -799,16 +798,16 @@ T = TypeVar('T') class A(Generic[T]): pass -a1: A[C] = C() -a2: A[D] = C() \ - # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") - class C(A[C]): pass -class D(A[D]): +class D(A['D']): pass +a1: A[C] = C() +a2: A[D] = C() \ + # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") + [case testNewAnalyzerTypeVarBoundForwardRef] from typing import TypeVar @@ -855,19 +854,17 @@ def f(): pass [case testNewAnalyzerNamedTupleCall] from typing import NamedTuple -o: Out -i: In +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) Out = NamedTuple('Out', [('x', In), ('y', Other)]) - +o: Out +i: In reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.y) # N: Revealed type is "__main__.Other" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(i.t) # N: Revealed type is "__main__.Other" - -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClass] @@ -936,29 +933,23 @@ class C: [case testNewAnalyzerNamedTupleCallNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11], __main__.Other@12, fallback=__main__.C.Out@10]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11]" - class C: def get_tuple(self) -> None: - self.o: Out - Out = NamedTuple('Out', [('x', In), ('y', Other)]) - In = NamedTuple('In', [('s', str), ('t', Other)]) + Out = NamedTuple('Out', [('x', 'In'), ('y', 'Other')]) + In = NamedTuple('In', [('s', str), ('t', 'Other')]) class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6], __main__.Other@7, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15], __main__.Other@18, fallback=__main__.C.Out@11]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" -reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" - class C: def get_tuple(self) -> None: - self.o: Out class Out(NamedTuple): x: In y: Other @@ -967,6 +958,12 @@ class C: s: str t: Other class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9], __main__.Other@12, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" +reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassForwardMethod] @@ -988,34 +985,31 @@ class Other(NamedTuple): [case testNewAnalyzerNamedTupleSpecialMethods] from typing import NamedTuple +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) +Out = NamedTuple('Out', [('x', In), ('y', Other)]) +class SubO(Out): pass + o: SubO reveal_type(SubO._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" reveal_type(o._replace(y=Other())) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" - -class SubO(Out): pass - -Out = NamedTuple('Out', [('x', In), ('y', Other)]) -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleBaseClass] from typing import NamedTuple +class Other: pass +class In(NamedTuple): + s: str + t: Other +class Out(NamedTuple('Out', [('x', In), ('y', Other)])): + pass o: Out reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(Out._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" - -class Out(NamedTuple('Out', [('x', In), ('y', Other)])): - pass - -class In(NamedTuple): - s: str - t: Other -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerIncompleteRefShadowsBuiltin1] @@ -1134,7 +1128,11 @@ class B(type): reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclass2] -reveal_type(A.f()) # N: Revealed type is "builtins.int" +class B(type): + def f(cls) -> int: + return 0 + +class C: pass class A(metaclass=B): pass @@ -1142,12 +1140,7 @@ class A(metaclass=B): class AA(metaclass=C): # E: Metaclasses not inheriting from "type" are not supported pass -class B(type): - def f(cls) -> int: - return 0 - -class C: pass - +reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclassPlaceholder] class B(C): pass @@ -1211,14 +1204,14 @@ class B(type): def f(cls) -> int: return 0 -reveal_type(A.f()) # N: Revealed type is "builtins.int" -reveal_type(A.x) # N: Revealed type is "builtins.str" - class A(six.with_metaclass(B, Defer)): pass class Defer: x: str + +reveal_type(A.f()) # N: Revealed type is "builtins.int" +reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture1] @@ -1252,6 +1245,7 @@ reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture4] +# flags: --disable-error-code used-before-def import future.utils class B(type): @@ -1271,31 +1265,32 @@ class Defer: [case testNewAnalyzerFinalDefiningModuleVar] from typing import Final +class D(C): ... +class C: ... + x: Final = C() y: Final[C] = D() bad: Final[D] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") reveal_type(x) # N: Revealed type is "__main__.C" reveal_type(y) # N: Revealed type is "__main__.C" -class D(C): ... -class C: ... - [case testNewAnalyzerFinalDefiningInstanceVar] from typing import Final +class D: ... +class E(C): ... + class C: def __init__(self, x: D) -> None: self.x: Final = x self.y: Final[C] = E(D()) reveal_type(C(D()).x) # N: Revealed type is "__main__.D" reveal_type(C(D()).y) # N: Revealed type is "__main__.C" - -class D: ... -class E(C): ... - [case testNewAnalyzerFinalReassignModuleVar] from typing import Final +class A: ... + x: Final = A() x = A() # E: Cannot assign to final name "x" @@ -1308,8 +1303,6 @@ def f2() -> None: def g() -> None: f() -class A: ... - [case testNewAnalyzerFinalReassignModuleReexport] import a [file a.py] @@ -1382,6 +1375,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.A" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClass3] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1461,13 +1455,13 @@ from typing import List, TypeVar, Union T = TypeVar('T') x: B[int] -B = A[List[T]] A = Union[int, T] +B = A[List[T]] class C(List[B[int]]): pass +y: C reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" reveal_type(y[0]) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" -y: C [builtins fixtures/list.pyi] [case testNewAnalyzerForwardAliasFromUnion] @@ -1488,6 +1482,7 @@ class C: [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyTwoDeferrals] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1500,7 +1495,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBase] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import List x: B @@ -1518,6 +1513,7 @@ main:8: note: Revealed type is "Any" main:9: note: Revealed type is "Any" [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1534,7 +1530,7 @@ reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBaseFunction] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1558,11 +1554,11 @@ tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition) from typing import List, Union x: A -A = Union[B, C] - class B(List[A]): pass class C(List[A]): pass +A = Union[B, C] + reveal_type(x) # N: Revealed type is "Union[__main__.B, __main__.C]" reveal_type(x[0]) # N: Revealed type is "Union[__main__.B, __main__.C]" [builtins fixtures/list.pyi] @@ -1578,19 +1574,18 @@ reveal_type(func()) # N: Revealed type is "builtins.list[Tuple[b.C, b.C]]" from typing import List, Tuple from a import func -B = List[Tuple[C, C]] - -class C(A): ... class A: ... +class C(A): ... +B = List[Tuple[C, C]] [builtins fixtures/list.pyi] [case testNewAnalyzerListComprehension] from typing import List +class A: pass +class B: pass a: List[A] a = [x for x in a] b: List[B] = [x for x in a] # E: List comprehension has incompatible type List[A]; expected List[B] -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testNewAnalyzerDictionaryComprehension] @@ -1796,23 +1791,26 @@ a.y = 1 # E: Incompatible types in assignment (expression has type "int", varia [case testNewAnalyzerAliasesFixedFew] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... +A = List[C] +x: A def func(x: List[C[T]]) -> T: ... -x: A -A = List[C] reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... - -T = TypeVar('T') [builtins fixtures/list.pyi] [case testNewAnalyzerAliasesFixedMany] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... + def func(x: List[C[T]]) -> T: ... @@ -1822,9 +1820,7 @@ A = List[C[int, str]] # E: "C" expects 1 type argument, but 2 given reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... -T = TypeVar('T') + [builtins fixtures/list.pyi] [case testNewAnalyzerBuiltinAliasesFixed] @@ -1973,7 +1969,7 @@ class A: pass class B: pass class C(B): pass -S = TypeVar('S', bound=Tuple[G[A], ...]) +S = TypeVar('S', bound='Tuple[G[A], ...]') class GG(Generic[S]): pass @@ -2060,12 +2056,12 @@ class C(Tuple[int, str]): class Meta(type): x = int() -y = C.x -reveal_type(y) # N: Revealed type is "builtins.int" - class C(metaclass=Meta): pass +y = C.x +reveal_type(y) # N: Revealed type is "builtins.int" + [case testNewAnalyzerFunctionError] def f(x: asdf) -> None: # E: Name "asdf" is not defined pass @@ -2089,7 +2085,7 @@ from typing import NewType, List x: C reveal_type(x[0]) # N: Revealed type is "__main__.C" -C = NewType('C', B) +C = NewType('C', 'B') class B(List[C]): pass @@ -2101,8 +2097,8 @@ from typing import NewType, List x: D reveal_type(x[0]) # N: Revealed type is "__main__.C" +C = NewType('C', 'B') D = C -C = NewType('C', B) class B(List[D]): pass @@ -2114,22 +2110,22 @@ from typing import NewType, List x: D reveal_type(x[0][0]) # N: Revealed type is "__main__.C" -D = C -C = NewType('C', List[B]) +D = C # E: Name "C" is used before definition +C = NewType('C', 'List[B]') class B(List[C]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasDirect] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import NewType, List x: D reveal_type(x[0][0]) D = List[C] -C = NewType('C', B) +C = NewType('C', 'B') class B(D): pass @@ -2178,9 +2174,9 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [case testNewAnalyzerDuplicateTypeVar] from typing import TypeVar, Generic, Any -T = TypeVar('T', bound=B[Any]) +T = TypeVar('T', bound='B[Any]') # The "int" error is because of typing fixture. -T = TypeVar('T', bound=C) # E: Cannot redefine "T" as a type variable \ +T = TypeVar('T', bound='C') # E: Cannot redefine "T" as a type variable \ # E: Invalid assignment target \ # E: "int" not callable @@ -2193,6 +2189,7 @@ y: B[B[Any]] reveal_type(y.x) # N: Revealed type is "__main__.B[Any]" [case testNewAnalyzerDuplicateTypeVarImportCycle] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2220,6 +2217,7 @@ tmp/a.py:5: error: Invalid assignment target tmp/a.py:5: error: "int" not callable [case testNewAnalyzerDuplicateTypeVarImportCycleWithAliases] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2313,6 +2311,7 @@ C = NamedTuple('C', [('x', int)]) [builtins fixtures/tuple.pyi] [case testNewAnalyzerApplicationForward1] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[int]() @@ -2335,15 +2334,14 @@ class A: ... [case testNewAnalyzerApplicationForward3] from typing import Generic, TypeVar -x = C[A]() -reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" - +class A: ... T = TypeVar('T') class C(Generic[T]): ... - -class A: ... +x = C[A]() +reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" [case testNewAnalyzerApplicationForward4] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[A]() # E: Value of type variable "T" of "C" cannot be "A" @@ -2474,6 +2472,9 @@ else: y() # E: "str" not callable [case testNewAnalyzerFirstAliasTargetWins] +class DesiredTarget: + attr: int + if int(): Alias = DesiredTarget else: @@ -2483,12 +2484,8 @@ else: x: Alias reveal_type(x.attr) # N: Revealed type is "builtins.int" - -class DesiredTarget: - attr: int - [case testNewAnalyzerFirstVarDefinitionWins] -x = y +x = y # E: Name "y" is used before definition x = 1 # We want to check that the first definition creates the variable. diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index a5e6cefc2af0..4209f4ec9164 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -25,6 +25,9 @@ main:6: error: Name "f" already defined on line 2 [case testTypeCheckOverloadWithImplementation] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -35,14 +38,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypingExtensionsOverload] from typing import Any from typing_extensions import overload +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -53,13 +56,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadNeedsImplementation] from typing import overload, Any + +class A: pass +class B: pass + @overload # E: An overloaded function outside a stub file must have an implementation def f(x: 'A') -> 'B': ... @overload @@ -67,9 +71,6 @@ def f(x: 'B') -> 'A': ... reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testSingleOverloadNoImplementation] @@ -84,6 +85,9 @@ class B: pass [case testOverloadByAnyOtherName] from typing import overload as rose from typing import Any +class A: pass +class B: pass + @rose def f(x: 'A') -> 'B': ... @rose @@ -94,14 +98,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithDecoratedImplementation] from typing import overload, Any +class A: pass +class B: pass + def deco(fun): ... @overload @@ -115,9 +119,6 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadDecoratedImplementationNotLast] @@ -174,6 +175,9 @@ class B: pass [case testTypeCheckOverloadWithImplementationError] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -196,9 +200,6 @@ def g(x): reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithUntypedImplAndMultipleVariants] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 6af596fc1feb..463ba3e65466 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -429,7 +429,6 @@ class Z(Generic[P]): ... # literals can be applied n: Z[[int]] -# TODO: type aliases too nt1 = Z[[int]] nt2: TypeAlias = Z[[int]] @@ -506,8 +505,7 @@ def f2(x: X[int, Concatenate[int, P_2]]) -> str: ... # Accepted def f3(x: X[int, [int, bool]]) -> str: ... # Accepted # ellipsis only show up here, but I can assume it works like Callable[..., R] def f4(x: X[int, ...]) -> str: ... # Accepted -# TODO: this is not rejected: -# def f5(x: X[int, int]) -> str: ... # Rejected +def f5(x: X[int, int]) -> str: ... # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" # CASE 3 def bar(x: int, *args: bool) -> int: ... @@ -844,9 +842,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`-2, *_P.args, **_P.kwargs) -> _R`-2" - -# TODO: _R` keeps flip-flopping between 5 (?), 13, 14, 15. Spooky. -# reveal_type(A().func) $ N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`13, *_P.args, **_P.kwargs) -> _R`13" +reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`5, *_P.args, **_P.kwargs) -> _R`5" def f(x: int) -> int: ... @@ -879,8 +875,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`-1, None]) -> __main__.Job[_P`-1, None]" -# TODO: flakey, _P`4 alternates around. -# reveal_type(A().func) $ N: Revealed type is "def [_P] (action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]" +reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]" reveal_type(A().func(Job(lambda x: x))) # N: Revealed type is "__main__.Job[[x: Any], None]" def f(x: int, y: int) -> None: ... @@ -1166,3 +1161,298 @@ def func3(callback: Callable[P1, str]) -> Callable[P1, str]: return "foo" return inner [builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec + +P = ParamSpec('P') + +def c1(f: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[P, int]) -> int: ... +def c3(f: Callable[P, int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[P, int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(*args, **kwargs) # E: Argument 1 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 2 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[P, int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[P, int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[P, int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[P, int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[P, int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" + +# Error message test: +P1 = ParamSpec('P1') + +def m1(f: Callable[P1, int], *a, **k: P1.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecAndConcatenateDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec +from typing_extensions import Concatenate + +P = ParamSpec('P') + +def c1(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[Concatenate[int, P], int]) -> int: ... +def c3(f: Callable[Concatenate[int, P], int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[Concatenate[int, P], int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(1, *args, **kwargs) # E: Argument 2 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 3 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[Concatenate[int, P], int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[Concatenate[int, P], int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[Concatenate[int, P], int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[Concatenate[int, P], int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testValidParamSpecInsideGenericWithoutArgsAndKwargs] +from typing import Callable, ParamSpec, Generic +from typing_extensions import Concatenate + +P = ParamSpec('P') + +class Some(Generic[P]): ... + +def create(s: Some[P], *args: int): ... +def update(s: Some[P], **kwargs: int): ... +def delete(s: Some[P]): ... + +def from_callable1(c: Callable[P, int], *args: int, **kwargs: int) -> Some[P]: ... +def from_callable2(c: Callable[P, int], **kwargs: int) -> Some[P]: ... +def from_callable3(c: Callable[P, int], *args: int) -> Some[P]: ... + +def from_extra1(c: Callable[Concatenate[int, P], int], *args: int, **kwargs: int) -> Some[P]: ... +def from_extra2(c: Callable[Concatenate[int, P], int], **kwargs: int) -> Some[P]: ... +def from_extra3(c: Callable[Concatenate[int, P], int], *args: int) -> Some[P]: ... +[builtins fixtures/paramspec.pyi] + + +[case testUnboundParamSpec] +from typing import Callable, ParamSpec + +P1 = ParamSpec('P1') +P2 = ParamSpec('P2') + +def f0(f: Callable[P1, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +def f1(*args: P1.args): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f2(**kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f3(*args: P1.args, **kwargs: int): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f4(*args: int, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Error message is based on the `args` definition: +def f5(*args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +def f6(*args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Multiple `ParamSpec` variables can be found, they should not affect error message: +P3 = ParamSpec('P3') + +def f7(first: Callable[P3, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f8(first: Callable[P3, int], *args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testArgsKwargsWithoutParamSpecVar] +from typing import Generic, Callable, ParamSpec + +P = ParamSpec('P') + +# This must be allowed: +class Some(Generic[P]): + def call(self, *args: P.args, **kwargs: P.kwargs): ... + +# TODO: this probably should be reported. +def call(*args: P.args, **kwargs: P.kwargs): ... +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceCrash] +from typing import Callable, Generic, ParamSpec, TypeVar + +def foo(x: int) -> int: ... +T = TypeVar("T") +def bar(x: T) -> T: ... + +P = ParamSpec("P") + +class C(Generic[P]): + def __init__(self, fn: Callable[P, int], *args: P.args, **kwargs: P.kwargs): ... + +reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins.int]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecClassConstructor] +# flags: --strict-optional +from typing import ParamSpec, Callable + +P = ParamSpec("P") + +class SomeClass: + def __init__(self, a: str) -> None: + pass + +def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> None: + pass + +def constructor(a: str) -> SomeClass: + return SomeClass(a) + +def wrong_constructor(a: bool) -> SomeClass: + return SomeClass("a") + +func(SomeClass, constructor) +func(SomeClass, wrong_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" \ + # E: Argument 2 to "func" has incompatible type "Callable[[bool], SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasBasic] +from typing import ParamSpec, Callable + +P = ParamSpec("P") +C = Callable[P, int] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" +def foo(x: int) -> str: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasConcatenate] +from typing import ParamSpec, Callable +from typing_extensions import Concatenate + +P = ParamSpec("P") +C = Callable[Concatenate[int, P], int] +def f(n: C[P]) -> C[P]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[], int]"; expected "Callable[[int], int]" +def bad() -> int: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(x: int, y: str) -> int: ... +reveal_type(bar2) # N: Revealed type is "def (builtins.int, y: builtins.str) -> builtins.int" + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" \ + # N: This is likely because "foo" has named arguments: "x". Consider marking them positional-only +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[str, int], int]"; expected "Callable[[int, int], int]" \ + # N: This is likely because "foo2" has named arguments: "x". Consider marking them positional-only +def foo2(x: str, y: int) -> int: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasRecursive] +from typing import ParamSpec, Callable, Union + +P = ParamSpec("P") +C = Callable[P, Union[int, C[P]]] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(__x: int) -> Callable[[int], int]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "C[[int]]" +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], Callable[[int], str]]"; expected "C[[int]]" +def foo2(__x: int) -> Callable[[int], str]: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInRuntimeContext] +from typing import ParamSpec, Generic + +P = ParamSpec("P") +class C(Generic[P]): ... + +c = C[int, str]() +reveal_type(c) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" + +A = C[P] +a = A[int, str]() +reveal_type(a) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInvalidLocations] +from typing import ParamSpec, Generic, List, TypeVar, Callable + +P = ParamSpec("P") +T = TypeVar("T") +A = List[T] +def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? +def g(x: A[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ + # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + +C = Callable[P, T] +x: C[int] # E: Bad number of arguments for type alias, expected: 2, given: 1 +y: C[int, str] # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" +z: C[int, str, bytes] # E: Bad number of arguments for type alias, expected: 2, given: 3 +[builtins fixtures/paramspec.pyi] + +[case testTrivialParametersHandledCorrectly] +from typing import ParamSpec, Generic, TypeVar, Callable, Any +from typing_extensions import Concatenate + +P = ParamSpec("P") +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[S, P, T]): ... + +def foo(f: Callable[P, int]) -> None: + x: C[Any, ..., Any] + x1: C[int, Concatenate[int, str, P], str] + x = x1 # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasNested] +from typing import ParamSpec, Callable, List, TypeVar, Generic +from typing_extensions import Concatenate + +P = ParamSpec("P") +A = List[Callable[P, None]] +B = List[Callable[Concatenate[int, P], None]] + +fs: A[int, str] +reveal_type(fs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.str)]" +gs: B[int, str] +reveal_type(gs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.int, builtins.str)]" + +T = TypeVar("T") +class C(Generic[T]): ... +C[Callable[P, int]]() # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ + # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/check-partially-defined.test b/test-data/unit/check-partially-defined.test deleted file mode 100644 index 6bb5a65232eb..000000000000 --- a/test-data/unit/check-partially-defined.test +++ /dev/null @@ -1,362 +0,0 @@ -[case testDefinedInOneBranch] -# flags: --enable-error-code partially-defined -if int(): - a = 1 -else: - x = 2 -z = a + 1 # E: Name "a" may be undefined -z = a + 1 # We only report the error on first occurrence. - -[case testElif] -# flags: --enable-error-code partially-defined -if int(): - a = 1 -elif int(): - a = 2 -else: - x = 3 - -z = a + 1 # E: Name "a" may be undefined - -[case testDefinedInAllBranches] -# flags: --enable-error-code partially-defined -if int(): - a = 1 -elif int(): - a = 2 -else: - a = 3 -z = a + 1 - -[case testOmittedElse] -# flags: --enable-error-code partially-defined -if int(): - a = 1 -z = a + 1 # E: Name "a" may be undefined - -[case testUpdatedInIf] -# flags: --enable-error-code partially-defined -# Variable a is already defined. Just updating it in an "if" is acceptable. -a = 1 -if int(): - a = 2 -z = a + 1 - -[case testNestedIf] -# flags: --enable-error-code partially-defined -if int(): - if int(): - a = 1 - x = 1 - x = x + 1 - else: - a = 2 - b = a + x # E: Name "x" may be undefined - b = b + 1 -else: - b = 2 -z = a + b # E: Name "a" may be undefined - -[case testVeryNestedIf] -# flags: --enable-error-code partially-defined -if int(): - if int(): - if int(): - a = 1 - else: - a = 2 - x = a - else: - a = 2 - b = a -else: - b = 2 -z = a + b # E: Name "a" may be undefined - -[case testTupleUnpack] -# flags: --enable-error-code partially-defined - -if int(): - (x, y) = (1, 2) -else: - [y, z] = [1, 2] -a = y + x # E: Name "x" may be undefined -a = y + z # E: Name "z" may be undefined - -[case testRedefined] -# flags: --enable-error-code partially-defined -y = 3 -if int(): - if int(): - y = 2 - x = y + 2 -else: - if int(): - y = 2 - x = y + 2 - -x = y + 2 - -[case testGenerator] -# flags: --enable-error-code partially-defined -if int(): - a = 3 -s = [a + 1 for a in [1, 2, 3]] -x = a # E: Name "a" may be undefined - -[case testScope] -# flags: --enable-error-code partially-defined -def foo() -> None: - if int(): - y = 2 - -if int(): - y = 3 -x = y # E: Name "y" may be undefined - -[case testFuncParams] -# flags: --enable-error-code partially-defined -def foo(a: int) -> None: - if int(): - a = 2 - x = a - -[case testWhile] -# flags: --enable-error-code partially-defined -while int(): - x = 1 - -y = x # E: Name "x" may be undefined - -while int(): - z = 1 -else: - z = 2 - -y = z # No error. - -while True: - k = 1 - if int(): - break -y = k # No error. - -[case testForLoop] -# flags: --enable-error-code partially-defined -for x in [1, 2, 3]: - if x: - x = 1 - y = x - z = 1 -else: - z = 2 - -a = z + y # E: Name "y" may be undefined - -[case testReturn] -# flags: --enable-error-code partially-defined -def f1() -> int: - if int(): - x = 1 - else: - return 0 - return x - -def f2() -> int: - if int(): - x = 1 - elif int(): - return 0 - else: - x = 2 - return x - -def f3() -> int: - if int(): - x = 1 - elif int(): - return 0 - else: - y = 2 - return x # E: Name "x" may be undefined - -def f4() -> int: - if int(): - x = 1 - elif int(): - return 0 - else: - return 0 - return x - -def f5() -> int: - # This is a test against crashes. - if int(): - return 1 - if int(): - return 2 - else: - return 3 - return 1 - -[case testAssert] -# flags: --enable-error-code partially-defined -def f1() -> int: - if int(): - x = 1 - else: - assert False, "something something" - return x - -def f2() -> int: - if int(): - x = 1 - elif int(): - assert False - else: - y = 2 - return x # E: Name "x" may be undefined - -[case testRaise] -# flags: --enable-error-code partially-defined -def f1() -> int: - if int(): - x = 1 - else: - raise BaseException("something something") - return x - -def f2() -> int: - if int(): - x = 1 - elif int(): - raise BaseException("something something") - else: - y = 2 - return x # E: Name "x" may be undefined -[builtins fixtures/exception.pyi] - -[case testContinue] -# flags: --enable-error-code partially-defined -def f1() -> int: - while int(): - if int(): - x = 1 - else: - continue - y = x - else: - x = 2 - return x - -def f2() -> int: - while int(): - if int(): - x = 1 - z = 1 - elif int(): - pass - else: - continue - y = x # E: Name "x" may be undefined - else: - x = 2 - z = 2 - return z # E: Name "z" may be undefined - -def f3() -> None: - while True: - if int(): - x = 2 - elif int(): - continue - else: - continue - y = x - -[case testBreak] -# flags: --enable-error-code partially-defined -def f1() -> None: - while int(): - if int(): - x = 1 - else: - break - y = x # No error -- x is always defined. - -def f2() -> None: - while int(): - if int(): - x = 1 - elif int(): - pass - else: - break - y = x # E: Name "x" may be undefined - -def f3() -> None: - while int(): - x = 1 - while int(): - if int(): - x = 2 - else: - break - y = x - z = x # E: Name "x" may be undefined - -[case testNoReturn] -# flags: --enable-error-code partially-defined - -from typing import NoReturn -def fail() -> NoReturn: - assert False - -def f() -> None: - if int(): - x = 1 - elif int(): - x = 2 - y = 3 - else: - # This has a NoReturn type, so we can skip it. - fail() - z = y # E: Name "y" may be undefined - z = x - -[case testDictComprehension] -# flags: --enable-error-code partially-defined - -def f() -> None: - for _ in [1, 2]: - key = 2 - val = 2 - - x = ( - key, # E: Name "key" may be undefined - val, # E: Name "val" may be undefined - ) - - d = [(0, "a"), (1, "b")] - {val: key for key, val in d} -[builtins fixtures/dict.pyi] - -[case testWithStmt] -# flags: --enable-error-code partially-defined -from contextlib import contextmanager - -@contextmanager -def ctx(*args): - yield 1 - -def f() -> None: - if int(): - a = b = 1 - x = 1 - - with ctx() as a, ctx(a) as b, ctx(x) as x: # E: Name "x" may be undefined - c = a - c = b - d = a - d = b -[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test new file mode 100644 index 000000000000..29c4868e97af --- /dev/null +++ b/test-data/unit/check-possibly-undefined.test @@ -0,0 +1,970 @@ +[case testDefinedInOneBranch] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +else: + x = 2 +z = a + 1 # E: Name "a" may be undefined +z = a + 1 # We only report the error on first occurrence. + +[case testElif] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +elif int(): + a = 2 +else: + x = 3 + +z = a + 1 # E: Name "a" may be undefined + +[case testUsedInIf] +# flags: --enable-error-code possibly-undefined +if int(): + y = 1 +if int(): + x = y # E: Name "y" may be undefined + +[case testDefinedInAllBranches] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +elif int(): + a = 2 +else: + a = 3 +z = a + 1 + +[case testOmittedElse] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +z = a + 1 # E: Name "a" may be undefined + +[case testUpdatedInIf] +# flags: --enable-error-code possibly-undefined +# Variable a is already defined. Just updating it in an "if" is acceptable. +a = 1 +if int(): + a = 2 +z = a + 1 + +[case testNestedIf] +# flags: --enable-error-code possibly-undefined +if int(): + if int(): + a = 1 + x = 1 + x = x + 1 + else: + a = 2 + b = a + x # E: Name "x" may be undefined + b = b + 1 +else: + b = 2 +z = a + b # E: Name "a" may be undefined + +[case testVeryNestedIf] +# flags: --enable-error-code possibly-undefined +if int(): + if int(): + if int(): + a = 1 + else: + a = 2 + x = a + else: + a = 2 + b = a +else: + b = 2 +z = a + b # E: Name "a" may be undefined + +[case testTupleUnpack] +# flags: --enable-error-code possibly-undefined + +if int(): + (x, y) = (1, 2) +else: + [y, z] = [1, 2] +a = y + x # E: Name "x" may be undefined +a = y + z # E: Name "z" may be undefined + +[case testIndexExpr] +# flags: --enable-error-code possibly-undefined + +if int(): + *x, y = (1, 2) +else: + x = [1, 2] +a = x # No error. +b = y # E: Name "y" may be undefined + +[case testRedefined] +# flags: --enable-error-code possibly-undefined +y = 3 +if int(): + if int(): + y = 2 + x = y + 2 +else: + if int(): + y = 2 + x = y + 2 + +x = y + 2 + +[case testFunction] +# flags: --enable-error-code possibly-undefined +def f0() -> None: + if int(): + def some_func() -> None: + pass + + some_func() # E: Name "some_func" may be undefined + +def f1() -> None: + if int(): + def some_func() -> None: + pass + else: + def some_func() -> None: + pass + + some_func() # No error. + +[case testLambda] +# flags: --enable-error-code possibly-undefined +def f0(b: bool) -> None: + if b: + fn = lambda: 2 + y = fn # E: Name "fn" may be undefined + +[case testUsedBeforeDefClass] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f(x: A): # No error here. + pass +y = A() # E: Name "A" is used before definition +class A: pass + +[case testClassScope] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +class C: + x = 0 + def f0(self) -> None: pass + + def f2(self) -> None: + f0() # No error. + self.f0() # No error. + +f0() # E: Name "f0" is used before definition +def f0() -> None: pass +y = x # E: Name "x" is used before definition +x = 1 + +[case testClassInsideFunction] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f() -> None: + class C: pass + +c = C() # E: Name "C" is used before definition +class C: pass + +[case testUsedBeforeDefFunc] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +foo() # E: Name "foo" is used before definition +def foo(): pass +[case testGenerator] +# flags: --enable-error-code possibly-undefined +if int(): + a = 3 +s = [a + 1 for a in [1, 2, 3]] +x = a # E: Name "a" may be undefined + +[case testScope] +# flags: --enable-error-code possibly-undefined +def foo() -> None: + if int(): + y = 2 + +if int(): + y = 3 +x = y # E: Name "y" may be undefined + +[case testVarDefinedInOuterScopeUpdated] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + global x + y = x + x = 1 # No error. + +x = 2 + +[case testNonlocalVar] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + x = 2 + + def inner() -> None: + nonlocal x + y = x + x = 1 # No error. + + +[case testGlobalDeclarationAfterUsage] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + y = x # E: Name "x" is used before definition + global x + x = 1 # No error. + +x = 2 +[case testVarDefinedInOuterScope] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + global x + y = x # We do not detect such errors right now. + +f0() +x = 1 +[case testDefinedInOuterScopeNoError] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def foo() -> None: + bar() + +def bar() -> None: + foo() +[case testFuncParams] +# flags: --enable-error-code possibly-undefined +def foo(a: int) -> None: + if int(): + a = 2 + x = a + +[case testWhile] +# flags: --enable-error-code possibly-undefined +while int(): + a = 1 + +x = a # E: Name "a" may be undefined + +while int(): + b = 1 +else: + b = 2 + +y = b # No error. + +while True: + c = 1 + if int(): + break +y = c # No error. + +# This while loop doesn't have a `break` inside, so we know that the else must always get executed. +while int(): + pass +else: + d = 1 +y = d # No error. + +while int(): + if int(): + break +else: + e = 1 +# If a while loop has a `break`, it's possible that the else didn't get executed. +y = e # E: Name "e" may be undefined + +while int(): + while int(): + if int(): + break + else: + f = 1 +else: + g = 2 + +y = f # E: Name "f" may be undefined +y = g + +[case testForLoop] +# flags: --enable-error-code possibly-undefined +for x in [1, 2, 3]: + if x: + x = 1 + y = x +else: + z = 2 + +a = z + y # E: Name "y" may be undefined + +[case testReturn] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + return 0 + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + x = 2 + return x + +def f3() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + y = 2 + return x # E: Name "x" may be undefined + +def f4() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + return 0 + return x + +def f5() -> int: + # This is a test against crashes. + if int(): + return 1 + if int(): + return 2 + else: + return 3 + return 1 + +def f6() -> int: + if int(): + x = 0 + return x + return x # E: Name "x" may be undefined + +[case testDefinedDifferentBranchUsedBeforeDef] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f0() -> None: + if int(): + x = 0 + else: + y = x # E: Name "x" is used before definition + z = x # E: Name "x" is used before definition + +def f1() -> None: + x = 1 + if int(): + x = 0 + else: + y = x # No error. + +def f2() -> None: + if int(): + x = 0 + elif int(): + y = x # E: Name "x" is used before definition + else: + y = x # E: Name "x" is used before definition + if int(): + z = x # E: Name "x" is used before definition + x = 1 + else: + x = 2 + w = x # No error. + +[case testPossiblyUndefinedLoop] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f0() -> None: + first_iter = True + for i in [0, 1]: + if first_iter: + first_iter = False + x = 0 + elif int(): + # This is technically a false positive but mypy isn't smart enough for this yet. + y = x # E: Name "x" may be undefined + else: + y = x # E: Name "x" may be undefined + if int(): + z = x # E: Name "x" may be undefined + x = 1 + else: + x = 2 + w = x # No error. + +def f1() -> None: + while True: + if int(): + x = 0 + else: + y = x # E: Name "x" may be undefined + z = x # E: Name "x" may be undefined + +def f2() -> None: + for i in [0, 1]: + x = i + else: + y = x # E: Name "x" may be undefined + +def f3() -> None: + while int(): + x = 1 + else: + y = x # E: Name "x" may be undefined + +def f4() -> None: + while int(): + y = x # E: Name "x" may be undefined + x: int = 1 + +[case testAssert] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + assert False, "something something" + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + assert False + else: + y = 2 + return x # E: Name "x" may be undefined + +[case testRaise] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + raise BaseException("something something") + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + raise BaseException("something something") + else: + y = 2 + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testContinue] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + while int(): + if int(): + x = 1 + else: + continue + y = x + else: + x = 2 + return x + +def f2() -> int: + while int(): + if int(): + x = 1 + elif int(): + pass + else: + continue + y = x # E: Name "x" may be undefined + return x # E: Name "x" may be undefined + +def f3() -> None: + while True: + if int(): + x = 2 + elif int(): + continue + else: + continue + y = x + +[case testBreak] +# flags: --enable-error-code possibly-undefined +def f1() -> None: + while int(): + if int(): + x = 1 + else: + break + y = x # No error -- x is always defined. + +def f2() -> None: + while int(): + if int(): + x = 1 + elif int(): + pass + else: + break + y = x # E: Name "x" may be undefined + +def f3() -> None: + while int(): + x = 1 + while int(): + if int(): + x = 2 + else: + break + y = x + z = x # E: Name "x" may be undefined + +[case testTryBasic] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + except: + pass + return x # E: Name "x" may be undefined + +def f2() -> int: + try: + pass + except: + x = 1 + return x # E: Name "x" may be undefined + +def f3() -> int: + try: + x = 1 + except: + y = x # E: Name "x" may be undefined + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + return 0 + return x + +def f5() -> int: + try: + x = 1 + except: + raise + return x + +def f6() -> None: + try: + pass + except BaseException as exc: + x = exc # No error. + exc = BaseException() + # This case is covered by the other check, not by possibly undefined check. + y = exc # E: Trying to read deleted variable "exc" + +def f7() -> int: + try: + if int(): + x = 1 + assert False + except: + pass + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryMultiExcept] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + x = 1 + except BaseException: + x = 2 + except: + x = 3 + return x + +def f2() -> int: + try: + x = 1 + except BaseException: + pass + except: + x = 3 + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryFinally] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + finally: + x = 2 + return x + +def f2() -> int: + try: + pass + except: + pass + finally: + x = 2 + return x + +def f3() -> int: + try: + x = 1 + except: + pass + finally: + y = x # E: Name "x" may be undefined + return x + +def f4() -> int: + try: + x = 0 + except BaseException: + raise + finally: + y = x # E: Name "x" may be undefined + return y + +def f5() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + pass + return x # No error. + +def f6() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + a = x # E: Name "x" may be undefined + return a +[builtins fixtures/exception.pyi] + +[case testTryElse] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + return 0 + except BaseException: + x = 1 + else: + x = 2 + finally: + y = x + return y + +def f2() -> int: + try: + pass + except: + x = 1 + else: + x = 2 + return x + +def f3() -> int: + try: + pass + except: + x = 1 + else: + pass + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + x = 2 + else: + pass + return x + +def f5() -> int: + try: + pass + except: + x = 1 + else: + return 1 + return x +[builtins fixtures/exception.pyi] + +[case testNoReturn] +# flags: --enable-error-code possibly-undefined + +from typing import NoReturn +def fail() -> NoReturn: + assert False + +def f() -> None: + if int(): + x = 1 + elif int(): + x = 2 + y = 3 + else: + # This has a NoReturn type, so we can skip it. + fail() + z = y # E: Name "y" may be undefined + z = x + +[case testDictComprehension] +# flags: --enable-error-code possibly-undefined + +def f() -> None: + for _ in [1, 2]: + key = 2 + val = 2 + + x = ( + key, # E: Name "key" may be undefined + val, # E: Name "val" may be undefined + ) + + d = [(0, "a"), (1, "b")] + {val: key for key, val in d} +[builtins fixtures/dict.pyi] + +[case testWithStmt] +# flags: --enable-error-code possibly-undefined +from contextlib import contextmanager + +@contextmanager +def ctx(*args): + yield 1 + +def f() -> None: + if int(): + a = b = 1 + x = 1 + + with ctx() as a, ctx(a) as b, ctx(x) as x: # E: Name "x" may be undefined + c = a + c = b + d = a + d = b +[builtins fixtures/tuple.pyi] + +[case testUnreachable] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +import typing + +def f0() -> None: + if typing.TYPE_CHECKING: + x = 1 + elif int(): + y = 1 + else: + y = 2 + a = x + +def f1() -> None: + if not typing.TYPE_CHECKING: + pass + else: + z = 1 + a = z + +def f2() -> None: + if typing.TYPE_CHECKING: + x = 1 + else: + y = x +[typing fixtures/typing-medium.pyi] + +[case testUsedBeforeDef] +# flags: --enable-error-code used-before-def + +def f0() -> None: + x = y # E: Name "y" is used before definition + y: int = 1 + +def f2() -> None: + if int(): + pass + else: + # No used-before-def error. + y = z # E: Name "z" is not defined + + def inner2() -> None: + z = 0 + +def f3() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + z: int = 2 + +def f4() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + x = z # E: Name "z" is used before definition + z: int = 2 + +[case testUsedBeforeDefImportsBasic] +# flags: --enable-error-code used-before-def +import foo # type: ignore +import x.y # type: ignore + +def f0() -> None: + a = foo # No error. + foo: int = 1 + +def f1() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +def f2() -> None: + a = x # No error. + x: int = 1 + +def f3() -> None: + a = x.y # No error. + x: int = 1 + +[case testUsedBeforeDefImportBasicRename] +# flags: --enable-error-code used-before-def +import x.y as z # type: ignore +from typing import Any + +def f0() -> None: + a = z # No error. + z: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +def f2() -> None: + a = x.y # E: Name "x" is used before definition + x: Any = 1 + +def f3() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +[case testUsedBeforeDefImportFrom] +# flags: --enable-error-code used-before-def +from foo import x # type: ignore + +def f0() -> None: + a = x # No error. + x: int = 1 + +[case testUsedBeforeDefImportFromRename] +# flags: --enable-error-code used-before-def +from foo import x as y # type: ignore + +def f0() -> None: + a = y # No error. + y: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +[case testUsedBeforeDefFunctionDeclarations] +# flags: --enable-error-code used-before-def + +def f0() -> None: + def inner() -> None: + pass + + inner() # No error. + inner = lambda: None + +[case testUsedBeforeDefBuiltins] +# flags: --enable-error-code used-before-def + +def f0() -> None: + s = type(123) + type = "abc" + a = type + +[case testUsedBeforeDefBuiltinsMultipass] +# flags: --enable-error-code used-before-def + +# When doing multiple passes, mypy resolves references slightly differently. +# In this case, it would refer the earlier `type` call to the range class defined below. +_type = type # No error +_C = C # E: Name "C" is used before definition +class type: pass +class C: pass + +[case testUsedBeforeDefImplicitModuleAttrs] +# flags: --enable-error-code used-before-def +a = __name__ # No error. +__name__ = "abc" + +[case testUntypedDef] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f(): + if int(): + x = 0 + z = y # No used-before-def error because def is untyped. + y = x # No possibly-undefined error because def is untyped. + +[case testUntypedDefCheckUntypedDefs] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def --check-untyped-defs + +def f(): + if int(): + x = 0 + z = y # E: Name "y" is used before definition + y: int = x # E: Name "x" may be undefined + +[case testClassBody] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +class A: + # The following should not only trigger an error from semantic analyzer, but not the used-before-def check. + y = x + 1 # E: Name "x" is not defined + x = 0 + # Same as above but in a loop, which should trigger a possibly-undefined error. + for _ in [1, 2, 3]: + b = a + 1 # E: Name "a" is not defined + a = 0 + + +class B: + if int(): + x = 0 + else: + # This type of check is not caught by the semantic analyzer. If we ever update it to catch such issues, + # we should make sure that errors are not double-reported. + y = x # E: Name "x" is used before definition + for _ in [1, 2, 3]: + if int(): + a = 0 + else: + # Same as above but in a loop. + b = a # E: Name "a" may be undefined diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 8cdfd2a3e0d9..96b3a484f56a 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -794,7 +794,7 @@ main:18: note: def attr2(self) -> str [case testSelfTypesWithProtocolsBehaveAsWithNominal] from typing import Protocol, TypeVar -T = TypeVar('T', bound=Shape) +T = TypeVar('T', bound='Shape') class Shape(Protocol): def combine(self: T, other: T) -> T: pass @@ -1153,6 +1153,25 @@ x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variabl # N: Protocol member P.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] +[case testClassVarProtocolImmutable] +from typing import Protocol, ClassVar + +class P(Protocol): + @property + def x(self) -> int: ... + +class C: + x: ClassVar[int] + +class Bad: + x: ClassVar[str] + +x: P = C() +y: P = Bad() # E: Incompatible types in assignment (expression has type "Bad", variable has type "P") \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: x: expected "int", got "str" +[builtins fixtures/property.pyi] + [case testSettablePropertyInProtocols] from typing import Protocol @@ -2642,6 +2661,53 @@ reveal_type([b, a]) # N: Revealed type is "builtins.list[def (x: def (__main__. [builtins fixtures/list.pyi] [out] +[case testCallbackProtocolFunctionAttributesSubtyping] +from typing import Protocol + +class A(Protocol): + __name__: str + def __call__(self) -> str: ... + +class B1(Protocol): + __name__: int + def __call__(self) -> str: ... + +class B2(Protocol): + __name__: str + def __call__(self) -> int: ... + +class B3(Protocol): + __name__: str + extra_stuff: int + def __call__(self) -> str: ... + +def f() -> str: ... + +reveal_type(f.__name__) # N: Revealed type is "builtins.str" +a: A = f # OK +b1: B1 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B1") \ + # N: Following member(s) of "function" have conflicts: \ + # N: __name__: expected "int", got "str" +b2: B2 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B2") \ + # N: "B2.__call__" has type "Callable[[], int]" +b3: B3 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B3") \ + # N: "function" is missing following "B3" protocol member: \ + # N: extra_stuff + +[case testCallbackProtocolFunctionAttributesInference] +from typing import Protocol, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S", covariant=True) +class A(Protocol[T, S]): + __name__: T + def __call__(self) -> S: ... + +def f() -> int: ... +def test(func: A[T, S]) -> Tuple[T, S]: ... +reveal_type(test(f)) # N: Revealed type is "Tuple[builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + [case testProtocolsAlwaysABCs] from typing import Protocol @@ -3460,6 +3526,23 @@ test(B) # OK test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ # N: "C" has constructor incompatible with "__call__" of "P" +[case testProtocolClassObjectPureCallback] +from typing import Any, ClassVar, Protocol + +class P(Protocol): + def __call__(self, x: int, y: int) -> Any: ... + +class B: + def __init__(self, x: int, y: int) -> None: ... +class C: + def __init__(self, x: int, y: str) -> None: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: "C" has constructor incompatible with "__call__" of "P" +[builtins fixtures/type.pyi] + [case testProtocolTypeTypeAttribute] from typing import ClassVar, Protocol, Type @@ -3787,3 +3870,131 @@ from typing_extensions import Final a: Final = 1 [builtins fixtures/module.pyi] + + +[case testModuleAsProtocolRedefinitionTopLevel] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + import mod1 as t +else: + import mod2 as t + +import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +[file mod1.py] +def f() -> str: ... + +[file mod2.py] +def f() -> str: ... + +[file badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolRedefinitionImportFrom] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + from package import mod1 as t +else: + from package import mod2 as t + +from package import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +package: int = 10 + +import package.mod1 as t +import package.mod1 # E: Incompatible import of "package" (imported name has type Module, local name has type "int") + +[file package/mod1.py] +def f() -> str: ... + +[file package/mod2.py] +def f() -> str: ... + +[file package/badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] + +[case testProtocolSelfTypeNewSyntax] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... + +class C: + next: C +class S: + next: Self + +x: P = C() +y: P = S() + +z: P +reveal_type(S().next) # N: Revealed type is "__main__.S" +reveal_type(z.next) # N: Revealed type is "__main__.P" +[builtins fixtures/property.pyi] + +[case testProtocolSelfTypeNewSyntaxSubProtocol] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... +class PS(P, Protocol): + @property + def other(self) -> Self: ... + +class C: + next: C + other: C +class S: + next: Self + other: Self + +x: PS = C() +y: PS = S() +[builtins fixtures/property.pyi] + +[case testProtocolClassVarSelfType] +from typing import ClassVar, Self, Protocol + +class P(Protocol): + DEFAULT: ClassVar[Self] +class C: + DEFAULT: ClassVar[C] + +x: P = C() + +[case testInferenceViaTypeTypeMetaclass] +from typing import Iterator, Iterable, TypeVar, Type + +M = TypeVar("M") + +class Meta(type): + def __iter__(self: Type[M]) -> Iterator[M]: ... +class Foo(metaclass=Meta): ... + +T = TypeVar("T") +def test(x: Iterable[T]) -> T: ... + +reveal_type(test(Foo)) # N: Revealed type is "__main__.Foo" +t_foo: Type[Foo] +reveal_type(test(t_foo)) # N: Revealed type is "__main__.Foo" + +TF = TypeVar("TF", bound=Foo) +def outer(cls: Type[TF]) -> TF: + reveal_type(test(cls)) # N: Revealed type is "TF`-1" + return cls() diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 5ac34025384c..12fd2b43c80a 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1725,3 +1725,106 @@ def my_func(pairs: Iterable[tuple[S, S]]) -> None: reveal_type(pair) # N: Revealed type is "Tuple[builtins.int, builtins.int]" \ # N: Revealed type is "Tuple[builtins.str, builtins.str]" [builtins fixtures/tuple.pyi] + +[case testPossiblyUndefinedMatch] +# flags: --enable-error-code possibly-undefined +def f0(x: int | str) -> int: + match x: + case int(): + y = 1 + return y # E: Name "y" may be undefined + +def f1(a: object) -> None: + match a: + case [y]: pass + case _: + y = 1 + x = 2 + z = y + z = x # E: Name "x" may be undefined + +def f2(a: object) -> None: + match a: + case [[y] as x]: pass + case {"k1": 1, "k2": x, "k3": y}: pass + case [0, *x]: + y = 2 + case _: + y = 1 + x = [2] + z = x + z = y + +def f3(a: object) -> None: + y = 1 + match a: + case [x]: + y = 2 + # Note the missing `case _:` + z = x # E: Name "x" may be undefined + z = y + +def f4(a: object) -> None: + y = 1 + match a: + case [x]: + y = 2 + case _: + assert False, "unsupported" + z = x + z = y + +def f5(a: object) -> None: + match a: + case tuple(x): pass + case _: + return + y = x + +def f6(a: object) -> None: + if int(): + y = 1 + match a: + case _ if y is not None: # E: Name "y" may be undefined + pass +[builtins fixtures/tuple.pyi] + +[case testPossiblyUndefinedMatchUnreachable] +# flags: --enable-error-code possibly-undefined +import typing + +def f0(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + case _: + y = 3 + return y # No error. + +def f1(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + return y # E: Name "y" may be undefined + +[typing fixtures/typing-medium.pyi] + +[case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] +from typing import overload +class C: + @overload + def __init__(self) -> None: pass + @overload + def __init__(self, x: int) -> None: pass + def __init__(self, x=0): + pass + +class D: pass + +X = None | C +Y = None | D +[builtins fixtures/type.pyi] diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test new file mode 100644 index 000000000000..7196f10f8863 --- /dev/null +++ b/test-data/unit/check-python311.test @@ -0,0 +1,65 @@ +[case testTryStarSimple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +[builtins fixtures/exception.pyi] + +[case testTryStarMultiple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +except* RuntimeError as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.RuntimeError]" +[builtins fixtures/exception.pyi] + +[case testTryStarBase] +try: + pass +except* BaseException as e: + reveal_type(e) # N: Revealed type is "builtins.BaseExceptionGroup[builtins.BaseException]" +[builtins fixtures/exception.pyi] + +[case testTryStarTuple] +class Custom(Exception): ... + +try: + pass +except* (RuntimeError, Custom) as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, __main__.Custom]]" +[builtins fixtures/exception.pyi] + +[case testTryStarInvalidType] +class Bad: ... +try: + pass +except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalid] +try: + pass +except* ExceptionGroup as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalidTuple] +try: + pass +except* (RuntimeError, ExceptionGroup) as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, Any]]" +[builtins fixtures/exception.pyi] + +[case testBasicTypeVarTupleGeneric] +from typing import Generic, TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +class Variadic(Generic[Unpack[Ts]]): + ... + +variadic: Variadic[int, str] +reveal_type(variadic) # N: Revealed type is "__main__.Variadic[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 1922192c2877..7e5e0f3cf185 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -707,8 +707,8 @@ def foo(name: str, /, **kwargs: Unpack[Person]) -> None: # Allowed ... [builtins fixtures/dict.pyi] -[case testPartiallyDefinedWithAssignmentExpr] -# flags: --python-version 3.8 --enable-error-code partially-defined +[case testPossiblyUndefinedWithAssignmentExpr] +# flags: --python-version 3.8 --enable-error-code possibly-undefined def f1() -> None: d = {0: 1} if int(): @@ -718,3 +718,24 @@ def f1() -> None: y = x z = x [builtins fixtures/dict.pyi] + +[case testNarrowOnSelfInGeneric] +# flags: --strict-optional +from typing import Generic, TypeVar, Optional + +T = TypeVar("T", int, str) + +class C(Generic[T]): + x: Optional[T] + def meth(self) -> Optional[T]: + if (y := self.x) is not None: + reveal_type(y) + return None +[out] +main:10: note: Revealed type is "builtins.int" +main:10: note: Revealed type is "builtins.str" + +[case testNoCrashOnAssignmentExprClass] +class C: + [(j := i) for i in [1, 2, 3]] # E: Assignment expression within a comprehension cannot be used in a class body +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-python39.test b/test-data/unit/check-python39.test index d169f4001015..105051a840bb 100644 --- a/test-data/unit/check-python39.test +++ b/test-data/unit/check-python39.test @@ -4,9 +4,9 @@ # most important test, to deal with this we'll only run this test with Python 3.9 and later. import typing def f(a: 'A', b: 'B') -> None: pass -f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPEP614] diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index a0875c60362c..b7b4372ecc12 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -88,6 +88,7 @@ A = int | list[A] -- Tests duplicating some existing type alias tests with recursive aliases enabled [case testRecursiveAliasesMutual] +# flags: --disable-error-code used-before-def from typing import Type, Callable, Union A = Union[B, int] @@ -120,6 +121,7 @@ B = List[A] [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -131,6 +133,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass2] +# flags: --disable-error-code used-before-def from typing import NewType, List x: D @@ -162,6 +165,7 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClassImported] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -376,6 +380,7 @@ x: A y: str = x[0] # E: Incompatible types in assignment (expression has type "Optional[A]", variable has type "str") [case testRecursiveAliasesProhibitBadAliases] +# flags: --disable-error-code used-before-def from typing import Union, Type, List, TypeVar NR = List[int] @@ -502,6 +507,8 @@ reveal_type(bnt.y) # N: Revealed type is "builtins.int" -- Tests duplicating some existing named tuple tests with recursive aliases enabled [case testMutuallyRecursiveNamedTuples] +# flags: --disable-error-code used-before-def + from typing import Tuple, NamedTuple, TypeVar, Union A = NamedTuple('A', [('x', str), ('y', Tuple[B, ...])]) @@ -565,6 +572,7 @@ t = m # E: Incompatible types in assignment (expression has type "B", variable [builtins fixtures/tuple.pyi] [case testMutuallyRecursiveNamedTuplesCalls] +# flags: --disable-error-code used-before-def from typing import NamedTuple B = NamedTuple('B', [('x', A), ('y', int)]) @@ -808,3 +816,84 @@ def test2() -> Tree2: def test3() -> Tree3: return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree3, Tree3, Tree3]]") [builtins fixtures/tuple.pyi] + +[case testRecursiveDoubleUnionNoCrash] +from typing import Tuple, Union, Callable, Sequence + +K = Union[int, Tuple[Union[int, K]]] +L = Union[int, Callable[[], Union[int, L]]] +M = Union[int, Sequence[Union[int, M]]] + +x: K +x = x +y: L +y = y +z: M +z = z + +x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") +z = x # OK +[builtins fixtures/tuple.pyi] + +[case testRecursiveInstanceInferenceNoCrash] +from typing import Sequence, TypeVar, Union + +class C(Sequence[C]): ... + +T = TypeVar("T") +def foo(x: T) -> C: ... + +Nested = Union[C, Sequence[Nested]] +x: Nested = foo(42) + +[case testNoRecursiveExpandInstanceUnionCrash] +from typing import List, Union + +class Tag(List[Union[Tag, List[Tag]]]): ... +Tag() + +[case testNoRecursiveExpandInstanceUnionCrashGeneric] +from typing import Generic, Iterable, TypeVar, Union + +ValueT = TypeVar("ValueT") +class Recursive(Iterable[Union[ValueT, Recursive[ValueT]]]): + pass + +class Base(Generic[ValueT]): + def __init__(self, element: ValueT): + pass +class Sub(Base[Union[ValueT, Recursive[ValueT]]]): + pass + +x: Iterable[str] +reveal_type(Sub) # N: Revealed type is "def [ValueT] (element: Union[ValueT`1, __main__.Recursive[ValueT`1]]) -> __main__.Sub[ValueT`1]" +reveal_type(Sub(x)) # N: Revealed type is "__main__.Sub[typing.Iterable[builtins.str]]" + +[case testNoRecursiveExpandInstanceUnionCrashInference] +# flags: --disable-error-code used-before-def +from typing import TypeVar, Union, Generic, List + +T = TypeVar("T") +InList = Union[T, InListRecurse[T]] +class InListRecurse(Generic[T], List[InList[T]]): ... + +def list_thing(transforming: InList[T]) -> T: + ... +reveal_type(list_thing([5])) # N: Revealed type is "builtins.list[builtins.int]" + +[case testRecursiveTypedDictWithList] +from typing import List +from typing_extensions import TypedDict + +Example = TypedDict("Example", {"rec": List["Example"]}) +e: Example +reveal_type(e) # N: Revealed type is "TypedDict('__main__.Example', {'rec': builtins.list[...]})" +[builtins fixtures/dict.pyi] + +[case testRecursiveNamedTupleWithList] +from typing import List, NamedTuple + +Example = NamedTuple("Example", [("rec", List["Example"])]) +e: Example +reveal_type(e) # N: Revealed type is "Tuple[builtins.list[...], fallback=__main__.Example]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-redefine.test b/test-data/unit/check-redefine.test index e73f715c9ec0..e3f1b976d4e9 100644 --- a/test-data/unit/check-redefine.test +++ b/test-data/unit/check-redefine.test @@ -285,7 +285,7 @@ def f() -> None: import typing as m m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) n = 1 - import typing as n # E: Name "n" already defined on line 5 + import typing as n # E: Incompatible import of "n" (imported name has type Module, local name has type "int") [builtins fixtures/module.pyi] [case testRedefineLocalWithTypeAnnotation] diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 506e8bfe8ab1..2d45d28764a0 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -128,6 +128,120 @@ reveal_type(cast(A, C()).copy()) # N: Revealed type is "__main__.A" [builtins fixtures/bool.pyi] +[case testSelfTypeOverrideCompatibility] +from typing import overload, TypeVar, Generic + +T = TypeVar("T") + +class A(Generic[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B2(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + @overload + def f(self: A[bytes]) -> bytes: ... + def f(self): ... + +class C(A[int]): + def f(self) -> int: ... + +class D(A[str]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + +class E(A[T]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + + +class F(A[bytes]): + # Note there's an argument to be made that this is actually compatible with the supertype + def f(self) -> bytes: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> bytes + +class G(A): + def f(self): ... + +class H(A[int]): + def f(self): ... + +class I(A[int]): + def f(*args): ... + +class J(A[int]): + def f(self, arg) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: Subclass: \ + # N: def f(self, arg: Any) -> int + +[builtins fixtures/tuple.pyi] + +[case testSelfTypeOverrideCompatibilityTypeVar-xfail] +from typing import overload, TypeVar, Union + +AT = TypeVar("AT", bound="A") + +class A: + @overload + def f(self: AT, x: int) -> AT: ... + @overload + def f(self, x: str) -> None: ... + @overload + def f(self: AT) -> bytes: ... + def f(*a, **kw): ... + +class B(A): + @overload # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None \ + # N: @overload \ + # N: def f(self) -> bytes \ + # N: Subclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None + def f(self, x: int) -> B: ... + @overload + def f(self, x: str) -> None: ... + def f(*a, **kw): ... +[builtins fixtures/dict.pyi] + [case testSelfTypeSuper] from typing import TypeVar, cast @@ -314,7 +428,7 @@ class C: [case testSelfTypeNew] from typing import TypeVar, Type -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: def __new__(cls: Type[T]) -> T: return cls() @@ -417,15 +531,15 @@ reveal_type(B().ft()) # N: Revealed type is "Tuple[builtins.int, builtins.int, [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeMeta] -from typing import Callable, TypeVar, Type +from typing import Callable, TypeVar, Type, ClassVar T = TypeVar('T') class A(type): @property def g(cls: object) -> int: return 0 @property def gt(cls: T) -> T: return cls - f: Callable[[object], int] - ft: Callable[[T], T] + f: ClassVar[Callable[[object], int]] + ft: ClassVar[Callable[[T], T]] class B(A): pass @@ -678,6 +792,26 @@ reveal_type(f.copy()) # N: Revealed type is "__main__.File" b.copy() # E: Invalid self argument "Bad" to attribute function "copy" with type "Callable[[T], T]" [builtins fixtures/tuple.pyi] +[case testMixinProtocolSuper] +from typing import Protocol + +class Base(Protocol): + def func(self) -> int: + ... + +class TweakFunc: + def func(self: Base) -> int: + return reveal_type(super().func()) # N: Revealed type is "builtins.int" + +class Good: + def func(self) -> int: ... +class C(TweakFunc, Good): pass +C().func() # OK + +class Bad: + def func(self) -> str: ... +class CC(TweakFunc, Bad): pass # E: Definition of "func" in base class "TweakFunc" is incompatible with definition in base class "Bad" + [case testBadClassLevelDecoratorHack] from typing_extensions import Protocol from typing import TypeVar, Any @@ -864,7 +998,7 @@ reveal_type(ab.x) # N: Revealed type is "builtins.int" from typing import Generic, List, Optional, TypeVar, Any Q = TypeVar("Q") -T = TypeVar("T", bound=Super[Any]) +T = TypeVar("T", bound='Super[Any]') class Super(Generic[Q]): @classmethod @@ -1023,7 +1157,7 @@ from typing import Optional, Type, TypeVar, overload, Union Id = int -A = TypeVar("A", bound=AClass) +A = TypeVar("A", bound='AClass') class AClass: @overload @@ -1239,3 +1373,435 @@ class Test(Generic[T]): a: deque[List[T]] # previously this failed with 'Incompatible types in assignment (expression has type "deque[List[List[T]]]", variable has type "deque[List[T]]")' b: deque[List[T]] = a.copy() + +[case testTypingSelfBasic] +from typing import Self, List + +class C: + attr: List[Self] + def meth(self) -> List[Self]: ... + def test(self) -> Self: + if bool(): + return C() # E: Incompatible return value type (got "C", expected "Self") + else: + return self +class D(C): ... + +reveal_type(C.meth) # N: Revealed type is "def [Self <: __main__.C] (self: Self`0) -> builtins.list[Self`0]" +C.attr # E: Access to generic instance variables via class is ambiguous +reveal_type(D().meth()) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type(D().attr) # N: Revealed type is "builtins.list[__main__.D]" + +[case testTypingSelfInvalidLocations] +from typing import Self, Callable + +var: Self # E: Self type is only allowed in annotations within class definition +reveal_type(var) # N: Revealed type is "Any" + +def foo() -> Self: ... # E: Self type is only allowed in annotations within class definition +reveal_type(foo) # N: Revealed type is "def () -> Any" + +bad: Callable[[Self], Self] # E: Self type is only allowed in annotations within class definition +reveal_type(bad) # N: Revealed type is "def (Any) -> Any" + +def func() -> None: + var: Self # E: Self type is only allowed in annotations within class definition + +class C(Self): ... # E: Self type is only allowed in annotations within class definition + +[case testTypingSelfInvalidArgs] +from typing import Self, List + +class C: + x: Self[int] # E: Self type cannot have type arguments + def meth(self) -> List[Self[int]]: # E: Self type cannot have type arguments + ... + +[case testTypingSelfConflict] +from typing import Self, TypeVar, Tuple + +T = TypeVar("T") +class C: + def meth(self: T) -> Tuple[Self, T]: ... # E: Method cannot have explicit self annotation and Self type +reveal_type(C().meth()) # N: Revealed type is "Tuple[, __main__.C]" +[builtins fixtures/property.pyi] + +[case testTypingSelfProperty] +from typing import Self, Tuple +class C: + @property + def attr(self) -> Tuple[Self, ...]: ... +class D(C): ... + +reveal_type(D().attr) # N: Revealed type is "builtins.tuple[__main__.D, ...]" +[builtins fixtures/property.pyi] + +[case testTypingSelfCallableVar] +from typing import Self, Callable + +class C: + x: Callable[[Self], Self] + def meth(self) -> Callable[[Self], Self]: ... +class D(C): ... + +reveal_type(C().x) # N: Revealed type is "def (__main__.C) -> __main__.C" +reveal_type(D().x) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().meth()) # N: Revealed type is "def (__main__.D) -> __main__.D" + +[case testTypingSelfClassMethod] +from typing import Self + +class C: + @classmethod + def meth(cls) -> Self: ... + @staticmethod + def bad() -> Self: ... # E: Static methods cannot use Self type \ + # E: A function returning TypeVar should receive at least one argument containing the same TypeVar \ + # N: Consider using the upper bound "C" instead + +class D(C): ... +reveal_type(D.meth()) # N: Revealed type is "__main__.D" +reveal_type(D.bad()) # N: Revealed type is "" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfOverload] +from typing import Self, overload, Union + +class C: + @overload + def foo(self, other: Self) -> Self: ... + @overload + def foo(self, other: int) -> int: ... + def foo(self, other: Union[Self, int]) -> Union[Self, int]: + return other +class D(C): ... +reveal_type(D().foo) # N: Revealed type is "Overload(def (other: __main__.D) -> __main__.D, def (other: builtins.int) -> builtins.int)" + +[case testTypingSelfNestedInAlias] +from typing import Generic, Self, TypeVar, List, Tuple + +T = TypeVar("T") +Pairs = List[Tuple[T, T]] + +class C(Generic[T]): + def pairs(self) -> Pairs[Self]: ... +class D(C[T]): ... +reveal_type(D[int]().pairs()) # N: Revealed type is "builtins.list[Tuple[__main__.D[builtins.int], __main__.D[builtins.int]]]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfOverrideVar] +from typing import Self, TypeVar, Generic + +T = TypeVar("T") +class C(Generic[T]): + x: Self + +class D(C[int]): + x: D +class Bad(C[int]): + x: C[int] # E: Incompatible types in assignment (expression has type "C[int]", base class "C" defined the type as "Bad") + +[case testTypingSelfOverrideVarMulti] +from typing import Self + +class C: + x: Self +class D: + x: C +class E: + x: Good + +class Bad(D, C): # E: Definition of "x" in base class "D" is incompatible with definition in base class "C" + ... +class Good(E, C): + ... + +[case testTypingSelfAlternativeGenericConstructor] +from typing import Self, Generic, TypeVar, Tuple + +T = TypeVar("T") +class C(Generic[T]): + def __init__(self, val: T) -> None: ... + @classmethod + def pair(cls, val: T) -> Tuple[Self, Self]: + return (cls(val), C(val)) # E: Incompatible return value type (got "Tuple[Self, C[T]]", expected "Tuple[Self, Self]") + +class D(C[int]): pass +reveal_type(C.pair(42)) # N: Revealed type is "Tuple[__main__.C[builtins.int], __main__.C[builtins.int]]" +reveal_type(D.pair("no")) # N: Revealed type is "Tuple[__main__.D, __main__.D]" \ + # E: Argument 1 to "pair" of "C" has incompatible type "str"; expected "int" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfMixedTypeVars] +from typing import Self, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + def meth(self, arg: S) -> Tuple[Self, S, T]: ... + +class D(C[int]): ... + +c: C[int] +d: D +reveal_type(c.meth("test")) # N: Revealed type is "Tuple[__main__.C[builtins.int], builtins.str, builtins.int]" +reveal_type(d.meth("test")) # N: Revealed type is "Tuple[__main__.D, builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfRecursiveInit] +from typing import Self + +class C: + def __init__(self, other: Self) -> None: ... +class D(C): ... + +reveal_type(C) # N: Revealed type is "def (other: __main__.C) -> __main__.C" +reveal_type(D) # N: Revealed type is "def (other: __main__.D) -> __main__.D" + +[case testTypingSelfCorrectName] +from typing import Self, List + +class C: + Self = List[C] + def meth(self) -> Self: ... +reveal_type(C.meth) # N: Revealed type is "def (self: __main__.C) -> builtins.list[__main__.C]" + +[case testTypingSelfClassVar] +from typing import Self, ClassVar, Generic, TypeVar + +class C: + DEFAULT: ClassVar[Self] +reveal_type(C.DEFAULT) # N: Revealed type is "__main__.C" + +T = TypeVar("T") +class G(Generic[T]): + BAD: ClassVar[Self] # E: ClassVar cannot contain Self type in generic classes +reveal_type(G.BAD) # N: Revealed type is "__main__.G[Any]" + +[case testTypingSelfMetaClassDisabled] +from typing import Self + +class Meta(type): + def meth(cls) -> Self: ... # E: Self type cannot be used in a metaclass + +[case testTypingSelfNonAnnotationUses] +from typing import Self, List, cast + +class C: + A = List[Self] # E: Self type cannot be used in type alias target + B = cast(Self, ...) + def meth(self) -> A: ... + +class D(C): ... +reveal_type(D().meth()) # N: Revealed type is "builtins.list[Any]" +reveal_type(D().B) # N: Revealed type is "__main__.D" + +[case testTypingSelfInternalSafe] +from typing import Self + +class C: + x: Self + def __init__(self, x: C) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfRedundantAllowed] +from typing import Self, Type + +class C: + def f(self: Self) -> Self: + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfRedundantWarning] +# mypy: enable-error-code="redundant-self" + +from typing import Self, Type + +class C: + def copy(self: Self) -> Self: # E: Redundant Self annotation on method first argument + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: # E: Redundant Self annotation on method first argument + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAssertType] +from typing import Self, assert_type + +class C: + def foo(self) -> None: + assert_type(self, Self) # E: Expression is of type "C", not "Self" + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + + def bar(self) -> Self: + assert_type(self, Self) # OK + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + return self + +[case testTypingSelfTypeVarClash] +from typing import Self, TypeVar, Tuple + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + def foo(self, x: S) -> Tuple[Self, S]: ... + +reveal_type(C.foo) # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`0, x: S`-1) -> Tuple[Self`0, S`-1]" +reveal_type(C().foo(42)) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfTypeVarClashAttr] +from typing import Self, TypeVar, Tuple, Callable + +class Defer(This): ... + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + foo: Callable[[S, Self], Tuple[Self, S]] + +reveal_type(C().foo) # N: Revealed type is "def [S] (S`1, __main__.C) -> Tuple[__main__.C, S`1]" +reveal_type(C().foo(42, C())) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +class This: ... +[builtins fixtures/tuple.pyi] + +[case testTypingSelfAttrOldVsNewStyle] +from typing import Self, TypeVar + +T = TypeVar("T", bound='C') +class C: + x: Self + def foo(self: T) -> T: + return self.x + def bar(self: T) -> T: + self.x = self + return self + def baz(self: Self) -> None: + self.x = self + def bad(self) -> None: + # This is unfortunate, but required by PEP 484 + self.x = self # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfClashInBodies] +from typing import Self, TypeVar + +T = TypeVar("T") +class C: + def very_bad(self, x: T) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "T", variable has type "Self") + x: Self + def baz(self: Self, x: T) -> None: + y: T = x + +[case testTypingSelfClashUnrelated] +from typing import Self, Generic, TypeVar + +class B: ... + +T = TypeVar("T", bound=B) +class C(Generic[T]): + def __init__(self, val: T) -> None: + self.val = val + def foo(self) -> Self: ... + +def test(x: C[T]) -> T: + reveal_type(x.val) # N: Revealed type is "T`-1" + return x.val + +[case testTypingSelfGenericBound] +from typing import Self, Generic, TypeVar + +T = TypeVar("T") +class C(Generic[T]): + val: T + def foo(self) -> Self: + reveal_type(self.val) # N: Revealed type is "T`1" + return self + +[case testTypingSelfDifferentImport] +import typing as t + +class Foo: + def foo(self) -> t.Self: + return self + @classmethod + def bar(cls) -> t.Self: + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAllowAliasUseInFinalClasses] +from typing import Self, final + +@final +class C: + def meth(self) -> Self: + return C() # OK for final classes + +[case testTypingSelfCallableClassVar] +from typing import Self, ClassVar, Callable, TypeVar + +class C: + f: ClassVar[Callable[[Self], Self]] +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testSelfTypeCallableClassVarOldStyle] +from typing import ClassVar, Callable, TypeVar + +T = TypeVar("T") +class C: + f: ClassVar[Callable[[T], T]] + +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def [T] (T`-1) -> T`-1" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testTypingSelfOnSuperTypeVarValues] +from typing import Self, Generic, TypeVar + +T = TypeVar("T", int, str) + +class B: + def copy(self) -> Self: ... +class C(B, Generic[T]): + def copy(self) -> Self: + inst = super().copy() + reveal_type(inst) # N: Revealed type is "Self`0" + return inst + +[case testTypingSelfWithValuesExpansion] +from typing import Self, Generic, TypeVar + +class A: pass +class B: pass +T = TypeVar("T", A, B) + +class C(Generic[T]): + val: T + def foo(self, x: T) -> None: ... + def bar(self, x: T) -> Self: + reveal_type(self.foo) # N: Revealed type is "def (x: __main__.A)" \ + # N: Revealed type is "def (x: __main__.B)" + self.foo(x) + return self + def baz(self: Self, x: T) -> None: + reveal_type(self.val) # N: Revealed type is "__main__.A" \ + # N: Revealed type is "__main__.B" + self.val = x diff --git a/test-data/unit/check-slots.test b/test-data/unit/check-slots.test index 96e4eba3c966..8beb0d8bf3f7 100644 --- a/test-data/unit/check-slots.test +++ b/test-data/unit/check-slots.test @@ -517,3 +517,13 @@ class A: self.b = 2 self.missing = 3 [builtins fixtures/tuple.pyi] + +[case testSlotsWithClassVar] +from typing import ClassVar +class X: + __slots__ = ('a',) + a: int +x = X() +X.a # E: "a" in __slots__ conflicts with class variable access +x.a +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 4be5060996e2..ed7349aaa296 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -140,20 +140,15 @@ main:5: error: Incompatible types in assignment (expression has type "bool", var main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testForStatement] +class A: pass a = None # type: A b = None # type: object for a in [A()]: - a = b # Fail + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") else: - a = b # Fail - -class A: pass + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") [builtins fixtures/list.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") -main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A") - [case testBreakStatement] import typing while None: @@ -520,15 +515,15 @@ class B: pass main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testTypeErrorInBlock] +class A: pass +class B: pass while object: x = None # type: A if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass - [case testTypeErrorInvolvingBaseException] +class A: pass x, a = None, None # type: (BaseException, A) if int(): @@ -541,7 +536,6 @@ if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") if int(): x = BaseException() -class A: pass [builtins fixtures/exception.pyi] [case testSimpleTryExcept2] @@ -557,49 +551,38 @@ main:5: error: Incompatible types in assignment (expression has type "object", v [case testBaseClassAsExceptionTypeInExcept] import typing +class Err(BaseException): pass try: pass except Err as e: - e = BaseException() # Fail + e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") e = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testMultipleExceptHandlers] import typing +class Err(BaseException): pass try: pass except BaseException as e: pass except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptStatement] import typing +class A: pass +class B: pass +class Err(BaseException): pass try: - a = B() # type: A # Fail + a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") except BaseException as e: - e = A() # Fail + e = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") e = Err() except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class A: pass -class B: pass -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") -main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") -main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptWithinFunction] import typing def f() -> None: @@ -823,7 +806,7 @@ try: pass except E1 as e: pass try: pass except E2 as e: pass -e + 1 # E: Trying to read deleted variable "e" +e + 1 # E: Trying to read deleted variable "e" # E: Name "e" is used before definition e = E1() # E: Assignment to variable "e" outside except: block [builtins fixtures/exception.pyi] @@ -2052,16 +2035,12 @@ foo = int [case testTypeOfGlobalUsed] import typing +class A(): pass +class B(): pass g = A() def f() -> None: global g - g = B() - -class A(): pass -class B(): pass -[out] -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") - + g = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfNonlocalUsed] import typing def f() -> None: @@ -2206,3 +2185,16 @@ def foo(): x: int = "no" # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs y = "no" # type: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs z: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs + +[case testGeneratorUnion] +from typing import Generator, Union + +class A: pass +class B: pass + +def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: + yield x # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]") + +[case testNoCrashOnStarRightHandSide] +x = *(1, 2, 3) # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test index 0913f4f25126..b3379e505be7 100644 --- a/test-data/unit/check-super.test +++ b/test-data/unit/check-super.test @@ -365,7 +365,7 @@ class A: def f(self) -> None: pass class B(A): - def g() -> None: # E: Method must have at least one argument + def g() -> None: # E: Method must have at least one argument. Did you forget the "self" argument? super().f() # E: super() requires one or more positional arguments in enclosing function def h(self) -> None: def a() -> None: @@ -409,3 +409,10 @@ class B(A): reveal_type(super().foo()) # N: Revealed type is "T`-1" return super().foo() [builtins fixtures/classmethod.pyi] + +[case testWrongSuperOutsideMethodNoCrash] +class B: + x: int +class C1(B): ... +class C2(B): ... +super(C1, C2).x # E: Argument 2 for "super" not an instance of argument 1 diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 061a4bcfa48d..266bfbf97888 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -164,10 +164,10 @@ class C(B): pass [case testVoidValueInTuple] import typing +def f() -> None: pass + (None, f()) # E: "f" does not return a value (f(), None) # E: "f" does not return a value - -def f() -> None: pass [builtins fixtures/tuple.pyi] @@ -247,15 +247,16 @@ class B: pass [case testAssigningToTupleItems] from typing import Tuple + +class A: pass +class B: pass + t = None # type: Tuple[A, B] n = 0 t[0] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[2] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[n] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -532,13 +533,12 @@ if int(): [case testAssignmentToStarFromAny] from typing import Any, cast +class C: pass + a, c = cast(Any, 1), C() p, *q = a c = a c = q - -class C: pass - [case testAssignmentToComplexStar] from typing import List li = None # type: List[int] @@ -572,6 +572,7 @@ class A: pass [case testAssignmentToStarFromTupleInference] from typing import List +class A: pass li = None # type: List[int] la = None # type: List[A] a, *l = A(), A() @@ -579,13 +580,14 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListInference] from typing import List + +class A: pass + li = None # type: List[int] la = None # type: List[A] a, *l = [A(), A()] @@ -593,8 +595,6 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] @@ -710,6 +710,9 @@ class C: pass [case testTupleErrorMessages] +class A: + def __add__(self, x: 'A') -> 'A': pass +def f(x: 'A') -> None: pass a = None # type: A @@ -717,11 +720,6 @@ a = None # type: A a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]") f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A" (a, a).foo # E: "Tuple[A, A]" has no attribute "foo" - -def f(x: 'A') -> None: pass - -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/tuple.pyi] [case testLargeTuplesInErrorMessages] @@ -776,6 +774,7 @@ class str: pass class bool: pass class type: pass class function: pass +class dict: pass -- For loop over tuple @@ -972,6 +971,17 @@ b = (1, 'x') a = (0, *b, '') [builtins fixtures/tuple.pyi] +[case testUnpackSyntaxError] +*foo # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] + +[case testUnpackBases] +class A: ... +class B: ... +bases = (A, B) +class C(*bases): ... # E: Invalid base class +[builtins fixtures/tuple.pyi] + [case testTupleMeetTupleAny] from typing import Union, Tuple class A: pass diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 8dafc8f47a6c..d7cccd2d6ba6 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -206,7 +206,7 @@ B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) [case testRecursiveAliasesErrors2] -# flags: --disable-recursive-aliases +# flags: --disable-recursive-aliases --disable-error-code=used-before-def # Recursive aliases are not supported yet. from typing import Type, Callable, Union @@ -224,6 +224,7 @@ main:7: error: Cannot resolve name "C" (possible cyclic definition) main:9: note: Revealed type is "Union[Any, builtins.int]" [case testDoubleForwardAlias] +# flags: --disable-error-code=used-before-def from typing import List x: A A = List[B] @@ -233,6 +234,7 @@ reveal_type(x) # N: Revealed type is "builtins.list[builtins.list[builtins.int]] [out] [case testDoubleForwardAliasWithNamedTuple] +# flags: --disable-error-code=used-before-def from typing import List, NamedTuple x: A A = List[B] @@ -254,6 +256,7 @@ if isinstance(x, list): [out] [case testForwardRefToTypeVar] +# flags: --disable-error-code=used-before-def from typing import TypeVar, List reveal_type(a) # N: Revealed type is "builtins.list[builtins.int]" @@ -444,7 +447,7 @@ A = Union[None] [case testAliasToClassMethod] from typing import TypeVar, Generic, Union, Type -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') MYPY = False if MYPY: @@ -821,28 +824,28 @@ c = Child() reveal_type(NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Parent.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Parent.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Parent.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Parent.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Parent.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Parent.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(Child.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(Child.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(Child.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(Child.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(Child.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Child.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(p.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(p.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(p.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(p.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(p.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" reveal_type(c.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" -reveal_type(c.SpecialImplicit) # N: Revealed type is "builtins.object" -reveal_type(c.SpecialExplicit) # N: Revealed type is "builtins.object" +reveal_type(c.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(c.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" # Use type aliases in a type alias context in a plausible way @@ -895,6 +898,7 @@ reveal_type(weird_child_2) # N: Revealed type is "def () -> Any" reveal_type(weird_child_3) # N: Revealed type is "def () -> Any" reveal_type(weird_child_4) # N: Revealed type is "def () -> Any" [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] [case testMalformedTypeAliasRuntimeReassignments] from typing import Union @@ -927,8 +931,8 @@ SpecialExplicit = 4 # E: Cannot assign multiple types to name "SpecialExplicit" Parent.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Parent.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") -Parent.SpecialImplicit = 4 -Parent.SpecialExplicit = 4 +Parent.SpecialImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") +Parent.SpecialExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") Child.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") Child.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") @@ -945,3 +949,82 @@ c.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type c.SpecialImplicit = 4 c.SpecialExplicit = 4 [builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] + +[case testNewStyleUnionInTypeAliasWithMalformedInstance] +# flags: --python-version 3.10 +from typing import List + +A = List[int, str] | int # E: "list" expects 1 type argument, but 2 given +B = int | list[int, str] # E: "list" expects 1 type argument, but 2 given +a: A +b: B +reveal_type(a) # N: Revealed type is "Union[builtins.list[Any], builtins.int]" +reveal_type(b) # N: Revealed type is "Union[builtins.int, builtins.list[Any]]" + +[case testValidTypeAliasValues] +from typing import TypeVar, Generic, List + +T = TypeVar("T", int, str) +S = TypeVar("S", int, bytes) + +class C(Generic[T]): ... +class D(C[S]): ... # E: Invalid type argument value for "C" + +U = TypeVar("U") +A = List[C[U]] +x: A[bytes] # E: Value of type variable "T" of "C" cannot be "bytes" + +V = TypeVar("V", bound=int) +class E(Generic[V]): ... +B = List[E[U]] +y: B[str] # E: Type argument "str" of "E" must be a subtype of "int" + +[case testValidTypeAliasValuesMoreRestrictive] +from typing import TypeVar, Generic, List + +T = TypeVar("T") +S = TypeVar("S", int, str) +U = TypeVar("U", bound=int) + +class C(Generic[T]): ... + +A = List[C[S]] +x: A[int] +x_bad: A[bytes] # E: Value of type variable "S" of "A" cannot be "bytes" + +B = List[C[U]] +y: B[int] +y_bad: B[str] # E: Type argument "str" of "B" must be a subtype of "int" + +[case testTupleWithDifferentArgsPy38] +# flags: --python-version 3.8 +NotYet1 = tuple[float] # E: "tuple" is not subscriptable +NotYet2 = tuple[float, float] # E: "tuple" is not subscriptable +NotYet3 = tuple[float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +NotYet4 = tuple[float, float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +[builtins fixtures/tuple.pyi] + +[case testTupleWithDifferentArgsStub] +# https://github.com/python/mypy/issues/11098 +import tup + +[file tup.pyi] +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] | str +Correct5 = tuple[int, str] +Correct6 = tuple[int, ...] + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +# Wrong: + +WrongTypeElement = str | tuple[float, 1] # E: Invalid type: try using Literal[1] instead? +WrongEllipsis = str | tuple[float, float, ...] # E: Unexpected "..." +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test index f477a9f2b390..e66153726e7d 100644 --- a/test-data/unit/check-type-promotion.test +++ b/test-data/unit/check-type-promotion.test @@ -54,3 +54,136 @@ def f(x: Union[SupportsFloat, T]) -> Union[SupportsFloat, T]: pass f(0) # should not crash [builtins fixtures/primitives.pyi] [out] + +[case testIntersectionUsingPromotion1] +# flags: --warn-unreachable +from typing import Union + +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +reveal_type(x) # N: Revealed type is "builtins.complex" + +y: Union[int, float] +if isinstance(y, float): + reveal_type(y) # N: Revealed type is "builtins.float" +else: + reveal_type(y) # N: Revealed type is "builtins.int" + +reveal_type(y) # N: Revealed type is "Union[builtins.int, builtins.float]" + +if isinstance(y, int): + reveal_type(y) # N: Revealed type is "builtins.int" +else: + reveal_type(y) # N: Revealed type is "builtins.float" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion2] +# flags: --warn-unreachable +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" + +# Note we make type precise, since type promotions are involved +reveal_type(x) # N: Revealed type is "Union[builtins.complex, builtins.int, builtins.float]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion3] +# flags: --warn-unreachable +x: object +if isinstance(x, int) and isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex) and isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion4] +# flags: --warn-unreachable +x: object +if isinstance(x, int): + if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex): + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.complex" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion5] +# flags: --warn-unreachable +from typing import Union + +x: Union[float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion6] +# flags: --warn-unreachable +from typing import Union + +x: Union[str, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.int, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion7] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, float): + reveal_type(x) # N: Revealed type is "builtins.float" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.complex" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion8] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.float" +if isinstance(x, (float, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 4c68b7b692ff..1f200d168a55 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -221,6 +221,19 @@ reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {'y': builtins.in [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testCannotCreateTypedDictWithDecoratedFunction] +# flags: --disallow-any-expr +# https://github.com/python/mypy/issues/13066 +from typing import TypedDict +class D(TypedDict): + @classmethod # E: Invalid statement in TypedDict definition; expected "field_name: field_type" + def m(self) -> D: + pass +d = D() +reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {})" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testTypedDictWithClassmethodAlternativeConstructorDoesNotCrash] # https://github.com/python/mypy/issues/5653 from typing import TypedDict @@ -880,17 +893,27 @@ B = TypedDict('B', {'@type': Literal['b-type'], 'b': int}) c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} reveal_type(c) # N: Revealed type is "Union[TypedDict('__main__.A', {'@type': Literal['a-type'], 'a': builtins.str}), TypedDict('__main__.B', {'@type': Literal['b-type'], 'b': builtins.int})]" -[builtins fixtures/tuple.pyi] +[builtins fixtures/dict.pyi] -[case testTypedDictUnionAmbiguousCase] +[case testTypedDictUnionAmbiguousCaseBothMatch] from typing import Union, Mapping, Any, cast from typing_extensions import TypedDict, Literal -A = TypedDict('A', {'@type': Literal['a-type'], 'a': str}) -B = TypedDict('B', {'@type': Literal['a-type'], 'a': str}) +A = TypedDict('A', {'@type': Literal['a-type'], 'value': str}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': str}) -c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} # E: Type of TypedDict is ambiguous, could be any of ("A", "B") \ - # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} +[builtins fixtures/dict.pyi] + +[case testTypedDictUnionAmbiguousCaseNoMatch] +from typing import Union, Mapping, Any, cast +from typing_extensions import TypedDict, Literal + +A = TypedDict('A', {'@type': Literal['a-type'], 'value': int}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': int}) + +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") [builtins fixtures/dict.pyi] -- Use dict literals @@ -1481,7 +1504,7 @@ class G(Generic[T]): yb: G[int] # E: Type argument "int" of "G" must be a subtype of "M" yg: G[M] -z: int = G[M]().x['x'] +z: int = G[M]().x['x'] # type: ignore[used-before-def] class M(TypedDict): x: int @@ -2007,7 +2030,193 @@ v = {union: 2} # E: Expected TypedDict key to be string literal num2: Literal['num'] v = {num2: 2} bad2: Literal['bad'] -v = {bad2: 2} # E: Extra key "bad" for TypedDict "Value" +v = {bad2: 2} # E: Missing key "num" for TypedDict "Value" \ + # E: Extra key "bad" for TypedDict "Value" + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_unionWithList] +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class D(TypedDict): + foo: int + + +d_or_list: D | list[str] + +if 'foo' in d_or_list: + assert_type(d_or_list, Union[D, list[str]]) +elif 'bar' in d_or_list: + assert_type(d_or_list, list[str]) +else: + assert_type(d_or_list, list[str]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_total] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, TypeVar, Union +from typing_extensions import final + +@final +class D1(TypedDict): + foo: int + + +@final +class D2(TypedDict): + bar: int + + +d: D1 | D2 + +if 'foo' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +foo_or_bar: Literal['foo', 'bar'] +if foo_or_bar in d: + assert_type(d, Union[D1, D2]) +else: + assert_type(d, Union[D1, D2]) + +foo_or_invalid: Literal['foo', 'invalid'] +if foo_or_invalid in d: + assert_type(d, D1) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) +else: + assert_type(d, Union[D1, D2]) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) + +TD = TypeVar('TD', D1, D2) + +def f(arg: TD) -> None: + value: int + if 'foo' in arg: + assert_type(arg['foo'], int) + else: + assert_type(arg['bar'], int) + + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_final] +# flags: --warn-unreachable +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class DFinal(TypedDict): + foo: int + + +class DNotFinal(TypedDict): + bar: int + + +d_not_final: DNotFinal + +if 'bar' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + spam = 'ham' # E: Statement is unreachable + +if 'spam' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + assert_type(d_not_final, DNotFinal) + +d_final: DFinal + +if 'spam' in d_final: + spam = 'ham' # E: Statement is unreachable +else: + assert_type(d_final, DFinal) + +d_union: DFinal | DNotFinal + +if 'foo' in d_union: + assert_type(d_union, Union[DFinal, DNotFinal]) +else: + assert_type(d_union, DNotFinal) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_partialThroughTotalFalse] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, Union +from typing_extensions import final + +@final +class DTotal(TypedDict): + required_key: int + + +@final +class DNotTotal(TypedDict, total=False): + optional_key: int + + +d: DTotal | DNotTotal + +if 'required_key' in d: + assert_type(d, DTotal) +else: + assert_type(d, DNotTotal) + +if 'optional_key' in d: + assert_type(d, DNotTotal) +else: + assert_type(d, Union[DTotal, DNotTotal]) + +key: Literal['optional_key', 'required_key'] +if key in d: + assert_type(d, Union[DTotal, DNotTotal]) +else: + assert_type(d, Union[DTotal, DNotTotal]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_partialThroughNotRequired] +from __future__ import annotations +from typing import assert_type, Required, NotRequired, TypedDict, Union +from typing_extensions import final + +@final +class D1(TypedDict): + required_key: Required[int] + optional_key: NotRequired[int] + + +@final +class D2(TypedDict): + abc: int + xyz: int + + +d: D1 | D2 + +if 'required_key' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +if 'optional_key' in d: + assert_type(d, D1) +else: + assert_type(d, Union[D1, D2]) [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] @@ -2081,7 +2290,7 @@ reveal_type(foo['baz']) # N: Revealed type is "builtins.list[Any]" from mypy_extensions import TypedDict from typing import Any, List -Foo = TypedDict('Foo', {'bar': Bar, 'baz': Bar}) +Foo = TypedDict('Foo', {'bar': 'Bar', 'baz': 'Bar'}) Bar = List[Any] @@ -2577,3 +2786,90 @@ TD[str](key=0, value=0) # E: Incompatible types (expression has type "int", Typ TD[str]({"key": 0, "value": 0}) # E: Incompatible types (expression has type "int", TypedDict item "value" has type "str") [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testTypedDictSelfItemNotAllowed] +from typing import Self, TypedDict, Optional + +class TD(TypedDict): + val: int + next: Optional[Self] # E: Self type cannot be used in TypedDict item type +TDC = TypedDict("TDC", {"val": int, "next": Optional[Self]}) # E: Self type cannot be used in TypedDict item type + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsInferred] +from typing import TypedDict, Dict + +D = TypedDict("D", {"foo": int}, total=False) + +def f(d: Dict[str, D]) -> None: + args = d["a"] + args.update(d.get("b", {})) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDeclared] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + name: str +class B(TypedDict, total=False): + name: str + +def foo(data: Union[A, B]) -> None: ... +foo({"name": "Robert"}) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsEmpty] +from typing import TypedDict, Union + +class Foo(TypedDict, total=False): + foo: str +class Bar(TypedDict, total=False): + bar: str + +def foo(body: Union[Foo, Bar] = {}) -> None: # OK + ... +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDistinct] +from typing import TypedDict, Union, Literal + +class A(TypedDict): + type: Literal['a'] + value: bool +class B(TypedDict): + type: Literal['b'] + value: str + +Response = Union[A, B] +def method(message: Response) -> None: ... + +method({'type': 'a', 'value': True}) # OK +method({'type': 'b', 'value': 'abc'}) # OK +method({'type': 'a', 'value': 'abc'}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "method" has incompatible type "Dict[str, str]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsNested] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + foo: C +class B(TypedDict, total=False): + foo: D +class C(TypedDict, total=False): + c: str +class D(TypedDict, total=False): + d: str + +def foo(data: Union[A, B]) -> None: ... +foo({"foo": {"c": "foo"}}) # OK +foo({"foo": {"e": "foo"}}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "foo" has incompatible type "Dict[str, Dict[str, str]]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index d8f6cde10441..9afe709ed19b 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -178,7 +178,7 @@ Ts = TypeVarTuple("Ts") B = Ts # E: Type variable "__main__.Ts" is invalid as target for type alias [builtins fixtures/tuple.pyi] -[case testPep646ArrayExample] +[case testTypeVarTuplePep646ArrayExample] from typing import Generic, Tuple, TypeVar, Protocol, NewType from typing_extensions import TypeVarTuple, Unpack @@ -213,7 +213,7 @@ reveal_type(abs(x)) # N: Revealed type is "__main__.Array[__main__.Height, __ma reveal_type(x + x) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" [builtins fixtures/tuple.pyi] -[case testPep646ArrayExampleWithDType] +[case testTypeVarTuplePep646ArrayExampleWithDType] from typing import Generic, Tuple, TypeVar, Protocol, NewType from typing_extensions import TypeVarTuple, Unpack @@ -250,7 +250,7 @@ reveal_type(x + x) # N: Revealed type is "__main__.Array[builtins.float, __main [builtins fixtures/tuple.pyi] -[case testPep646ArrayExampleInfer] +[case testTypeVarTuplePep646ArrayExampleInfer] from typing import Generic, Tuple, TypeVar, NewType from typing_extensions import TypeVarTuple, Unpack @@ -265,7 +265,7 @@ class Array(Generic[Unpack[Shape]]): x: Array[float, Height, Width] = Array() [builtins fixtures/tuple.pyi] -[case testPep646TypeConcatenation] +[case testTypeVarTuplePep646TypeConcatenation] from typing import Generic, TypeVar, NewType from typing_extensions import TypeVarTuple, Unpack @@ -295,7 +295,7 @@ d = add_batch_channels(a) reveal_type(d) # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width, __main__.Channels]" [builtins fixtures/tuple.pyi] -[case testPep646TypeVarConcatenation] +[case testTypeVarTuplePep646TypeVarConcatenation] from typing import Generic, TypeVar, NewType, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -311,7 +311,7 @@ def prefix_tuple( z = prefix_tuple(x=0, y=(True, 'a')) reveal_type(z) # N: Revealed type is "Tuple[builtins.int, builtins.bool, builtins.str]" [builtins fixtures/tuple.pyi] -[case testPep646TypeVarTupleUnpacking] +[case testTypeVarTuplePep646TypeVarTupleUnpacking] from typing import Generic, TypeVar, NewType, Any, Tuple from typing_extensions import TypeVarTuple, Unpack @@ -367,7 +367,7 @@ reveal_type(bad2) # N: Revealed type is "def (x: Tuple[builtins.int, Unpack[bui [builtins fixtures/tuple.pyi] -[case testPep646TypeVarStarArgs] +[case testTypeVarTuplePep646TypeVarStarArgsBasic] from typing import Tuple from typing_extensions import TypeVarTuple, Unpack @@ -382,3 +382,133 @@ def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" [builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646TypeVarStarArgs] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]: + reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + return args + +reveal_type(with_prefix_suffix(True, "bar", "foo", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" +reveal_type(with_prefix_suffix(True, "bar", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.int]" + +with_prefix_suffix(True, "bar", "foo", 1.0) # E: Argument 4 to "with_prefix_suffix" has incompatible type "float"; expected "int" +with_prefix_suffix(True, "bar") # E: Too few arguments for "with_prefix_suffix" + +t = (True, "bar", "foo", 5) +reveal_type(with_prefix_suffix(*t)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.str, builtins.int]" +reveal_type(with_prefix_suffix(True, *("bar", "foo"), 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" + +# TODO: handle list case +#reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5)) + +bad_t = (True, "bar") +with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" + +def foo(*args: Unpack[Ts]) -> None: + reveal_type(with_prefix_suffix(True, "bar", *args, 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsFixedLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, str]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +foo(0, "foo") +foo(0, 1) # E: Argument 2 to "foo" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" +foo("foo", "bar") # E: Argument 1 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" +foo(0, "foo", 1) # E: Invalid number of arguments +foo(0) # E: Invalid number of arguments +foo() # E: Invalid number of arguments +foo(*(0, "foo")) + +# TODO: fix this case to do something sensible. +#def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: +# reveal_type(args) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, ...]]) -> None: + reveal_type(args) # N: Revealed type is "builtins.tuple[builtins.int, ...]" + +foo(0, 1, 2) +# TODO: this should say 'expected "int"' rather than the unpack +foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, ...]]" + + +def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.bool, builtins.bool]" + # TODO: generate an error + # reveal_type(args[1]) + +foo2("bar", 1, 2, 3, False, True) +foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", "bar", 2, 3, False, True) # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", 1, 2, 3, 4, True) # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2(*("bar", 1, 2, 3, False, True)) +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646Callable] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call( + target: Callable[[Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func(arg1: int, arg2: str) -> None: ... +def func2(arg1: int, arg2: int) -> None: ... +def func3(*args: int) -> None: ... + +vargs: Tuple[int, ...] +vargs_str: Tuple[str, ...] + +call(target=func, args=(0, 'foo')) +call(target=func, args=('bar', 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[object, str], None]" +call(target=func, args=(True, 'foo', 0)) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=(0, 0, 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" + +# NOTE: This behavior may be a bit contentious, it is maybe inconsistent with our handling of +# PEP646 but consistent with our handling of callable constraints. +call(target=func2, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, int], None]"; expected "Callable[[VarArg(int)], None]" +call(target=func3, args=vargs) +call(target=func3, args=(0,1)) +call(target=func3, args=(0,'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func3, args=vargs_str) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableWithPrefixSuffix] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call_prefix( + target: Callable[[bytes, Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func_prefix(arg0: bytes, arg1: int, arg2: str) -> None: ... +def func2_prefix(arg0: str, arg1: int, arg2: str) -> None: ... + +call_prefix(target=func_prefix, args=(0, 'foo')) +call_prefix(target=func2_prefix, args=(0, 'foo')) # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]" +[builtins fixtures/tuple.pyi] + diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index a561c29e54f7..cabc28e786b2 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -971,14 +971,14 @@ if x: [builtins fixtures/dict.pyi] [out] -[case testUnpackUnionNoCrashOnPartialNoneList] +[case testUnpackUnionNoCrashOnPartialList] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] -x, _ = d.get(a, ([], [])) -reveal_type(x) # N: Revealed type is "Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[]]" +x, _ = d.get(a, ([], "")) +reveal_type(x) # N: Revealed type is "builtins.list[Tuple[builtins.str, builtins.str]]" for y in x: pass [builtins fixtures/dict.pyi] @@ -1171,6 +1171,25 @@ def foo( foo([1]) [builtins fixtures/list.pyi] +[case testGenericUnionMemberWithTypeVarConstraints] + +from typing import Generic, TypeVar, Union + +T = TypeVar('T', str, int) + +class C(Generic[T]): ... + +def f(s: Union[T, C[T]]) -> T: ... + +ci: C[int] +cs: C[str] + +reveal_type(f(1)) # N: Revealed type is "builtins.int" +reveal_type(f('')) # N: Revealed type is "builtins.str" +reveal_type(f(ci)) # N: Revealed type is "builtins.int" +reveal_type(f(cs)) # N: Revealed type is "builtins.str" + + [case testNestedInstanceTypeAliasUnsimplifiedUnion] from typing import TypeVar, Union, Iterator, List, Any T = TypeVar("T") diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 00ac7df320d2..d598fe13b7e9 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -38,6 +38,13 @@ def test(*t: type) -> None: [case testCallingVarArgsFunction] +def f( *a: 'A') -> None: pass + +def g() -> None: pass + +class A: pass +class B(A): pass +class C: pass a = None # type: A b = None # type: B @@ -51,17 +58,14 @@ f() f(a) f(b) f(a, b, a, b) +[builtins fixtures/list.pyi] -def f( *a: 'A') -> None: pass - -def g() -> None: pass +[case testCallingVarArgsFunctionWithAlsoNormalArgs] +def f(a: 'C', *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithAlsoNormalArgs] a = None # type: A b = None # type: B @@ -73,16 +77,16 @@ f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f(c) f(c, a) f(c, b, b, a, b) +[builtins fixtures/list.pyi] -def f(a: 'C', *b: 'A') -> None: pass +[case testCallingVarArgsFunctionWithDefaultArgs] +# flags: --implicit-optional --no-strict-optional +def f(a: 'C' = None, *b: 'A') -> None: + pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithDefaultArgs] -# flags: --implicit-optional --no-strict-optional a = None # type: A b = None # type: B @@ -95,13 +99,6 @@ f() f(c) f(c, a) f(c, b, b, a, b) - -def f(a: 'C' = None, *b: 'A') -> None: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterable] @@ -156,6 +153,14 @@ f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [case testTypeInferenceWithCalleeVarArgs] from typing import TypeVar T = TypeVar('T') + +def f( *a: T) -> T: + pass + +class A: pass +class B(A): pass +class C: pass + a = None # type: A b = None # type: B c = None # type: C @@ -180,13 +185,6 @@ if int(): o = f(a, b, o) if int(): c = f(c) - -def f( *a: T) -> T: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testTypeInferenceWithCalleeVarArgsAndDefaultArgs] @@ -195,6 +193,11 @@ T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T, b: T = None, *c: T) -> T: + pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -210,11 +213,6 @@ if int(): a = f(a, a) if int(): a = f(a, a, a) - -def f(a: T, b: T = None, *c: T) -> T: - pass - -class A: pass [builtins fixtures/list.pyi] @@ -224,27 +222,31 @@ class A: pass [case testCallingWithListVarArgs] from typing import List, Any, cast + +def f(a: 'A', b: 'B') -> None: + pass + +class A: pass +class B: pass + aa = None # type: List[A] ab = None # type: List[B] a = None # type: A b = None # type: B -f(*aa) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" f(a, *ab) # Ok f(a, b) (cast(Any, f))(*aa) # IDEA: Move to check-dynamic? (cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic? - -def f(a: 'A', b: 'B') -> None: - pass +[builtins fixtures/list.pyi] +[case testCallingWithTupleVarArgs] +def f(a: 'A', b: 'B', c: 'C') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" - -[case testCallingWithTupleVarArgs] +class C: pass +class CC(C): pass a = None # type: A b = None # type: B @@ -262,27 +264,20 @@ f(*(a, b, c)) f(a, *(b, c)) f(a, b, *(c,)) f(a, *(b, cc)) - -def f(a: 'A', b: 'B', c: 'C') -> None: pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/tuple.pyi] [case testInvalidVarArg] +def f(a: 'A') -> None: + pass + +class A: pass + a = None # type: A f(*None) f(*a) # E: List or tuple expected as variadic arguments f(*(a,)) - -def f(a: 'A') -> None: - pass - -class A: pass [builtins fixtures/tuple.pyi] @@ -292,34 +287,33 @@ class A: pass [case testCallingVarArgsFunctionWithListVarArgs] from typing import List + +def f(a: 'A', *b: 'B') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass +class B: pass + aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B) -f(*aa) # Fail -f(a, *aa) # Fail -f(b, *ab) # Fail -f(a, a, *ab) # Fail -f(a, b, *aa) # Fail -f(b, b, *ab) # Fail -g(*ab) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" +f(a, *aa) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" +f(b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +f(a, a, *ab) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(a, b, *aa) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" +f(b, b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +g(*ab) # E: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" f(a, *ab) f(a, b, *ab) f(a, b, b, *ab) g(*aa) +[builtins fixtures/list.pyi] +[case testCallingVarArgsFunctionWithTupleVarArgs] +def f(a: 'A', *b: 'B') -> None: + pass -def f(a: 'A', *b: 'B') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" -main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" -main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B" -main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" -main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" - -[case testCallingVarArgsFunctionWithTupleVarArgs] +class C: pass +class CC(C): pass a, b, c, cc = None, None, None, None # type: (A, B, C, CC) @@ -335,14 +329,6 @@ f(*()) # E: Too few arguments for "f" f(*(a, b, b)) f(a, *(b, b)) f(a, b, *(b,)) - -def f(a: 'A', *b: 'B') -> None: - pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/list.pyi] @@ -352,32 +338,21 @@ class CC(C): pass [case testDynamicVarArg] from typing import Any +def f(a: 'A') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass + d, a = None, None # type: (Any, A) -f(a, a, *d) # Fail +f(a, a, *d) # E: Too many arguments for "f" f(a, *d) # Ok f(*d) # Ok g(*d) g(a, *d) g(a, a, *d) - -def f(a: 'A') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass -class A: pass [builtins fixtures/list.pyi] -[out] -main:3: error: Too many arguments for "f" - [case testListVarArgsAndSubtyping] from typing import List -aa = None # type: List[A] -ab = None # type: List[B] - -g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" -f(*aa) -f(*ab) -g(*ab) - def f( *a: 'A') -> None: pass @@ -386,11 +361,25 @@ def g( *a: 'B') -> None: class A: pass class B(A): pass + +aa = None # type: List[A] +ab = None # type: List[B] + +g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" +f(*aa) +f(*ab) +g(*ab) [builtins fixtures/list.pyi] [case testCallerVarArgsAndDefaultArgs] # flags: --implicit-optional --no-strict-optional +def f(a: 'A', b: 'B' = None, *c: 'B') -> None: + pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) f(*()) # E: Too few arguments for "f" f(a, *[a]) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" \ @@ -403,12 +392,6 @@ f(*(a, b, b, b)) f(a, *[]) f(a, *[b]) f(a, *[b, b]) - -def f(a: 'A', b: 'B' = None, *c: 'B') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testVarArgsAfterKeywordArgInCall1] @@ -528,6 +511,13 @@ def f(a: B, *b: B) -> B: pass from typing import List, TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, *b: T) -> Tuple[S, T]: + pass + +class A: pass +class B: pass + a, b, aa = None, None, None # type: (A, B, List[A]) if int(): @@ -551,18 +541,18 @@ if int(): b, a = f(b, *aa) if int(): b, a = f(b, a, *aa) - -def f(a: S, *b: T) -> Tuple[S, T]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarArgsTupleWithTypeInference] from typing import TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> Tuple[S, T]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) if int(): @@ -579,11 +569,6 @@ if int(): a, b = f(*(a, b)) if int(): a, b = f(a, *(b,)) - -def f(a: S, b: T) -> Tuple[S, T]: pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarargsAndComplexTypeInference] @@ -595,6 +580,13 @@ ao = None # type: List[object] aa = None # type: List[A] ab = None # type: List[B] +class G(Generic[T]): + def f(self, *a: S) -> Tuple[List[S], List[T]]: + pass + +class A: pass +class B: pass + if int(): a, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ @@ -621,13 +613,6 @@ if int(): # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant - -class G(Generic[T]): - def f(self, *a: S) -> Tuple[List[S], List[T]]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 2ea7f07da3bc..c2e98cdb74f9 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1505,3 +1505,96 @@ def f(): [out] a.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs == Return code: 0 + +[case testCustomTypeshedDirFilePassedExplicitly] +# cmd: mypy --custom-typeshed-dir dir m.py dir/stdlib/foo.pyi +[file m.py] +1() +[file dir/stdlib/abc.pyi] +1() # Errors are not reported from typeshed by default +[file dir/stdlib/builtins.pyi] +class object: pass +class str(object): pass +class int(object): pass +class list: pass +class dict: pass +[file dir/stdlib/sys.pyi] +[file dir/stdlib/types.pyi] +[file dir/stdlib/typing.pyi] +[file dir/stdlib/mypy_extensions.pyi] +[file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/foo.pyi] +1() # Errors are reported if the file was explicitly passed on the command line +[file dir/stdlib/VERSIONS] +[out] +dir/stdlib/foo.pyi:1: error: "int" not callable +m.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly1] +# cmd: mypy $CWD/pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly2] +# cmd: mypy pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly3] +# cmd: mypy -p foo +# cwd: pypath +[file pypath/foo/__init__.py] +1() +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable +foo/__init__.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly4] +# cmd: mypy -m foo +# cwd: pypath +[file pypath/foo.py] +1() +[out] +foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly5] +# cmd: mypy -m foo.m +# cwd: pypath +[file pypath/foo/__init__.py] +1() # TODO: Maybe this should generate errors as well? But how would we decide? +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable + +[case testCmdlineCfgEnableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +enable_error_code = + truthy-bool, + redundant-expr, +[out] + +[case testCmdlineCfgDisableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +disable_error_code = + misc, + override, +[out] + +[case testCmdlineCfgAlwaysTrueTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +always_true = + MY_VAR, +[out] diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 56966b2f740c..c72dc3a32bc7 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -214,6 +214,20 @@ mypy-daemon: error: Missing target module, package, files, or command. $ dmypy stop Daemon stopped +[case testDaemonWarningSuccessExitCode-posix] +$ dmypy run -- foo.py --follow-imports=error +Daemon started +foo.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs +Success: no issues found in 1 source file +$ echo $? +0 +$ dmypy stop +Daemon stopped +[file foo.py] +def foo(): + a: int = 1 + print(a + "2") + -- this is carefully constructed to be able to break if the quickstart system lets -- something through incorrectly. in particular, the files need to have the same size [case testDaemonQuickstart] diff --git a/test-data/unit/errorstream.test b/test-data/unit/errorstream.test index 8a73748d27ff..46af433f8916 100644 --- a/test-data/unit/errorstream.test +++ b/test-data/unit/errorstream.test @@ -36,14 +36,14 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y 1 / '' -x = 1 + 1 +x = 1 + int() [out] ==== Errors flushed ==== diff --git a/test-data/unit/fine-grained-attr.test b/test-data/unit/fine-grained-attr.test index fd7c97da0662..3fd40b774c7b 100644 --- a/test-data/unit/fine-grained-attr.test +++ b/test-data/unit/fine-grained-attr.test @@ -46,3 +46,37 @@ A.__attrs_attrs__.b [out] == + +[case magicAttributeConsistency2-only_when_cache] +[file c.py] +import attr + +@attr.s +class Entry: + var: int = attr.ib() +[builtins fixtures/attr.pyi] + +[file m.py] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> None: ... +func(Entry(2)) + +[file m.py.2] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> int: + return 2 # Change return type to force reanalysis + +func(Entry(2)) + +[out] +== diff --git a/test-data/unit/fine-grained-follow-imports.test b/test-data/unit/fine-grained-follow-imports.test index ebe8b86b37ab..22f2a7895cf9 100644 --- a/test-data/unit/fine-grained-follow-imports.test +++ b/test-data/unit/fine-grained-follow-imports.test @@ -769,3 +769,80 @@ from . import mod3 == main.py:1: error: Cannot find implementation or library stub for module named "pkg" main.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports + +[case testNewImportCycleTypeVarBound] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +import trio +from . import abc as abc + +T = TypeVar("T", bound=trio.abc.A) + +[file trio/abc.py.2] +import trio +class A: ... +[out] +== + +[case testNewImportCycleTupleBase] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar, Tuple +import trio +from . import abc as abc + +class C(Tuple[trio.abc.A, trio.abc.A]): ... + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/tuple.pyi] +[out] +== + +[case testNewImportCycleTypedDict] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +from typing_extensions import TypedDict +import trio +from . import abc as abc + +class C(TypedDict): + x: trio.abc.A + y: trio.abc.A + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/dict.pyi] +[out] +== diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test index a52db3959633..8574477d8272 100644 --- a/test-data/unit/fine-grained-inspect.test +++ b/test-data/unit/fine-grained-inspect.test @@ -52,8 +52,8 @@ class Meta(type): == {"C": ["meth", "x"]} {"C": ["meth", "x"], "Meta": ["y"], "type": ["__init__"]} -{} -{"object": ["__init__"]} +{"function": ["__name__"]} +{"function": ["__name__"], "object": ["__init__"]} [case testInspectDefBasic] # inspect2: --show=definition foo.py:5:5 diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 32c4ff2eecf0..9f22dc9ab7ac 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1586,11 +1586,11 @@ class A: [file b.py.3] 2 [out] -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testBaseClassDeleted] import m @@ -1809,8 +1809,8 @@ def f() -> Iterator[None]: [typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [triggered] -2: , __main__ -3: , __main__, a +2: , , __main__ +3: , , __main__, a [out] main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" == @@ -2007,11 +2007,11 @@ class A: class A: def foo(self) -> int: pass [out] -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == [case testPreviousErrorInMethodSemanal2] @@ -3124,7 +3124,6 @@ whatever: int [out] == b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" [case testFixMissingMetaclass] import a @@ -3143,7 +3142,6 @@ class M(type): x: int [out] b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" == [case testGoodMetaclassSpoiled] @@ -5525,11 +5523,13 @@ a.py:5: error: Argument 1 to "f" has incompatible type "C"; expected "int" import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5542,25 +5542,27 @@ def T() -> None: pass [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Function "a.T" is not valid as a type -main:4: note: Perhaps you need "Callable[...]" or a callback protocol? -main:6: error: Free type variable expected in Generic[...] -main:7: error: Function "a.T" is not valid as a type -main:7: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Function "a.T" is not valid as a type -main:10: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Function "a.T" is not valid as a type +main:6: note: Perhaps you need "Callable[...]" or a callback protocol? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Function "a.T" is not valid as a type +main:9: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Function "a.T" is not valid as a type +main:12: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeVarToModule] import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5574,15 +5576,15 @@ import T [out] == == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Module "T" is not valid as a type -main:4: note: Perhaps you meant to use a protocol matching the module structure? -main:6: error: Free type variable expected in Generic[...] -main:7: error: Module "T" is not valid as a type -main:7: note: Perhaps you meant to use a protocol matching the module structure? -main:10: error: Module "T" is not valid as a type -main:10: note: Perhaps you meant to use a protocol matching the module structure? -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Module "T" is not valid as a type +main:6: note: Perhaps you meant to use a protocol matching the module structure? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Module "T" is not valid as a type +main:9: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Module "T" is not valid as a type +main:12: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeClassToModule] @@ -5614,11 +5616,13 @@ main:8: note: Perhaps you meant to use a protocol matching the module structure? import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5630,9 +5634,9 @@ from typing import TypeVar T = int [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:6: error: Free type variable expected in Generic[...] -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:9: error: "C" expects no type arguments, but 1 given +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeAliasToModule] @@ -8201,6 +8205,7 @@ x = 1 == [case testIdLikeDecoForwardCrashAlias] +# flags: --disable-error-code used-before-def import b [file b.py] from typing import Callable, Any, TypeVar @@ -10130,3 +10135,209 @@ b.py:2: error: "int" not callable a.py:1: error: Unsupported operand types for + ("int" and "str") 1 + '' ^~ + +[case testTypingSelfFine] +import m +[file lib.py] +from typing import Any + +class C: + def meth(self, other: Any) -> C: ... +[file lib.py.2] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file n.py] +import lib +class D(lib.C): ... +[file m.py] +from n import D +d = D() +def test() -> None: + d.meth(42) +[out] +== +m.py:4: error: Argument 1 to "meth" of "C" has incompatible type "int"; expected "D" + +[case testNoNestedDefinitionCrash] +import m +[file m.py] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +[file m.py.2] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +# change +[builtins fixtures/dict.pyi] +[out] +== + +[case testNoNestedDefinitionCrash2] +import m +[file m.py] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +[file m.py.2] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +# change +[builtins fixtures/dict.pyi] +[out] +== + +[case testNamedTupleNestedCrash] +import m +[file m.py] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int + +[file m.py.2] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int +# change +[builtins fixtures/tuple.pyi] +[out] +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testNamedTupleNestedClassRecheck] +import n +[file n.py] +import m +x: m.NT +[file m.py] +from typing import NamedTuple +from f import A + +class NT(NamedTuple): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/tuple.pyi] +[out] +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testTypedDictNestedClassRecheck] +import n +[file n.py] +import m +x: m.TD +[file m.py] +from typing_extensions import TypedDict +from f import A + +class TD(TypedDict): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/dict.pyi] +[out] +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" +== +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" + +[case testTypeAliasWithNewStyleUnionChangedToVariable] +# flags: --python-version 3.10 +import a + +[file a.py] +from b import C, D +A = C | D +a: A +reveal_type(a) + +[file b.py] +C = int +D = str + +[file b.py.2] +C = "x" +D = "y" + +[file b.py.3] +C = str +D = int +[out] +a.py:4: note: Revealed type is "Union[builtins.int, builtins.str]" +== +a.py:2: error: Unsupported left operand type for | ("str") +a.py:3: error: Variable "a.A" is not valid as a type +a.py:3: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a.py:4: note: Revealed type is "A?" +== +a.py:4: note: Revealed type is "Union[builtins.str, builtins.int]" + +[case testUnionOfSimilarCallablesCrash] +import b + +[file b.py] +from a import x + +[file m.py] +from typing import Union, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +def foo(x: T, y: S) -> Union[T, S]: ... +def f(x: int) -> int: ... +def g(*x: int) -> int: ... + +[file a.py] +from m import f, g, foo +x = foo(f, g) + +[file a.py.2] +from m import f, g, foo +x = foo(f, g) +reveal_type(x) +[builtins fixtures/tuple.pyi] +[out] +== +a.py:3: note: Revealed type is "Union[def (x: builtins.int) -> builtins.int, def (*x: builtins.int) -> builtins.int]" diff --git a/test-data/unit/fixtures/__init_subclass__.pyi b/test-data/unit/fixtures/__init_subclass__.pyi index c5a17f60688e..b4618c28249e 100644 --- a/test-data/unit/fixtures/__init_subclass__.pyi +++ b/test-data/unit/fixtures/__init_subclass__.pyi @@ -11,3 +11,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi index bb4788df8fe9..401de6fb9cd1 100644 --- a/test-data/unit/fixtures/__new__.pyi +++ b/test-data/unit/fixtures/__new__.pyi @@ -16,3 +16,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi index 08b145f4efd1..2ec7703f00c4 100644 --- a/test-data/unit/fixtures/alias.pyi +++ b/test-data/unit/fixtures/alias.pyi @@ -12,3 +12,5 @@ class str: pass class function: pass bytes = str + +class dict: pass diff --git a/test-data/unit/fixtures/any.pyi b/test-data/unit/fixtures/any.pyi index d6d90b7b3e98..b1f8d83bf524 100644 --- a/test-data/unit/fixtures/any.pyi +++ b/test-data/unit/fixtures/any.pyi @@ -6,3 +6,5 @@ class int: pass class str: pass def any(i: Iterable[T]) -> bool: pass + +class dict: pass diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi index 8d0ecc00f4b6..9985ccf84817 100644 --- a/test-data/unit/fixtures/args.pyi +++ b/test-data/unit/fixtures/args.pyi @@ -26,6 +26,7 @@ class list(Sequence[T], Generic[T]): pass class int: def __eq__(self, o: object) -> bool: pass +class float: pass class str: pass class bytes: pass class bool: pass diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/attr.pyi index c209abfef0d9..3bd4f0ec7cbe 100644 --- a/test-data/unit/fixtures/attr.pyi +++ b/test-data/unit/fixtures/attr.pyi @@ -23,6 +23,7 @@ class complex: def __init__(self, real: str = ...) -> None: ... class str: pass -class unicode: pass class ellipsis: pass class tuple: pass +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index 245526d78907..bc58a22b952b 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -14,7 +14,7 @@ class int: pass class bool(int): pass class float: pass class str: pass -class unicode: pass class ellipsis: pass -class list: pass +class list(Generic[T]): pass class property: pass +class dict: pass diff --git a/test-data/unit/fixtures/bool_py2.pyi b/test-data/unit/fixtures/bool_py2.pyi deleted file mode 100644 index b2c935132d57..000000000000 --- a/test-data/unit/fixtures/bool_py2.pyi +++ /dev/null @@ -1,16 +0,0 @@ -# builtins stub used in boolean-related test cases. -from typing import Generic, TypeVar -import sys -T = TypeVar('T') - -class object: - def __init__(self) -> None: pass - -class type: pass -class tuple(Generic[T]): pass -class function: pass -class bool: pass -class int: pass -class str: pass -class unicode: pass -class ellipsis: pass diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi index 4ad72bee93ec..44abf0691ceb 100644 --- a/test-data/unit/fixtures/callable.pyi +++ b/test-data/unit/fixtures/callable.pyi @@ -28,3 +28,4 @@ class str: def __eq__(self, other: 'str') -> bool: pass class ellipsis: pass class list: ... +class dict: pass diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi index 03ad803890a3..97e018b1dc1c 100644 --- a/test-data/unit/fixtures/classmethod.pyi +++ b/test-data/unit/fixtures/classmethod.pyi @@ -26,3 +26,6 @@ class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass + +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi index bcd03a2562e5..880ec3dd4d9d 100644 --- a/test-data/unit/fixtures/complex.pyi +++ b/test-data/unit/fixtures/complex.pyi @@ -10,3 +10,4 @@ class int: pass class float: pass class complex: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex_tuple.pyi b/test-data/unit/fixtures/complex_tuple.pyi index 6be46ac34573..81f1d33d1207 100644 --- a/test-data/unit/fixtures/complex_tuple.pyi +++ b/test-data/unit/fixtures/complex_tuple.pyi @@ -13,3 +13,4 @@ class float: pass class complex: pass class str: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index 206843a88b24..7de40af9cfe7 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -37,7 +37,11 @@ class dict(Mapping[KT, VT]): def get(self, k: KT, default: Union[KT, _T]) -> Union[VT, _T]: pass def __len__(self) -> int: ... -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> int: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + class function: pass class classmethod: pass property = object() diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index f4ec15e4fa9a..153832411f50 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -29,7 +29,7 @@ class dict(Mapping[KT, VT]): @overload def get(self, k: KT) -> Optional[VT]: pass @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass + def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... class int: # for convenience @@ -41,7 +41,6 @@ class int: # for convenience imag: int class str: pass # for keyword argument key type -class unicode: pass # needed for py2 docstrings class bytes: pass class list(Sequence[T]): # needed by some test cases diff --git a/test-data/unit/fixtures/divmod.pyi b/test-data/unit/fixtures/divmod.pyi index cf41c500f49b..4d81d8fb47a2 100644 --- a/test-data/unit/fixtures/divmod.pyi +++ b/test-data/unit/fixtures/divmod.pyi @@ -19,3 +19,5 @@ class ellipsis: pass _N = TypeVar('_N', int, float) def divmod(_x: _N, _y: _N) -> Tuple[_N, _N]: ... + +class dict: pass diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index bf6d21c8716e..08496e4e5934 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -1,3 +1,4 @@ +import sys from typing import Generic, TypeVar T = TypeVar('T') @@ -5,19 +6,24 @@ class object: def __init__(self): pass class type: pass -class tuple(Generic[T]): pass +class tuple(Generic[T]): + def __ge__(self, other: object) -> bool: ... +class list: pass +class dict: pass class function: pass class int: pass class str: pass -class unicode: pass class bool: pass class ellipsis: pass -# Note: this is a slight simplification. In Python 2, the inheritance hierarchy -# is actually Exception -> StandardError -> RuntimeError -> ... class BaseException: def __init__(self, *args: object) -> None: ... class Exception(BaseException): pass class RuntimeError(Exception): pass class NotImplementedError(RuntimeError): pass +if sys.version_info >= (3, 11): + _BT_co = TypeVar("_BT_co", bound=BaseException, covariant=True) + _T_co = TypeVar("_T_co", bound=Exception, covariant=True) + class BaseExceptionGroup(BaseException, Generic[_BT_co]): ... + class ExceptionGroup(BaseExceptionGroup[_T_co], Exception): ... diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi index 78d39aee85b8..328c666b7ece 100644 --- a/test-data/unit/fixtures/f_string.pyi +++ b/test-data/unit/fixtures/f_string.pyi @@ -34,3 +34,5 @@ class str: def format(self, *args) -> str: pass def join(self, l: List[str]) -> str: pass + +class dict: pass diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi index b2e104ccfceb..e454a27a5ebd 100644 --- a/test-data/unit/fixtures/fine_grained.pyi +++ b/test-data/unit/fixtures/fine_grained.pyi @@ -27,3 +27,4 @@ class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Generic[T]): pass +class dict: pass diff --git a/test-data/unit/fixtures/float.pyi b/test-data/unit/fixtures/float.pyi index 880b16a2321b..5db4525849c0 100644 --- a/test-data/unit/fixtures/float.pyi +++ b/test-data/unit/fixtures/float.pyi @@ -34,3 +34,5 @@ class float: def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... + +class dict: pass diff --git a/test-data/unit/fixtures/floatdict_python2.pyi b/test-data/unit/fixtures/floatdict_python2.pyi deleted file mode 100644 index f177355d5d4b..000000000000 --- a/test-data/unit/fixtures/floatdict_python2.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union - -T = TypeVar('T') -KT = TypeVar('KT') -VT = TypeVar('VT') - -Any = 0 - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: Any) -> None: pass - -class str: - def __add__(self, other: 'str') -> 'str': pass - def __rmul__(self, n: int) -> str: ... - -class unicode: pass - -class tuple(Generic[T]): pass -class slice: pass -class function: pass - -class ellipsis: pass - -class list(Iterable[T], Generic[T]): - @overload - def __init__(self) -> None: pass - @overload - def __init__(self, x: Iterable[T]) -> None: pass - def __iter__(self) -> Iterator[T]: pass - def __add__(self, x: list[T]) -> list[T]: pass - def __mul__(self, x: int) -> list[T]: pass - def __getitem__(self, x: int) -> T: pass - def append(self, x: T) -> None: pass - def extend(self, x: Iterable[T]) -> None: pass - -class dict(Mapping[KT, VT], Generic[KT, VT]): - @overload - def __init__(self, **kwargs: VT) -> None: pass - @overload - def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass - def __setitem__(self, k: KT, v: VT) -> None: pass - def __getitem__(self, k: KT) -> VT: pass - def __iter__(self) -> Iterator[KT]: pass - def update(self, a: Mapping[KT, VT]) -> None: pass - @overload - def get(self, k: KT) -> Optional[VT]: pass - @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass - - -class int: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: int) -> int: ... - def __rmul__(self, x: int) -> int: ... - def __truediv__(self, x: int) -> int: ... - def __rtruediv__(self, x: int) -> int: ... - -class float: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: float) -> float: ... - def __rmul__(self, x: float) -> float: ... - def __truediv__(self, x: float) -> float: ... - def __rtruediv__(self, x: float) -> float: ... diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi index 31f6de78d486..694f83e940b2 100644 --- a/test-data/unit/fixtures/for.pyi +++ b/test-data/unit/fixtures/for.pyi @@ -18,3 +18,4 @@ class str: pass # for convenience class list(Iterable[t], Generic[t]): def __iter__(self) -> Iterator[t]: pass +class dict: pass diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi index c00a7846628a..697d0d919d98 100644 --- a/test-data/unit/fixtures/function.pyi +++ b/test-data/unit/fixtures/function.pyi @@ -5,3 +5,4 @@ class type: pass class function: pass class int: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi index aa8bfce7fbe0..c1125c24b941 100644 --- a/test-data/unit/fixtures/isinstance.pyi +++ b/test-data/unit/fixtures/isinstance.pyi @@ -25,3 +25,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/isinstance_python3_10.pyi b/test-data/unit/fixtures/isinstance_python3_10.pyi index abb37ea81c00..7c919a216bfb 100644 --- a/test-data/unit/fixtures/isinstance_python3_10.pyi +++ b/test-data/unit/fixtures/isinstance_python3_10.pyi @@ -27,3 +27,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi index 31dc333b3d4f..90fbabe8bc92 100644 --- a/test-data/unit/fixtures/list.pyi +++ b/test-data/unit/fixtures/list.pyi @@ -36,3 +36,5 @@ class str: class bool(int): pass property = object() # Dummy definition. + +class dict: pass diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi index 87959fefbff5..b14152c7e98f 100644 --- a/test-data/unit/fixtures/module_all.pyi +++ b/test-data/unit/fixtures/module_all.pyi @@ -16,3 +16,4 @@ class list(Generic[_T], Sequence[_T]): def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi deleted file mode 100644 index 989333c5f41a..000000000000 --- a/test-data/unit/fixtures/module_all_python2.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Generic, Sequence, TypeVar -_T = TypeVar('_T') - -class object: - def __init__(self) -> None: pass -class type: pass -class function: pass -class int: pass -class str: pass -class unicode: pass -class list(Generic[_T], Sequence[_T]): - def append(self, x: _T): pass - def extend(self, x: Sequence[_T]): pass - def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass -class tuple(Generic[_T]): pass diff --git a/test-data/unit/fixtures/notimplemented.pyi b/test-data/unit/fixtures/notimplemented.pyi index e619a6c5ad85..2ca376ea0760 100644 --- a/test-data/unit/fixtures/notimplemented.pyi +++ b/test-data/unit/fixtures/notimplemented.pyi @@ -11,3 +11,4 @@ class bool: pass class int: pass class str: pass NotImplemented = cast(Any, None) +class dict: pass diff --git a/test-data/unit/fixtures/object_hashable.pyi b/test-data/unit/fixtures/object_hashable.pyi index 592cba808cbf..49b17991f01c 100644 --- a/test-data/unit/fixtures/object_hashable.pyi +++ b/test-data/unit/fixtures/object_hashable.pyi @@ -7,3 +7,4 @@ class float: ... class str: ... class ellipsis: ... class tuple: ... +class dict: pass diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index d5845aba43c6..9cc4d22eb0a7 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -33,8 +33,6 @@ class str: def startswith(self, x: 'str') -> bool: pass def strip(self) -> 'str': pass -class unicode: pass - class int: def __add__(self, x: 'int') -> 'int': pass def __radd__(self, x: 'int') -> 'int': pass @@ -74,3 +72,5 @@ def __print(a1: object = None, a2: object = None, a3: object = None, a4: object = None) -> None: pass class ellipsis: pass + +class dict: pass diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index 9553df4b40c7..90d76b9d76dd 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -1,5 +1,5 @@ # builtins stub with non-generic primitive types -from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, overload +from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, Tuple, Union T = TypeVar('T') V = TypeVar('V') @@ -20,7 +20,9 @@ class int: def __rmul__(self, x: int) -> int: pass class float: def __float__(self) -> float: pass -class complex: pass + def __add__(self, x: float) -> float: pass +class complex: + def __add__(self, x: complex) -> complex: pass class bool(int): pass class str(Sequence[str]): def __add__(self, s: str) -> str: pass @@ -63,3 +65,5 @@ class range(Sequence[int]): def __getitem__(self, i: int) -> int: pass def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass + +def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi index 9dca0d50a3be..2397c05c78d5 100644 --- a/test-data/unit/fixtures/property.pyi +++ b/test-data/unit/fixtures/property.pyi @@ -13,6 +13,7 @@ class function: pass property = object() # Dummy definition class classmethod: pass +class list: pass class dict: pass class int: pass class str: pass diff --git a/test-data/unit/fixtures/property_py2.pyi b/test-data/unit/fixtures/property_py2.pyi deleted file mode 100644 index 3b0ab69cf43f..000000000000 --- a/test-data/unit/fixtures/property_py2.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import typing - -_T = typing.TypeVar('_T') - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: typing.Any) -> None: pass - -class function: pass - -property = object() # Dummy definition - -class int: pass -class str: pass -class unicode: pass -class bool: pass -class ellipsis: pass - -class tuple(typing.Generic[_T]): pass diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi deleted file mode 100644 index 51af59c8bd45..000000000000 --- a/test-data/unit/fixtures/python2.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Generic, Iterable, TypeVar, Sequence, Iterator - -class object: - def __init__(self) -> None: pass - def __eq__(self, other: object) -> bool: pass - def __ne__(self, other: object) -> bool: pass - -class type: - def __init__(self, x) -> None: pass - -class function: pass - -class int: pass -class float: pass -class str: - def format(self, *args, **kwars) -> str: ... -class unicode: - def format(self, *args, **kwars) -> unicode: ... -class bool(int): pass - -bytes = str - -T = TypeVar('T') -S = TypeVar('S') -class list(Iterable[T], Generic[T]): - def __iter__(self) -> Iterator[T]: pass - def __getitem__(self, item: int) -> T: pass -class tuple(Iterable[T]): - def __iter__(self) -> Iterator[T]: pass -class dict(Generic[T, S]): pass - -class bytearray(Sequence[int]): - def __init__(self, string: str) -> None: pass - def __contains__(self, item: object) -> bool: pass - def __iter__(self) -> Iterator[int]: pass - def __getitem__(self, item: int) -> int: pass - -# Definition of None is implicit diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi index 9852bbc9fcc6..71d3bd2eee18 100644 --- a/test-data/unit/fixtures/set.pyi +++ b/test-data/unit/fixtures/set.pyi @@ -6,6 +6,7 @@ T = TypeVar('T') class object: def __init__(self) -> None: pass + def __eq__(self, other): pass class type: pass class tuple(Generic[T]): pass @@ -24,3 +25,5 @@ class set(Iterable[T], Generic[T]): def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass + +class dict: pass diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi index 947d49ea09fb..b5a4549da068 100644 --- a/test-data/unit/fixtures/slice.pyi +++ b/test-data/unit/fixtures/slice.pyi @@ -14,3 +14,4 @@ class str: pass class slice: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi index 7d5d98634e48..8a87121b2a71 100644 --- a/test-data/unit/fixtures/staticmethod.pyi +++ b/test-data/unit/fixtures/staticmethod.pyi @@ -16,6 +16,6 @@ class int: def from_bytes(bytes: bytes, byteorder: str) -> int: pass class str: pass -class unicode: pass class bytes: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi index afdc2bf5b59a..7dbb8fa90dbe 100644 --- a/test-data/unit/fixtures/transform.pyi +++ b/test-data/unit/fixtures/transform.pyi @@ -28,3 +28,5 @@ def __print(a1=None, a2=None, a3=None, a4=None): # Do not use *args since this would require list and break many test # cases. pass + +class dict: pass diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi index b195dfa59729..6c816c1c5b7a 100644 --- a/test-data/unit/fixtures/tuple-simple.pyi +++ b/test-data/unit/fixtures/tuple-simple.pyi @@ -18,3 +18,4 @@ class function: pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 5c69a4ad1eb5..0261731304b1 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -23,7 +23,8 @@ class tuple(Sequence[Tco], Generic[Tco]): def __rmul__(self, n: int) -> Tuple[Tco, ...]: pass def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass def count(self, obj: object) -> int: pass -class function: pass +class function: + __name__: str class ellipsis: pass class classmethod: pass @@ -36,7 +37,6 @@ class bool(int): pass class str: pass # For convenience class bytes: pass class bytearray: pass -class unicode: pass class list(Sequence[T], Generic[T]): @overload @@ -51,3 +51,5 @@ def isinstance(x: object, t: type) -> bool: pass def sum(iterable: Iterable[T], start: Optional[T] = None) -> T: pass class BaseException: pass + +class dict: pass diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 755b45ff0bb5..39357a693638 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -1,8 +1,9 @@ # builtins stub used in type-related test cases. -from typing import Generic, TypeVar, List, Union +from typing import Any, Generic, TypeVar, List, Union -T = TypeVar('T') +T = TypeVar("T") +S = TypeVar("S") class object: def __init__(self) -> None: pass @@ -12,12 +13,15 @@ class list(Generic[T]): pass class type(Generic[T]): __name__: str + def __call__(self, *args: Any, **kwargs: Any) -> Any: pass def __or__(self, other: Union[type, None]) -> type: pass + def __ror__(self, other: Union[type, None]) -> type: pass def mro(self) -> List['type']: pass class tuple(Generic[T]): pass +class dict(Generic[T, S]): pass class function: pass class bool: pass class int: pass class str: pass -class unicode: pass +class ellipsis: pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index c406da986818..04568f7c03f3 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -179,3 +179,5 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class _SpecialForm: pass diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 568fe057c4cf..0d0e13468013 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -68,4 +68,8 @@ class ContextManager(Generic[T]): # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass +class _SpecialForm: pass + TYPE_CHECKING = 1 + +def dataclass_transform() -> Callable[[T], T]: ... diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index d51134ead599..1a31549463b6 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -5,6 +5,7 @@ overload = 0 Type = 0 Literal = 0 Optional = 0 +Self = 0 T_co = TypeVar('T_co', covariant=True) KT = TypeVar('KT') diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index 378570b4c19c..92ae402b9ea5 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -9,6 +9,7 @@ from abc import ABCMeta cast = 0 +assert_type = 0 overload = 0 Any = 0 Union = 0 @@ -25,6 +26,7 @@ TypedDict = 0 NoReturn = 0 Required = 0 NotRequired = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi index 489e3ddb6ef9..350e145a6f8f 100644 --- a/test-data/unit/fixtures/union.pyi +++ b/test-data/unit/fixtures/union.pyi @@ -15,3 +15,4 @@ class tuple(Generic[T]): pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi deleted file mode 100644 index e7109a179aac..000000000000 --- a/test-data/unit/lib-stub/__builtin__.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Generic, TypeVar -_T = TypeVar('_T') - -Any = 0 - -class object: - def __init__(self): - # type: () -> None - pass - -class type: - def __init__(self, x): - # type: (Any) -> None - pass - -# These are provided here for convenience. -class int: pass -class float: pass - -class str: pass -class unicode: pass - -class tuple(Generic[_T]): pass -class function: pass - -class ellipsis: pass - -def print(*args, end=''): pass - -# Definition of None is implicit diff --git a/test-data/unit/lib-stub/_decimal.pyi b/test-data/unit/lib-stub/_decimal.pyi new file mode 100644 index 000000000000..2c2c5bff11f7 --- /dev/null +++ b/test-data/unit/lib-stub/_decimal.pyi @@ -0,0 +1,4 @@ +# Very simplified decimal stubs for use in tests + +class Decimal: + def __new__(cls, value: str = ...) -> Decimal: ... diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 8c4f504fb2e7..c2ac78c41661 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -17,11 +17,17 @@ class float: pass class str: pass class bytes: pass -class function: pass +class function: + __name__: str class ellipsis: pass -from typing import Generic, Sequence, TypeVar +from typing import Generic, Iterator, Sequence, TypeVar _T = TypeVar('_T') -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> bool: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + +class dict: pass # Definition of None is implicit diff --git a/test-data/unit/lib-stub/datetime.pyi b/test-data/unit/lib-stub/datetime.pyi new file mode 100644 index 000000000000..7d71682d051d --- /dev/null +++ b/test-data/unit/lib-stub/datetime.pyi @@ -0,0 +1,16 @@ +# Very simplified datetime stubs for use in tests + +class datetime: + def __new__( + cls, + year: int, + month: int, + day: int, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + *, + fold: int = ..., + ) -> datetime: ... + def __format__(self, __fmt: str) -> str: ... diff --git a/test-data/unit/lib-stub/decimal.pyi b/test-data/unit/lib-stub/decimal.pyi new file mode 100644 index 000000000000..d2ab6eda9ff1 --- /dev/null +++ b/test-data/unit/lib-stub/decimal.pyi @@ -0,0 +1,3 @@ +# Very simplified decimal stubs for use in tests + +from _decimal import * diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi new file mode 100644 index 000000000000..9e62a14c2f34 --- /dev/null +++ b/test-data/unit/lib-stub/functools.pyi @@ -0,0 +1,35 @@ +from typing import Generic, TypeVar, Callable, Any, Mapping + +_T = TypeVar("_T") + +class _SingleDispatchCallable(Generic[_T]): + registry: Mapping[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... + +class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + def __class_getitem__(cls, item: Any) -> Any: ... diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi index 6274163c497d..d79be8719417 100644 --- a/test-data/unit/lib-stub/mypy_extensions.pyi +++ b/test-data/unit/lib-stub/mypy_extensions.pyi @@ -1,7 +1,7 @@ # NOTE: Requires fixtures/dict.pyi from typing import ( Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator, - Union + Union, Protocol ) import sys @@ -51,10 +51,13 @@ mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... if sys.version_info >= (3, 0): + class __SupportsInt(Protocol[T_co]): + def __int__(self) -> int: pass + _Int = Union[int, i32, i64] class i32: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i32) -> i32: ... def __radd__(self, x: i32) -> i32: ... def __sub__(self, x: i32) -> i32: ... @@ -84,7 +87,7 @@ if sys.version_info >= (3, 0): def __gt__(self, x: i32) -> bool: ... class i64: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i64) -> i64: ... def __radd__(self, x: i64) -> i64: ... def __sub__(self, x: i64) -> i64: ... diff --git a/test-data/unit/lib-stub/traceback.pyi b/test-data/unit/lib-stub/traceback.pyi new file mode 100644 index 000000000000..83c1891f80f5 --- /dev/null +++ b/test-data/unit/lib-stub/traceback.pyi @@ -0,0 +1,3 @@ +# Very simplified traceback stubs for use in tests + +def print_tb(*args, **kwargs) -> None: ... diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 23d97704d934..a306b70f74d7 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -27,6 +27,9 @@ NoReturn = 0 Never = 0 NewType = 0 ParamSpec = 0 +TypeVarTuple = 0 +Unpack = 0 +Self = 0 TYPE_CHECKING = 0 T = TypeVar('T') diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index e92f7e913502..89f7108fe83c 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -1,10 +1,11 @@ -from typing import TypeVar, Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type +import typing +from typing import Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import NewType as NewType, overload as overload import sys -_T = TypeVar('_T') +_T = typing.TypeVar('_T') class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: @@ -25,6 +26,8 @@ Literal: _SpecialForm = ... Annotated: _SpecialForm = ... +TypeVar: _SpecialForm + ParamSpec: _SpecialForm Concatenate: _SpecialForm @@ -54,3 +57,5 @@ class _TypedDict(Mapping[str, object]): def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... def reveal_type(__obj: T) -> T: pass + +def dataclass_transform() -> Callable[[T], T]: ... diff --git a/test-data/unit/lib-stub/unannotated_lib.pyi b/test-data/unit/lib-stub/unannotated_lib.pyi new file mode 100644 index 000000000000..90bfb6fa47d6 --- /dev/null +++ b/test-data/unit/lib-stub/unannotated_lib.pyi @@ -0,0 +1 @@ +def f(x): ... diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index a593a064cbb2..144a095440f2 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -669,18 +669,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -693,19 +693,19 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x'], Literal['y']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>) - y<20> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x'], Literal['y']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>) + y<21> (target.A<0>))) [case testNamedTupleOldVersion_typeinfo] import target @@ -730,17 +730,17 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -753,18 +753,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>) - y<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>) + y<20> (target.A<0>))) [case testUnionType_types] import target diff --git a/test-data/unit/plugins/customentry.py b/test-data/unit/plugins/customentry.py index f8b86c33dcfc..b3dacfd4cf44 100644 --- a/test-data/unit/plugins/customentry.py +++ b/test-data/unit/plugins/customentry.py @@ -4,7 +4,7 @@ class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook - assert fullname is not None + assert fullname return None def my_hook(ctx): diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 59ab586b17e6..b414eba9f679 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1636,3 +1636,244 @@ foo("") foo(list("")) foo(list((list(""), ""))) [out] + +[case testNarrowTypeForDictKeys] +# flags: --strict-optional +from typing import Dict, KeysView, Optional + +d: Dict[str, int] +key: Optional[str] +if key in d.keys(): + reveal_type(key) +else: + reveal_type(key) + +kv: KeysView[str] +k: Optional[str] +if k in kv: + reveal_type(k) +else: + reveal_type(k) + +[out] +_testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" +_testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" + +[case testTypeAliasWithNewStyleUnion] +# flags: --python-version 3.10 +from typing import Literal, Type, TypeAlias, TypeVar + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +C = type[int] | str + +D = type[int] | str +x: D +reveal_type(x) +E: TypeAlias = type[int] | str +y: E +reveal_type(y) +F = list[type[int] | str] + +T = TypeVar("T", int, str) +def foo(x: T) -> T: + A = type[int] | str + a: A + return x +[out] +_testTypeAliasWithNewStyleUnion.py:5: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnion.py:25: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnion.py:28: note: Revealed type is "Union[Type[builtins.int], builtins.str]" + +[case testTypeAliasWithNewStyleUnionInStub] +# flags: --python-version 3.7 +import m +a: m.A +reveal_type(a) +b: m.B +reveal_type(b) +c: m.C +reveal_type(c) +d: m.D +reveal_type(d) +e: m.E +reveal_type(e) +f: m.F +reveal_type(f) + +[file m.pyi] +from typing import Type, Callable +from typing_extensions import Literal, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +C = type[int] | str +reveal_type(C) +D: TypeAlias = type[int] | str +E = str | type[int] +F: TypeAlias = str | type[int] +G = list[type[int] | str] +H = list[str | type[int]] + +CU1 = int | Callable[[], str | bool] +CU2: TypeAlias = int | Callable[[], str | bool] +CU3 = int | Callable[[str | bool], str] +CU4: TypeAlias = int | Callable[[str | bool], str] +[out] +m.pyi:5: note: Revealed type is "typing._SpecialForm" +m.pyi:22: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnionInStub.py:4: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:6: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:8: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:10: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:12: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" +_testTypeAliasWithNewStyleUnionInStub.py:14: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" + +[case testEnumNameWorkCorrectlyOn311] +# flags: --python-version 3.11 +import enum + +class E(enum.Enum): + X = 1 + Y = 2 + @enum.property + def foo(self) -> int: ... + +e: E +reveal_type(e.name) +reveal_type(e.value) +reveal_type(E.X.name) +reveal_type(e.foo) +reveal_type(E.Y.foo) +[out] +_testEnumNameWorkCorrectlyOn311.py:11: note: Revealed type is "builtins.str" +_testEnumNameWorkCorrectlyOn311.py:12: note: Revealed type is "Union[Literal[1]?, Literal[2]?]" +_testEnumNameWorkCorrectlyOn311.py:13: note: Revealed type is "Literal['X']?" +_testEnumNameWorkCorrectlyOn311.py:14: note: Revealed type is "builtins.int" +_testEnumNameWorkCorrectlyOn311.py:15: note: Revealed type is "builtins.int" + +[case testTypeAliasNotSupportedWithNewStyleUnion] +# flags: --python-version 3.9 +from typing_extensions import TypeAlias +A = type[int] | str +B = str | type[int] +C = str | int +D: TypeAlias = str | int +[out] +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]") +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Unsupported left operand type for | ("Type[str]") + +[case testTypedDictUnionGetFull] +from typing import Dict +from typing_extensions import TypedDict + +class TD(TypedDict, total=False): + x: int + y: int + +A = Dict[str, TD] +x: A +def foo(k: str) -> TD: + reveal_type(x.get(k, {})) + return x.get(k, {}) +[out] +_testTypedDictUnionGetFull.py:11: note: Revealed type is "TypedDict('_testTypedDictUnionGetFull.TD', {'x'?: builtins.int, 'y'?: builtins.int})" + +[case testTupleWithDifferentArgsPy310] +# https://github.com/python/mypy/issues/11098 +# flags: --python-version 3.10 +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] +Correct5 = tuple[float, ...] +Correct6 = list[tuple[int, str]] +c1: Correct1 +c2: Correct2 +c3: Correct3 +c4: Correct4 +c5: Correct5 +c6: Correct6 +reveal_type(c1) +reveal_type(c2) +reveal_type(c3) +reveal_type(c4) +reveal_type(c5) +reveal_type(c6) + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +WrongTypeElement = str | tuple[float, 1] # Error +WrongEllipsis = tuple[float, float, ...] | str # Error + +# TODO: This should produce a fixed-length tuple +reveal_type(tuple[int, str]((1, "x"))) +[out] +_testTupleWithDifferentArgsPy310.py:15: note: Revealed type is "Union[builtins.str, Tuple[builtins.float, builtins.float, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:16: note: Revealed type is "Union[Tuple[builtins.float], builtins.str]" +_testTupleWithDifferentArgsPy310.py:17: note: Revealed type is "Union[builtins.tuple[builtins.float, ...], builtins.str]" +_testTupleWithDifferentArgsPy310.py:18: note: Revealed type is "Tuple[builtins.float, builtins.str]" +_testTupleWithDifferentArgsPy310.py:19: note: Revealed type is "builtins.tuple[builtins.float, ...]" +_testTupleWithDifferentArgsPy310.py:20: note: Revealed type is "builtins.list[Tuple[builtins.int, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:26: error: Invalid type: try using Literal[1] instead? +_testTupleWithDifferentArgsPy310.py:27: error: Unexpected "..." +_testTupleWithDifferentArgsPy310.py:30: note: Revealed type is "builtins.tuple[builtins.object, ...]" + +[case testEnumIterMetaInference] +import socket +from enum import Enum +from typing import Iterable, Iterator, Type, TypeVar + +_E = TypeVar("_E", bound=Enum) + +def enum_iter(cls: Type[_E]) -> Iterable[_E]: + reveal_type(iter(cls)) + reveal_type(next(iter(cls))) + return iter(cls) + +for value in enum_iter(socket.SocketKind): + reveal_type(value) +[out] +_testEnumIterMetaInference.py:8: note: Revealed type is "typing.Iterator[_E`-1]" +_testEnumIterMetaInference.py:9: note: Revealed type is "_E`-1" +_testEnumIterMetaInference.py:13: note: Revealed type is "socket.SocketKind" diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test index a7ab6d754b2c..50dabb1fdea9 100644 --- a/test-data/unit/reports.test +++ b/test-data/unit/reports.test @@ -103,6 +103,28 @@ class A(object): +[case testNoCrashRecursiveAliasInReport] +# cmd: mypy --any-exprs-report report n.py + +[file n.py] +from typing import Union, List, Any, TypeVar + +Nested = List[Union[Any, Nested]] +T = TypeVar("T") +NestedGen = List[Union[T, NestedGen[T]]] + +x: Nested +y: NestedGen[int] +z: NestedGen[Any] + +[file report/any-exprs.txt] +[outfile report/types-of-anys.txt] + Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact +----------------------------------------------------------------------------------------------------------------- + n 0 4 0 8 0 0 0 +----------------------------------------------------------------------------------------------------------------- +Total 0 4 0 8 0 0 0 + [case testTypeVarTreatedAsEmptyLine] # cmd: mypy --html-report report n.py @@ -480,7 +502,7 @@ namespace_packages = True -

folder.subfolder.something

+

folder.subfolder.something

diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test index 4b60ab99f869..20443517e03e 100644 --- a/test-data/unit/semanal-basic.test +++ b/test-data/unit/semanal-basic.test @@ -8,8 +8,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( NameExpr(x [__main__.x]))) @@ -25,8 +26,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(2)) AssignmentStmt:2( - NameExpr(z* [__main__.z]) - IntExpr(3)) + NameExpr(z [__main__.z]) + IntExpr(3) + builtins.int) ExpressionStmt:3( TupleExpr:3( NameExpr(x [__main__.x]) @@ -48,25 +50,27 @@ MypyFile:1( Args()))) [case testAccessingGlobalNameBeforeDefinition] +# flags: --disable-error-code used-before-def x f() x = 1 def f(): pass [out] MypyFile:1( - ExpressionStmt:1( - NameExpr(x [__main__.x])) ExpressionStmt:2( - CallExpr:2( + NameExpr(x [__main__.x])) + ExpressionStmt:3( + CallExpr:3( NameExpr(f [__main__.f]) Args())) - AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) - FuncDef:4( + AssignmentStmt:4( + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) + FuncDef:5( f - Block:4( - PassStmt:4()))) + Block:5( + PassStmt:5()))) [case testFunctionArgs] def f(x, y): @@ -117,8 +121,9 @@ MypyFile:1( NameExpr(g [__main__.g]) Args())))) AssignmentStmt:4( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:5( g Block:5( @@ -134,8 +139,9 @@ def f(y): [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(2)) @@ -163,8 +169,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:2( f Block:2( @@ -498,17 +505,21 @@ MypyFile:1( ExpressionStmt:3( Ellipsis))) AssignmentStmt:4( - NameExpr(x* [__main__.x] = 1) - IntExpr(1)) + NameExpr(x [__main__.x] = 1) + IntExpr(1) + Literal[1]?) AssignmentStmt:5( - NameExpr(y* [__main__.y] = 1.0) - FloatExpr(1.0)) + NameExpr(y [__main__.y] = 1.0) + FloatExpr(1.0) + Literal[1.0]?) AssignmentStmt:6( - NameExpr(s* [__main__.s] = hi) - StrExpr(hi)) + NameExpr(s [__main__.s] = hi) + StrExpr(hi) + Literal['hi']?) AssignmentStmt:7( - NameExpr(t* [__main__.t] = True) - NameExpr(True [builtins.True])) + NameExpr(t [__main__.t] = True) + NameExpr(True [builtins.True]) + Literal[True]?) AssignmentStmt:8( NameExpr(n* [__main__.n] = None) CallExpr:8( diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test index 082a3fe69050..86f8b8656fb6 100644 --- a/test-data/unit/semanal-classes.test +++ b/test-data/unit/semanal-classes.test @@ -248,8 +248,9 @@ MypyFile:1( ClassDef:1( A AssignmentStmt:2( - NameExpr(x* [m]) - IntExpr(1)) + NameExpr(x [m]) + IntExpr(1) + builtins.int) AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))) @@ -287,8 +288,9 @@ MypyFile:1( NameExpr(A [__main__.A])) Then( AssignmentStmt:3( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) Else( AssignmentStmt:5( NameExpr(x [__main__.A.x]) @@ -541,8 +543,9 @@ MypyFile:1( ClassDef:2( A AssignmentStmt:3( - NameExpr(X* [m]) - IntExpr(1)) + NameExpr(X [m]) + IntExpr(1) + builtins.int) FuncDef:4( f Args( diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 2b10beacbf97..a4ed905dcb9f 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -542,7 +542,7 @@ import typing class A: def f(): pass [out] -main:3: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testInvalidBaseClass] import typing @@ -564,8 +564,8 @@ class A: def f() -> None: pass def g(): pass [out] -main:3: error: Method must have at least one argument -main:4: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? +main:4: error: Method must have at least one argument. Did you forget the "self" argument? [case testMultipleMethodDefinition] import typing @@ -1474,3 +1474,12 @@ y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]): # E: Can only use one type var tuple in a class def pass + +# TODO: this should generate an error +#def bad_args(*args: TVariadic): +# pass + +def bad_kwargs(**kwargs: Unpack[TVariadic]): # E: Unpack item in ** argument must be a TypedDict + pass + +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test index 98bf32708f1b..fa07e533a842 100644 --- a/test-data/unit/semanal-expressions.test +++ b/test-data/unit/semanal-expressions.test @@ -15,8 +15,9 @@ x.y [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( MemberExpr:2( NameExpr(x [__main__.x]) @@ -80,8 +81,9 @@ not x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( UnaryExpr:2( - @@ -187,8 +189,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( @@ -223,8 +226,9 @@ b = [x for x in a if x] [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) ListComprehension:2( @@ -240,8 +244,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( SetComprehension:2( GeneratorExpr:2( @@ -258,8 +263,9 @@ b = {x for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) SetComprehension:2( @@ -275,8 +281,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( DictionaryComprehension:2( NameExpr(x [l]) @@ -293,8 +300,9 @@ b = {x: x + 1 for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) DictionaryComprehension:2( @@ -313,8 +321,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -327,8 +336,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -345,8 +355,9 @@ lambda: x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:2( LambdaExpr:2( Block:2( diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test index 16b9a9b18250..bc381293161f 100644 --- a/test-data/unit/semanal-modules.test +++ b/test-data/unit/semanal-modules.test @@ -16,8 +16,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testImportedNameInType] import m @@ -51,8 +52,9 @@ MypyFile:1( MypyFile:1( tmp/m.py AssignmentStmt:1( - NameExpr(y* [m.y]) - IntExpr(1))) + NameExpr(y [m.y]) + IntExpr(1) + builtins.int)) [case testImportFromType] from m import c @@ -342,8 +344,9 @@ MypyFile:1( MypyFile:1( tmp/m/n.py AssignmentStmt:1( - NameExpr(x* [m.n.x]) - IntExpr(1))) + NameExpr(x [m.n.x]) + IntExpr(1) + builtins.int)) [case testImportFromSubmodule] from m._n import x @@ -448,8 +451,9 @@ MypyFile:1( MypyFile:1( tmp/m/n/k.py AssignmentStmt:1( - NameExpr(x* [m.n.k.x]) - IntExpr(1))) + NameExpr(x [m.n.k.x]) + IntExpr(1) + builtins.int)) [case testImportInSubmodule] import m._n @@ -609,8 +613,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport0] import m.x @@ -637,8 +642,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(1))) + NameExpr(y [m.z.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport1] import m.t.b as b @@ -673,13 +679,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport2] import m.t.b as b @@ -712,13 +720,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport3] import m.t @@ -762,8 +772,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(zy* [m.z.zy]) - IntExpr(3))) + NameExpr(zy [m.z.zy]) + IntExpr(3) + builtins.int)) [case testRelativeImportFromSameModule] import m.x @@ -803,7 +814,7 @@ def somef_unction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somef_unction" or "some_function"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function" or "somef_unction"? [case testImportMisspellingMultipleCandidatesTruncated] import f @@ -820,7 +831,7 @@ def somefun_ction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somefun_ction", "somefu_nction", or "somef_unction"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function", "somef_unction", or "somefu_nction"? [case testFromImportAsInStub] from m import * @@ -914,5 +925,6 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(a* [x.a]) - IntExpr(1))) + NameExpr(a [x.a]) + IntExpr(1) + builtins.int)) diff --git a/test-data/unit/semanal-python310.test b/test-data/unit/semanal-python310.test index a009636575dc..9418ac2912b2 100644 --- a/test-data/unit/semanal-python310.test +++ b/test-data/unit/semanal-python310.test @@ -8,8 +8,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -28,8 +29,9 @@ a [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -49,8 +51,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -78,8 +81,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -99,8 +103,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -121,11 +126,13 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( - NameExpr(a* [__main__.a]) - IntExpr(1)) + NameExpr(a [__main__.a]) + IntExpr(1) + builtins.int) MatchStmt:3( NameExpr(x [__main__.x]) Pattern( @@ -144,8 +151,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -164,8 +172,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -191,8 +200,9 @@ b = 1 MypyFile:1( Import:1(_a) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:4( NameExpr(x [__main__.x]) Pattern( diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index fdc5ca2bbbdd..013452068cf1 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -272,8 +272,9 @@ else: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -326,8 +327,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( - NameExpr(xx* [__main__.xx]) - IntExpr(1)) + NameExpr(xx [__main__.xx]) + IntExpr(1) + builtins.int) AssignmentStmt:3( MemberExpr:3( NameExpr(x [__main__.x]) @@ -408,8 +410,9 @@ MypyFile:1( [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y])) @@ -436,8 +439,9 @@ y, x = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -450,8 +454,9 @@ y, (x, z) = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -468,8 +473,9 @@ if x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -510,8 +516,9 @@ del x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) DelStmt:2( NameExpr(x [__main__.x]))) @@ -782,6 +789,7 @@ MypyFile:1( Args()))))) [case testTryExceptWithMultipleHandlers] +class Err(BaseException): pass try: pass except BaseException as e: @@ -789,36 +797,34 @@ except BaseException as e: except Err as f: f = BaseException() # Fail f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] MypyFile:1( - TryStmt:1( - Block:1( - PassStmt:2()) + ClassDef:1( + Err + BaseType( + builtins.BaseException) + PassStmt:1()) + TryStmt:2( + Block:2( + PassStmt:3()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) - Block:3( - PassStmt:4()) + Block:4( + PassStmt:5()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) - Block:5( - AssignmentStmt:6( + Block:6( + AssignmentStmt:7( NameExpr(f [__main__.f]) - CallExpr:6( + CallExpr:7( NameExpr(BaseException [builtins.BaseException]) Args())) - AssignmentStmt:7( + AssignmentStmt:8( NameExpr(f [__main__.f]) - CallExpr:7( + CallExpr:8( NameExpr(Err [__main__.Err]) - Args())))) - ClassDef:8( - Err - BaseType( - builtins.BaseException) - PassStmt:8())) - + Args()))))) [case testMultipleAssignmentWithPartialNewDef] # flags: --allow-redefinition o = None @@ -961,16 +967,18 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x'* [__main__.x']) - IntExpr(0)) + NameExpr(x' [__main__.x']) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(x' [__main__.x'])))) AssignmentStmt:5( - NameExpr(x* [__main__.x]) - StrExpr()) + NameExpr(x [__main__.x]) + StrExpr() + builtins.str) ExpressionStmt:6( CallExpr:6( NameExpr(f [__main__.f]) @@ -993,8 +1001,9 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) @@ -1046,15 +1055,17 @@ x = '' [out] MypyFile:1( AssignmentStmt:2( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:3( NameExpr(x [__main__.x])) ClassDef:4( A AssignmentStmt:5( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) AssignmentStmt:6( NameExpr(x [__main__.x]) StrExpr())) @@ -1114,3 +1125,191 @@ MypyFile:1( AssignmentStmt:5( NameExpr(y [__main__.y]) IntExpr(1))))) + +[case testConstantFold1] +from typing_extensions import Final +add: Final = 15 + 47 +add_mul: Final = (2 + 3) * 5 +sub: Final = 7 - 11 +bit_and: Final = 6 & 10 +bit_or: Final = 6 | 10 +bit_xor: Final = 6 ^ 10 +lshift: Final = 5 << 2 +rshift: Final = 13 >> 2 +lshift0: Final = 5 << 0 +rshift0: Final = 13 >> 0 +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(add [__main__.add] = 62) + OpExpr:2( + + + IntExpr(15) + IntExpr(47)) + Literal[62]?) + AssignmentStmt:3( + NameExpr(add_mul [__main__.add_mul] = 25) + OpExpr:3( + * + OpExpr:3( + + + IntExpr(2) + IntExpr(3)) + IntExpr(5)) + Literal[25]?) + AssignmentStmt:4( + NameExpr(sub [__main__.sub] = -4) + OpExpr:4( + - + IntExpr(7) + IntExpr(11)) + Literal[-4]?) + AssignmentStmt:5( + NameExpr(bit_and [__main__.bit_and] = 2) + OpExpr:5( + & + IntExpr(6) + IntExpr(10)) + Literal[2]?) + AssignmentStmt:6( + NameExpr(bit_or [__main__.bit_or] = 14) + OpExpr:6( + | + IntExpr(6) + IntExpr(10)) + Literal[14]?) + AssignmentStmt:7( + NameExpr(bit_xor [__main__.bit_xor] = 12) + OpExpr:7( + ^ + IntExpr(6) + IntExpr(10)) + Literal[12]?) + AssignmentStmt:8( + NameExpr(lshift [__main__.lshift] = 20) + OpExpr:8( + << + IntExpr(5) + IntExpr(2)) + Literal[20]?) + AssignmentStmt:9( + NameExpr(rshift [__main__.rshift] = 3) + OpExpr:9( + >> + IntExpr(13) + IntExpr(2)) + Literal[3]?) + AssignmentStmt:10( + NameExpr(lshift0 [__main__.lshift0] = 5) + OpExpr:10( + << + IntExpr(5) + IntExpr(0)) + Literal[5]?) + AssignmentStmt:11( + NameExpr(rshift0 [__main__.rshift0] = 13) + OpExpr:11( + >> + IntExpr(13) + IntExpr(0)) + Literal[13]?)) + +[case testConstantFold2] +from typing_extensions import Final +neg1: Final = -5 +neg2: Final = --1 +neg3: Final = -0 +pos: Final = +5 +inverted1: Final = ~0 +inverted2: Final = ~5 +inverted3: Final = ~3 +p0: Final = 3**0 +p1: Final = 3**5 +p2: Final = (-5)**3 +p3: Final = 0**0 +s: Final = 'x' + 'y' +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(neg1 [__main__.neg1] = -5) + UnaryExpr:2( + - + IntExpr(5)) + Literal[-5]?) + AssignmentStmt:3( + NameExpr(neg2 [__main__.neg2] = 1) + UnaryExpr:3( + - + UnaryExpr:3( + - + IntExpr(1))) + Literal[1]?) + AssignmentStmt:4( + NameExpr(neg3 [__main__.neg3] = 0) + UnaryExpr:4( + - + IntExpr(0)) + Literal[0]?) + AssignmentStmt:5( + NameExpr(pos [__main__.pos] = 5) + UnaryExpr:5( + + + IntExpr(5)) + Literal[5]?) + AssignmentStmt:6( + NameExpr(inverted1 [__main__.inverted1] = -1) + UnaryExpr:6( + ~ + IntExpr(0)) + Literal[-1]?) + AssignmentStmt:7( + NameExpr(inverted2 [__main__.inverted2] = -6) + UnaryExpr:7( + ~ + IntExpr(5)) + Literal[-6]?) + AssignmentStmt:8( + NameExpr(inverted3 [__main__.inverted3] = -4) + UnaryExpr:8( + ~ + IntExpr(3)) + Literal[-4]?) + AssignmentStmt:9( + NameExpr(p0 [__main__.p0] = 1) + OpExpr:9( + ** + IntExpr(3) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:10( + NameExpr(p1 [__main__.p1] = 243) + OpExpr:10( + ** + IntExpr(3) + IntExpr(5)) + Literal[243]?) + AssignmentStmt:11( + NameExpr(p2 [__main__.p2] = -125) + OpExpr:11( + ** + UnaryExpr:11( + - + IntExpr(5)) + IntExpr(3)) + Literal[-125]?) + AssignmentStmt:12( + NameExpr(p3 [__main__.p3] = 1) + OpExpr:12( + ** + IntExpr(0) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:13( + NameExpr(s [__main__.s] = xy) + OpExpr:13( + + + StrExpr(x) + StrExpr(y)) + Literal['xy']?)) diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test index bdf4f52ae5fc..c886080557b0 100644 --- a/test-data/unit/semanal-symtable.test +++ b/test-data/unit/semanal-symtable.test @@ -9,7 +9,7 @@ x = 1 [out] __main__: SymbolTable( - x : Gdef/Var (__main__.x)) + x : Gdef/Var (__main__.x) : builtins.int) [case testFuncDef] def f(): pass @@ -35,7 +35,7 @@ __main__: m : Gdef/MypyFile (m)) m: SymbolTable( - x : Gdef/Var (m.x)) + x : Gdef/Var (m.x) : builtins.int) [case testImportFromModule] from m import x @@ -49,7 +49,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testImportAs] from m import x as xx @@ -63,7 +63,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testFailingImports] from sys import non_existing1 # type: ignore @@ -80,7 +80,7 @@ __main__: non_existing4 : Gdef/Var (__main__.non_existing4) : Any) sys: SymbolTable( - platform : Gdef/Var (sys.platform) + platform : Gdef/Var (sys.platform) : builtins.str version_info : Gdef/Var (sys.version_info)) [case testDecorator] @@ -95,6 +95,6 @@ def g() -> None: [out] __main__: SymbolTable( - Callable : Gdef/Var (typing.Callable) + Callable : Gdef/Var (typing.Callable) : builtins.int dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def () g : Gdef/Decorator (__main__.g) : def ()) diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index d832772f5f81..494d701b758a 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -790,6 +790,7 @@ def f(x: int) -> None: pass def f(*args) -> None: pass x = f +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [overload]) @@ -1032,6 +1033,7 @@ MypyFile:1( [case testVarArgsAndKeywordArgs] def g(*x: int, y: str = ''): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( FuncDef:1( @@ -1558,3 +1560,29 @@ MypyFile:1( AssignmentStmt:2( NameExpr(TV* [__main__.TV]) TypeVarTupleExpr:2())) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleCallable] +from typing_extensions import TypeVarTuple, Unpack +from typing import Callable +Ts = TypeVarTuple("Ts") + +def foo(x: Callable[[Unpack[Ts]], None]) -> None: + pass +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [TypeVarTuple, Unpack]) + ImportFrom:2(typing, [Callable]) + AssignmentStmt:3( + NameExpr(Ts* [__main__.Ts]) + TypeVarTupleExpr:3()) + FuncDef:5( + foo + Args( + Var(x)) + def [Ts] (x: def (*Unpack[Ts`-1])) + Block:5( + PassStmt:6()))) + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 71d31144d827..68f4dc0ab123 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2813,3 +2813,12 @@ class Some: def __int__(self) -> int: ... def __float__(self) -> float: ... def __index__(self) -> int: ... + + +[case testTypeVarPEP604Bound] +from typing import TypeVar +T = TypeVar("T", bound=str | None) +[out] +from typing import TypeVar + +T = TypeVar('T', bound=str | None) diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 5cbdf38d1b4f..cd4071eb14ee 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -21,15 +21,15 @@ [case testConstructorCall] import typing -A() -B() class A: pass class B: pass +A() +B() [out] -CallExpr(2) : A -NameExpr(2) : def () -> A -CallExpr(3) : B -NameExpr(3) : def () -> B +CallExpr(4) : A +NameExpr(4) : def () -> A +CallExpr(5) : B +NameExpr(5) : def () -> B [case testLiterals] import typing @@ -139,6 +139,8 @@ class float: def __sub__(self, x: int) -> float: pass class type: pass class str: pass +class list: pass +class dict: pass [out] OpExpr(3) : builtins.int OpExpr(4) : builtins.float @@ -165,6 +167,8 @@ class bool: pass class type: pass class function: pass class str: pass +class list: pass +class dict: pass [out] ComparisonExpr(3) : builtins.bool ComparisonExpr(4) : builtins.bool @@ -202,17 +206,17 @@ UnaryExpr(6) : builtins.bool [case testFunctionCall] ## CallExpr from typing import Tuple -f( - A(), - B()) class A: pass class B: pass def f(a: A, b: B) -> Tuple[A, B]: pass +f( + A(), + B()) [builtins fixtures/tuple-simple.pyi] [out] -CallExpr(3) : Tuple[A, B] -CallExpr(4) : A -CallExpr(5) : B +CallExpr(6) : Tuple[A, B] +CallExpr(7) : A +CallExpr(8) : B -- Statements @@ -602,28 +606,26 @@ NameExpr(4) : def [t] (x: t`-1) -> t`-1 ## CallExpr from typing import TypeVar, Generic T = TypeVar('T') -f(g()) -f(h(b)) -f(h(c)) - -b = None # type: B -c = None # type: C - +class A(Generic[T]): pass +class B: pass +class C(B): pass def f(a: 'A[B]') -> None: pass - def g() -> 'A[T]': pass def h(a: T) -> 'A[T]': pass -class A(Generic[T]): pass -class B: pass -class C(B): pass +b = None # type: B +c = None # type: C + +f(g()) +f(h(b)) +f(h(c)) [out] -CallExpr(4) : None -CallExpr(4) : A[B] -CallExpr(5) : None -CallExpr(5) : A[B] -CallExpr(6) : None -CallExpr(6) : A[B] +CallExpr(14) : None +CallExpr(14) : A[B] +CallExpr(15) : None +CallExpr(15) : A[B] +CallExpr(16) : None +CallExpr(16) : A[B] [case testInferGenericTypeForLocalVariable] from typing import TypeVar, Generic @@ -697,21 +699,21 @@ ListExpr(2) : builtins.list[Any] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - f, - [A()]) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +map( + f, + [A()]) [builtins fixtures/list.pyi] [out] -CallExpr(4) : builtins.list[B] -NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(5) : def (a: A) -> B -CallExpr(6) : A -ListExpr(6) : builtins.list[A] -NameExpr(6) : def () -> A +CallExpr(8) : builtins.list[B] +NameExpr(8) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(9) : def (a: A) -> B +CallExpr(10) : A +ListExpr(10) : builtins.list[A] +NameExpr(10) : def () -> A -- Lambdas @@ -761,106 +763,106 @@ ListExpr(2) : builtins.list[A] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: f(x), l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: f(x), l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -CallExpr(6) : B -LambdaExpr(6) : def (A) -> B -NameExpr(6) : def (a: A) -> B -NameExpr(6) : builtins.list[A] -NameExpr(6) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +CallExpr(10) : B +LambdaExpr(10) : def (A) -> B +NameExpr(10) : def (a: A) -> B +NameExpr(10) : builtins.list[A] +NameExpr(10) : A [case testLambdaAndHigherOrderFunction2] ## LambdaExpr|NameExpr|ListExpr from typing import TypeVar, List, Callable t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: [f(x)], l) def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: [f(x)], l) [builtins fixtures/list.pyi] [out] -NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(7) : def (A) -> builtins.list[B] -ListExpr(7) : builtins.list[B] -NameExpr(7) : def (a: A) -> B -NameExpr(7) : builtins.list[A] -NameExpr(7) : A +NameExpr(10) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(11) : def (A) -> builtins.list[B] +ListExpr(11) : builtins.list[B] +NameExpr(11) : def (a: A) -> B +NameExpr(11) : builtins.list[A] +NameExpr(11) : A [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass +class A: pass l = None # type: List[A] map( [lambda x: x], l) -def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass -class A: pass [builtins fixtures/list.pyi] [out] -- TODO We probably should not silently infer 'Any' types in statically typed -- context. Perhaps just fail instead? -CallExpr(5) : builtins.list[Any] -NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] -LambdaExpr(6) : def (A) -> A -ListExpr(6) : builtins.list[def (A) -> Any] -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(7) : builtins.list[Any] +NameExpr(7) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] +LambdaExpr(8) : def (A) -> A +ListExpr(8) : builtins.list[def (A) -> Any] +NameExpr(8) : A +NameExpr(9) : builtins.list[A] [case testLambdaAndHigherOrderFunction3] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: x.b, - l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass +l = None # type: List[A] +map( + lambda x: x.b, + l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(6) : def (A) -> B -MemberExpr(6) : B -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(10) : def (A) -> B +MemberExpr(10) : B +NameExpr(10) : A +NameExpr(11) : builtins.list[A] [case testLambdaAndHigherOrderFunctionAndKeywordArgs] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass +class A: + b = None # type: B +class B: pass l = None # type: List[A] map( a=l, f=lambda x: x.b) -def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass -class A: - b = None # type: B -class B: pass [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(6) : builtins.list[A] -LambdaExpr(7) : def (A) -> B -MemberExpr(7) : B -NameExpr(7) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(10) : builtins.list[A] +LambdaExpr(11) : def (A) -> B +MemberExpr(11) : B +NameExpr(11) : A -- Boolean operations diff --git a/test-requirements.txt b/test-requirements.txt index 574cb208b4ff..aec11e87e96f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,13 +1,13 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==22.6.0 # must match version in .pre-commit-config.yaml +black==22.12.0 # must match version in .pre-commit-config.yaml filelock>=3.3.0 flake8==5.0.4 # must match version in .pre-commit-config.yaml -flake8-bugbear==22.8.23 # must match version in .pre-commit-config.yaml -flake8-noqa==1.2.9 # must match version in .pre-commit-config.yaml -isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml -lxml>=4.4.0; python_version<'3.11' +flake8-bugbear==22.12.6 # must match version in .pre-commit-config.yaml +flake8-noqa==1.3.0 # must match version in .pre-commit-config.yaml +isort[colors]==5.11.4 # must match version in .pre-commit-config.yaml +lxml>=4.9.1; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 pytest>=6.2.4 @@ -15,5 +15,6 @@ pytest-xdist>=1.34.0 pytest-forked>=1.3.0,<2.0.0 pytest-cov>=2.10.0 py>=1.5.2 -setuptools!=50 +setuptools>=65.5.1 six +tomli>=1.1.0 diff --git a/tox.ini b/tox.ini index 92810bed9981..a155ec726386 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,6 @@ minversion = 3.8.0 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} envlist = - py36, py37, py38, py39, @@ -14,7 +13,7 @@ isolated_build = true [testenv] description = run the test driver with {basepython} -passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) +passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) PYTEST_ADDOPTS deps = -rtest-requirements.txt commands = python -m pytest {posargs} @@ -27,6 +26,7 @@ commands = [testenv:type] description = type check ourselves +passenv = TERM MYPY_FORCE_COLOR MYPY_FORCE_TERMINAL_WIDTH commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py From 23a6a0f1a209a642b6cca976c52c466a783d8fa7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Sat, 12 Aug 2023 21:06:36 +0200 Subject: [PATCH 25/26] Fix whitespaces --- misc/test-stubgenc.sh | 4 ++-- .../pybind11_mypy_demo/basics.pyi | 20 +++++++++---------- test-data/unit/stubgen.test | 4 ++-- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/misc/test-stubgenc.sh b/misc/test-stubgenc.sh index 5beb26340e7c..7713e1b04e43 100755 --- a/misc/test-stubgenc.sh +++ b/misc/test-stubgenc.sh @@ -29,7 +29,7 @@ function stubgenc_test() { } # create stubs without docstrings -stubgenc_test stubgen -p pybind11_mypy_demo +stubgenc_test stubgen -p pybind11_mypy_demo # create stubs with docstrings -stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings +stubgenc_test stubgen-include-docs -p pybind11_mypy_demo --include-docstrings exit $EXIT diff --git a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi index eab1439cbee0..676d7f6d3f15 100644 --- a/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen-include-docs/pybind11_mypy_demo/basics.pyi @@ -67,46 +67,46 @@ class Point: def __init__(self) -> None: """__init__(*args, **kwargs) Overloaded function. - + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None - + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" @overload def __init__(self, x: float, y: float) -> None: """__init__(*args, **kwargs) Overloaded function. - + 1. __init__(self: pybind11_mypy_demo.basics.Point) -> None - + 2. __init__(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> None""" @overload def distance_to(self, x: float, y: float) -> float: """distance_to(*args, **kwargs) Overloaded function. - + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float - + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" @overload def distance_to(self, other: Point) -> float: """distance_to(*args, **kwargs) Overloaded function. - + 1. distance_to(self: pybind11_mypy_demo.basics.Point, x: float, y: float) -> float - + 2. distance_to(self: pybind11_mypy_demo.basics.Point, other: pybind11_mypy_demo.basics.Point) -> float""" @property def length(self) -> float: ... def answer() -> int: '''answer() -> int - + answer docstring, with end quote"''' def midpoint(left: float, right: float) -> float: """midpoint(left: float, right: float) -> float""" def sum(arg0: int, arg1: int) -> int: '''sum(arg0: int, arg1: int) -> int - + multiline docstring test, edge case quotes """\'\'\'''' def weighted_midpoint(left: float, right: float, alpha: float = ...) -> float: """weighted_midpoint(left: float, right: float, alpha: float = 0.5) -> float""" diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 68ad84fc2671..774a17b76161 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -3241,7 +3241,7 @@ class A: a multiline docstring""" def func(): """func docstring - + don't forget to indent""" def nodoc(): ... @@ -3252,7 +3252,7 @@ class B: ... def nodoc(): ... - + [out] class A: def func() -> None: ... From 7580e22dbd3fc106bbe1e05f303ee29dd42410d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Ch=C3=BDlek?= Date: Sun, 13 Aug 2023 12:48:43 +0200 Subject: [PATCH 26/26] Improve code style --- mypy/stubgenc.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 70ca3e4f7c1b..31487f9d0dcf 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -385,25 +385,19 @@ def generate_c_function_stub( # a sig generator indicates @classmethod by specifying the cls arg if class_name and signature.args and signature.args[0].name == "cls": output.append("@classmethod") + output_signature = "def {function}({args}) -> {ret}:".format( + function=name, + args=", ".join(args), + ret=strip_or_import(signature.ret_type, module, known_modules, imports), + ) if include_docstrings and docstr: - output.append( - "def {function}({args}) -> {ret}:".format( - function=name, - args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, known_modules, imports), - ) - ) docstr_quoted = mypy.util.quote_docstring(docstr.strip()) docstr_indented = "\n ".join(docstr_quoted.split("\n")) + output.append(output_signature) output.extend(f" {docstr_indented}".split("\n")) else: - output.append( - "def {function}({args}) -> {ret}: ...".format( - function=name, - args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, known_modules, imports), - ) - ) + output_signature += " ..." + output.append(output_signature) def strip_or_import(
folder/subfolder/something.py