diff --git a/ChangeLog b/ChangeLog index 3da3ef5008..43efc8f454 100644 --- a/ChangeLog +++ b/ChangeLog @@ -16,6 +16,12 @@ Release date: TBA Closes PyCQA/pylint#4633 +* Fix unhandled StopIteration during inference, following the implementation + of PEP479 in python 3.7+ + + Closes PyCQA/pylint#4631 + Closes #1080 + What's New in astroid 2.6.1? ============================ Release date: 2021-06-29 diff --git a/astroid/arguments.py b/astroid/arguments.py index bffcf2162c..f5e291450b 100644 --- a/astroid/arguments.py +++ b/astroid/arguments.py @@ -101,6 +101,8 @@ def _unpack_keywords(self, keywords, context=None): except InferenceError: values[name] = util.Uninferable continue + except StopIteration: + continue if not isinstance(inferred, nodes.Dict): # Not something we can work with. @@ -113,6 +115,8 @@ def _unpack_keywords(self, keywords, context=None): except InferenceError: values[name] = util.Uninferable continue + except StopIteration: + continue if not isinstance(dict_key, nodes.Const): values[name] = util.Uninferable continue @@ -140,6 +144,8 @@ def _unpack_args(self, args, context=None): except InferenceError: values.append(util.Uninferable) continue + except StopIteration: + continue if inferred is util.Uninferable: values.append(util.Uninferable) diff --git a/astroid/bases.py b/astroid/bases.py index c272b2a368..df6d196c36 100644 --- a/astroid/bases.py +++ b/astroid/bases.py @@ -169,8 +169,10 @@ def _infer_method_result_truth(instance, method_name, context): for value in meth.infer_call_result(instance, context=context): if value is util.Uninferable: return value - - inferred = next(value.infer(context=context)) + try: + inferred = next(value.infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e return inferred.bool_value() except InferenceError: pass @@ -344,7 +346,7 @@ def getitem(self, index, context=None): node=self, context=context, ) - return next(method.infer_call_result(self, new_context)) + return next(method.infer_call_result(self, new_context), None) class UnboundMethod(Proxy): @@ -434,7 +436,10 @@ def _infer_type_new_call(self, caller, context): from astroid import node_classes # Verify the metaclass - mcs = next(caller.args[0].infer(context=context)) + try: + mcs = next(caller.args[0].infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e if mcs.__class__.__name__ != "ClassDef": # Not a valid first argument. return None @@ -443,7 +448,10 @@ def _infer_type_new_call(self, caller, context): return None # Verify the name - name = next(caller.args[1].infer(context=context)) + try: + name = next(caller.args[1].infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e if name.__class__.__name__ != "Const": # Not a valid name, needs to be a const. return None @@ -452,24 +460,39 @@ def _infer_type_new_call(self, caller, context): return None # Verify the bases - bases = next(caller.args[2].infer(context=context)) + try: + bases = next(caller.args[2].infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e if bases.__class__.__name__ != "Tuple": # Needs to be a tuple. return None - inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts] + try: + inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts] + except StopIteration as e: + raise InferenceError(context=context) from e if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases): # All the bases needs to be Classes return None # Verify the attributes. - attrs = next(caller.args[3].infer(context=context)) + try: + attrs = next(caller.args[3].infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e if attrs.__class__.__name__ != "Dict": # Needs to be a dictionary. return None cls_locals = collections.defaultdict(list) for key, value in attrs.items: - key = next(key.infer(context=context)) - value = next(value.infer(context=context)) + try: + key = next(key.infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e + try: + value = next(value.infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context) from e # Ignore non string keys if key.__class__.__name__ == "Const" and isinstance(key.value, str): cls_locals[key.value].append(value) diff --git a/astroid/brain/brain_builtin_inference.py b/astroid/brain/brain_builtin_inference.py index da01e44443..9d98cf9e68 100644 --- a/astroid/brain/brain_builtin_inference.py +++ b/astroid/brain/brain_builtin_inference.py @@ -329,7 +329,7 @@ def is_iterable(n): try: inferred = next(arg.infer(context)) - except (InferenceError, NameInferenceError) as exc: + except (InferenceError, NameInferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if isinstance(inferred, nodes.Dict): items = inferred.items @@ -432,11 +432,11 @@ def infer_super(node, context=None): else: try: mro_pointer = next(node.args[0].infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc try: mro_type = next(node.args[1].infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if mro_pointer is util.Uninferable or mro_type is util.Uninferable: @@ -458,7 +458,7 @@ def _infer_getattr_args(node, context): try: obj = next(node.args[0].infer(context=context)) attr = next(node.args[1].infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if obj is util.Uninferable or attr is util.Uninferable: @@ -496,7 +496,7 @@ def infer_getattr(node, context=None): # Try to infer the default and return it instead. try: return next(node.args[2].infer(context=context)) - except InferenceError as exc: + except (StopIteration, InferenceError) as exc: raise UseInferenceDefault from exc raise UseInferenceDefault @@ -544,7 +544,7 @@ def infer_callable(node, context=None): argument = node.args[0] try: inferred = next(argument.infer(context=context)) - except InferenceError: + except (InferenceError, StopIteration): return util.Uninferable if inferred is util.Uninferable: return util.Uninferable @@ -564,7 +564,7 @@ def infer_property(node, context=None): getter = node.args[0] try: inferred = next(getter.infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if not isinstance(inferred, (nodes.FunctionDef, nodes.Lambda)): @@ -592,7 +592,7 @@ def infer_bool(node, context=None): argument = node.args[0] try: inferred = next(argument.infer(context=context)) - except InferenceError: + except (InferenceError, StopIteration): return util.Uninferable if inferred is util.Uninferable: return util.Uninferable @@ -682,7 +682,7 @@ def infer_issubclass(callnode, context=None): try: obj_type = next(obj_node.infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if not isinstance(obj_type, nodes.ClassDef): raise UseInferenceDefault("TypeError: arg 1 must be class") @@ -749,13 +749,19 @@ def _class_or_tuple_to_container(node, context=None): # Move inferences results into container # to simplify later logic # raises InferenceError if any of the inferences fall through - node_infer = next(node.infer(context=context)) + try: + node_infer = next(node.infer(context=context)) + except StopIteration as e: + raise InferenceError(node=node, context=context) from e # arg2 MUST be a type or a TUPLE of types # for isinstance if isinstance(node_infer, nodes.Tuple): - class_container = [ - next(node.infer(context=context)) for node in node_infer.elts - ] + try: + class_container = [ + next(node.infer(context=context)) for node in node_infer.elts + ] + except StopIteration as e: + raise InferenceError(node=node, context=context) from e class_container = [ klass_node for klass_node in class_container if klass_node is not None ] @@ -865,7 +871,7 @@ def _build_dict_with_elements(elements): values = call.positional_arguments[0] try: inferred_values = next(values.infer(context=context)) - except InferenceError: + except (InferenceError, StopIteration): return _build_dict_with_elements([]) if inferred_values is util.Uninferable: return _build_dict_with_elements([]) diff --git a/astroid/brain/brain_functools.py b/astroid/brain/brain_functools.py index 37b2180848..248e0fb932 100644 --- a/astroid/brain/brain_functools.py +++ b/astroid/brain/brain_functools.py @@ -74,7 +74,7 @@ def _functools_partial_inference(node, context=None): partial_function = call.positional_arguments[0] try: inferred_wrapped_function = next(partial_function.infer(context=context)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if inferred_wrapped_function is Uninferable: raise UseInferenceDefault("Cannot infer the wrapped function") diff --git a/astroid/brain/brain_multiprocessing.py b/astroid/brain/brain_multiprocessing.py index 6e0b91c8ae..5f68f8b7bd 100644 --- a/astroid/brain/brain_multiprocessing.py +++ b/astroid/brain/brain_multiprocessing.py @@ -36,7 +36,7 @@ def Manager(): try: context = next(node["default"].infer()) base = next(node["base"].infer()) - except InferenceError: + except (InferenceError, StopIteration): return module for node in (context, base): diff --git a/astroid/brain/brain_namedtuple_enum.py b/astroid/brain/brain_namedtuple_enum.py index 7cd5efd1a4..8798483cb0 100644 --- a/astroid/brain/brain_namedtuple_enum.py +++ b/astroid/brain/brain_namedtuple_enum.py @@ -54,11 +54,11 @@ def _infer_first(node, context): raise UseInferenceDefault try: value = next(node.infer(context=context)) - if value is util.Uninferable: - raise UseInferenceDefault() - return value except StopIteration as exc: raise InferenceError from exc + if value is util.Uninferable: + raise UseInferenceDefault() + return value def _find_func_form_arguments(node, context): @@ -184,10 +184,15 @@ def infer_named_tuple(node, context=None): node, tuple_base_name, context=context ) call_site = arguments.CallSite.from_call(node, context=context) - func = next(extract_node("import collections; collections.namedtuple").infer()) + node = extract_node("import collections; collections.namedtuple") + try: + + func = next(node.infer()) + except StopIteration as e: + raise InferenceError(node=node) from e try: rename = next(call_site.infer_argument(func, "rename", context)).bool_value() - except InferenceError: + except (InferenceError, StopIteration): rename = False try: @@ -471,7 +476,10 @@ def infer_typing_namedtuple_class(class_node, context=None): """ ).format(typename=class_node.name, fields=",".join(annassigns_fields)) node = extract_node(code) - generated_class_node = next(infer_named_tuple(node, context)) + try: + generated_class_node = next(infer_named_tuple(node, context)) + except StopIteration as e: + raise InferenceError(node=node, context=context) from e for method in class_node.mymethods(): generated_class_node.locals[method.name] = [method] @@ -507,7 +515,7 @@ def infer_typing_namedtuple(node, context=None): # so we extract the args and infer a named tuple. try: func = next(node.func.infer()) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if func.qname() != "typing.NamedTuple": diff --git a/astroid/brain/brain_nose.py b/astroid/brain/brain_nose.py index a4fbb4e21e..86e70a7be0 100644 --- a/astroid/brain/brain_nose.py +++ b/astroid/brain/brain_nose.py @@ -42,7 +42,7 @@ class Test(unittest.TestCase): ) try: case = next(module["a"].infer()) - except InferenceError: + except (InferenceError, StopIteration): return for method in case.methods(): if method.name.startswith("assert") and "_" not in method.name: diff --git a/astroid/brain/brain_six.py b/astroid/brain/brain_six.py index ce357ae86e..c59d072c30 100644 --- a/astroid/brain/brain_six.py +++ b/astroid/brain/brain_six.py @@ -189,7 +189,7 @@ def transform_six_add_metaclass(node): # pylint: disable=inconsistent-return-st try: func = next(decorator.func.infer()) - except InferenceError: + except (InferenceError, StopIteration): continue if func.qname() == SIX_ADD_METACLASS and decorator.args: metaclass = decorator.args[0] diff --git a/astroid/brain/brain_typing.py b/astroid/brain/brain_typing.py index 39169dc367..f59c7e4c8e 100644 --- a/astroid/brain/brain_typing.py +++ b/astroid/brain/brain_typing.py @@ -115,7 +115,7 @@ def infer_typing_typevar_or_newtype(node, context_itton=None): """Infer a typing.TypeVar(...) or typing.NewType(...) call""" try: func = next(node.func.infer(context=context_itton)) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if func.qname() not in TYPING_TYPEVARS_QUALIFIED: @@ -145,7 +145,7 @@ def infer_typing_attr( """Infer a typing.X[...] subscript""" try: value = next(node.value.infer()) - except InferenceError as exc: + except (InferenceError, StopIteration) as exc: raise UseInferenceDefault from exc if ( @@ -269,7 +269,11 @@ def infer_typing_alias( or not isinstance(node.parent.targets[0], AssignName) ): return None - res = next(node.args[0].infer(context=ctx)) + try: + res = next(node.args[0].infer(context=ctx)) + except StopIteration as e: + raise InferenceError(node=node.args[0], context=context) from e + assign_name = node.parent.targets[0] class_def = ClassDef( @@ -333,7 +337,10 @@ def infer_tuple_alias( node: Call, ctx: context.InferenceContext = None ) -> typing.Iterator[ClassDef]: """Infer call to tuple alias as new subscriptable class typing.Tuple.""" - res = next(node.args[0].infer(context=ctx)) + try: + res = next(node.args[0].infer(context=ctx)) + except StopIteration as e: + raise InferenceError(node=node.args[0], context=context) from e class_def = ClassDef( name="Tuple", parent=node.parent, diff --git a/astroid/decorators.py b/astroid/decorators.py index 81bc7d6204..7d0291208c 100644 --- a/astroid/decorators.py +++ b/astroid/decorators.py @@ -95,21 +95,22 @@ def wrapped(node, context=None, _func=func, **kwargs): yielded = set() generator = _func(node, context, **kwargs) - try: - while True: + while True: + try: res = next(generator) - # unproxy only true instance, not const, tuple, dict... - if res.__class__.__name__ == "Instance": - ares = res._proxied - else: - ares = res - if ares not in yielded: - yield res - yielded.add(ares) - except StopIteration as error: - if error.args: - return error.args[0] - return None + except StopIteration as error: + if error.args: + return error.args[0] + return None + + # unproxy only true instance, not const, tuple, dict... + if res.__class__.__name__ == "Instance": + ares = res._proxied + else: + ares = res + if ares not in yielded: + yield res + yielded.add(ares) return wrapped @@ -131,7 +132,6 @@ def yes_if_nothing_inferred(func, instance, args, kwargs): @wrapt.decorator def raise_if_nothing_inferred(func, instance, args, kwargs): generator = func(*args, **kwargs) - try: yield next(generator) except StopIteration as error: diff --git a/astroid/helpers.py b/astroid/helpers.py index 3b6ddee264..7f86c6d94e 100644 --- a/astroid/helpers.py +++ b/astroid/helpers.py @@ -285,6 +285,8 @@ def object_len(node, context=None): try: len_call = next(node_type.igetattr("__len__", context=context)) + except StopIteration as e: + raise AstroidTypeError(str(e)) from e except AttributeInferenceError as e: raise AstroidTypeError( f"object of type '{node_type.pytype()}' has no len()" diff --git a/astroid/inference.py b/astroid/inference.py index 9e42dc6ebb..9bff87738e 100644 --- a/astroid/inference.py +++ b/astroid/inference.py @@ -527,7 +527,7 @@ def _infer_unaryop(self, context=None): continue meth = methods[0] - inferred = next(meth.infer(context=context)) + inferred = next(meth.infer(context=context), None) if inferred is util.Uninferable or not inferred.callable(): continue @@ -571,7 +571,10 @@ def _invoke_binop_inference(instance, opnode, op, other, context, method_name): methods = dunder_lookup.lookup(instance, method_name) context = contextmod.bind_context_to_node(context, instance) method = methods[0] - inferred = next(method.infer(context=context)) + try: + inferred = next(method.infer(context=context)) + except StopIteration as e: + raise InferenceError(node=method, context=context) from e if inferred is util.Uninferable: raise InferenceError return instance.infer_binary_op(opnode, op, other, context, inferred) @@ -922,7 +925,7 @@ def infer_ifexp(self, context=None): rhs_context = contextmod.copy_context(context) try: test = next(self.test.infer(context=context.clone())) - except InferenceError: + except (InferenceError, StopIteration): both_branches = True else: if test is not util.Uninferable: diff --git a/astroid/interpreter/objectmodel.py b/astroid/interpreter/objectmodel.py index 0ab3b43376..b167e7ea91 100644 --- a/astroid/interpreter/objectmodel.py +++ b/astroid/interpreter/objectmodel.py @@ -308,7 +308,10 @@ def infer_call_result(self, caller, context=None): ) context = contextmod.copy_context(context) - cls = next(caller.args[0].infer(context=context)) + try: + cls = next(caller.args[0].infer(context=context)) + except StopIteration as e: + raise InferenceError(context=context, node=caller.args[0]) from e if cls is astroid.Uninferable: raise InferenceError( @@ -705,7 +708,7 @@ class DictMethodBoundMethod(astroid.BoundMethod): def infer_call_result(self, caller, context=None): yield obj - meth = next(self._instance._proxied.igetattr(name)) + meth = next(self._instance._proxied.igetattr(name), None) return DictMethodBoundMethod(proxy=meth, bound=self._instance) @property diff --git a/astroid/node_classes.py b/astroid/node_classes.py index 179bc17c6e..bfd8c4fb8d 100644 --- a/astroid/node_classes.py +++ b/astroid/node_classes.py @@ -79,7 +79,7 @@ def unpack_infer(stmt, context=None): yield from unpack_infer(elt, context) return dict(node=stmt, context=context) # if inferred is a final node, return it and stop - inferred = next(stmt.infer(context)) + inferred = next(stmt.infer(context), util.Uninferable) if inferred is stmt: yield inferred return dict(node=stmt, context=context) @@ -185,7 +185,7 @@ def _slice_value(index, context=None): # we'll stop at the first possible value. try: inferred = next(index.infer(context=context)) - except InferenceError: + except (InferenceError, StopIteration): pass else: if isinstance(inferred, Const): diff --git a/astroid/protocols.py b/astroid/protocols.py index 749e1f6d76..651000cb90 100644 --- a/astroid/protocols.py +++ b/astroid/protocols.py @@ -460,7 +460,10 @@ def excepthandler_assigned_stmts(self, node=None, context=None, assign_path=None def _infer_context_manager(self, mgr, context): - inferred = next(mgr.infer(context=context)) + try: + inferred = next(mgr.infer(context=context)) + except StopIteration as e: + raise InferenceError(node=mgr) from e if isinstance(inferred, bases.Generator): # Check if it is decorated with contextlib.contextmanager. func = inferred.parent @@ -470,7 +473,7 @@ def _infer_context_manager(self, mgr, context): ) for decorator_node in func.decorators.nodes: - decorator = next(decorator_node.infer(context=context)) + decorator = next(decorator_node.infer(context=context), None) if isinstance(decorator, nodes.FunctionDef): if decorator.qname() == _CONTEXTLIB_MGR: break @@ -497,7 +500,7 @@ def _infer_context_manager(self, mgr, context): elif isinstance(inferred, bases.Instance): try: enter = next(inferred.igetattr("__enter__", context=context)) - except (InferenceError, AttributeInferenceError) as exc: + except (InferenceError, AttributeInferenceError, StopIteration) as exc: raise InferenceError(node=inferred) from exc if not isinstance(enter, bases.BoundMethod): raise InferenceError(node=enter) @@ -650,7 +653,7 @@ def _determine_starred_iteration_lookups(starred, target, lookups): try: rhs = next(value.infer(context)) - except InferenceError: + except (InferenceError, StopIteration): yield util.Uninferable return if rhs is util.Uninferable or not hasattr(rhs, "itered"): @@ -698,7 +701,7 @@ def _determine_starred_iteration_lookups(starred, target, lookups): if isinstance(stmt, nodes.For): try: inferred_iterable = next(stmt.iter.infer(context=context)) - except InferenceError: + except (InferenceError, StopIteration): yield util.Uninferable return if inferred_iterable is util.Uninferable or not hasattr( diff --git a/astroid/scoped_nodes.py b/astroid/scoped_nodes.py index 10f90dc59c..1ba6c3a33b 100644 --- a/astroid/scoped_nodes.py +++ b/astroid/scoped_nodes.py @@ -750,7 +750,7 @@ def wildcard_import_names(self): try: explicit = next(all_values.assigned_stmts()) - except InferenceError: + except (InferenceError, StopIteration): return default except AttributeError: # not an assignment node @@ -761,7 +761,7 @@ def wildcard_import_names(self): inferred = [] try: explicit = next(explicit.infer()) - except InferenceError: + except (InferenceError, StopIteration): return default if not isinstance(explicit, (node_classes.Tuple, node_classes.List)): return default @@ -775,7 +775,7 @@ def str_const(node): else: try: inferred_node = next(node.infer()) - except InferenceError: + except (InferenceError, StopIteration): continue if str_const(inferred_node): inferred.append(inferred_node.value) @@ -1118,7 +1118,7 @@ def _infer_decorator_callchain(node): if not node.parent: return None try: - result = next(node.infer_call_result(node.parent)) + result = next(node.infer_call_result(node.parent), None) except InferenceError: return None if isinstance(result, bases.Instance): @@ -1538,7 +1538,7 @@ def type( # try: current = next(node.func.infer()) - except InferenceError: + except (InferenceError, StopIteration): continue _type = _infer_decorator_callchain(current) if _type is not None: @@ -1681,7 +1681,7 @@ def is_abstract(self, pass_is_abstract=True, any_raise_is_abstract=False): for node in self.decorators.nodes: try: inferred = next(node.infer()) - except InferenceError: + except (InferenceError, StopIteration): continue if inferred and inferred.qname() in ( "abc.abstractproperty", @@ -1733,9 +1733,12 @@ def infer_call_result(self, caller=None, context=None): and len(self.args.args) == 1 and self.args.vararg is not None ): - metaclass = next(caller.args[0].infer(context)) + metaclass = next(caller.args[0].infer(context), None) if isinstance(metaclass, ClassDef): - class_bases = [next(arg.infer(context)) for arg in caller.args[1:]] + try: + class_bases = [next(arg.infer(context)) for arg in caller.args[1:]] + except StopIteration as e: + raise InferenceError(node=caller.args[1:], context=context) from e new_class = ClassDef(name="temporary_class") new_class.hide = True new_class.parent = self @@ -2166,7 +2169,10 @@ def is_subtype_of(self, type_name, context=None): return False def _infer_type_call(self, caller, context): - name_node = next(caller.args[0].infer(context)) + try: + name_node = next(caller.args[0].infer(context)) + except StopIteration as e: + raise InferenceError(node=caller.args[0], context=context) from e if isinstance(name_node, node_classes.Const) and isinstance( name_node.value, str ): @@ -2177,11 +2183,14 @@ def _infer_type_call(self, caller, context): result = ClassDef(name, None) # Get the bases of the class. - class_bases = next(caller.args[1].infer(context)) + try: + class_bases = next(caller.args[1].infer(context)) + except StopIteration as e: + raise InferenceError(node=caller.args[1], context=context) from e if isinstance(class_bases, (node_classes.Tuple, node_classes.List)): bases = [] for base in class_bases.itered(): - inferred = next(base.infer(context=context)) + inferred = next(base.infer(context=context), None) if inferred: bases.append( node_classes.EvaluatedObject(original=base, value=inferred) @@ -2196,7 +2205,7 @@ def _infer_type_call(self, caller, context): # Get the members of the class try: members = next(caller.args[2].infer(context)) - except InferenceError: + except (InferenceError, StopIteration): members = None if members and isinstance(members, node_classes.Dict): @@ -2219,7 +2228,7 @@ def infer_call_result(self, caller, context=None): metaclass = self.metaclass(context=context) if metaclass is not None: dunder_call = next(metaclass.igetattr("__call__", context)) - except AttributeInferenceError: + except (AttributeInferenceError, StopIteration): pass if dunder_call and dunder_call.qname() != "builtins.type.__call__": @@ -2674,7 +2683,7 @@ def getitem(self, index, context=None): new_context.callcontext = contextmod.CallContext(args=[index]) try: - return next(method.infer_call_result(self, new_context)) + return next(method.infer_call_result(self, new_context), util.Uninferable) except AttributeError: # Starting with python3.9, builtin types list, dict etc... # are subscriptable thanks to __class_getitem___ classmethod. @@ -2923,7 +2932,7 @@ def _inferred_bases(self, context=None): for stmt in self.bases: try: baseobj = next(stmt.infer(context=context.clone())) - except InferenceError: + except (InferenceError, StopIteration): continue if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied