Skip to content

Commit

Permalink
Fix 4689 Exclude ThreadPoolExecutor and ProcessPoolExecutor f…
Browse files Browse the repository at this point in the history
…rom ``consider-using-with`` (#4721)

* Do not immediately emit ``consider-using-with`` if a context manager is assigned to a variable.
Instead check if it is used later on in a ``with`` block.

* Enhance check to also work for tuple unpacking in assignments

* Remove ``ThreadPoolExecutor`` and ``ProcessPoolExecutor`` from list of callables that trigger the ``consider-using-with`` message

Co-authored-by: Pierre Sassoulas <pierre.sassoulas@gmail.com>
  • Loading branch information
DudeNr33 and Pierre-Sassoulas authored Jul 20, 2021
1 parent 3fe9954 commit dd54e55
Show file tree
Hide file tree
Showing 5 changed files with 220 additions and 44 deletions.
5 changes: 5 additions & 0 deletions ChangeLog
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,11 @@ Release date: TBA

Closes #4668

* No longer emit ``consider-using-with`` for ``ThreadPoolExecutor`` and ``ProcessPoolExecutor``
as they have legitimate use cases without a ``with`` block.

Closes #4689

* Fix crash when inferring variables assigned in match patterns

Closes #4685
Expand Down
3 changes: 3 additions & 0 deletions doc/whatsnew/2.9.rst
Original file line number Diff line number Diff line change
Expand Up @@ -99,3 +99,6 @@ Other Changes
functions/methods.

* Added various deprecated functions/methods for python 3.10, 3.7, 3.6 and 3.3

* No longer emit ``consider-using-with`` for ``ThreadPoolExecutor`` and ``ProcessPoolExecutor``
as they have legitimate use cases without a ``with`` block.
135 changes: 124 additions & 11 deletions pylint/checkers/refactoring/refactoring_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
import itertools
import tokenize
from functools import reduce
from typing import List, Optional, Tuple, Union, cast
from typing import Dict, Iterator, List, NamedTuple, Optional, Tuple, Union, cast

import astroid
from astroid.util import Uninferable

from pylint import checkers, interfaces
from pylint import utils as lint_utils
Expand Down Expand Up @@ -41,8 +42,6 @@
"tarfile.TarFile",
"tarfile.TarFile.open",
"multiprocessing.context.BaseContext.Pool",
"concurrent.futures.thread.ThreadPoolExecutor",
"concurrent.futures.process.ProcessPoolExecutor",
"subprocess.Popen",
)
)
Expand Down Expand Up @@ -147,6 +146,33 @@ def _will_be_released_automatically(node: astroid.Call) -> bool:
return func.qname() in callables_taking_care_of_exit


class ConsiderUsingWithStack(NamedTuple):
"""Stack for objects that may potentially trigger a R1732 message
if they are not used in a ``with`` block later on."""

module_scope: Dict[str, astroid.NodeNG] = {}
class_scope: Dict[str, astroid.NodeNG] = {}
function_scope: Dict[str, astroid.NodeNG] = {}

def __iter__(self) -> Iterator[Dict[str, astroid.NodeNG]]:
yield from (self.function_scope, self.class_scope, self.module_scope)

def get_stack_for_frame(
self, frame: Union[astroid.FunctionDef, astroid.ClassDef, astroid.Module]
):
"""Get the stack corresponding to the scope of the given frame."""
if isinstance(frame, astroid.FunctionDef):
return self.function_scope
if isinstance(frame, astroid.ClassDef):
return self.class_scope
return self.module_scope

def clear_all(self) -> None:
"""Convenience method to clear all stacks"""
for stack in self:
stack.clear()


class RefactoringChecker(checkers.BaseTokenChecker):
"""Looks for code which can be refactored
Expand Down Expand Up @@ -416,6 +442,7 @@ class RefactoringChecker(checkers.BaseTokenChecker):
def __init__(self, linter=None):
checkers.BaseTokenChecker.__init__(self, linter)
self._return_nodes = {}
self._consider_using_with_stack = ConsiderUsingWithStack()
self._init()
self._never_returning_functions = None

Expand All @@ -425,6 +452,7 @@ def _init(self):
self._nested_blocks_msg = None
self._reported_swap_nodes = set()
self._can_simplify_bool_op = False
self._consider_using_with_stack.clear_all()

def open(self):
# do this in open since config not fully initialized in __init__
Expand Down Expand Up @@ -543,7 +571,12 @@ def process_tokens(self, tokens):
if self.linter.is_message_enabled("trailing-comma-tuple"):
self.add_message("trailing-comma-tuple", line=token.start[0])

@utils.check_messages("consider-using-with")
def leave_module(self, _):
# check for context managers that have been created but not used
self._emit_consider_using_with_if_needed(
self._consider_using_with_stack.module_scope
)
self._init()

@utils.check_messages("too-many-nested-blocks")
Expand Down Expand Up @@ -593,7 +626,14 @@ def visit_excepthandler(self, node):

@utils.check_messages("redefined-argument-from-local")
def visit_with(self, node):
for _, names in node.items:
for var, names in node.items:
if isinstance(var, astroid.Name):
for stack in self._consider_using_with_stack:
# We don't need to restrict the stacks we search to the current scope and outer scopes,
# as e.g. the function_scope stack will be empty when we check a ``with`` on the class level.
if var.name in stack:
del stack[var.name]
break
if not names:
continue
for name in names.nodes_of_class(astroid.AssignName):
Expand Down Expand Up @@ -818,9 +858,12 @@ def _check_simplifiable_ifexp(self, node):
self.add_message("simplifiable-if-expression", node=node, args=(reduced_to,))

@utils.check_messages(
"too-many-nested-blocks", "inconsistent-return-statements", "useless-return"
"too-many-nested-blocks",
"inconsistent-return-statements",
"useless-return",
"consider-using-with",
)
def leave_functiondef(self, node):
def leave_functiondef(self, node: astroid.FunctionDef) -> None:
# check left-over nested blocks stack
self._emit_nested_blocks_message_if_needed(self._nested_blocks)
# new scope = reinitialize the stack of nested blocks
Expand All @@ -830,6 +873,19 @@ def leave_functiondef(self, node):
# check for single return or return None at the end
self._check_return_at_the_end(node)
self._return_nodes[node.name] = []
# check for context managers that have been created but not used
self._emit_consider_using_with_if_needed(
self._consider_using_with_stack.function_scope
)
self._consider_using_with_stack.function_scope.clear()

@utils.check_messages("consider-using-with")
def leave_classdef(self, _: astroid.ClassDef) -> None:
# check for context managers that have been created but not used
self._emit_consider_using_with_if_needed(
self._consider_using_with_stack.class_scope
)
self._consider_using_with_stack.class_scope.clear()

@utils.check_messages("stop-iteration-return")
def visit_raise(self, node):
Expand Down Expand Up @@ -1021,6 +1077,10 @@ def _emit_nested_blocks_message_if_needed(self, nested_blocks):
args=(len(nested_blocks), self.config.max_nested_blocks),
)

def _emit_consider_using_with_if_needed(self, stack: Dict[str, astroid.NodeNG]):
for node in stack.values():
self.add_message("consider-using-with", node=node)

@staticmethod
def _duplicated_isinstance_types(node):
"""Get the duplicated types from the underlying isinstance calls.
Expand Down Expand Up @@ -1282,12 +1342,22 @@ def _check_swap_variables(self, node):
message = "consider-swap-variables"
self.add_message(message, node=node)

@utils.check_messages(
"simplify-boolean-expression",
"consider-using-ternary",
"consider-swap-variables",
"consider-using-with",
)
def visit_assign(self, node: astroid.Assign) -> None:
self._append_context_managers_to_stack(node)
self.visit_return(node) # remaining checks are identical as for return nodes

@utils.check_messages(
"simplify-boolean-expression",
"consider-using-ternary",
"consider-swap-variables",
)
def visit_assign(self, node):
def visit_return(self, node: astroid.Return) -> None:
self._check_swap_variables(node)
if self._is_and_or_ternary(node.value):
cond, truth_value, false_value = self._and_or_ternary_arguments(node.value)
Expand Down Expand Up @@ -1317,9 +1387,54 @@ def visit_assign(self, node):
)
self.add_message(message, node=node, args=(suggestion,))

visit_return = visit_assign
def _append_context_managers_to_stack(self, node: astroid.Assign) -> None:
if _is_inside_context_manager(node):
# if we are inside a context manager itself, we assume that it will handle the resource management itself.
return
if isinstance(node.targets[0], (astroid.Tuple, astroid.List, astroid.Set)):
assignees = node.targets[0].elts
value = utils.safe_infer(node.value)
if value is None or not hasattr(value, "elts"):
# We cannot deduce what values are assigned, so we have to skip this
return
values = value.elts
else:
assignees = [node.targets[0]]
values = [node.value]
if Uninferable in (assignees, values):
return
for assignee, value in zip(assignees, values):
if not isinstance(value, astroid.Call):
continue
inferred = utils.safe_infer(value.func)
if not inferred or inferred.qname() not in CALLS_RETURNING_CONTEXT_MANAGERS:
continue
stack = self._consider_using_with_stack.get_stack_for_frame(node.frame())
varname = (
assignee.name
if isinstance(assignee, astroid.AssignName)
else assignee.attrname
)
if varname in stack:
# variable was redefined before it was used in a ``with`` block
self.add_message(
"consider-using-with",
node=stack[varname],
)
stack[varname] = value

def _check_consider_using_with(self, node: astroid.Call):
if _is_inside_context_manager(node):
# if we are inside a context manager itself, we assume that it will handle the resource management itself.
return
if (
node
in self._consider_using_with_stack.get_stack_for_frame(
node.frame()
).values()
):
# the result of this call was already assigned to a variable and will be checked when leaving the scope.
return
inferred = utils.safe_infer(node.func)
if not inferred:
return
Expand All @@ -1332,9 +1447,7 @@ def _check_consider_using_with(self, node: astroid.Call):
and not _is_part_of_with_items(node)
)
)
if could_be_used_in_with and not (
_is_inside_context_manager(node) or _will_be_released_automatically(node)
):
if could_be_used_in_with and not _will_be_released_automatically(node):
self.add_message("consider-using-with", node=node)

def _check_consider_using_join(self, aug_assign):
Expand Down
75 changes: 64 additions & 11 deletions tests/functional/c/consider/consider_using_with.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import threading
import urllib
import zipfile
from concurrent import futures
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor


def test_codecs_open():
Expand Down Expand Up @@ -112,6 +112,7 @@ class MyLockContext:
"""
The message must not be triggered if the resource allocation is done inside a context manager.
"""

def __init__(self):
self.lock = threading.Lock()

Expand Down Expand Up @@ -140,16 +141,6 @@ def test_multiprocessing():
pass


def test_futures():
_ = futures.ThreadPoolExecutor() # [consider-using-with]
with futures.ThreadPoolExecutor():
pass

_ = futures.ProcessPoolExecutor() # [consider-using-with]
with futures.ProcessPoolExecutor():
pass


def test_popen():
_ = subprocess.Popen("sh") # [consider-using-with]
with subprocess.Popen("sh"):
Expand All @@ -162,3 +153,65 @@ def test_suppress_in_exit_stack():
_ = stack.enter_context(
open("/sys/firmware/devicetree/base/hwid,location", "r")
) # must not trigger


def test_futures():
"""
Regression test for issue #4689.
ThreadPoolExecutor and ProcessPoolExecutor were formerly part of the callables that raised
the R1732 message if used outside a with block, but there are legitimate use cases where
Executor instances are used e.g. as a persistent background worker pool throughout the program.
"""
thread_executor = ThreadPoolExecutor()
thread_executor.submit(print, 1)
process_executor = ProcessPoolExecutor()
process_executor.submit(print, 2)
thread_executor.shutdown()
process_executor.shutdown()


pool = multiprocessing.Pool() # must not trigger, as it is used later on
with pool:
pass


global_pool = (
multiprocessing.Pool()
) # must not trigger, will be used in nested scope


def my_nested_function():
with global_pool:
pass


# this must also work for tuple unpacking
pool1, pool2 = (
multiprocessing.Pool(), # must not trigger
multiprocessing.Pool(), # must not trigger
)

with pool1:
pass

with pool2:
pass

unused_pool1, unused_pool2 = (
multiprocessing.Pool(), # [consider-using-with]
multiprocessing.Pool(), # [consider-using-with]
)

used_pool, unused_pool = (
multiprocessing.Pool(), # must not trigger
multiprocessing.Pool(), # [consider-using-with]
)
with used_pool:
pass

unused_pool, used_pool = (
multiprocessing.Pool(), # [consider-using-with]
multiprocessing.Pool(), # must not trigger
)
with used_pool:
pass
Loading

0 comments on commit dd54e55

Please sign in to comment.