Skip to content

Commit

Permalink
lru_cache: Adds #type: ignore to usages os ParamSpec to fix mypy errors
Browse files Browse the repository at this point in the history
  • Loading branch information
Tomaz-Vieira committed Nov 2, 2021
1 parent e642aa8 commit 850b2b7
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions stdlib/functools.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,20 @@ class _CacheInfo(NamedTuple):
maxsize: int
currsize: int

class _lru_cache_wrapper(Generic[_P, _T]):
__wrapped__: Callable[_P, _T]
def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
class _lru_cache_wrapper(Generic[_P, _T]): # type: ignore
__wrapped__: Callable[_P, _T] # type: ignore
def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _T: ... # type: ignore
def cache_info(self) -> _CacheInfo: ...
def cache_clear(self) -> None: ...

if sys.version_info >= (3, 8):
@overload
def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ...
def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ... # type: ignore
@overload
def lru_cache(maxsize: Callable[_P, _T], typed: bool = ...) -> _lru_cache_wrapper[_P, _T]: ...
def lru_cache(maxsize: Callable[_P, _T], typed: bool = ...) -> _lru_cache_wrapper[_P, _T]: ... # type: ignore

else:
def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ...
def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ... # type: ignore

WRAPPER_ASSIGNMENTS: Sequence[str]
WRAPPER_UPDATES: Sequence[str]
Expand Down

0 comments on commit 850b2b7

Please sign in to comment.