Skip to content

Commit

Permalink
bump mypy 0.982 → 0.991; fix typing issues
Browse files Browse the repository at this point in the history
  • Loading branch information
BurnzZ committed Nov 21, 2022
1 parent 4a8807a commit 4881d6f
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 19 deletions.
10 changes: 10 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,13 @@ line-length = 88
[tool.isort]
profile = "black"
multi_line_output = 3

[[tool.mypy.overrides]]
module = [
"tests.test_downloader.*",
"tests.test_scrapy_dependencies.*",
]
# Ignore this type of error since mypy expects an Iterable return
# when test cases are decorated with @inlineCallbacks. However, the
# tests doesn't return anything at all.
disable_error_code = "misc"
2 changes: 1 addition & 1 deletion scrapy_poet/injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ def is_provider_requiring_scrapy_response(provider):

def get_injector_for_testing(
providers: Mapping,
additional_settings: Dict = None,
additional_settings: Optional[Dict] = None,
overrides_registry: Optional[OverridesRegistryBase] = None,
) -> Injector:
"""
Expand Down
27 changes: 14 additions & 13 deletions tests/test_downloader.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from functools import partial
from typing import Callable
from unittest import mock

import attr
Expand Down Expand Up @@ -26,13 +27,13 @@


@pytest.fixture
def scrapy_downloader():
def scrapy_downloader() -> Callable:
mock_downloader = AsyncMock()
return create_scrapy_downloader(mock_downloader)


@ensureDeferred
async def test_incompatible_scrapy_request(scrapy_downloader):
async def test_incompatible_scrapy_request(scrapy_downloader) -> None:
"""The Request must be web_poet.HttpRequest and not anything else."""

req = scrapy.Request("https://example.com")
Expand All @@ -42,7 +43,7 @@ async def test_incompatible_scrapy_request(scrapy_downloader):


@pytest.fixture
def fake_http_response():
def fake_http_response() -> web_poet.HttpResponse:
return web_poet.HttpResponse(
"https://example.com",
b"some content",
Expand All @@ -52,7 +53,7 @@ def fake_http_response():


@ensureDeferred
async def test_scrapy_poet_downloader(fake_http_response):
async def test_scrapy_poet_downloader(fake_http_response) -> None:
req = web_poet.HttpRequest("https://example.com")

with mock.patch(
Expand All @@ -77,7 +78,7 @@ async def test_scrapy_poet_downloader(fake_http_response):


@ensureDeferred
async def test_scrapy_poet_downloader_ignored_request():
async def test_scrapy_poet_downloader_ignored_request() -> None:
"""It should handle IgnoreRequest from Scrapy according to the web poet
standard on additional request error handling."""
req = web_poet.HttpRequest("https://example.com")
Expand All @@ -94,7 +95,7 @@ async def test_scrapy_poet_downloader_ignored_request():


@ensureDeferred
async def test_scrapy_poet_downloader_twisted_error():
async def test_scrapy_poet_downloader_twisted_error() -> None:
req = web_poet.HttpRequest("https://example.com")

with mock.patch(
Expand All @@ -109,7 +110,7 @@ async def test_scrapy_poet_downloader_twisted_error():


@ensureDeferred
async def test_scrapy_poet_downloader_head_redirect(fake_http_response):
async def test_scrapy_poet_downloader_head_redirect(fake_http_response) -> None:
req = web_poet.HttpRequest("https://example.com", method="HEAD")

with mock.patch(
Expand All @@ -127,7 +128,7 @@ async def test_scrapy_poet_downloader_head_redirect(fake_http_response):


@inlineCallbacks
def test_additional_requests_success():
def test_additional_requests_success() -> None:
items = []

with MockServer(EchoResource) as server:
Expand Down Expand Up @@ -164,7 +165,7 @@ async def parse(self, response, page: ItemPage):


@inlineCallbacks
def test_additional_requests_bad_response():
def test_additional_requests_bad_response() -> None:
items = []

with MockServer(StatusResource) as server:
Expand Down Expand Up @@ -203,7 +204,7 @@ async def parse(self, response, page: ItemPage):


@inlineCallbacks
def test_additional_requests_connection_issue():
def test_additional_requests_connection_issue() -> None:
items = []

with mock.patch(
Expand Down Expand Up @@ -250,7 +251,7 @@ async def parse(self, response, page: ItemPage):


@inlineCallbacks
def test_additional_requests_ignored_request():
def test_additional_requests_ignored_request() -> None:
items = []

with MockServer(EchoResource) as server:
Expand Down Expand Up @@ -307,7 +308,7 @@ async def parse(self, response, page: ItemPage):
strict=True,
)
@inlineCallbacks
def test_additional_requests_unhandled_downloader_middleware_exception():
def test_additional_requests_unhandled_downloader_middleware_exception() -> None:
items = []

with MockServer(EchoResource) as server:
Expand Down Expand Up @@ -353,7 +354,7 @@ async def parse(self, response, page: ItemPage):


@inlineCallbacks
def test_additional_requests_dont_filter():
def test_additional_requests_dont_filter() -> None:
"""Verify that while duplicate regular requests are filtered out,
additional requests are not (neither relative to the main requests not
relative to each other).
Expand Down
8 changes: 4 additions & 4 deletions tests/test_scrapy_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class ProductHtml(HtmlResource):

@inlineCallbacks
@pytest.mark.parametrize("scrapy_class", SCRAPY_PROVIDED_CLASSES)
def test_scrapy_dependencies_on_providers(scrapy_class, settings):
def test_scrapy_dependencies_on_providers(scrapy_class, settings) -> None:
"""Scrapy dependencies should be injected into Providers."""

@attr.s(auto_attribs=True)
Expand All @@ -41,7 +41,7 @@ class PageDataProvider(PageObjectInputProvider):

provided_classes = {PageData}

def __call__(self, to_provide, obj: scrapy_class):
def __call__(self, to_provide, obj: scrapy_class): # type: ignore[valid-type]
return [PageData(scrapy_class=scrapy_class.__name__)]

@attr.s(auto_attribs=True)
Expand Down Expand Up @@ -77,13 +77,13 @@ def parse(self, response, page: Page):

@inlineCallbacks
@pytest.mark.parametrize("scrapy_class", SCRAPY_PROVIDED_CLASSES)
def test_scrapy_dependencies_on_page_objects(scrapy_class, settings):
def test_scrapy_dependencies_on_page_objects(scrapy_class, settings) -> None:
"""Scrapy dependencies should not be injected into Page Objects."""

@attr.s(auto_attribs=True)
class Page(WebPage):

scrapy_obj: scrapy_class
scrapy_obj: scrapy_class # type: ignore[valid-type]

def to_item(self):
return {
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ deps =

[testenv:mypy]
deps =
mypy==0.982
mypy==0.991

commands = mypy --ignore-missing-imports --no-warn-no-return scrapy_poet tests

Expand Down

0 comments on commit 4881d6f

Please sign in to comment.