-
Notifications
You must be signed in to change notification settings - Fork 256
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
13 changed files
with
200 additions
and
101 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,5 @@ | ||
[bumpversion] | ||
current_version = 0.9.0 | ||
current_version = 2.0.0 | ||
commit = True | ||
tag = True | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,3 @@ | ||
# -*- coding: utf-8 -*- | ||
|
||
__version__ = "0.9.0" | ||
__version__ = "2.0.0" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,48 +1,48 @@ | ||
import json | ||
from json import JSONDecodeError | ||
from typing import Iterable | ||
|
||
from requests.exceptions import RequestException | ||
from dirhunt.sessions import Sessions | ||
from dirhunt.sources.base import Source | ||
from aiohttp import ClientError | ||
|
||
from dirhunt.sources.base import SourceBase | ||
|
||
|
||
COMMONCRAWL_URL = "https://index.commoncrawl.org/collinfo.json" | ||
TIMEOUT = 10 | ||
|
||
|
||
class CommonCrawl(Source): | ||
def get_latest_craw_index(self): | ||
class CommonCrawl(SourceBase): | ||
async def get_latest_craw_index(self): | ||
url = COMMONCRAWL_URL | ||
session = Sessions().get_session() | ||
try: | ||
with session.get(url, timeout=TIMEOUT) as response: | ||
async with self.sources.crawler.session.get( | ||
url, timeout=TIMEOUT | ||
) as response: | ||
response.raise_for_status() | ||
crawl_indexes = response.json() | ||
except (RequestException, ValueError, JSONDecodeError) as e: | ||
crawl_indexes = await response.json() | ||
except (ClientError, ValueError, JSONDecodeError) as e: | ||
self.add_error("Error on CommonCrawl source: {}".format(e)) | ||
return | ||
if not crawl_indexes: | ||
return | ||
latest_crawl_index = crawl_indexes[0] | ||
return latest_crawl_index["cdx-api"] | ||
|
||
def callback(self, domain): | ||
latest_crawl_index = self.get_latest_craw_index() | ||
async def search_by_domain(self, domain: str) -> Iterable[str]: | ||
latest_crawl_index = await self.get_latest_craw_index() | ||
if not latest_crawl_index: | ||
return | ||
session = Sessions().get_session() | ||
try: | ||
with session.get( | ||
latest_crawl_index, | ||
params={"url": "*.{}".format(domain), "output": "json"}, | ||
timeout=TIMEOUT, | ||
stream=True, | ||
) as response: | ||
response.raise_for_status() | ||
for line in filter(bool, response.iter_lines()): | ||
if isinstance(line, bytes): | ||
line = line.decode("utf-8") | ||
data = json.loads(line) | ||
self.add_result(data["url"]) | ||
except RequestException: | ||
return | ||
return [] | ||
async with self.sources.crawler.session.get( | ||
latest_crawl_index, | ||
params={"url": "*.{}".format(domain), "output": "json"}, | ||
timeout=TIMEOUT, | ||
) as response: | ||
response.raise_for_status() | ||
urls = set() | ||
while True: | ||
line = (await response.content.readline()).decode("utf-8") | ||
if not line: | ||
break | ||
data = json.loads(line) | ||
urls.add(data["url"]) | ||
return urls |
Oops, something went wrong.