Skip to content

Commit

Permalink
fix: updated mediafusion and tweaked scrape func to be cleaner
Browse files Browse the repository at this point in the history
  • Loading branch information
dreulavelle committed Nov 8, 2024
1 parent afdb9f6 commit 73c0bcc
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 21 deletions.
25 changes: 7 additions & 18 deletions src/program/services/scrapers/mediafusion.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
""" Mediafusion scraper module """
import json
import re
from typing import Dict

from loguru import logger
Expand Down Expand Up @@ -89,7 +90,7 @@ def validate(self) -> bool:

try:
response = self.request_handler.execute(HttpMethod.POST, url, overriden_response_type=ResponseType.DICT, json=payload, headers=headers)
self.encrypted_string = json.loads(response.data)["encrypted_str"]
self.encrypted_string = response.data["encrypted_str"]
except Exception as e:
logger.error(f"Failed to encrypt user data: {e}")
return False
Expand Down Expand Up @@ -131,30 +132,18 @@ def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]:
url += identifier

response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout)

if not response.is_ok or len(response.data.streams) <= 0:
logger.log("NOT_FOUND", f"No streams found for {item.log_string}")
return {}

torrents: Dict[str, str] = {}

for stream in response.data.streams:
description_split = stream.description.split("\n💾")
if len(description_split) < 2:
logger.warning(f"Invalid stream description: {stream.description}")
continue
raw_title = description_split[0].replace("📂 ", "")

url_split = stream.url.split("?info_hash=")
if len(url_split) < 2:
logger.warning(f"Invalid stream URL: {stream.url}")
continue
info_hash = url_split[1]

if not info_hash or not raw_title:
continue

torrents[info_hash] = raw_title
description_split = stream.description.replace("📂 ", "").replace("/", "")
raw_title = description_split.split("\n")[0]
info_hash = re.search(r"info_hash=([A-Za-z0-9]+)", stream.url).group(1)
if info_hash and info_hash not in torrents:
torrents[info_hash] = raw_title

if torrents:
logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}")
Expand Down
9 changes: 8 additions & 1 deletion src/program/state_transition.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from loguru import logger
from program.media import MediaItem, States
from program.services.downloaders import Downloader
from program.services.indexers.trakt import TraktIndexer
Expand All @@ -19,6 +20,7 @@ def process_event(emitted_by: Service, existing_item: MediaItem | None = None, c
#TODO - Reindex non-released badly indexed items here
if content_item or (existing_item is not None and existing_item.last_state == States.Requested):
next_service = TraktIndexer
logger.debug(f"Submitting {content_item.log_string if content_item else existing_item.log_string} to trakt indexer")
return next_service, [content_item or existing_item]

elif existing_item is not None and existing_item.last_state in [States.PartiallyCompleted, States.Ongoing]:
Expand Down Expand Up @@ -71,6 +73,11 @@ def process_event(emitted_by: Service, existing_item: MediaItem | None = None, c
if not items_to_submit:
return no_further_processing
else:

return no_further_processing

return next_service, items_to_submit
# if items_to_submit and next_service:
# for item in items_to_submit:
# logger.debug(f"Submitting {item.log_string} ({item.id}) to {next_service if isinstance(next_service, str) else next_service.__name__}")

return next_service, items_to_submit
5 changes: 3 additions & 2 deletions src/routers/secure/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,9 @@ async def get_logs() -> str:
@router.get("/events", operation_id="events")
async def get_events(
request: Request,
) -> dict[str, list[EventUpdate]]:
return request.app.program.em.get_event_updates()
) -> dict[str, list[str]]:
events = request.app.program.em.get_event_updates()
return events


@router.get("/mount", operation_id="mount")
Expand Down

0 comments on commit 73c0bcc

Please sign in to comment.