Skip to content

Commit

Permalink
Added asynchron Requests for Jellyfin Updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Iceshadow1404 committed Aug 13, 2024
1 parent 7c576a0 commit 55408e2
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 135 deletions.
56 changes: 30 additions & 26 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from src.constants import *
import tempfile
import errno
import asyncio


try:
Expand Down Expand Up @@ -41,7 +42,6 @@ def clean_log_files():
os.remove(log_file)
Path(log_file).touch()


def acquire_lock(lock_file):
try:
if os.path.exists(lock_file):
Expand Down Expand Up @@ -73,7 +73,7 @@ def pid_exists(pid):
return True


def main():
async def main():
"""Main function for processing covers and updating Jellyfin."""
lock_file = os.path.join(tempfile.gettempdir(), 'jellyfin_cover_manager.lock')
lock = acquire_lock(lock_file)
Expand All @@ -85,17 +85,17 @@ def main():
try:
clean_log_files()
organize_covers()
start_get_and_save_series_and_movie()
await start_get_and_save_series_and_movie()

try:
clean_json_names(OUTPUT_FILENAME)
except json.JSONDecodeError as json_error:
log(f"JSON decoding error: {str(json_error)}. Creating new files...", success=False)
delete_corrupted_files()
await delete_corrupted_files()
return

missing_folders.clear()
assign_images_and_update_jellyfin(OUTPUT_FILENAME)
await assign_images_and_update_jellyfin(OUTPUT_FILENAME)

if missing_folders:
if os.path.exists(MISSING_FOLDER):
Expand All @@ -112,7 +112,7 @@ def main():
finally:
release_lock(lock)

def delete_corrupted_files():
async def delete_corrupted_files():
"""Delete existing files and recreate them with fresh data."""
files_to_recreate = [RAW_FILENAME, OUTPUT_FILENAME, ID_CACHE_FILENAME]

Expand All @@ -122,74 +122,78 @@ def delete_corrupted_files():
os.remove(file)
log(f"Deleted existing file: {file}", success=True)

start_get_and_save_series_and_movie()
await start_get_and_save_series_and_movie()
clean_json_names(OUTPUT_FILENAME)

log("Successfully recreated and populated new files", success=True)
except Exception as e:
log(f"Error recreating files: {str(e)}", success=False)

def check_raw_cover():
async def check_raw_cover():
"""Check Raw Cover directory every 10 seconds for new files."""
while not stop_thread.is_set():
try:
for file in Path(RAW_COVER_DIR).iterdir():
if file.suffix.lower() in ['.filepart']:
while file.exists():
print(f"Waiting for {file.name} to finish transferring...")
time.sleep(1)
await asyncio.sleep(1)
continue

if file.suffix.lower() in ['.zip', '.png', '.jpg', '.jpeg', '.webp']:
initial_size = file.stat().st_size
time.sleep(1)
await asyncio.sleep(1)
if file.stat().st_size == initial_size:
log(f"Found new file: {file.name}")
main()
await main()
break
if os.path.getsize(MEDIUX_FILE) != 0:
log("mediux.txt is not empty. Running mediux_downloader.")
log("waiting for additional links")
time.sleep(10)
mediux_downloader()
await asyncio.sleep(10)
await mediux_downloader()
except Exception as e:
error_message = f"Error checking raw cover: {str(e)}"
log(error_message, success=False)

time.sleep(5)
await asyncio.sleep(5)

print("Checker thread stopped.")

def run_program(run_main_immediately=False):
async def run_program(run_main_immediately=False):
"""Main program entry point."""
setup_directories()
if os.path.getsize(MEDIUX_FILE) != 0:
mediux_downloader()

try:
if os.path.getsize(MEDIUX_FILE) != 0:
await mediux_downloader()
except FileNotFoundError:
if not os.path.exists(MEDIUX_FILE):
with open(MEDIUX_FILE, 'w') as f:
pass

if run_main_immediately:
main()
await main()

checker_thread = threading.Thread(target=check_raw_cover)
checker_thread.start()
checker_task = asyncio.create_task(check_raw_cover())

try:
while not stop_thread.is_set():
start_get_and_save_series_and_movie()
time.sleep(30)
await start_get_and_save_series_and_movie()
await asyncio.sleep(30)
except KeyboardInterrupt:
print("Main program is closing...")
stop_thread.set()
checker_thread.join()
print("Checker thread has been terminated.")
await checker_task
print("Checker task has been terminated.")

if __name__ == '__main__':
try:
clean_log_files()
parser = argparse.ArgumentParser(description="Jellyfin Cover Manager")
parser.add_argument("--main", action="store_true", help="Run the main function immediately after start")
args = parser.parse_args()

run_program(run_main_immediately=args.main)
asyncio.run(run_program(run_main_immediately=args.main))
except Exception as e:
print(f"Unhandled exception in main script: {e}")
traceback.print_exc()
77 changes: 40 additions & 37 deletions src/getIDs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,23 @@
import time
from typing import List, Dict, Set, Tuple, Optional
from requests.exceptions import RequestException
import asyncio
import aiohttp

from src.config import JELLYFIN_URL, API_KEY, TMDB_API_KEY, USE_TMDB
from src.utils import log, ensure_dir
from src.updateCover import clean_json_names, assign_images_and_update_jellyfin, missing_folders
from src.constants import RAW_FILENAME, OUTPUT_FILENAME, ID_CACHE_FILENAME, MISSING_FOLDER


def start_get_and_save_series_and_movie():
media_list = get_and_save_series_and_movies()
async def start_get_and_save_series_and_movie():
media_list = await get_and_save_series_and_movies()
if media_list:
new_ids, has_processing_tags, items_with_tags, items_with_unknown_years = process_media_list(media_list)
old_ids = load_cached_ids()

unknown_years = any(item.get('Year') == 'Unknown' and item['Type'] in ['Series', 'Movie'] for item in media_list)

if has_processing_tags or unknown_years:
log("IMDB or TVDB tags detected or unknown years found. Waiting 30 seconds before refreshing...")
if has_processing_tags:
Expand All @@ -31,17 +34,17 @@ def start_get_and_save_series_and_movie():
log("Items with unknown years:")
for item in items_with_unknown_years:
log(f" - {item}")
time.sleep(30)
await asyncio.sleep(30)
if os.path.exists(ID_CACHE_FILENAME):
os.remove(ID_CACHE_FILENAME)
return start_get_and_save_series_and_movie() # Restart the process
return await start_get_and_save_series_and_movie() # Restart the process

if new_ids != old_ids:
log("Changes in media items detected. Running main function...")
clean_json_names(RAW_FILENAME) # Clean the raw file first
clean_json_names(RAW_FILENAME)
new_sorted_data = sort_series_and_movies(RAW_FILENAME)
if new_sorted_data:
save_if_different(OUTPUT_FILENAME, new_sorted_data)
await save_if_different(OUTPUT_FILENAME, new_sorted_data)
save_cached_ids(new_ids)
else:
log("No changes detected in media items.")
Expand All @@ -50,7 +53,7 @@ def start_get_and_save_series_and_movie():
log("Failed to retrieve series and movies data.", success=False)


def get_and_save_series_and_movies(use_local_file: bool = False) -> Optional[List[Dict]]:
async def get_and_save_series_and_movies(use_local_file: bool = False) -> Optional[List[Dict]]:
# Useful for Debugging
use_local_file = False

Expand Down Expand Up @@ -79,35 +82,35 @@ def get_and_save_series_and_movies(use_local_file: bool = False) -> Optional[Lis
attempt = 0
retry_delay = 5

while True:
attempt += 1
try:
response = requests.get(url, headers=headers, params=params)

if response.status_code == 401:
log("Invalid API Key. Please check your API key and try again.", success=False)
time.sleep(retry_delay)
continue

response.raise_for_status()

items = response.json().get('Items')
if not items:
log("No items found in the response", success=False)
async with aiohttp.ClientSession() as session:
while True:
attempt += 1
try:
async with session.get(url, headers=headers, params=params) as response:
if response.status == 401:
log("Invalid API Key. Please check your API key and try again.", success=False)
await asyncio.sleep(retry_delay)
continue

response.raise_for_status()
data = await response.json()
items = data.get('Items')
if not items:
log("No items found in the response", success=False)
time.sleep(retry_delay)
continue

media_list = [create_media_info(item) for item in items]

with open(RAW_FILENAME, 'w', encoding='utf-8') as f:
json.dump(media_list, f, ensure_ascii=False, indent=4)

return media_list

except RequestException as e:
log(f"Request failed (Attempt {attempt}): {e}", success=False)
log(f"Retrying in {retry_delay} seconds (Attempt {attempt})...")
time.sleep(retry_delay)
continue

media_list = [create_media_info(item) for item in items]

with open(RAW_FILENAME, 'w', encoding='utf-8') as f:
json.dump(media_list, f, ensure_ascii=False, indent=4)

return media_list

except RequestException as e:
log(f"Request failed (Attempt {attempt}): {e}", success=False)
log(f"Retrying in {retry_delay} seconds (Attempt {attempt})...")
time.sleep(retry_delay)

return None

Expand Down Expand Up @@ -316,7 +319,7 @@ def create_boxset_info(boxset: Dict) -> Dict:
return boxset_info


def save_if_different(filename: str, new_data: List[Dict]):
async def save_if_different(filename: str, new_data: List[Dict]):
try:
if os.path.exists(filename):
with open(filename, 'r', encoding='utf-8') as file:
Expand Down Expand Up @@ -349,7 +352,7 @@ def save_if_different(filename: str, new_data: List[Dict]):
os.remove(MISSING_FOLDER)

try:
assign_images_and_update_jellyfin(filename)
await assign_images_and_update_jellyfin(filename)
except OSError as exc:
if exc.errno == 36:
log(f"Filename too long {str(exc)}", success=False)
Expand Down
7 changes: 5 additions & 2 deletions src/mediux_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@


def mediux_downloader():
asyncio.run(async_mediux_downloader())
if asyncio.get_event_loop().is_running():
return asyncio.create_task(async_mediux_downloader())
else:
asyncio.run(async_mediux_downloader())


async def async_mediux_downloader():
Expand All @@ -35,7 +38,7 @@ async def async_mediux_downloader():
for index, download_url in enumerate(download_urls):
if not download_url.startswith("https://mediux.pro/sets"):
log("Please select a set link instead of a collection link.")
print("Invialid Link:", download_url)
log(f"Invialid Link: {download_url}", success=False)
sys.exit(1)

log(f'Downloading set information for URL {index + 1}')
Expand Down
Loading

0 comments on commit 55408e2

Please sign in to comment.