Skip to content

Commit

Permalink
Revert "Added asynchron Requests for Jellyfin Updates"
Browse files Browse the repository at this point in the history
This reverts commit 55408e2
  • Loading branch information
Iceshadow1404 committed Aug 13, 2024
1 parent 563fd10 commit 932983e
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 156 deletions.
56 changes: 26 additions & 30 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from src.constants import *
import tempfile
import errno
import asyncio


try:
Expand Down Expand Up @@ -38,6 +37,7 @@ def clean_log_files():
os.remove(log_file)
Path(log_file).touch()


def acquire_lock(lock_file):
try:
if os.path.exists(lock_file):
Expand Down Expand Up @@ -69,7 +69,7 @@ def pid_exists(pid):
return True


async def main():
def main():
"""Main function for processing covers and updating Jellyfin."""
lock_file = os.path.join(tempfile.gettempdir(), 'jellyfin_cover_manager.lock')
lock = acquire_lock(lock_file)
Expand All @@ -81,17 +81,17 @@ async def main():
try:
clean_log_files()
organize_covers()
await start_get_and_save_series_and_movie()
start_get_and_save_series_and_movie()

try:
clean_json_names(OUTPUT_FILENAME)
except json.JSONDecodeError as json_error:
log(f"JSON decoding error: {str(json_error)}. Creating new files...", success=False)
await delete_corrupted_files()
delete_corrupted_files()
return

missing_folders.clear()
await assign_images_and_update_jellyfin(OUTPUT_FILENAME)
assign_images_and_update_jellyfin(OUTPUT_FILENAME)

if missing_folders:
if os.path.exists(MISSING_FOLDER):
Expand All @@ -108,7 +108,7 @@ async def main():
finally:
release_lock(lock)

async def delete_corrupted_files():
def delete_corrupted_files():
"""Delete existing files and recreate them with fresh data."""
files_to_recreate = [RAW_FILENAME, OUTPUT_FILENAME, ID_CACHE_FILENAME]

Expand All @@ -118,78 +118,74 @@ async def delete_corrupted_files():
os.remove(file)
log(f"Deleted existing file: {file}", success=True)

await start_get_and_save_series_and_movie()
start_get_and_save_series_and_movie()
clean_json_names(OUTPUT_FILENAME)

log("Successfully recreated and populated new files", success=True)
except Exception as e:
log(f"Error recreating files: {str(e)}", success=False)

async def check_raw_cover():
def check_raw_cover():
"""Check Raw Cover directory every 10 seconds for new files."""
while not stop_thread.is_set():
try:
for file in Path(RAW_COVER_DIR).iterdir():
if file.suffix.lower() in ['.filepart']:
while file.exists():
print(f"Waiting for {file.name} to finish transferring...")
await asyncio.sleep(1)
time.sleep(1)
continue

if file.suffix.lower() in ['.zip', '.png', '.jpg', '.jpeg', '.webp']:
initial_size = file.stat().st_size
await asyncio.sleep(1)
time.sleep(1)
if file.stat().st_size == initial_size:
log(f"Found new file: {file.name}")
await main()
main()
break
if os.path.getsize(MEDIUX_FILE) != 0:
log("mediux.txt is not empty. Running mediux_downloader.")
log("waiting for additional links")
await asyncio.sleep(10)
await mediux_downloader()
time.sleep(10)
mediux_downloader()
except Exception as e:
error_message = f"Error checking raw cover: {str(e)}"
log(error_message, success=False)

await asyncio.sleep(5)
time.sleep(5)

print("Checker thread stopped.")

async def run_program(run_main_immediately=False):
def run_program(run_main_immediately=False):
"""Main program entry point."""
setup_directories()
try:
if os.path.getsize(MEDIUX_FILE) != 0:
await mediux_downloader()
except FileNotFoundError:
if not os.path.exists(MEDIUX_FILE):
with open(MEDIUX_FILE, 'w') as f:
pass
if os.path.getsize(MEDIUX_FILE) != 0:
mediux_downloader()


if run_main_immediately:
await main()
main()

checker_task = asyncio.create_task(check_raw_cover())
checker_thread = threading.Thread(target=check_raw_cover)
checker_thread.start()

try:
while not stop_thread.is_set():
await start_get_and_save_series_and_movie()
await asyncio.sleep(30)
start_get_and_save_series_and_movie()
time.sleep(30)
except KeyboardInterrupt:
print("Main program is closing...")
stop_thread.set()
await checker_task
print("Checker task has been terminated.")
checker_thread.join()
print("Checker thread has been terminated.")

if __name__ == '__main__':
try:
clean_log_files()
parser = argparse.ArgumentParser(description="Jellyfin Cover Manager")
parser.add_argument("--main", action="store_true", help="Run the main function immediately after start")
args = parser.parse_args()

asyncio.run(run_program(run_main_immediately=args.main))
run_program(run_main_immediately=args.main)
except Exception as e:
print(f"Unhandled exception in main script: {e}")
traceback.print_exc()
77 changes: 37 additions & 40 deletions src/getIDs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,20 @@
import time
from typing import List, Dict, Set, Tuple, Optional
from requests.exceptions import RequestException
import asyncio
import aiohttp

from src.config import JELLYFIN_URL, API_KEY, TMDB_API_KEY, USE_TMDB
from src.utils import log, ensure_dir
from src.updateCover import clean_json_names, assign_images_and_update_jellyfin, missing_folders
from src.constants import RAW_FILENAME, OUTPUT_FILENAME, ID_CACHE_FILENAME, MISSING_FOLDER


async def start_get_and_save_series_and_movie():
media_list = await get_and_save_series_and_movies()
def start_get_and_save_series_and_movie():
media_list = get_and_save_series_and_movies()
if media_list:
new_ids, has_processing_tags, items_with_tags, items_with_unknown_years = process_media_list(media_list)
old_ids = load_cached_ids()

unknown_years = any(item.get('Year') == 'Unknown' and item['Type'] in ['Series', 'Movie'] for item in media_list)

if has_processing_tags or unknown_years:
log("IMDB or TVDB tags detected or unknown years found. Waiting 30 seconds before refreshing...")
if has_processing_tags:
Expand All @@ -32,17 +29,17 @@ async def start_get_and_save_series_and_movie():
log("Items with unknown years:")
for item in items_with_unknown_years:
log(f" - {item}")
await asyncio.sleep(30)
time.sleep(30)
if os.path.exists(ID_CACHE_FILENAME):
os.remove(ID_CACHE_FILENAME)
return await start_get_and_save_series_and_movie() # Restart the process
return start_get_and_save_series_and_movie() # Restart the process

if new_ids != old_ids:
log("Changes in media items detected. Running main function...")
clean_json_names(RAW_FILENAME)
clean_json_names(RAW_FILENAME) # Clean the raw file first
new_sorted_data = sort_series_and_movies(RAW_FILENAME)
if new_sorted_data:
await save_if_different(OUTPUT_FILENAME, new_sorted_data)
save_if_different(OUTPUT_FILENAME, new_sorted_data)
save_cached_ids(new_ids)
else:
log("No changes detected in media items.")
Expand All @@ -51,7 +48,7 @@ async def start_get_and_save_series_and_movie():
log("Failed to retrieve series and movies data.", success=False)


async def get_and_save_series_and_movies(use_local_file: bool = False) -> Optional[List[Dict]]:
def get_and_save_series_and_movies(use_local_file: bool = False) -> Optional[List[Dict]]:
# Useful for Debugging
use_local_file = False

Expand Down Expand Up @@ -80,35 +77,35 @@ async def get_and_save_series_and_movies(use_local_file: bool = False) -> Option
attempt = 0
retry_delay = 5

async with aiohttp.ClientSession() as session:
while True:
attempt += 1
try:
async with session.get(url, headers=headers, params=params) as response:
if response.status == 401:
log("Invalid API Key. Please check your API key and try again.", success=False)
await asyncio.sleep(retry_delay)
continue

response.raise_for_status()
data = await response.json()
items = data.get('Items')
if not items:
log("No items found in the response", success=False)
time.sleep(retry_delay)
continue

media_list = [create_media_info(item) for item in items]

with open(RAW_FILENAME, 'w', encoding='utf-8') as f:
json.dump(media_list, f, ensure_ascii=False, indent=4)

return media_list

except RequestException as e:
log(f"Request failed (Attempt {attempt}): {e}", success=False)
log(f"Retrying in {retry_delay} seconds (Attempt {attempt})...")
while True:
attempt += 1
try:
response = requests.get(url, headers=headers, params=params)

if response.status_code == 401:
log("Invalid API Key. Please check your API key and try again.", success=False)
time.sleep(retry_delay)
continue

response.raise_for_status()

items = response.json().get('Items')
if not items:
log("No items found in the response", success=False)
time.sleep(retry_delay)
continue

media_list = [create_media_info(item) for item in items]

with open(RAW_FILENAME, 'w', encoding='utf-8') as f:
json.dump(media_list, f, ensure_ascii=False, indent=4)

return media_list

except RequestException as e:
log(f"Request failed (Attempt {attempt}): {e}", success=False)
log(f"Retrying in {retry_delay} seconds (Attempt {attempt})...")
time.sleep(retry_delay)

return None

Expand Down Expand Up @@ -317,7 +314,7 @@ def create_boxset_info(boxset: Dict) -> Dict:
return boxset_info


async def save_if_different(filename: str, new_data: List[Dict]):
def save_if_different(filename: str, new_data: List[Dict]):
try:
if os.path.exists(filename):
with open(filename, 'r', encoding='utf-8') as file:
Expand Down Expand Up @@ -350,7 +347,7 @@ async def save_if_different(filename: str, new_data: List[Dict]):
os.remove(MISSING_FOLDER)

try:
await assign_images_and_update_jellyfin(filename)
assign_images_and_update_jellyfin(filename)
except OSError as exc:
if exc.errno == 36:
log(f"Filename too long {str(exc)}", success=False)
Expand Down
7 changes: 2 additions & 5 deletions src/mediux_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,7 @@


def mediux_downloader():
if asyncio.get_event_loop().is_running():
return asyncio.create_task(async_mediux_downloader())
else:
asyncio.run(async_mediux_downloader())
asyncio.run(async_mediux_downloader())


async def async_mediux_downloader():
Expand All @@ -38,7 +35,7 @@ async def async_mediux_downloader():
for index, download_url in enumerate(download_urls):
if not download_url.startswith("https://mediux.pro/sets"):
log("Please select a set link instead of a collection link.")
log(f"Invialid Link: {download_url}", success=False)
print("Invialid Link:", download_url)
sys.exit(1)

log(f'Downloading set information for URL {index + 1}')
Expand Down
Loading

0 comments on commit 932983e

Please sign in to comment.