diff --git a/backend/app/http_routes.py b/backend/app/http_routes.py index 64ddd13e..ecb609f7 100644 --- a/backend/app/http_routes.py +++ b/backend/app/http_routes.py @@ -24,8 +24,6 @@ from routes.purchase_end_stripe_webhook import PurchaseEndStripeWebHook from routes.check_expired_purchases import ExpiredPurchasesChecker from routes.device import Device -from routes.daily_notifications import DailyNotifications -from routes.daily_emails import DailyEmails from routes.event import Event from routes.text_type import TextType from routes.text_edit_action import TextEditAction @@ -90,8 +88,6 @@ def __init__(self, api): api.add_resource(PurchaseEndStripeWebHook, "/subscription_end") api.add_resource(ExpiredPurchasesChecker, "/check_expired_purchases") api.add_resource(Device, "/device") - api.add_resource(DailyNotifications, "/send_daily_notifications") - api.add_resource(DailyEmails, "/send_daily_emails") api.add_resource(Event, "/event") api.add_resource(TextType, "/text_type") api.add_resource(TextEditAction, "/text_edit_action") diff --git a/backend/app/routes/calendar_suggestion_notification.py b/backend/app/routes/calendar_suggestion_notification.py index 98b9aec8..58f3c5ff 100644 --- a/backend/app/routes/calendar_suggestion_notification.py +++ b/backend/app/routes/calendar_suggestion_notification.py @@ -5,6 +5,7 @@ from flask import request from flask_restful import Resource from app import db +from mojodex_core.entities.user import User from mojodex_core.logging_handler import log_error from mojodex_core.entities.db_base_entities import * from datetime import datetime @@ -42,8 +43,8 @@ def post(self): try: # Get all calendar_suggestion_pks with notification_time_utc is not none and between since_date and until_date - results = db.session.query(MdCalendarSuggestion, MdUser, MdTask) \ - .join(MdUser, MdUser.user_id == MdCalendarSuggestion.user_id) \ + results = db.session.query(MdCalendarSuggestion, User, MdTask) \ + .join(User, User.user_id == MdCalendarSuggestion.user_id) \ .join(MdTask, MdTask.task_pk == MdCalendarSuggestion.proposed_task_fk) \ .filter(MdCalendarSuggestion.reminder_date.isnot(None)) \ .filter(MdCalendarSuggestion.reminder == True) \ @@ -57,7 +58,7 @@ def post(self): "suggestion_text": calendar_suggestion.suggestion_text, "task_pk": calendar_suggestion.proposed_task_fk, "task_name": task.name_for_system, - "user_timezone_offset": user.timezone_offset, + "datetime_context": user.datetime_context, "user_name": user.name, "user_company_description": user.company_description, "user_goal": user.goal diff --git a/backend/app/routes/daily_emails.py b/backend/app/routes/daily_emails.py deleted file mode 100644 index ce8ef823..00000000 --- a/backend/app/routes/daily_emails.py +++ /dev/null @@ -1,267 +0,0 @@ -import os -import requests -from flask import request -from flask_restful import Resource -from app import db -from mojodex_core.logging_handler import log_error -from mojodex_core.entities.db_base_entities import * -from sqlalchemy import func, text, extract, case -from sqlalchemy.sql.functions import coalesce -from mojodex_core.email_sender.email_service import EmailService -from datetime import datetime, timedelta - -class DailyEmails(Resource): - reminder_email_type="reminder_email" - summary_email_type="summary_email_type" - - - def post(self): - if not request.is_json: - log_error(f"Error sending daily emails : Request must be JSON") - return {"error": "Request must be JSON"}, 400 - - try: - secret = request.headers['Authorization'] - if secret != os.environ["MOJODEX_SCHEDULER_SECRET"]: - log_error(f"Error sending daily emails : Authentication error : Wrong secret", notify_admin=True) - return {"error": "Authentication error : Wrong secret"}, 403 - except KeyError: - log_error(f"Error sending daily emails : Missing Authorization secret in headers", notify_admin=True) - return {"error": f"Missing Authorization secret in headers"}, 403 - - try: - timestamp = request.json['datetime'] - n_emails = min(50, int(request.json["n_emails"])) if "n_emails" in request.json else 50 - offset = int(request.json["offset"]) if "offset" in request.json else 0 - - except KeyError: - log_error(f"Error sending daily emails : Missing datetime in body", notify_admin=True) - return {"error": f"Missing datetime in body"}, 400 - - try: - # select first n_emails users - # today_morning = today at 00:00:00 - today_morning = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - n_meeting_minutes_subquery = (db.session.query( - MdUserTask.user_id.label('user_id'), - MdUserTaskExecution.user_task_fk, - func.count(MdUserTaskExecution.user_task_execution_pk).label('today_meeting_minutes_counts')) - .join(MdUserTask, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) - .join(MdTask, MdUserTask.task_fk == MdTask.task_pk) - .join(MdUser, MdUserTask.user_id == MdUser.user_id) - .filter(MdTask.name_for_system == 'prepare_meeting_minutes', - func.timezone( - text(f'md_user.timezone_offset * interval \'1 minute\''), - MdUserTaskExecution.start_date) >= today_morning, - MdUserTaskExecution.start_date.isnot(None)) - .group_by(MdUserTaskExecution.user_task_fk, MdUserTask.user_id) - ).subquery() - - # subquery to get the latest meeting minutes date for each user - # (we will filter those which last meeting minutes date is <= 3 days) - last_meeting_minutes_date_subquery = (db.session.query( - MdUserTask.user_id.label('user_id'), - func.max(MdUserTaskExecution.start_date).label('latest_meeting_minutes_date')) - .join(MdUserTask, - MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) - .join(MdTask, MdUserTask.task_fk == MdTask.task_pk) - .filter(MdTask.name_for_system == 'prepare_meeting_minutes', - MdUserTaskExecution.start_date.isnot(None)) - .group_by(MdUserTask.user_id) - ).subquery() - three_days_ago = today_morning - timedelta(days=3) - - # Create a subquery to get the user ids who received a reminder_email the day before yesterday - day_before_yesterday = today_morning - timedelta(days=2) - - reminder_email_day_before_yesterday_subquery = (db.session.query(MdUser.user_id.label('user_id')) - .join(MdEvent, MdUser.user_id == MdEvent.user_id) - .filter(MdEvent.event_type == DailyEmails.reminder_email_type, - MdEvent.creation_date >= day_before_yesterday, - MdEvent.creation_date < day_before_yesterday + timedelta( - days=1)) - .group_by(MdUser.user_id) - ).subquery() - - # Create a subquery to get the user ids who received a reminder_email yesterday - yesterday = today_morning - timedelta(days=1) - reminder_email_yesterday_subquery = (db.session.query(MdUser.user_id.label('user_id')) - .join(MdEvent, MdUser.user_id == MdEvent.user_id) - .filter(MdEvent.event_type == DailyEmails.reminder_email_type, - MdEvent.creation_date >= yesterday, - MdEvent.creation_date < yesterday + timedelta(days=1)) - .group_by(MdUser.user_id) - ).subquery() - - # subquery to count the md_process created since today morning for each user, then we join this subquery in the main query. - process_count_subquery = (db.session.query( - MdUser.user_id.label('user_id'), - func.count(MdProcess.process_pk).label('process_count_since_today')) - .join(MdUserTask, MdUser.user_id == MdUserTask.user_id) - .join(MdUserTaskExecution, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) - .join(MdFollowUp, - MdFollowUp.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) - .join(MdProcess, MdProcess.follow_up_fk == MdFollowUp.follow_up_pk) - .filter(MdProcess.creation_date >= today_morning) - .group_by(MdUser.user_id) - ).subquery() - - # Create a subquery to get the 3 last md_followup.description with proactive=True created - # Window Function to rank rows within a user's follow ups partitioned by user_id, ordered by creation_date descending - ranked_proactive_followup_subquery = ( - db.session.query( - MdUserTask.user_id.label('user_id'), - MdFollowUp.description.label("proactive_followup_description"), - func.row_number() - .over(partition_by=MdUserTask.user_id, order_by=MdFollowUp.creation_date.desc()) - .label("followup_rank"), - ) - .join(MdUserTaskExecution, - MdFollowUp.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) - .join(MdUserTask, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) - .filter(MdFollowUp.proactive == True) - .subquery() - ) - - # Filter the subquery to only include the top 3 ranked follow up descriptions for each user - filtered_proactive_followup_subquery = ( - db.session.query( - ranked_proactive_followup_subquery.c.user_id.label('user_id'), - ranked_proactive_followup_subquery.c.proactive_followup_description, - ) - .filter(ranked_proactive_followup_subquery.c.followup_rank <= 3) - .subquery() - ) - - # Aggregate the descriptions into an array - grouped_proactive_followups_subquery = ( - db.session.query( - filtered_proactive_followup_subquery.c.user_id.label('user_id'), - func.array_agg( - filtered_proactive_followup_subquery.c.proactive_followup_description - ).label("last_three_proactive_followups"), - ) - .group_by(filtered_proactive_followup_subquery.c.user_id) - ).subquery() - - # Subquery for user_language_code - user_lang_subquery = ( - db.session.query( - MdTaskDisplayedData.task_fk.label("task_fk"), - MdTaskDisplayedData.name_for_user.label("user_lang_name_for_user"), - ) - .join(MdTask, MdTask.task_pk == MdTaskDisplayedData.task_fk) - .join(MdUserTask, MdUserTask.task_fk == MdTask.task_pk) - .join(MdUser, MdUser.user_id == MdUserTask.user_id) - .filter(MdTaskDisplayedData.language_code == MdUser.language_code) - .subquery() - ) - - # Subquery for 'en' - en_subquery = ( - db.session.query( - MdTaskDisplayedData.task_fk.label("task_fk"), - MdTaskDisplayedData.name_for_user.label("en_name_for_user"), - ) - .filter(MdTaskDisplayedData.language_code == "en") - .subquery() - ) - - # subquery to find the first represented task in user_task, but no execution - enabled_not_executed_task_subquery = ( - db.session.query( - MdUserTask.user_id.label('user_id'), - coalesce(user_lang_subquery.c.user_lang_name_for_user, en_subquery.c.en_name_for_user).label( - "first_enabled_not_executed_task_name") - ) - .join(MdTask, MdUserTask.task_fk == MdTask.task_pk) - .outerjoin(MdUserTaskExecution, MdUserTask.user_task_pk == MdUserTaskExecution.user_task_fk) - .outerjoin(user_lang_subquery, MdTask.task_pk == user_lang_subquery.c.task_fk) - .outerjoin(en_subquery, MdTask.task_pk == en_subquery.c.task_fk) - .filter(MdUserTask.enabled.is_(True)) - .group_by( - MdUserTask.user_id, - user_lang_subquery.c.user_lang_name_for_user, - en_subquery.c.en_name_for_user - ) - .having(func.count(MdUserTaskExecution.user_task_execution_pk) == 0) - ).subquery() - - # select first n_notifications users and their associated number of meeting recaps today - results = db.session.query(MdUser.user_id, MdUser.email, MdUser.name, MdUser.company_description, - MdUser.timezone_offset, MdUser.goal, MdUser.language_code, - func.coalesce( - n_meeting_minutes_subquery.c.today_meeting_minutes_counts, 0 - ).label("n_meeting_minutes_today"), - case((reminder_email_yesterday_subquery.c.user_id.is_(None), False), - else_=True).label( - "received_reminder_email_yesterday"), - case((reminder_email_day_before_yesterday_subquery.c.user_id.is_(None), False), - else_=True).label( - "reminder_email_day_before_yesterday"), - func.coalesce(process_count_subquery.c.process_count_since_today, 0).label("n_processes_created_today"), - grouped_proactive_followups_subquery.c.last_three_proactive_followups.label("last_three_proactive_followups"), - enabled_not_executed_task_subquery.c.first_enabled_not_executed_task_name.label("first_enabled_not_executed_task_name") - ) \ - .distinct(MdUser.user_id) \ - .outerjoin(n_meeting_minutes_subquery, MdUser.user_id == n_meeting_minutes_subquery.c.user_id) \ - .outerjoin(reminder_email_yesterday_subquery, - MdUser.user_id == reminder_email_yesterday_subquery.c.user_id) \ - .outerjoin(reminder_email_day_before_yesterday_subquery, - MdUser.user_id == reminder_email_day_before_yesterday_subquery.c.user_id) \ - .outerjoin(grouped_proactive_followups_subquery, - MdUser.user_id == grouped_proactive_followups_subquery.c.user_id, ) \ - .outerjoin(process_count_subquery, MdUser.user_id == process_count_subquery.c.user_id) \ - .outerjoin(enabled_not_executed_task_subquery, - MdUser.user_id == enabled_not_executed_task_subquery.c.user_id) \ - .join(last_meeting_minutes_date_subquery, - MdUser.user_id == last_meeting_minutes_date_subquery.c.user_id) \ - .filter(last_meeting_minutes_date_subquery.c.latest_meeting_minutes_date >= three_days_ago) \ - .filter(MdUser.timezone_offset != None)\ - .filter(extract("hour", text("NOW() - md_user.timezone_offset * interval \'1 minute\'")) >= int(os.environ.get("DAILY_EMAIL_TIME", 17))) \ - .filter(extract("hour", text("NOW() - md_user.timezone_offset * interval \'1 minute\'")) < int(os.environ.get("DAILY_EMAIL_TIME", 17))+1) \ - .order_by(MdUser.user_id).offset(offset).limit(n_emails).all() - - results = [result._asdict() for result in results] - - users = [{"user_id": row["user_id"], - "email": row["email"], - "user_name": row["name"], - "user_company_description": row["company_description"], - "user_timezone_offset": row["timezone_offset"], - "user_goal": row["goal"], - "language": row["language_code"] if row["language_code"] else "en", - "n_meeting_minutes_today": row["n_meeting_minutes_today"], - "received_reminder_email_yesterday": row["received_reminder_email_yesterday"], - "received_reminder_email_the_day_before_yesterday": row["reminder_email_day_before_yesterday"], - "n_processes_created_today": row["n_processes_created_today"], - "last_three_proactive_followups": row["last_three_proactive_followups"], - "first_enabled_not_executed_task_name": row["first_enabled_not_executed_task_name"], - } for row in results] - # no meeting minutes today, already received a reminder yesterday and the day before yesterday => send admin email - users_needing_admin_email = [user for user in users if user["n_meeting_minutes_today"] == 0 and user[ - "received_reminder_email_yesterday"] and user["received_reminder_email_the_day_before_yesterday"]] - # others = users - users_needing_admin_email - user_needing_email = [user for user in users if user not in users_needing_admin_email] - - for user in users_needing_admin_email: - user_id, email = user["user_id"], user["email"] - try: - EmailService().send_admin_email(f"User engagement", - f"User {user_id} with email {email} has done no meeting minutes for the last 2 days.") - except Exception as e: - log_error(f"Error sending daily notification to user {user_id}: {e}", notify_admin=True) - - # send backend for preparing email text - uri = f"{os.environ['BACKGROUND_BACKEND_URI']}/events_generation" - pload = {"datetime": datetime.now().isoformat(), "event_type": "daily_emails", "data": user_needing_email} - internal_request = requests.post(uri, json=pload) - if internal_request.status_code != 200: - log_error(f"Error while calling background events_generation : {internal_request.json()}") - - # return list of user_ids - return {"user_ids": [user["user_id"] for user in users]}, 200 - except Exception as e: - db.session.rollback() - log_error(f"Error sending daily emails : {e}", notify_admin=True) - return {"error": f"Error sending daily emails : {e}"}, 500 diff --git a/backend/app/routes/daily_notifications.py b/backend/app/routes/daily_notifications.py deleted file mode 100644 index 2f37b4ff..00000000 --- a/backend/app/routes/daily_notifications.py +++ /dev/null @@ -1,90 +0,0 @@ -import os - -import requests -from flask import request -from flask_restful import Resource -from app import db -from mojodex_core.logging_handler import log_error -from mojodex_core.entities.db_base_entities import * -from sqlalchemy import func, text, extract -from datetime import datetime - -class DailyNotifications(Resource): - - def post(self): - if not request.is_json: - log_error(f"Error sending daily notifications : Request must be JSON") - return {"error": "Request must be JSON"}, 400 - - try: - secret = request.headers['Authorization'] - if secret != os.environ["MOJODEX_SCHEDULER_SECRET"]: - log_error(f"Error sending daily notifications : Authentication error : Wrong secret", notify_admin=True) - return {"error": "Authentication error : Wrong secret"}, 403 - except KeyError: - log_error(f"Error sending daily notifications : Missing Authorization secret in headers", notify_admin=True) - return {"error": f"Missing Authorization secret in headers"}, 403 - - try: - timestamp = request.json['datetime'] - n_notifications = min(50, int(request.json["n_notifications"])) if "n_notifications" in request.json else 50 - offset = int(request.json["offset"]) if "offset" in request.json else 0 - except Exception as e: - log_error(f"Error sending daily notifications : {e}", notify_admin=True) - return {"error": f"Missing datetime in body"}, 400 - - try: - # today_morning = today at 00:00:00 - today_morning = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - - todos_subquery = (db.session.query( - MdUserTask.user_id.label('user_id'), - MdUserTaskExecution.user_task_fk, - func.array_agg(MdTodo.description).label('today_new_todos_desc')) - .join(MdUserTask, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) - .join(MdTodo, MdTodo.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) - .join(MdUser, MdUser.user_id == MdUserTask.user_id) - .filter(func.timezone( - text(f'md_user.timezone_offset * interval \'1 minute\''), MdTodo.creation_date) >= today_morning) - .group_by(MdUserTaskExecution.user_task_fk, MdUserTask.user_id) - ).subquery() - - results = db.session.query(MdUser.user_id, MdUser.name, MdUser.company_description, MdUser.timezone_offset, - MdUser.goal, - MdUser.language_code, - todos_subquery.c.today_new_todos_desc.label("today_new_todos_desc") - ).outerjoin(todos_subquery, MdUser.user_id == todos_subquery.c.user_id).filter( - MdUser.timezone_offset != None) \ - .filter(extract("hour", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) >= int( - os.environ.get('DAILY_NOTIF_TIME', 14))) \ - .filter(extract("hour", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) < int( - os.environ.get('DAILY_NOTIF_TIME', 14)) + 1) \ - .filter(todos_subquery.c.today_new_todos_desc.isnot(None)) \ - .order_by( - MdUser.user_id).offset(offset).limit(n_notifications).all() - - results = [result._asdict() for result in results] - - users = [{'user_id': row["user_id"], - 'user_name': row["name"], - 'user_company_description': row["company_description"], - 'user_timezone_offset': row["timezone_offset"], - 'user_goal': row["goal"], - 'language': row["language_code"] if row["language_code"] else "en", - 'new_todos_today': row["today_new_todos_desc"] - } for row in results] - - - # send backend for preparing email text - uri = f"{os.environ['BACKGROUND_BACKEND_URI']}/events_generation" - pload = {'datetime': datetime.now().isoformat(), 'event_type': 'daily_notifications', 'data': users} - internal_request = requests.post(uri, json=pload) - if internal_request.status_code != 200: - log_error(f"Error while calling background events_generation : {internal_request.json()}") - - # return list of user_ids - return {"user_ids": [user["user_id"] for user in users]}, 200 - except Exception as e: - db.session.rollback() - log_error(f"Error sending daily notifications : {e}", notify_admin=True) - return {"error": f"Error sending daily notifications : {e}"}, 500 diff --git a/backend/app/routes/todo_daily_emails.py b/backend/app/routes/todo_daily_emails.py index 087552d3..f06ee2ab 100644 --- a/backend/app/routes/todo_daily_emails.py +++ b/backend/app/routes/todo_daily_emails.py @@ -6,6 +6,7 @@ from flask import request from flask_restful import Resource from app import db +from mojodex_core.entities.user import User from mojodex_core.logging_handler import log_error from mojodex_core.entities.db_base_entities import * from sqlalchemy import func, text, extract, and_ @@ -159,14 +160,14 @@ def post(self): deleted_todos_today = [deleted_todo_today._asdict() for deleted_todo_today in deleted_todos_today] # Users whose timezone is not null and whose hour is between 8 and 9 am and who are not on weekends - base_user_query = db.session.query(MdUser) \ - .filter(MdUser.timezone_offset != None) \ + base_user_query = db.session.query(User) \ + .filter(User.timezone_offset != None) \ .filter(extract("hour", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) >= int(os.environ.get('DAILY_TODO_EMAIL_TIME', 8))) \ .filter(extract("hour", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) < int(os.environ.get('DAILY_TODO_EMAIL_TIME', 8))+1) \ .filter(extract("dow", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) != int(0)) \ .filter(extract("dow", text('NOW() - md_user.timezone_offset * interval \'1 minute\'')) != int(6)) \ - .filter(MdUser.todo_email_reception == True) \ - .order_by(MdUser.user_id) \ + .filter(User.todo_email_reception == True) \ + .order_by(User.user_id) \ .offset(offset).limit(n_emails) \ .all() @@ -189,7 +190,7 @@ def post(self): "user_id": user.user_id, "email": user.email, "username": user.name, - "user_timezone_offset": user.timezone_offset, + "datetime_context": user.datetime_context, "company_description": user.company_description, "goal": user.goal, "language": user.language_code, diff --git a/background/app/app.py b/background/app/app.py index f22e9781..ee2a1a56 100644 --- a/background/app/app.py +++ b/background/app/app.py @@ -1,6 +1,4 @@ from gevent import monkey - - monkey.patch_all() @@ -32,20 +30,9 @@ db = SQLAlchemy(app) api = Api(app) -from background_logger import BackgroundLogger - -main_logger = BackgroundLogger("main_logger") - from models.documents.document_manager import DocumentManager document_manager = DocumentManager() -from language_retriever import LanguageRetriever -language_retriever = LanguageRetriever() - -from conversation_retriever import ConversationRetriever -conversation_retriever = ConversationRetriever() - - from http_routes import HttpRouteManager HttpRouteManager(api) diff --git a/background/app/background_logger.py b/background/app/background_logger.py deleted file mode 100644 index 41994d94..00000000 --- a/background/app/background_logger.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -import os - - -class EmojiFormatter(logging.Formatter): - def __init__(self): - super().__init__() - - def format(self, record): - emoji = '' - if record.levelno == logging.DEBUG: - emoji = 'šŸŸ£' - if record.levelno == logging.INFO: - emoji = 'šŸŸ¢' - elif record.levelno == logging.WARNING: - emoji = 'šŸŸ ' - elif record.levelno == logging.ERROR: - emoji = 'šŸ”“' - record.msg = f'{emoji} {record.name} :: {record.msg}' - return super().format(record) - -class BackgroundLogger(logging.Logger): - def __init__(self, name): - super().__init__(name, level=logging.NOTSET) - formatter = EmojiFormatter() - stream_handler = logging.StreamHandler() - stream_handler.setFormatter(formatter) - self.addHandler(stream_handler) - # switch on if os.environ.get("LOG_LEVEL") to set level - log_level = os.environ.get("LOG_LEVEL") - if log_level == "debug": - self.setLevel(logging.DEBUG) - elif log_level == "info": - self.setLevel(logging.INFO) - elif log_level == "warning": - self.setLevel(logging.WARNING) - elif log_level == "error": - self.setLevel(logging.ERROR) - else: # default - self.setLevel(logging.DEBUG) - diff --git a/background/app/conversation_retriever.py b/background/app/conversation_retriever.py deleted file mode 100644 index e23ac100..00000000 --- a/background/app/conversation_retriever.py +++ /dev/null @@ -1,70 +0,0 @@ - -from background_logger import BackgroundLogger -from mojodex_core.entities.db_base_entities import MdMessage - - -class ConversationRetriever: - logger_prefix = "ConversationRetriever::" - - def __init__(self): - self.logger = BackgroundLogger(ConversationRetriever.logger_prefix) - - def _get_all_session_messages(self, session_id): - from app import db - return db.session.query(MdMessage) \ - .filter(MdMessage.session_id == session_id) \ - .order_by(MdMessage.message_date) \ - .all() - - def get_conversation_as_string(self, session_id, agent_key="Agent", user_key="User", with_tags=True): - try: - messages = self._get_all_session_messages(session_id) - conversation = "" - for message in messages: - if message.sender == "user": # Session.user_message_key: - if "text" in message.message: - conversation += f"{user_key}: {message.message['text']}\n" - elif message.sender == "mojo": # Session.agent_message_key: - if "text" in message.message: - if "text_with_tags" in message.message and with_tags: - conversation += f"{agent_key}: {message.message['text_with_tags']}\n" - else: - conversation += f"{agent_key}: {message.message['text']}\n" - else: - raise Exception("Unknown message sender") - return conversation - except Exception as e: - raise Exception("_get_conversation_as_string: " + str(e)) - - def _get_user_messages_as_conversation(self, session_id, user_key="User"): - try: - messages = self._get_all_session_messages(session_id) - conversation = "" - for message in messages: - if message.sender == "user": # Session.user_message_key: - if "text" in message.message: - conversation += f"{user_key}: {message.message['text']}\n" - return conversation - except Exception as e: - raise Exception("_get_conversation_as_string: " + str(e)) - - - def get_conversation_as_list(self, session_id, agent_key="assistant", user_key="user", with_tags=True): - try: - messages = self._get_all_session_messages(session_id) - conversation = [] - for message in messages: - if message.sender == "user": # Session.user_message_key: - if "text" in message.message: - conversation.append({"role": user_key, "content": message.message['text']}) - elif message.sender == "mojo": # Session.agent_message_key: - if "text" in message.message: - if "text_with_tags" in message.message and with_tags: - conversation.append({"role": agent_key, "content": message.message['text_with_tags']}) - else: - conversation.append({"role": agent_key, "content": message.message['text']}) - else: - raise Exception("Unknown message sender") - return conversation - except Exception as e: - raise Exception("get_conversation_as_list: " + str(e)) \ No newline at end of file diff --git a/background/app/instructions/daily_emails_text_prompt.mpt b/background/app/instructions/daily_emails_text_prompt.mpt deleted file mode 100644 index f874b666..00000000 --- a/background/app/instructions/daily_emails_text_prompt.mpt +++ /dev/null @@ -1,38 +0,0 @@ -#! gpt-4o -#! gpt4-turbo/2023-03-15-preview -#! gpt-4-turbo-preview -#! mistral-large-latest - -YOUR CONTEXT -{{mojo_knowledge}} - -GLOBAL CONTEXT -{{user_datetime_context}} - -USER NAME -{{username}} - -{%if user_company_knowledge%}USER'S COMPANY KNOWLEDGE -{{user_company_knowledge}}{%endif%} - -{%if user_business_goal%}USER'S BUSINESS GOAL -{{user_business_goal}}{%endif%} - -One of your tasks is to help the user keep track of all the important conversations he might have during the day. -{%if received_reminder_email_yesterday%} -Yesterday and today, the user didn't ask your help on that task. Maybe they had before. - -Prepare an *ultimate* friendly reminder email to reengage the user.{%else%}Today, the user didn't ask your help on that task. - -Prepare a friendly and engaging reminder email to encourage the user to tell you if they had a conversation and need help for that. -{%endif%} - -Make it personal. Do not imagine things. Stick to the information you have. -Don't be too chatty, quite straight to the point, 5 sentences max. -Write the email in the following json format: -{ - "subject": "", - "body": "" -} -Use language {{language}}. You can use emojis. -No talk, just email. \ No newline at end of file diff --git a/background/app/instructions/daily_notification_text.mpt b/background/app/instructions/daily_notification_text.mpt deleted file mode 100644 index e6a1c6b5..00000000 --- a/background/app/instructions/daily_notification_text.mpt +++ /dev/null @@ -1,40 +0,0 @@ -#! gpt-4o -#! gpt4-turbo/2023-03-15-preview -#! gpt-4-turbo-preview -#! mistral-large-latest - -YOUR CONTEXT -{{mojo_knowledge}} - -GLOBAL CONTEXT -{{user_datetime_context}} - -USER NAME -{{username}} - -{%if user_company_knowledge%}USER'S COMPANY KNOWLEDGE -{{user_company_knowledge}}{%endif%} - -{%if user_business_goal%}USER'S BUSINESS GOAL -{{user_business_goal}}{%endif%} - -One of your tasks is to extract the user's to-do list items from the tasks you help them with. -Today, you extracted {{new_todos_today|length}} new to-do list items from the tasks you help the user with this morning: -TO-DO LIST ITEMS YOU EXTRACTED TODAY -{%for new_todo in new_todos_today%} - {{new_todo}}{%endfor%} - -You make a great team with the user, and you love doing your work for them! - -Prepare a friendly and engaging reminder app notification message to encourage the user to check for the todos you prepare for them. -Refer to those to-dos as "to-dos" not "tasks", even when translation is needed. - -Notification is composed of a title and a message. -Title should catch the user's attention. -Message should be short and to the point. -Write notification in the following json format: -{ -"title": "", -"message": "<message less than 20 words>" -} -Use language {{language}}. You can use 1 emoji. -No talk, just message. \ No newline at end of file diff --git a/background/app/instructions/extract_todos.mpt b/background/app/instructions/extract_todos.mpt index 4beec2b2..46240e4b 100644 --- a/background/app/instructions/extract_todos.mpt +++ b/background/app/instructions/extract_todos.mpt @@ -50,7 +50,7 @@ Return those in the following json format: "todo_definition": "<Definition as it will be displayed in the user's todo list. The definition should help the user remember what was the original task. Mention any name, company,... that can help them get the context.>", - "mentioned_as_todo": <Did the user explicitly mentioned this as a todo? yes/no>, + "mentioned_as_todo": <Did the user explicitly mentioned this as a todo? yes/no (in english)>, "due_date": "<Date at which the todo will be displayed in user's todo list. Format yyyy-mm-dd>" }] } diff --git a/background/app/instructions/extract_user_knowledge.mpt b/background/app/instructions/extract_user_knowledge.mpt deleted file mode 100644 index cfc12939..00000000 --- a/background/app/instructions/extract_user_knowledge.mpt +++ /dev/null @@ -1,21 +0,0 @@ -#! gpt-4o -#! gpt4-turbo/2023-03-15-preview -#! gpt-4-turbo-preview -#! mistral-large-latest - -You are an AI assistant of a user. Update the summary of the user based on your conversation with them. If you are writing the summary for the first time, return a single sentence. - -If there is no new information about the user or the information is not worth noting (not an important or relevant fact to remember long-term), return the existing summary unchanged. - -Existing summary of the user: -{{existing_summary}} - -Full conversation: -{{conversation}} - -Make no mistake about how the user is connected to people, places, and concepts. -Include only facts about the user themselves. -Include only important facts that could be useful to remember long-term. -Write the summary in English. - -No talk, just the summary in english. \ No newline at end of file diff --git a/background/app/language_retriever.py b/background/app/language_retriever.py deleted file mode 100644 index e37ee043..00000000 --- a/background/app/language_retriever.py +++ /dev/null @@ -1,19 +0,0 @@ -from app import db -from background_logger import BackgroundLogger -from mojodex_core.entities.db_base_entities import MdSession - - -class LanguageRetriever: - logger_prefix = "LanguageRetriever::" - - def __init__(self): - self.logger = BackgroundLogger(LanguageRetriever.logger_prefix) - - def get_language_from_session_or_user(self, session_id, user): - try: - self.logger.debug(f"_get_session_language") - session_language = \ - db.session.query(MdSession.language).filter(MdSession.session_id == session_id).first()[0] - return session_language if session_language else user.language_code - except Exception as e: - raise Exception(f"{self.logger_prefix} _get_session_language:: {e}") \ No newline at end of file diff --git a/background/app/models/cortex/extract_todos_cortex.py b/background/app/models/cortex/extract_todos_cortex.py deleted file mode 100644 index fb77aa4e..00000000 --- a/background/app/models/cortex/extract_todos_cortex.py +++ /dev/null @@ -1,138 +0,0 @@ -import os - - -import requests -from app import db, language_retriever, conversation_retriever -from mojodex_core.entities.db_base_entities import * - -from models.user_task_execution import UserTaskExecution -from models.knowledge.knowledge_collector import KnowledgeCollector - -from background_logger import BackgroundLogger - -from models.todos.todos_creator import TodosCreator -from sqlalchemy import func -from datetime import datetime - -class ExtractTodosCortex: - logger_prefix = "ExtractTodosCortex" - - def __init__(self, user_task_execution): - try: - self.logger = BackgroundLogger( - f"{ExtractTodosCortex.logger_prefix} - user_task_execution_pk {user_task_execution.user_task_execution_pk}") - self.logger.debug(f"__init__") - self.session_id = user_task_execution.session_id # used to identify any call to openai - - self.user_task_execution_pk = user_task_execution.user_task_execution_pk - - self.user_task_pk, self.task_name_for_system, self.task_definition_for_system, self.task_input_values = self.__get_task_info( - user_task_execution) - - self.task_result = self.__get_task_result(user_task_execution) - self.user = db.session.query(MdUser).join(MdUserTask, MdUserTask.user_id == MdUser.user_id).filter( - MdUserTask.user_task_pk == self.user_task_pk).first() - self.user_task_execution = UserTaskExecution(self.session_id, self.user_task_execution_pk, self.task_name_for_system, - self.task_definition_for_system, self.task_input_values, - self.task_result, self.user.user_id) - - self.company = db.session.query(MdCompany).join(MdUser, MdUser.company_fk == MdCompany.company_pk).filter( - MdUser.user_id == self.user.user_id).first() - self.language = language_retriever.get_language_from_session_or_user(self.session_id, self.user) - self.knowledge_collector = KnowledgeCollector(self.user.name, self.user.timezone_offset, self.user.summary, self.user.company_description, self.user.goal) - - self.conversation = conversation_retriever.get_conversation_as_string(self.session_id, agent_key='YOU', user_key='USER', with_tags=False) - - self.linked_user_task_executions_todos = self.__get_linked_tasks_todos(user_task_execution) - - - except Exception as e: - self.logger.error(f"__init__ :: {e}") - - def extract_todos(self): - try: - - self.logger.debug(f"extract_todos") - todos_creator = TodosCreator( - self.user_task_execution, - self.knowledge_collector, - self.language, - self.conversation, - self.linked_user_task_executions_todos) - todos_creator.extract_and_save() - self.__mark_todo_extracted() - except Exception as e: - self.logger.error(f"extract_todos: {e}") - - def __get_task_info(self, user_task_execution): - try: - user_task_pk, task_name_for_system, task_definition_for_system, task_input_values = db.session.query( - MdUserTask.user_task_pk, MdTask.name_for_system, MdTask.definition_for_system, - MdUserTaskExecution.json_input_values) \ - .join(MdUserTask, MdUserTask.task_fk == MdTask.task_pk) \ - .join(MdUserTaskExecution, MdUserTask.user_task_pk == MdUserTaskExecution.user_task_fk) \ - .filter(MdUserTaskExecution.user_task_execution_pk == user_task_execution.user_task_execution_pk) \ - .first() - - return user_task_pk, task_name_for_system, task_definition_for_system, task_input_values - except Exception as e: - raise Exception(f"_get_task_info: {e}") - - def __get_task_result(self, user_task_execution): - try: - task_result = db.session.query(MdProducedTextVersion.production) \ - .join(MdProducedText, MdProducedTextVersion.produced_text_fk == MdProducedText.produced_text_pk) \ - .join(MdUserTaskExecution, - MdProducedText.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) \ - .filter(MdUserTaskExecution.user_task_execution_pk == user_task_execution.user_task_execution_pk) \ - .order_by(MdProducedTextVersion.creation_date.desc()) \ - .first()[0] - - return task_result - except Exception as e: - raise Exception(f"__get_task_result: {e}") - - - def __get_linked_tasks_todos(self, user_task_execution): - try: - linked_user_task_executions_todos = [] - - # Subquery to get the latest todo_scheduling for each todo - latest_todo_scheduling = db.session.query( - MdTodoScheduling.todo_fk, - func.max(MdTodoScheduling.scheduled_date).label('latest_scheduled_date')) \ - .group_by(MdTodoScheduling.todo_fk) \ - .subquery() - - while user_task_execution and user_task_execution.predefined_action_from_user_task_execution_fk: - user_task_execution = db.session.query(MdUserTaskExecution) \ - .filter(MdUserTaskExecution.user_task_execution_pk == user_task_execution.predefined_action_from_user_task_execution_fk) \ - .first() - if user_task_execution: - todos = db.session.query(MdTodo.description, latest_todo_scheduling.c.latest_scheduled_date) \ - .join(latest_todo_scheduling, MdTodo.todo_pk == latest_todo_scheduling.c.todo_fk) \ - .filter(MdTodo.user_task_execution_fk == user_task_execution.user_task_execution_pk) \ - .all() - linked_user_task_executions_todos += [{"scheduled_date": str(scheduled_date), "description": description} for description, scheduled_date in todos] - - return linked_user_task_executions_todos - - except Exception as e: - raise Exception(f"__get_linked_tasks_todos: {e}") - - - def __mark_todo_extracted(self): - try: - uri = f"{os.environ['MOJODEX_BACKEND_URI']}/extract_todos" - # Save follow-ups in db => send to mojodex-backend - pload = {'datetime': datetime.now().isoformat(), - 'user_task_execution_fk': self.user_task_execution.user_task_execution_pk} - headers = {'Authorization': os.environ['MOJODEX_BACKGROUND_SECRET'], 'Content-Type': 'application/json'} - internal_request = requests.put(uri, json=pload, headers=headers) - if internal_request.status_code != 200: - raise Exception(str(internal_request.json())) - return internal_request.json()["user_task_execution_pk"] - except Exception as e: - raise Exception(f"__save_to_db: {e}") - - diff --git a/background/app/models/cortex/reschedule_todo_cortex.py b/background/app/models/cortex/reschedule_todo_cortex.py deleted file mode 100644 index f9e544b2..00000000 --- a/background/app/models/cortex/reschedule_todo_cortex.py +++ /dev/null @@ -1,145 +0,0 @@ - - -from app import db, language_retriever, conversation_retriever -from mojodex_core.entities.db_base_entities import * - -from models.user_task_execution import UserTaskExecution -from models.knowledge.knowledge_collector import KnowledgeCollector - -from background_logger import BackgroundLogger -from sqlalchemy import func, text - -from models.todos.todos_rescheduler import TodosRescheduler - -from mojodex_core.email_sender.email_service import EmailService -from datetime import datetime, timezone - -class RescheduleTodoCortex: - logger_prefix = "RescheduleTodoCortex" - - def __init__(self, todo): - try: - self.logger = BackgroundLogger( - f"{RescheduleTodoCortex.logger_prefix} - todo_pk {todo.todo_pk}") - self.logger.debug(f"__init__") - self.todo_pk = todo.todo_pk - self.todo_description = todo.description - self.n_scheduled = self.__get_n_scheduled(self.todo_pk) - self.first_scheduled_date = self.__get_first_scheduled_date(self.todo_pk) - - self.session_id, self.user_task_execution_pk, self.user_task_pk, self.task_name_for_system, self.task_definition_for_system, self.task_input_values = self.__get_task_info( - self.todo_pk) - - self.task_result = self.__get_task_result(self.user_task_execution_pk) - self.user = db.session.query(MdUser).join(MdUserTask, MdUserTask.user_id == MdUser.user_id).filter( - MdUserTask.user_task_pk == self.user_task_pk).first() - self.company = db.session.query(MdCompany).join(MdUser, MdUser.company_fk == MdCompany.company_pk).filter( - MdUser.user_id == self.user.user_id).first() - self.language = language_retriever.get_language_from_session_or_user(self.session_id, self.user) - self.knowledge_collector = KnowledgeCollector(self.user.name, self.user.timezone_offset, self.user.summary, - self.user.company_description, self.user.goal) - self.user_task_execution = UserTaskExecution(self.session_id, self.user_task_execution_pk, - self.task_name_for_system, - self.task_definition_for_system, self.task_input_values, - self.task_result, self.user.user_id) - - self.todo_list = self.__get_todolist(self.user.user_id) - - - except Exception as e: - self.logger.error(f"__init__ :: {e}") - EmailService().send_technical_error_email(f"Error in {self.logger_prefix} - __init__ (todo_pk: {todo.todo_pk}): {e}") - - - def reschedule_todo(self): - try: - todo_rescheduler = TodosRescheduler( - self.todo_pk, - self.user_task_execution, - self.knowledge_collector, - self.todo_description, - self.n_scheduled, - self.first_scheduled_date, - self.todo_list) - todo_rescheduler.reschedule_and_save() - except Exception as e: - self.logger.error(f"reschedule_todo: {e}") - EmailService().send_technical_error_email(f"Error in {self.logger_prefix} - todo_pk {self.todo_pk} - reschedule_todo: {e}") - - - - def __get_task_info(self, todo_pk): - try: - session_id, user_task_execution_pk, user_task_pk, task_name_for_system, task_definition_for_system, task_input_values = db.session.query( - MdUserTaskExecution.session_id, MdUserTaskExecution.user_task_execution_pk, MdUserTask.user_task_pk, MdTask.name_for_system, MdTask.definition_for_system, - MdUserTaskExecution.json_input_values) \ - .join(MdUserTask, MdUserTask.task_fk == MdTask.task_pk) \ - .join(MdUserTaskExecution, MdUserTask.user_task_pk == MdUserTaskExecution.user_task_fk) \ - .join(MdTodo, MdUserTaskExecution.user_task_execution_pk == MdTodo.user_task_execution_fk) \ - .filter(MdTodo.todo_pk == todo_pk) \ - .first() - - return session_id, user_task_execution_pk, user_task_pk, task_name_for_system, task_definition_for_system, task_input_values - except Exception as e: - raise Exception(f"_get_task_info: {e}") - - def __get_task_result(self, user_task_execution_pk): - try: - task_result = db.session.query(MdProducedTextVersion.production) \ - .join(MdProducedText, MdProducedTextVersion.produced_text_fk == MdProducedText.produced_text_pk) \ - .join(MdUserTaskExecution, - MdProducedText.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) \ - .filter(MdUserTaskExecution.user_task_execution_pk == user_task_execution_pk) \ - .order_by(MdProducedTextVersion.creation_date.desc()) \ - .first()[0] - - return task_result - except Exception as e: - raise Exception(f"_get_task_result: {e}") - - def __get_n_scheduled(self, todo_pk): - try: - n_scheduled = db.session.query(MdTodoScheduling).filter(MdTodoScheduling.todo_fk == todo_pk).count() - return n_scheduled - except Exception as e: - raise Exception(f"_get_n_scheduled: {e}") - - def __get_first_scheduled_date(self, todo_pk): - try: - first_scheduled_date = db.session.query(MdTodoScheduling.scheduled_date) \ - .filter(MdTodoScheduling.todo_fk == todo_pk) \ - .order_by(MdTodoScheduling.scheduled_date) \ - .first()[0] - return first_scheduled_date - except Exception as e: - raise Exception(f"_get_first_scheduled_date: {e}") - - - def __get_todolist(self, user_id): - try: - # Subquery to get the latest todo_scheduling for each todo - latest_todo_scheduling = db.session.query( - MdTodoScheduling.todo_fk, - func.max(MdTodoScheduling.scheduled_date).label('latest_scheduled_date')) \ - .group_by(MdTodoScheduling.todo_fk) \ - .subquery() - # Get the current date in UTC - now_utc = datetime.now(timezone.utc).date() - results = db.session.query(MdTodo.description, latest_todo_scheduling.c.latest_scheduled_date) \ - .join(MdUserTaskExecution, MdTodo.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) \ - .join(latest_todo_scheduling, MdTodo.todo_pk == latest_todo_scheduling.c.todo_fk) \ - .join(MdUserTask, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) \ - .join(MdUser, MdUserTask.user_id == MdUser.user_id) \ - .filter(MdUser.user_id == user_id) \ - .filter(MdTodo.deleted_by_user.is_(None)) \ - .filter(MdTodo.completed.is_(None)) \ - .filter((latest_todo_scheduling.c.latest_scheduled_date - text( - 'md_user.timezone_offset * interval \'1 minute\'')) >= now_utc) \ - .order_by(latest_todo_scheduling.c.latest_scheduled_date.desc()) \ - .offset(0) \ - .limit(20) \ - .all() - - return [{'description': description, 'scheduled_date': scheduled_date} for description, scheduled_date in results] - except Exception as e: - raise Exception(f"_get_todolist: {e}") diff --git a/background/app/models/documents/document_chunk_manager.py b/background/app/models/documents/document_chunk_manager.py index 909b832f..99e88cdf 100644 --- a/background/app/models/documents/document_chunk_manager.py +++ b/background/app/models/documents/document_chunk_manager.py @@ -1,15 +1,9 @@ import os from datetime import datetime - import requests import tiktoken -from background_logger import BackgroundLogger - - - class DocumentChunkManager: - logger_prefix = "DocumentChunkManager" # Constants CHUNK_SIZE = 200 # The target size of each text chunk in tokens @@ -19,10 +13,9 @@ class DocumentChunkManager: def __init__(self): try: - self.logger = BackgroundLogger(f"{DocumentChunkManager.logger_prefix}") self.tokenizer = tiktoken.get_encoding("cl100k_base") # The encoding scheme to use for tokenization except Exception as e: - raise Exception(f"{DocumentChunkManager.logger_prefix} : __init__ : {e}") + raise Exception(f"{self.__class__.__name__} : __init__ : {e}") def _get_text_chunks(self, text, chunk_token_size=None): """ @@ -136,7 +129,7 @@ def create_document_chunks(self, document_pk, text, embeddeding_function, user_i return valid_chunks_pk except Exception as e: - raise Exception(f"{DocumentChunkManager.logger_prefix} : create_document_chunks : {e}") + raise Exception(f"{self.__class__.__name__} : create_document_chunks : {e}") def __add_document_chunk_to_db(self, document_fk, chunk_index, chunk_text, embedding): try: @@ -180,9 +173,7 @@ def update_document_chunks(self, document_pk, text, embeddeding_function, user_i document_chunk_pk = self.__add_document_chunk_to_db(document_pk, i, new_valid_chunks[i], new_embeddeding) except Exception as e: - raise Exception(f"{DocumentChunkManager.logger_prefix} : update_document_chunks : {e}") - - + raise Exception(f"{self.__class__.__name__} : update_document_chunks : {e}") def __update_document_chunk_in_db(self, chunk_pk, chunk_text, embedding): try: diff --git a/background/app/models/documents/document_manager.py b/background/app/models/documents/document_manager.py index cec9825c..b78370d0 100644 --- a/background/app/models/documents/document_manager.py +++ b/background/app/models/documents/document_manager.py @@ -1,20 +1,12 @@ import os import requests from datetime import datetime - from models.documents.document_chunk_manager import DocumentChunkManager - from mojodex_core.llm_engine.providers.model_loader import ModelLoader -from background_logger import BackgroundLogger - - class DocumentManager: - logger_prefix = "DocumentManager" def __init__(self): - self.logger = BackgroundLogger(f"{DocumentManager.logger_prefix}") - self.document_chunk_manager = DocumentChunkManager() def _embedded(self, text, user_id, user_task_execution_pk=None, task_name_for_system=None): @@ -64,14 +56,10 @@ def update_document(self, user_id, document_pk, document_chunks_pks, text, chunk old_chunks_pks=document_chunks_pks) self.__save_document_to_db(document_pk) except Exception as e: - self.logger.error( - f"{DocumentManager.logger_prefix} :: update_document : {e}") - raise Exception( - f"{DocumentManager.logger_prefix} :: update_document : {e}") + raise Exception(f"{self.__class__.__name__} :: update_document : {e}") def __save_document_to_db(self, document_pk): try: - self.logger.debug(f"__save_document_to_db : {document_pk}") uri = f"{os.environ['MOJODEX_BACKEND_URI']}/document" pload = {'datetime': datetime.now().isoformat(), 'document_pk': document_pk, 'update_date': datetime.now().isoformat()} @@ -105,5 +93,4 @@ def retrieve(self, query, user_task_execution_pk, task_name_for_system, top_k=1) return document_chunks except Exception as e: - self.logger.error(f"retrieve : {e}") - return None + raise Exception(f"{self.__class__.__name__} :: retrieve : {e}") diff --git a/background/app/models/documents/website_parser.py b/background/app/models/documents/website_parser.py index fcae55c8..95965782 100644 --- a/background/app/models/documents/website_parser.py +++ b/background/app/models/documents/website_parser.py @@ -1,12 +1,7 @@ -import os - import requests from bs4 import BeautifulSoup from urllib.parse import urljoin from app import document_manager - -from background_logger import BackgroundLogger - from mojodex_core.llm_engine.mpt import MPT @@ -17,12 +12,8 @@ class WebsiteParser: website_chunk_validation_mpt_filename = "instructions/is_website_chunk_relevant.mpt" - def __init__(self): - self.logger = BackgroundLogger(f"{WebsiteParser.logger_prefix}") - def __get_all_page_urls(self, base_url): try: - self.logger.debug(f"__get_all_page_urls: {base_url}") # Send a GET request to the base URL response = requests.get(base_url) @@ -47,17 +38,14 @@ def __get_all_page_urls(self, base_url): def __get_webpage_text(self, url, retry=2): try: - self.logger.debug(f"__get_webpage_text: {url}") response = requests.get(url) html_content = response.text soup = BeautifulSoup(html_content, 'html.parser') webpage_text = soup.text.strip() if webpage_text.strip() == "": if retry > 0: - self.logger.warning("šŸŸ  Empty webpage text, retrying...") return self.__get_webpage_text(url, retry - 1) else: - self.logger.warning("šŸ”“ Empty webpage text, giving up...") return None # remove multiple \n by only one webpage_text = "\n".join( @@ -65,26 +53,21 @@ def __get_webpage_text(self, url, retry=2): return webpage_text except Exception as e: - self.logger.error(f"__get_webpage_text: {e}") return None def website_to_doc(self, base_url, all_urls=False): try: - self.logger.debug(f"website_to_doc: {base_url}") # Remove ending "/" from base_url if base_url[-1] == "/": base_url = base_url[:-1] all_page_urls = self.__get_all_page_urls( base_url) if all_urls else [base_url] - self.logger.debug( - f"website_to_doc: found {len(all_page_urls)} pages") text_list = [] for link in all_page_urls[:WebsiteParser.MAX_WEBSITE_PAGES]: text = self.__get_webpage_text(link) if text is not None: text_list.append({"link": link, "text": text}) - self.logger.debug(f"website_to_doc: Done") return text_list except Exception as e: @@ -105,7 +88,6 @@ def update_website_document(self, user_id, base_url, document_pk, document_chunk try: # useful for validation self.company_name = company_name - self.logger.debug(f"update_website: {base_url}") responses = self.website_to_doc(base_url, all_urls=False) document_manager.update_document(user_id, document_pk, document_chunks_pks, responses[0]["text"], chunk_validation_callback=self.__validate_website_chunk) @@ -116,7 +98,6 @@ def update_website_document(self, user_id, base_url, document_pk, document_chunk def create_website_document(self, user_id, base_url, company_name): try: self.company_name = company_name # useful for validation - self.logger.debug(f"update_website: {base_url}") responses = self.website_to_doc(base_url, all_urls=True) for response in responses: document_manager.new_document(user_id, response["text"], response["link"], document_type='webpage', diff --git a/background/app/models/events/calendar_suggestion_notifications_generator.py b/background/app/models/events/calendar_suggestion_notifications_generator.py index cee5308a..7d2ca2ad 100644 --- a/background/app/models/events/calendar_suggestion_notifications_generator.py +++ b/background/app/models/events/calendar_suggestion_notifications_generator.py @@ -1,34 +1,21 @@ -from background_logger import BackgroundLogger from mojodex_core.json_loader import json_decode_retry from mojodex_core.email_sender.email_service import EmailService +from mojodex_core.knowledge_manager import KnowledgeManager from mojodex_core.logging_handler import on_json_error - from models.events.events_generator import EventsGenerator - -from models.knowledge.knowledge_collector import KnowledgeCollector - from mojodex_core.llm_engine.mpt import MPT class CalendarSuggestionNotificationsGenerator(EventsGenerator): - logger_prefix = "CalendarSuggestionNotificationsGenerator::" calendar_suggestion_notification_text_mpt_filename = "instructions/calendar_suggestion_reminder_notification.mpt" - def __init__(self): - self.logger = BackgroundLogger( - f"{CalendarSuggestionNotificationsGenerator.logger_prefix}") - self.logger.info("__init__") - - def generate_events(self, mojo_knowledge, calendar_suggestions): + def generate_events(self, calendar_suggestions): try: - self.logger.info(" generate_events") for calendar_suggestion in calendar_suggestions: user_id = calendar_suggestion["user_id"] try: - user_datetime_context = KnowledgeCollector.get_global_context_knowledge( - calendar_suggestion["user_timezone_offset"]) - notification_message = self.__generate_notif_text(mojo_knowledge, user_datetime_context, + notification_message = self._generate_notif_text(KnowledgeManager().mojodex_knowledge, calendar_suggestion["datetime_context"], user_id, calendar_suggestion["user_name"], calendar_suggestion[ @@ -46,16 +33,15 @@ def generate_events(self, mojo_knowledge, calendar_suggestions): event_type="calendar_suggestion_notification", data=data) except Exception as e: EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing notification for user {user_id}: {e}") + f"{self.__class__.__name__} : generate_events: Error preparing notification for user {user_id}: {e}") except Exception as e: EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing notifications: {e}") + f"{self.__class__.__name__} : generate_events: Error preparing notifications: {e}") @json_decode_retry(retries=3, required_keys=["title", "message"], on_json_error=on_json_error) - def __generate_notif_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, + def _generate_notif_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, user_business_goal, calendar_suggestion, task_name): try: - self.logger.info(f"generate_notif_text for user {user_id}") calendar_suggestion_notification = MPT(CalendarSuggestionNotificationsGenerator.calendar_suggestion_notification_text_mpt_filename, mojo_knowledge=mojo_knowledge, user_datetime_context=user_datetime_context, @@ -75,4 +61,4 @@ def __generate_notif_text(self, mojo_knowledge, user_datetime_context, user_id, # try to load as json to extract title and body return notification_message except Exception as e: - raise Exception(f"generate_notif_text: " + str(e)) + raise Exception(f"_generate_notif_text: {e}") diff --git a/background/app/models/events/daily_emails_generator.py b/background/app/models/events/daily_emails_generator.py deleted file mode 100644 index 0edac079..00000000 --- a/background/app/models/events/daily_emails_generator.py +++ /dev/null @@ -1,107 +0,0 @@ - -import os -from jinja2 import Template - -from background_logger import BackgroundLogger -from mojodex_core.json_loader import json_decode_retry - - -from mojodex_core.email_sender.email_service import EmailService -from mojodex_core.logging_handler import on_json_error - -from models.events.events_generator import EventsGenerator - -from models.knowledge.knowledge_collector import KnowledgeCollector - -from mojodex_core.llm_engine.mpt import MPT - - -class DailyEmailsGenerator(EventsGenerator): - logger_prefix = "DailyEmailsGenerator::" - message_from_mojodex_email = "mojodex_core/mails_templates/message_from_mojodex.html" - daily_email_text_mpt_filename = "instructions/daily_emails_text_prompt.mpt" - - reminder_email_type = "reminder_email" - summary_email_type = "summary_email" - - def __init__(self): - self.logger = BackgroundLogger(f"{DailyEmailsGenerator.logger_prefix}") - self.logger.info("__init__") - - def generate_events(self, mojo_knowledge, users): - try: - self.logger.info("generate_events") - for user in users: - user_id, email = user["user_id"], user["email"] - # Temporarily removed because it felt too much to send users an email every day - # if user['n_meeting_minutes_today'] > 0: - # email_body_template = Template("""Hey! - # It was a busy day: - # {{n_meeting_minutes}} meetings - # {%if n_processes_created_today > 0%}{{n_processes_created_today}} followup items{%endif%} - # - # {%if last_three_proactive_followups%}I prepared for you to review and use: - # {%for followup in last_three_proactive_followups%} - # {{followup}}{%endfor%}{%endif%} -# - # {%if additional_feature %}Iā€™m sure I can do more, like {{additional_feature}}.{%else%}I'm always here to help.{%endif%} - # Just ask me.""") - # try: - # email_body = email_body_template.render(n_meeting_minutes=user['n_meeting_minutes_today'], - # n_processes_created_today=user[ - # 'n_processes_created_today'], - # last_three_proactive_followups=user[ - # 'last_three_proactive_followups'], - # additional_feature=user[ - # 'first_enabled_not_executed_task_name']) - - # self.send_event(user_id, message={"subject": "subject", "body": email_body, 'email': email}, - # event_type=DailyEmailsGenerator.summary_email_type) - # except Exception as e: - # send_admin_error_email(f"Error sending daily recap email to {email} : {e}") - if user['n_meeting_minutes_today'] == 0: - user_datetime_context = KnowledgeCollector.get_global_context_knowledge( - user["user_timezone_offset"]) - email_message = self.__generate_emails_text(mojo_knowledge, user_datetime_context, user_id, - user["user_name"], - user["user_company_description"], user["user_goal"], - user["received_reminder_email_yesterday"], - user["language"]) - subject, body = email_message["subject"], email_message["body"] - try: - with open(DailyEmailsGenerator.message_from_mojodex_email, "r") as f: - template = Template(f.read()) - body = template.render( - mojodex_message=body, mojodex_webapp_url=os.environ["MOJODEX_WEBAPP_URI"], button_text="Login") - self.send_event(user_id, message={"subject": subject, "body": body, 'email': email}, - event_type=DailyEmailsGenerator.reminder_email_type) - except Exception as e: - EmailService().send_technical_error_email( - f"Error sending daily remind_email to {email} : {e}") - - except Exception as e: - EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing emails: {e}") - - @json_decode_retry(retries=3, required_keys=["subject", "body"], on_json_error=on_json_error) - def __generate_emails_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, - user_business_goal, - received_reminder_email_yesterday, language, retry=2): - try: - self.logger.info(f"generate_emails_text for user {user_id}") - daily_email_text = MPT(DailyEmailsGenerator.daily_email_text_mpt_filename, - mojo_knowledge=mojo_knowledge, - user_datetime_context=user_datetime_context, - username=username, - user_company_knowledge=user_company_knowledge, - user_business_goal=user_business_goal, - received_reminder_email_yesterday=received_reminder_email_yesterday, - language=language - ) - - email_message = daily_email_text.run(user_id=user_id, - temperature=1, max_tokens=500, - json_format=True)[0] - return email_message - except Exception as e: - raise Exception(f"generate_notif_text: " + str(e)) diff --git a/background/app/models/events/daily_notifications_generator.py b/background/app/models/events/daily_notifications_generator.py deleted file mode 100644 index 077507b0..00000000 --- a/background/app/models/events/daily_notifications_generator.py +++ /dev/null @@ -1,71 +0,0 @@ -from background_logger import BackgroundLogger -from mojodex_core.json_loader import json_decode_retry - -from mojodex_core.email_sender.email_service import EmailService -from mojodex_core.logging_handler import on_json_error -from models.events.events_generator import EventsGenerator -from models.knowledge.knowledge_collector import KnowledgeCollector - -from mojodex_core.llm_engine.mpt import MPT - - -class DailyNotificationsGenerator(EventsGenerator): - logger_prefix = "DailyNotificationsGenerator::" - daily_notification_text_mpt_filename = "instructions/daily_notification_text.mpt" - - def __init__(self): - self.logger = BackgroundLogger( - f"{DailyNotificationsGenerator.logger_prefix}") - self.logger.info("__init__") - - def generate_events(self, mojo_knowledge, users): - try: - self.logger.info(" generate_events") - for user in users: - user_id = user["user_id"] - try: - user_datetime_context = KnowledgeCollector.get_global_context_knowledge( - user["user_timezone_offset"]) - notification_json = self.__generate_notif_text(mojo_knowledge, user_datetime_context, - user_id, user["user_name"], - user["user_company_description"], - user["user_goal"], - user["new_todos_today"], - user["language"]) - notification_title, notification_body = notification_json[ - "title"], notification_json["message"] - data = {"user_id": user_id, "type": "todos"} - - self.send_event(user_id, message={"title": notification_title, "body": notification_body}, - event_type="daily_notification", data=data) - except Exception as e: - EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing notification for user {user_id}: {e}") - except Exception as e: - EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing notifications: {e}") - - @json_decode_retry(retries=3, required_keys=["title", "message"], on_json_error=on_json_error) - def __generate_notif_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, - user_business_goal, - new_todos_today, language, retry=2): - try: - self.logger.info(f"generate_notif_text for user {user_id}") - daily_notification_text = MPT(DailyNotificationsGenerator.daily_notification_text_mpt_filename, - mojo_knowledge=mojo_knowledge, - user_datetime_context=user_datetime_context, - username=username, - user_company_knowledge=user_company_knowledge, - user_business_goal=user_business_goal, - new_todos_today=new_todos_today, - language=language - ) - - notification_message = daily_notification_text.run(user_id=user_id, - temperature=0, - json_format=True, - max_tokens=50)[0] - - return notification_message - except Exception as e: - raise Exception(f"generate_notif_text: " + str(e)) diff --git a/background/app/models/events/todo_daily_emails_generator.py b/background/app/models/events/todo_daily_emails_generator.py index c5ebd33f..ae1a0688 100644 --- a/background/app/models/events/todo_daily_emails_generator.py +++ b/background/app/models/events/todo_daily_emails_generator.py @@ -1,40 +1,26 @@ import os - from jinja2 import Template -from background_logger import BackgroundLogger - - from mojodex_core.email_sender.email_service import EmailService +from mojodex_core.knowledge_manager import KnowledgeManager from models.events.events_generator import EventsGenerator from mojodex_core.json_loader import json_decode_retry -from mojodex_core.logging_handler import on_json_error - -from models.knowledge.knowledge_collector import KnowledgeCollector - +from mojodex_core.logging_handler import log_error, on_json_error from mojodex_core.llm_engine.mpt import MPT class TodoDailyEmailsGenerator(EventsGenerator): - logger_prefix = "TodoDailyEmailsGenerator::" + message_from_mojodex_email = "mojodex_core/mails_templates/message_from_mojodex.html" todo_daily_email_text_mpt_filename = "instructions/todo_daily_emails_text_prompt.mpt" todo_daily_email_type = "todo_daily_email" - def __init__(self): - self.logger = BackgroundLogger( - f"{TodoDailyEmailsGenerator.logger_prefix}") - self.logger.info("__init__") - - def generate_events(self, mojo_knowledge, users): + def generate_events(self, users): try: - self.logger.info("generate_events") for user in users: user_id, email = user["user_id"], user["email"] - user_datetime_context = KnowledgeCollector.get_global_context_knowledge( - user["user_timezone_offset"]) - email_json = self.__generate_emails_text(mojo_knowledge, user_datetime_context, user_id, + email_json = self._generate_emails_text(KnowledgeManager().mojodex_knowledge, user["datetime_context"], user_id, user["username"], user["company_description"], user["goal"], user["language"], @@ -52,17 +38,16 @@ def generate_events(self, mojo_knowledge, users): event_type=TodoDailyEmailsGenerator.todo_daily_email_type) except Exception as e: EmailService().send_technical_error_email( - f"Error sending todo daily remind_email to {email} : {e}") + f"{self.__class__.__name__} : generate_events : Error sending todo daily remind_email to {email} : {e}") except Exception as e: EmailService().send_technical_error_email( - f"{self.logger.name} : Error preparing emails: {e}") + f"{self.__class__.__name__} : generate_events : Error preparing emails: {e}") @json_decode_retry(retries=3, required_keys=["subject", "body"], on_json_error=on_json_error) - def __generate_emails_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, + def _generate_emails_text(self, mojo_knowledge, user_datetime_context, user_id, username, user_company_knowledge, user_business_goal, language, today_todo_list, rescheduled_todos, deleted_todos): try: - self.logger.info(f"generate_emails_text for user {user_id}") todo_daily_email_text = MPT(TodoDailyEmailsGenerator.todo_daily_email_text_mpt_filename, mojo_knowledge=mojo_knowledge, user_datetime_context=user_datetime_context, @@ -81,4 +66,4 @@ def __generate_emails_text(self, mojo_knowledge, user_datetime_context, user_id, json_format=True)[0] return email_message_json except Exception as e: - raise Exception(f"generate_email_text: " + str(e)) + raise Exception(f"_generate_emails_text: {e}") diff --git a/background/app/models/knowledge/knowledge_collector.py b/background/app/models/knowledge/knowledge_collector.py deleted file mode 100644 index 9c16acc9..00000000 --- a/background/app/models/knowledge/knowledge_collector.py +++ /dev/null @@ -1,55 +0,0 @@ -from datetime import datetime, timedelta, timezone - -from mojodex_core.logging_handler import MojodexCoreLogger - -from mojodex_core.llm_engine.mpt import MPT - -class KnowledgeCollector: - - def __init__(self, user_name, user_timezone_offset, user_summary, user_company_knowledge, user_business_goal): - self.mojo_knowledge = self.get_mojo_knowledge() - self.user_datetime_context = self.get_global_context_knowledge(user_timezone_offset if user_timezone_offset else 0) - self.user_timezone_offset = user_timezone_offset - self.user_name = user_name - self.user_summary = user_summary - self.user_company_knowledge = user_company_knowledge - self.user_business_goal= user_business_goal - - self.logger = MojodexCoreLogger("KnowledgeCollector") - - @staticmethod - def get_mojo_knowledge(): - with open("mojodex_core/prompts/knowledge/mojo_knowledge.txt", 'r') as f: - return f.read() - - @staticmethod - def get_global_context_knowledge(timezone_offset=0): - """Returns the global context knowledge for the current time - :param timezone_offset: offset in minutes to remove from UTC time to get user time""" - timestamp = datetime.now(timezone.utc) - timestamp -= timedelta(minutes=timezone_offset) - user_datetime_context = MPT("mojodex_core/instructions/user_datetime_context.mpt", - weekday=timestamp.strftime("%A"), - datetime=timestamp.strftime("%d %B %Y"), - time=timestamp.strftime("%H:%M")) - return user_datetime_context.prompt - - @property - def mojodex_knowledge(self): - return MPT("mojodex_core/instructions/mojodex_knowledge.mpt") - - @property - def localized_context(self): - try: - timestamp = datetime.now(timezone.utc) - if self.user_timezone_offset: - timestamp -= timedelta(minutes=self.user_timezone_offset) - return MPT("mojodex_core/instructions/user_datetime_context.mpt", - weekday=timestamp.strftime("%A"), - datetime=timestamp.strftime("%d %B %Y"), - time=timestamp.strftime("%H:%M")) - except Exception as e: - self.logger.error(f"Error getting localized context: {e}") - return KnowledgeCollector.get_global_context_knowledge(self.user_timezone_offset) - - diff --git a/background/app/models/knowledge/user_knowledge_extractor.py b/background/app/models/knowledge/user_knowledge_extractor.py deleted file mode 100644 index f45f5f77..00000000 --- a/background/app/models/knowledge/user_knowledge_extractor.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -import requests -from datetime import datetime - -from background_logger import BackgroundLogger - -from mojodex_core.llm_engine.mpt import MPT - - -class UserKnowledgeExtractor: - logger_prefix = "UserKnowledgeExtractor::" - - extract_user_knowledge_mpt_filename = "instructions/extract_user_knowledge.mpt" - - def __init__(self, session_id, user_id, conversation): - self.logger = BackgroundLogger( - f"{UserKnowledgeExtractor.logger_prefix} - session_id {session_id}") - self.session_id = session_id - self.user_id = user_id - self.conversation = conversation - - def update_user_summary(self, current_summary): - try: - - extract_user_knowledge = MPT(UserKnowledgeExtractor.extract_user_knowledge_mpt_filename, - conversation=self.conversation, existing_summary=current_summary) - - response = extract_user_knowledge.run(user_id=self.user_id, - temperature=0, max_tokens=200) - new_summary = response[0].strip() - - self._save_user_summary(new_summary) - return new_summary - except Exception as e: - raise Exception(f"update_user_summary :: {e}") - - def _save_user_summary(self, summary): - try: - uri = f"{os.environ['MOJODEX_BACKEND_URI']}/user_summary" - pload = {'datetime': datetime.now().isoformat( - ), 'session_id': self.session_id, 'summary': summary} - headers = { - 'Authorization': os.environ['MOJODEX_BACKGROUND_SECRET'], 'Content-Type': 'application/json'} - internal_request = requests.post(uri, json=pload, headers=headers) - if internal_request.status_code != 200: - self.logger.error( - f"_save_user_summary :: {internal_request.text}") - return {"error": f"Error while calling background _save_user_summary : {internal_request.text}"}, 400 - except Exception as e: - raise Exception(f"_save_user_summary :: {e}") diff --git a/background/app/models/todos/todos_creator.py b/background/app/models/todos/todos_creator.py index 59111808..8208e39c 100644 --- a/background/app/models/todos/todos_creator.py +++ b/background/app/models/todos/todos_creator.py @@ -1,98 +1,125 @@ import os from datetime import datetime - import requests +from sqlalchemy import func +from mojodex_core.db import with_db_session +from mojodex_core.entities.db_base_entities import MdTodo, MdTodoScheduling +from mojodex_core.entities.user import User +from mojodex_core.entities.user_task import UserTask +from mojodex_core.entities.user_task_execution import UserTaskExecution from mojodex_core.json_loader import json_decode_retry -from background_logger import BackgroundLogger from mojodex_core.logging_handler import on_json_error - - +from mojodex_core.knowledge_manager import KnowledgeManager from mojodex_core.email_sender.email_service import EmailService from mojodex_core.llm_engine.mpt import MPT class TodosCreator: - logger_prefix = "TodosCreator" todos_url = "/todos" todos_extractor_mpt_filename = "instructions/extract_todos.mpt" - def __init__(self, user_task_execution, knowledge_collector, language, conversation, - linked_user_task_executions_todos): - self.user_task_execution = user_task_execution - self.knowledge_collector = knowledge_collector - self.language = language - self.conversation = conversation - self.linked_user_task_executions_todos = linked_user_task_executions_todos - self.logger = BackgroundLogger( - f"{TodosCreator.logger_prefix} - user_task_execution_pk {user_task_execution.user_task_execution_pk}") + def __init__(self, user_task_execution_pk): + self.user_task_execution_pk = user_task_execution_pk + def extract_and_save(self): + """ + Extracts todos from the user_task_execution and send them to mojodex-backend to save them in the db + """ try: - self.logger.debug("extract_and_save") - json_todos = self.__extract() - self.logger.debug(f"Extracted - {len(json_todos['todos'])} todos") + collected_data = self._collect_data() + json_todos = self._extract(*collected_data) for todo in json_todos['todos']: if todo['mentioned_as_todo'].strip().lower() != "yes": - self.logger.debug( - f"extract_and_save: {todo['todo_definition']} not mentioned as todo - Not saving to db") if todo['mentioned_as_todo'].strip().lower() != 'no': EmailService().send_technical_error_email( - f"(Warning) Extracting todos for user_task_execution_pk {self.user_task_execution.user_task_execution_pk} : {todo['mentioned_as_todo']} is not a valid value for mentioned_as_todo") + f"{self.__class__.__name__} - (Warning) Extracting todos for user_task_execution_pk {self.user_task_execution.user_task_execution_pk} : {todo['mentioned_as_todo']} is not a valid value for mentioned_as_todo") continue - # check reminder_date is a DateTime in format yyyy-mm-dd try: + # check reminder_date is a DateTime in format yyyy-mm-dd datetime.strptime(todo['due_date'], "%Y-%m-%d") - save_to_db = True except ValueError: - self.logger.error( - f"Error extracting todos : Invalid reminder_date {todo['due_date']} - Not saving to db") - save_to_db = False - if save_to_db: - self.__save_to_db( - todo['todo_definition'], todo['due_date']) - self.logger.debug(f"Saved to db") + # Invalid due date, not saving to db just skip it + continue + self._save_to_db(todo['todo_definition'], todo['due_date']) + self._mark_todo_extracted() except Exception as e: - raise Exception(f"{self.logger_prefix} : extract_and_save: {e}") + EmailService().send_technical_error_email(f"{self.__class__.__name__} : extract_and_save: {e}") + + @with_db_session + def _collect_data(self, db_session): + """ + Collects necessary data from DB to call the prompt extracting ToDos + """ + try: + user_task_execution: UserTaskExecution + user: User + user_task_execution, user = db_session.query(UserTaskExecution, User) \ + .join(UserTask, UserTask.user_task_pk == UserTaskExecution.user_task_fk) \ + .join(User, User.user_id == UserTask.user_id) \ + .filter(UserTaskExecution.user_task_execution_pk == self.user_task_execution_pk) \ + .first() + # Subquery to get the latest todo_scheduling for each todo + latest_todo_scheduling = db_session.query( + MdTodoScheduling.todo_fk, + func.max(MdTodoScheduling.scheduled_date).label('latest_scheduled_date')) \ + .group_by(MdTodoScheduling.todo_fk) \ + .subquery() + + todos = db_session.query(MdTodo.description, latest_todo_scheduling.c.latest_scheduled_date) \ + .join(latest_todo_scheduling, MdTodo.todo_pk == latest_todo_scheduling.c.todo_fk) \ + .filter(MdTodo.user_task_execution_fk.in_([previous_related_user_task_execution.user_task_execution_pk for previous_related_user_task_execution in user_task_execution.previous_related_user_task_executions])) \ + .all() + + related_previous_user_task_executions_todos = [{"scheduled_date": str(scheduled_date), "description": description} for description, scheduled_date in todos] + + task_result = f"{user_task_execution.last_produced_text_version.title}\n{user_task_execution.last_produced_text_version.production}" + + return user.user_id, user.datetime_context, user.name, user.goal, user.company_description, user_task_execution.task.name_for_system, user_task_execution.task.definition_for_system, user_task_execution.session.get_conversation_as_string(with_tags=False), task_result, related_previous_user_task_executions_todos, user.language_code + except Exception as e: + raise Exception(f"_collect_data: {e}") @json_decode_retry(retries=3, required_keys=['todos'], on_json_error=on_json_error) - def __extract(self): - self.logger.debug(f"_extract") + def _extract(self, user_id, user_datetime_context, user_name, user_business_goal, user_company_description, task_name_for_system, + task_definition_for_system, task_execution_conversation, task_execution_result, related_previous_user_task_executions_todos, language): try: todos_extractor = MPT(TodosCreator.todos_extractor_mpt_filename, - mojo_knowledge=self.knowledge_collector.mojodex_knowledge, - user_datetime_context=self.knowledge_collector.localized_context, - username=self.knowledge_collector.user_name, - user_business_goal=self.knowledge_collector.user_business_goal, - user_company_knowledge=self.knowledge_collector.user_company_knowledge, - task_name=self.user_task_execution.task_name, - task_definition=self.user_task_execution.task_definition, - task_conversation=self.conversation, - task_result=self.user_task_execution.task_result, - linked_user_task_executions_todos=self.linked_user_task_executions_todos, - language=self.language + mojo_knowledge=KnowledgeManager().mojodex_knowledge, + user_datetime_context=user_datetime_context, + username=user_name, + user_business_goal=user_business_goal, + user_company_knowledge=user_company_description, + task_name=task_name_for_system, + task_definition=task_definition_for_system, + task_conversation=task_execution_conversation, + task_result=task_execution_result, + linked_user_task_executions_todos=related_previous_user_task_executions_todos, + language=language ) - results = todos_extractor.run(user_id=self.user_task_execution.user_id, + results = todos_extractor.run(user_id=user_id, temperature=0, max_tokens=500, json_format=True, - user_task_execution_pk=self.user_task_execution.user_task_execution_pk, - task_name_for_system=self.user_task_execution.task_name) + user_task_execution_pk=self.user_task_execution_pk, + task_name_for_system=task_name_for_system) result = results[0] return result - except Exception as e: raise Exception(f"_extract :: {e}") - def __save_to_db(self, description, due_date): + + def _save_to_db(self, description, due_date): + """ + Save todos in db by sending them to mojodex-backend through appropriated route because backend is the only responsible for writing in db + """ try: - self.logger.debug(f"_save_to_db: {description}") uri = f"{os.environ['MOJODEX_BACKEND_URI']}/{TodosCreator.todos_url}" - # Save follow-ups in db => send to mojodex-backend + pload = {'datetime': datetime.now().isoformat(), 'description': description, 'due_date': due_date, - 'user_task_execution_fk': self.user_task_execution.user_task_execution_pk} + 'user_task_execution_fk': self.user_task_execution_pk} headers = { 'Authorization': os.environ['MOJODEX_BACKGROUND_SECRET'], 'Content-Type': 'application/json'} internal_request = requests.put(uri, json=pload, headers=headers) @@ -100,4 +127,21 @@ def __save_to_db(self, description, due_date): raise Exception(str(internal_request.json())) return internal_request.json()["todo_pk"] except Exception as e: - raise Exception(f"__save_to_db: {e}") + raise Exception(f"_save_to_db: {e}") + + def _mark_todo_extracted(self): + """ + Mark the user_task_execution's todos has been extracted by sending it to mojodex-backend through appropriated route + """ + try: + uri = f"{os.environ['MOJODEX_BACKEND_URI']}/extract_todos" + # Save follow-ups in db => send to mojodex-backend + pload = {'datetime': datetime.now().isoformat(), + 'user_task_execution_fk': self.user_task_execution_pk} + headers = {'Authorization': os.environ['MOJODEX_BACKGROUND_SECRET'], 'Content-Type': 'application/json'} + internal_request = requests.put(uri, json=pload, headers=headers) + if internal_request.status_code != 200: + raise Exception(str(internal_request.json())) + return internal_request.json()["user_task_execution_pk"] + except Exception as e: + raise Exception(f"_mark_todo_extracted: {e}") \ No newline at end of file diff --git a/background/app/models/todos/todos_rescheduler.py b/background/app/models/todos/todos_rescheduler.py index 4d52a6af..74af9f27 100644 --- a/background/app/models/todos/todos_rescheduler.py +++ b/background/app/models/todos/todos_rescheduler.py @@ -1,88 +1,100 @@ import os from datetime import datetime - import requests +from mojodex_core.db import with_db_session +from mojodex_core.entities.todo import Todo +from mojodex_core.entities.user import User +from mojodex_core.entities.user_task import UserTask +from mojodex_core.entities.user_task_execution import UserTaskExecution from mojodex_core.json_loader import json_decode_retry -from background_logger import BackgroundLogger + from mojodex_core.logging_handler import on_json_error from mojodex_core.email_sender.email_service import EmailService +from mojodex_core.knowledge_manager import KnowledgeManager from mojodex_core.llm_engine.mpt import MPT class TodosRescheduler: - logger_prefix = "TodosRescheduler" todos_scheduling_url = "/todos_scheduling" todos_rescheduler_mpt_filename = "instructions/reschedule_todo.mpt" - def __init__(self, todo_pk, user_task_execution, knowledge_collector, todo_description, n_scheduled, - first_scheduled_date, todo_list): + def __init__(self, todo_pk): self.todo_pk = todo_pk - self.user_task_execution = user_task_execution - self.knowledge_collector = knowledge_collector - self.todo_description = todo_description - self.n_scheduled = n_scheduled - self.first_scheduled_date = first_scheduled_date - self.todo_list = todo_list - self.logger = BackgroundLogger( - f"{TodosRescheduler.logger_prefix} - todo_pk {todo_pk}") def reschedule_and_save(self): try: - json_result = self.__reschedule() - # check reminder_date is a DateTime in format yyyy-mm-dd + collected_data = self._collect_data() + json_result = self._reschedule(*collected_data) try: + # check reminder_date is a DateTime in format yyyy-mm-dd datetime.strptime(json_result['reschedule_date'], "%Y-%m-%d") - save_to_db = True except ValueError: - self.logger.error( - f"Error extracting todos : Invalid reschedule_date {json_result['reschedule_date']} - Not saving to db") - EmailService().send_technical_error_email(f"Error in {self.logger_prefix} - reschedule_and_save: Invalid reschedule_date {json_result['reschedule_date']} - Not saving to db." + EmailService().send_technical_error_email(f"Error in {self.__class__.__name__} - reschedule_and_save: Invalid reschedule_date {json_result['reschedule_date']} - Not saving to db." f"This is not blocking, only this todo {self.todo_pk} will not be rescheduled.") - save_to_db = False - if save_to_db: - self.__save_to_db( - json_result['argument'], json_result['reschedule_date']) + return + + self._save_to_db(json_result['argument'], json_result['reschedule_date']) + except Exception as e: + EmailService().send_technical_error_email(f"{self.__class__.__name__} : extract_and_save: {e}") + + @with_db_session + def _collect_data(self, db_session): + try: + todo: Todo + user_task_execution: UserTaskExecution + user: User + todo, user_task_execution, user = db_session.query(Todo, UserTaskExecution, User) \ + .join(UserTaskExecution, UserTaskExecution.user_task_execution_pk == Todo.user_task_execution_fk) \ + .join(UserTask, UserTask.user_task_pk == UserTaskExecution.user_task_fk) \ + .join(User, User.user_id == UserTask.user_id) \ + .filter(Todo.todo_pk == self.todo_pk) \ + .first() + task_result = f"{user_task_execution.last_produced_text_version.title}\n{user_task_execution.last_produced_text_version.production}" + + return user.user_id, user_task_execution.user_task_execution_pk, user_task_execution.task.name_for_system, user.datetime_context, user.name, user.goal, user.company_description, user_task_execution.task.definition_for_system, task_result, todo.description, user.todo_list, todo.n_times_scheduled + except Exception as e: - raise Exception(f"{self.logger_prefix} : reschedule_and_save: {e}") + raise Exception(f"_collect_data :: {e}") @json_decode_retry(retries=3, required_keys=['reschedule_date', 'argument'], on_json_error=on_json_error) - def __reschedule(self): - self.logger.debug(f"_reschedule") + def _reschedule(self, user_id, user_task_execution_pk, task_name_for_system, user_datetime_context, username, user_business_goal, + user_company_knowledge, task_definition_for_system, task_result, todo_definition, todo_list, n_scheduled): try: todos_rescheduler = MPT(TodosRescheduler.todos_rescheduler_mpt_filename, - mojo_knowledge=self.knowledge_collector.mojodex_knowledge, - user_datetime_context=self.knowledge_collector.localized_context, - username=self.knowledge_collector.user_name, - user_business_goal=self.knowledge_collector.user_business_goal, - user_company_knowledge=self.knowledge_collector.user_company_knowledge, - task_name=self.user_task_execution.task_name, - task_definition=self.user_task_execution.task_definition, - task_result=self.user_task_execution.task_result, - todo_definition=self.todo_description, - todo_list=self.todo_list, - n_scheduled=self.n_scheduled + mojo_knowledge=KnowledgeManager().mojodex_knowledge, + user_datetime_context=user_datetime_context, + username=username, + user_business_goal=user_business_goal, + user_company_knowledge=user_company_knowledge, + task_name=task_name_for_system, + task_definition=task_definition_for_system, + task_result=task_result, + todo_definition=todo_definition, + todo_list=todo_list, + n_scheduled=n_scheduled ) - results = todos_rescheduler.run(user_id=self.user_task_execution.user_id, - temperature=0, max_tokens=500, json_format=True, - user_task_execution_pk=self.user_task_execution.user_task_execution_pk, - task_name_for_system=self.user_task_execution.task_name) + results = todos_rescheduler.run(user_id=user_id, + temperature=0, max_tokens=500, json_format=True, + user_task_execution_pk=user_task_execution_pk, + task_name_for_system=task_name_for_system) + result = results[0] return result - except Exception as e: - raise Exception(f"_extract :: {e}") + raise Exception(f"_reschedule :: {e}") - def __save_to_db(self, argument, reschedule_date): + def _save_to_db(self, argument, reschedule_date): + """ + Save new todo due-date in db by sending it to mojodex-backend through appropriated route because backend is the only responsible for writing in db + """ try: - self.logger.debug(f"_save_to_db: {reschedule_date}") uri = f"{os.environ['MOJODEX_BACKEND_URI']}/{TodosRescheduler.todos_scheduling_url}" - # Save follow-ups in db => send to mojodex-backend pload = {'datetime': datetime.now().isoformat(), 'argument': argument, 'reschedule_date': reschedule_date, 'todo_fk': self.todo_pk} headers = { @@ -92,4 +104,4 @@ def __save_to_db(self, argument, reschedule_date): raise Exception(str(internal_request.json())) return except Exception as e: - raise Exception(f"__save_to_db: {e}") + raise Exception(f"_save_to_db: {e}") diff --git a/background/app/models/user_task_execution.py b/background/app/models/user_task_execution.py deleted file mode 100644 index a233ffa1..00000000 --- a/background/app/models/user_task_execution.py +++ /dev/null @@ -1,14 +0,0 @@ -class UserTaskExecution: - - logger_prefix = "UserTaskExecution::" - def __init__(self, session_id, user_task_execution_pk, task_name, task_definition, json_input_values, task_result, user_id): - try: - self.session_id = session_id - self.user_task_execution_pk = user_task_execution_pk - self.task_name = task_name - self.task_definition = task_definition - self.json_input_values = json_input_values - self.task_result = task_result - self.user_id=user_id - except Exception as e: - raise Exception(f"{UserTaskExecution.logger_prefix} __init__ :: {e}") \ No newline at end of file diff --git a/background/app/routes/event_generation.py b/background/app/routes/event_generation.py index 97eb973e..b77dca1f 100644 --- a/background/app/routes/event_generation.py +++ b/background/app/routes/event_generation.py @@ -1,14 +1,7 @@ from flask import request from flask_restful import Resource -from app import db, executor - -from models.events.daily_notifications_generator import DailyNotificationsGenerator -from models.events.daily_emails_generator import DailyEmailsGenerator - -from models.knowledge.knowledge_collector import KnowledgeCollector - +from app import executor from models.events.todo_daily_emails_generator import TodoDailyEmailsGenerator - from models.events.calendar_suggestion_notifications_generator import CalendarSuggestionNotificationsGenerator @@ -22,26 +15,14 @@ def post(self): return {"error": "invalid inputs"}, 400 try: - if event_type == 'daily_notifications': - events_generator = DailyNotificationsGenerator() - elif event_type == 'daily_emails': - events_generator = DailyEmailsGenerator() - elif event_type == 'todo_daily_emails': + if event_type == 'todo_daily_emails': events_generator = TodoDailyEmailsGenerator() elif event_type == 'calendar_suggestion_notifications': events_generator = CalendarSuggestionNotificationsGenerator() else: raise Exception(f"Unknown event type {event_type}") - mojo_knowledge = KnowledgeCollector.get_mojo_knowledge() - - def run_generate_events(events_generator, mojo_knowledge, data): - try: - events_generator.generate_events(mojo_knowledge, data) - except Exception as err: - print("šŸ”“" + str(err)) - - executor.submit(run_generate_events, events_generator, mojo_knowledge, data) + executor.submit(events_generator.generate_events, data) return {"success": "Process started"}, 200 except Exception as e: return {"error": f"Error generating events : {e}"}, 404 diff --git a/background/app/routes/extract_todos.py b/background/app/routes/extract_todos.py index 98e97edb..50d649fb 100644 --- a/background/app/routes/extract_todos.py +++ b/background/app/routes/extract_todos.py @@ -1,22 +1,13 @@ +from models.todos.todos_creator import TodosCreator from flask import request from flask_restful import Resource from app import db, executor -from mojodex_core.entities.db_base_entities import * - -from background_logger import BackgroundLogger - -from models.cortex.extract_todos_cortex import ExtractTodosCortex - +from mojodex_core.entities.db_base_entities import MdUserTaskExecution class ExtractTodos(Resource): - logger_prefix = "ExtractTodos::" - - def __init__(self): - self.logger = BackgroundLogger(f"{ExtractTodos.logger_prefix}") def post(self): try: - self.logger.debug(f"POST /extract_todos") timestamp = request.json['datetime'] user_task_execution_pk = request.json['user_task_execution_pk'] except Exception: @@ -24,21 +15,13 @@ def post(self): try: # check user_task_execution exists - self.logger.debug(f"šŸŸ¢ POST /extract_todos - user_task_execution_pk {user_task_execution_pk}") user_task_execution = db.session.query(MdUserTaskExecution).filter(MdUserTaskExecution.user_task_execution_pk == user_task_execution_pk).first() - self.logger.debug(f"šŸŸ¢ POST /extract_todos - user_task_execution retrieved from db") if user_task_execution is None: return {"error": "user_task_execution not found"}, 404 - extract_todos_cortex = ExtractTodosCortex(user_task_execution) - self.logger.debug(f"šŸŸ¢ POST /extract_todos - extract_todos_cortex created") - def run_extract_todos_cortex(cortex): - try: - cortex.extract_todos() - except Exception as err: - print("šŸ”“" + str(err)) + todos_creator = TodosCreator(user_task_execution_pk) - executor.submit(run_extract_todos_cortex, extract_todos_cortex) + executor.submit(todos_creator.extract_and_save) return {"success": "Process started"}, 200 except Exception as e: return {"error": f"Error in extract todos route : {e}"}, 404 diff --git a/background/app/routes/reschedule_todo.py b/background/app/routes/reschedule_todo.py index aaa58528..00502a35 100644 --- a/background/app/routes/reschedule_todo.py +++ b/background/app/routes/reschedule_todo.py @@ -1,23 +1,13 @@ +from models.todos.todos_rescheduler import TodosRescheduler from flask import request from flask_restful import Resource from app import db, executor -from mojodex_core.entities.db_base_entities import * - -from background_logger import BackgroundLogger - - -from models.cortex.reschedule_todo_cortex import RescheduleTodoCortex +from mojodex_core.entities.db_base_entities import MdTodo class RescheduleTodo(Resource): - logger_prefix = "RescheduleTodo::" - - def __init__(self): - self.logger = BackgroundLogger(f"{RescheduleTodo.logger_prefix}") - def post(self): try: - self.logger.debug(f"POST /reschedule_todo") timestamp = request.json['datetime'] todo_pk = request.json['todo_pk'] except Exception: @@ -25,21 +15,13 @@ def post(self): try: # check user_task_execution exists - self.logger.debug(f"šŸŸ¢ POST /reschedule_todo - todo_pk {todo_pk}") todo = db.session.query(MdTodo).filter(MdTodo.todo_pk == todo_pk).first() - self.logger.debug(f"šŸŸ¢ POST /reschedule_todo - todo retrieved from db") if todo is None: return {"error": "todo not found"}, 404 - reschedule_todos_cortex = RescheduleTodoCortex(todo) - self.logger.debug(f"šŸŸ¢ POST /reschedule_todo - reschedule_todos_cortex created") - def run_reschedule_todos_cortex(cortex): - try: - cortex.reschedule_todo() - except Exception as err: - print("šŸ”“" + str(err)) + todos_rescheduler = TodosRescheduler(todo_pk) - executor.submit(run_reschedule_todos_cortex, reschedule_todos_cortex) + executor.submit(todos_rescheduler.reschedule_and_save) return {"success": "Process started"}, 200 except Exception as e: return {"error": f"Error in reschedule todos route : {e}"}, 404 diff --git a/background/app/routes/update_document.py b/background/app/routes/update_document.py index 9c3b4008..99d2385c 100644 --- a/background/app/routes/update_document.py +++ b/background/app/routes/update_document.py @@ -5,6 +5,7 @@ from models.documents.website_parser import WebsiteParser from app import executor +from mojodex_core.logging_handler import log_error class UpdateDocument(Resource): @@ -42,10 +43,13 @@ def post(self): document_chunks_pks = [document_chunk_pk[0] for document_chunk_pk in document_chunks_pks] def launch_document_update(document_name, document_type, document_manager_app, user_id, edition, document_chunks_pks, company_name, website_parser): - if document_type == "learned_by_mojo": - document_manager_app.update_document(user_id, document_pk, document_chunks_pks, edition) - elif document_type == "webpage": - website_parser.update_website_document( user_id, document_name, document_pk, document_chunks_pks, company_name) + try: + if document_type == "learned_by_mojo": + document_manager_app.update_document(user_id, document_pk, document_chunks_pks, edition) + elif document_type == "webpage": + website_parser.update_website_document( user_id, document_name, document_pk, document_chunks_pks, company_name) + except Exception as e: + log_error(f"launch_document_update : {e}", notify_admin=True) executor.submit(launch_document_update, document.name, document.document_type, document_manager, user_id, edition, document_chunks_pks, company_name, self.website_parser) diff --git a/mojodex_core/entities/todo.py b/mojodex_core/entities/todo.py new file mode 100644 index 00000000..b925753c --- /dev/null +++ b/mojodex_core/entities/todo.py @@ -0,0 +1,15 @@ +from mojodex_core.entities.db_base_entities import MdTodo, MdTodoScheduling +from sqlalchemy.orm import object_session + +class Todo(MdTodo): + + @property + def n_times_scheduled(self): + """ + Returns the number of times the todo has been scheduled + """ + try: + session = object_session(self) + return session.query(MdTodoScheduling).filter(MdTodoScheduling.todo_fk == self.todo_pk).count() + except Exception as e: + raise Exception(f"{self.__class__.__name__} :: n_times_scheduled :: {e}") \ No newline at end of file diff --git a/mojodex_core/entities/user.py b/mojodex_core/entities/user.py index e51e9931..5b79692f 100644 --- a/mojodex_core/entities/user.py +++ b/mojodex_core/entities/user.py @@ -1,6 +1,7 @@ -from mojodex_core.entities.db_base_entities import MdUser, MdTask, MdUserTask +from sqlalchemy import func, text +from mojodex_core.entities.db_base_entities import MdTodo, MdTodoScheduling, MdUser, MdTask, MdUserTask, MdUserTaskExecution from sqlalchemy.orm import object_session from mojodex_core.entities.instruct_task import InstructTask @@ -38,4 +39,38 @@ def datetime_context(self): datetime=self.local_datetime.strftime("%d %B %Y"), time=self.local_datetime.strftime("%H:%M")).prompt except Exception as e: - raise Exception(f"{self.__class__.__name__} :: datetime_context :: {e}") \ No newline at end of file + raise Exception(f"{self.__class__.__name__} :: datetime_context :: {e}") + + @property + def todo_list(self): + """ + Returns the list of 20 first todos for the user along with their due dates + """ + try: + session = object_session(self) + # Subquery to get the latest todo_scheduling for each todo + latest_todo_scheduling = session.query( + MdTodoScheduling.todo_fk, + func.max(MdTodoScheduling.scheduled_date).label('latest_scheduled_date')) \ + .group_by(MdTodoScheduling.todo_fk) \ + .subquery() + # Get the current date in UTC + now_utc = datetime.now(timezone.utc).date() + results = session.query(MdTodo.description, latest_todo_scheduling.c.latest_scheduled_date) \ + .join(MdUserTaskExecution, MdTodo.user_task_execution_fk == MdUserTaskExecution.user_task_execution_pk) \ + .join(latest_todo_scheduling, MdTodo.todo_pk == latest_todo_scheduling.c.todo_fk) \ + .join(MdUserTask, MdUserTaskExecution.user_task_fk == MdUserTask.user_task_pk) \ + .join(MdUser, MdUserTask.user_id == MdUser.user_id) \ + .filter(MdUser.user_id == self.user_id) \ + .filter(MdTodo.deleted_by_user.is_(None)) \ + .filter(MdTodo.completed.is_(None)) \ + .filter((latest_todo_scheduling.c.latest_scheduled_date - text( + 'md_user.timezone_offset * interval \'1 minute\'')) >= now_utc) \ + .order_by(latest_todo_scheduling.c.latest_scheduled_date.desc()) \ + .offset(0) \ + .limit(20) \ + .all() + + return [{'description': description, 'scheduled_date': scheduled_date} for description, scheduled_date in results] + except Exception as e: + raise Exception(f"{self.__class__.__name__} :: todo_list :: {e}") \ No newline at end of file diff --git a/mojodex_core/entities/user_task_execution.py b/mojodex_core/entities/user_task_execution.py index 64fc7ca0..d502b0bc 100644 --- a/mojodex_core/entities/user_task_execution.py +++ b/mojodex_core/entities/user_task_execution.py @@ -1,5 +1,5 @@ from mojodex_core.entities.user_task import UserTask -from mojodex_core.entities.db_base_entities import MdUserTaskExecution, MdProducedText +from mojodex_core.entities.db_base_entities import MdProducedTextVersion, MdUserTaskExecution, MdProducedText from sqlalchemy.orm import object_session from mojodex_core.entities.session import Session @@ -79,3 +79,43 @@ def session(self): except Exception as e: raise Exception(f"{self.__class__} :: session :: {e}") + @property + def last_produced_text_version(self): + try: + session = object_session(self) + return session.query(MdProducedTextVersion).\ + join(MdProducedText, MdProducedTextVersion.produced_text_fk == MdProducedText.produced_text_pk).\ + filter(MdProducedText.user_task_execution_fk == self.user_task_execution_pk).\ + order_by(MdProducedTextVersion.creation_date.desc()).first() + except Exception as e: + raise Exception(f"{self.__class__.__name__} :: produced_text :: {e}") + + @property + def derives_from_a_previous_user_task_execution(self) -> bool: + """On mobile app, some tasks can display "predefined_actions" at the end of the execution. + Those are other task the user can launch to chain task executions on a same subject. + + Returns True if the current UserTaskExecution is the result of a predefined_action launched from a previous UserTaskExecution. + """ + return self.predefined_action_from_user_task_execution_fk is not None + + @property + def previous_related_user_task_executions(self) -> list: + """On mobile app, some tasks can display "predefined_actions" at the end of the execution. + Those are other task the user can launch to chain task executions on a same subject. + + This methods retrieves the previous related user task execution of this chain, if any. + """ + try: + session = object_session(self) + previous_related_user_task_execution = [] + user_task_execution = self + while user_task_execution and user_task_execution.derives_from_a_previous_user_task_execution: + user_task_execution = session.query(UserTaskExecution) \ + .filter(UserTaskExecution.user_task_execution_pk == user_task_execution.predefined_action_from_user_task_execution_fk) \ + .first() + if user_task_execution: + previous_related_user_task_execution.append(user_task_execution) + return previous_related_user_task_execution + except Exception as e: + raise Exception(f"{self.__class__.__name__} :: previous_related_user_task_execution :: {e}") \ No newline at end of file diff --git a/scheduler/app/main.py b/scheduler/app/main.py index 35e709f1..fa581fa6 100644 --- a/scheduler/app/main.py +++ b/scheduler/app/main.py @@ -4,8 +4,6 @@ import schedule import time from scheduled_tasks.check_disengaged_free_users import CheckDisengagedFreeTrialUsers -from scheduled_tasks.send_daily_emails import SendDailyEmails -from scheduled_tasks.send_daily_notifications import SendDailyNotifications from scheduled_tasks.extract_todos import ExtractTodos from scheduled_tasks.reschedule_todos import RescheduleTodos from scheduled_tasks.send_todo_daily_emails import SendTodoDailyEmails @@ -67,9 +65,7 @@ def _check_push_notifications_are_configured(): RescheduleTodos(3600) # reschedule todos every 1 hour if _check_push_notifications_are_configured(): CalendarSuggestionNotificationSender(600) # send calendar suggestion notifications every 10 minutes - SendDailyNotifications(3600) # send daily notifications every 1 hour (filtered by timezone) if _check_emails_are_configured(): - #SendDailyEmails(3600) # send daily emails every 1 hour (filtered by timezone) SendTodoDailyEmails(3600) # send todo daily emails every 1 hour (filtered by timezone) CheckDisengagedFreeTrialUsers(86400) # check disengaged free trial users every day FirstHomeChatOfWeek(3600) diff --git a/scheduler/app/scheduled_tasks/send_daily_emails.py b/scheduler/app/scheduled_tasks/send_daily_emails.py deleted file mode 100644 index 5da63784..00000000 --- a/scheduler/app/scheduled_tasks/send_daily_emails.py +++ /dev/null @@ -1,24 +0,0 @@ -import os -from datetime import datetime -import requests -from scheduled_tasks.scheduled_task import ScheduledTask - - -class SendDailyEmails(ScheduledTask): - logger_prefix = "SendDailyEmails" - - def job(self, offset=0, batch_size=50): - try: - uri = f"{os.environ['MOJODEX_BACKEND_URI']}/send_daily_emails" - pload = {'datetime': datetime.now().isoformat(), 'n_emails': batch_size, 'offset': offset} - headers = {'Authorization': os.environ['MOJODEX_SCHEDULER_SECRET'], 'Content-Type': 'application/json'} - internal_request = requests.post(uri, json=pload, headers=headers) - if internal_request.status_code != 200: - self.logger.error(f"Error sending daily emails : {internal_request.text}") - else: - user_ids = internal_request.json()['user_ids'] - self.logger.info(f"Daily emails successfully launched. {len(user_ids)} users are concerned.") - if len(user_ids) == batch_size: - self.job(offset=offset+batch_size) - except Exception as e: - self.logger.error(f"Error sending daily emails : {e}") \ No newline at end of file diff --git a/scheduler/app/scheduled_tasks/send_daily_notifications.py b/scheduler/app/scheduled_tasks/send_daily_notifications.py deleted file mode 100644 index 257ea836..00000000 --- a/scheduler/app/scheduled_tasks/send_daily_notifications.py +++ /dev/null @@ -1,24 +0,0 @@ -import os -from datetime import datetime -import requests -from scheduled_tasks.scheduled_task import ScheduledTask - - -class SendDailyNotifications(ScheduledTask): - logger_prefix = "SendDailyNotifications" - - def job(self, offset=0, batch_size=50): - try: - uri = f"{os.environ['MOJODEX_BACKEND_URI']}/send_daily_notifications" - pload = {'datetime': datetime.now().isoformat(), 'n_notifications': batch_size, 'offset': offset} - headers = {'Authorization': os.environ['MOJODEX_SCHEDULER_SECRET'], 'Content-Type': 'application/json'} - internal_request = requests.post(uri, json=pload, headers=headers) - if internal_request.status_code != 200: - self.logger.error(f"Error sending daily notifications : {internal_request.text}") - else: - user_ids = internal_request.json()['user_ids'] - self.logger.info(f"Daily notifications successfully launched. : {len(user_ids)} users are concerned.") - if len(user_ids) == batch_size: - self.job(offset=offset+batch_size) - except Exception as e: - self.logger.error(f"Error sending daily notifications : {e}") \ No newline at end of file