From f58130c2d22c3e4333412dab73062ab48c5667ee Mon Sep 17 00:00:00 2001 From: Rodolfo Olivieri Date: Mon, 13 Jan 2025 15:04:02 -0300 Subject: [PATCH 1/2] Migrate JSON history cache to database solution This patch introduces a migration from the old JSON format to a database format. This allow us to switch between sqlite, postgresql and mysql, giving the user more autonomy, security and control over their database. --- .pre-commit-config.yaml | 1 + command_line_assistant/config/schemas.py | 78 ++++- .../daemon/database/__init__.py | 1 + .../daemon/database/manager.py | 170 +++++++++++ .../daemon/database/models/__init__.py | 1 + .../daemon/database/models/base.py | 6 + .../daemon/database/models/history.py | 43 +++ command_line_assistant/dbus/interfaces.py | 77 ++--- command_line_assistant/history/base.py | 48 +--- command_line_assistant/history/manager.py | 24 +- .../history/plugins/local.py | 162 +++++++---- command_line_assistant/history/schemas.py | 202 ------------- .../config/command-line-assistant/config.toml | 5 +- data/release/xdg/config.toml | 5 +- docs/source/daemon/database/index.rst | 8 + .../database/manager.rst} | 4 +- docs/source/daemon/database/models/base.rst | 8 + .../source/daemon/database/models/history.rst | 8 + docs/source/daemon/database/models/index.rst | 8 + docs/source/daemon/index.rst | 1 + docs/source/history/index.rst | 1 - packaging/command-line-assistant.spec | 6 + pdm.lock | 125 +++++++- pyproject.toml | 1 + tests/commands/test_history.py | 58 ++-- tests/config/test_config.py | 2 - tests/config/test_schema.py | 30 ++ tests/conftest.py | 43 +-- tests/daemon/database/__init__.py | 0 tests/daemon/database/test_manager.py | 177 ++++++++++++ tests/dbus/test_interfaces.py | 138 +++------ tests/history/plugins/test_local.py | 266 +++++++++++------- tests/history/test_manager.py | 71 ++--- tests/history/test_schemas.py | 218 -------------- 34 files changed, 1091 insertions(+), 905 deletions(-) create mode 100644 command_line_assistant/daemon/database/__init__.py create mode 100644 command_line_assistant/daemon/database/manager.py create mode 100644 command_line_assistant/daemon/database/models/__init__.py create mode 100644 command_line_assistant/daemon/database/models/base.py create mode 100644 command_line_assistant/daemon/database/models/history.py delete mode 100644 command_line_assistant/history/schemas.py create mode 100644 docs/source/daemon/database/index.rst rename docs/source/{history/schemas.rst => daemon/database/manager.rst} (51%) create mode 100644 docs/source/daemon/database/models/base.rst create mode 100644 docs/source/daemon/database/models/history.rst create mode 100644 docs/source/daemon/database/models/index.rst create mode 100644 tests/daemon/database/__init__.py create mode 100644 tests/daemon/database/test_manager.py delete mode 100644 tests/history/test_schemas.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dd84ba8..7ddd2a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -52,6 +52,7 @@ repos: - responses - tomli; python_version<"3.11" - setuptools + - sqlalchemy - repo: https://github.com/gitleaks/gitleaks rev: v8.22.1 diff --git a/command_line_assistant/config/schemas.py b/command_line_assistant/config/schemas.py index bf5af08..b95ad5c 100644 --- a/command_line_assistant/config/schemas.py +++ b/command_line_assistant/config/schemas.py @@ -2,7 +2,67 @@ import dataclasses from pathlib import Path -from typing import Union +from typing import Optional, Union + + +@dataclasses.dataclass +class DatabaseSchema: + """This class represents the [history.database] section of our config.toml file. + + Attributes: + connection (str): The connection string. + """ + + type: str = "sqlite" # 'sqlite', 'mysql', 'postgresql', etc. + host: Optional[str] = None + database: Optional[str] = None + port: Optional[int] = None # Optional for SQLite as it doesn't require host or port + user: Optional[str] = None # Optional for SQLite + password: Optional[str] = None # Optional for SQLite + connection_string: Optional[Union[str, Path]] = ( + None # Some databases like SQLite can use a file path + ) + + def __post_init__(self): + """Post initialization method to normalize values""" + # If the database type is not a supported one, we can just skip it. + allowed_databases = ("mysql", "sqlite", "postgresql") + if self.type not in allowed_databases: + raise ValueError( + f"The database type must be one of {','.join(allowed_databases)}, not {self.type}" + ) + + if self.connection_string: + self.connection_string = Path(self.connection_string).expanduser() + + # Post-initialization to set default values for specific db types + if self.type == "sqlite" and not self.connection_string: + self.connection_string = f"sqlite://{self.database}" + elif self.type == "mysql" and not self.port: + self.port = 3306 # Default MySQL port + elif self.type == "postgresql" and not self.port: + self.port = 5432 # Default PostgreSQL port + + def get_connection_url(self) -> str: + """ + Constructs and returns the connection URL or string for the respective database. + + Raises: + ValueError: In case the type is not recognized + + Returns: + str: The URL formatted connection + """ + connection_urls = { + "sqlite": f"sqlite:///{self.connection_string}", + "mysql": f"mysql://{self.user}:{self.password}@{self.host}:{self.port}/{self.database}", + "postgresql": f"postgresql://{self.user}:{self.password}@{self.host}:{self.port}/{self.database}", + } + + if self.type not in connection_urls: + raise ValueError(f"Unsupported database type: {self.type}") + + return connection_urls[self.type] @dataclasses.dataclass @@ -83,13 +143,15 @@ class HistorySchema: """ enabled: bool = True - file: Union[str, Path] = Path( # type: ignore - "/var/lib/command-line-assistant/history.json" - ) + database: DatabaseSchema = dataclasses.field(default_factory=DatabaseSchema) def __post_init__(self): """Post initialization method to normalize values""" - self.file: Path = Path(self.file).expanduser() + + # # Database may be present in the config.toml. If it is not, we odn't do + # # anything and go with defaults. + if isinstance(self.database, dict): + self.database = DatabaseSchema(**self.database) @dataclasses.dataclass @@ -108,8 +170,8 @@ class AuthSchema: def __post_init__(self) -> None: """Post initialization method to normalize values""" - self.cert_file = Path(self.cert_file) - self.key_file = Path(self.key_file) + self.cert_file = Path(self.cert_file).expanduser() + self.key_file = Path(self.key_file).expanduser() @dataclasses.dataclass @@ -122,7 +184,7 @@ class BackendSchema: """ endpoint: str = "http://0.0.0.0:8080" - auth: Union[dict, AuthSchema] = dataclasses.field(default_factory=AuthSchema) + auth: AuthSchema = dataclasses.field(default_factory=AuthSchema) def __post_init__(self): """Post initialization method to normalize values""" diff --git a/command_line_assistant/daemon/database/__init__.py b/command_line_assistant/daemon/database/__init__.py new file mode 100644 index 0000000..1e96313 --- /dev/null +++ b/command_line_assistant/daemon/database/__init__.py @@ -0,0 +1 @@ +"""Main module to hold anything related to databases.""" diff --git a/command_line_assistant/daemon/database/manager.py b/command_line_assistant/daemon/database/manager.py new file mode 100644 index 0000000..22c1de9 --- /dev/null +++ b/command_line_assistant/daemon/database/manager.py @@ -0,0 +1,170 @@ +"""Database module to handle SQLAlchemy connections and interactions.""" + +import logging +import uuid +from contextlib import contextmanager +from typing import Generator, Optional, TypeVar + +from sqlalchemy import create_engine +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.pool import StaticPool + +from command_line_assistant.config import Config +from command_line_assistant.daemon.database.models.base import BaseModel + +logger = logging.getLogger(__name__) + + +# Type variable for ORM models +T = TypeVar("T") + + +class DatabaseError(Exception): + """Base exception for database errors.""" + + +class ConnectionError(DatabaseError): + """Exception raised when connection fails.""" + + +class QueryError(DatabaseError): + """Exception raised when query fails.""" + + +class DatabaseManager: + """Class to handle database operations using SQLAlchemy.""" + + def __init__(self, config: Config, echo: bool = False) -> None: + """Initialize database connection. + + Args: + database (Path): Path to the SQLite database file + echo (bool): Enable SQL query logging if True + """ + self._config = config + self._engine: Engine = self._create_engine(echo) + self._session_factory = sessionmaker(bind=self._engine) + + def _create_engine(self, echo: bool) -> Engine: + """Create SQLAlchemy engine with proper settings. + + Args: + echo (bool): Enable SQL query logging if True + + Returns: + Engine: Configured SQLAlchemy engine + + Raises: + ConnectionError: When invalid database settings are provided + """ + try: + connection_url = self._config.history.database.get_connection_url() + + # SQLite-specific settings + connect_args = {} + if self._config.history.database.type == "sqlite": + connect_args["check_same_thread"] = False + return create_engine( + connection_url, + echo=echo, + poolclass=StaticPool, + connect_args=connect_args, + ) + + # For other databases, use standard pooling + return create_engine( + connection_url, + echo=echo, + pool_pre_ping=True, + pool_size=5, + max_overflow=10, + ) + except Exception as e: + logger.error("Failed to create database engine: %s", e) + raise ConnectionError(f"Could not create database engine: {e}") from e + + def connect(self) -> None: + """Create database tables if they don't exist.""" + try: + BaseModel.metadata.create_all(self._engine) + except Exception as e: + logger.error("Failed to create database tables: %s", e) + raise ConnectionError(f"Could not create tables: {e}") from e + + @contextmanager + def session(self) -> Generator[Session, None, None]: + """Create a contextual database session. + + Yields: + Session: SQLAlchemy session object + + Raises: + QueryError: If session operations fail + """ + session = self._session_factory() + try: + yield session + session.commit() + except Exception as e: + session.rollback() + logger.error("Database session error: %s", e) + raise QueryError(f"Session error: {e}") from e + finally: + session.close() + + def add(self, instance: T) -> None: + """Add an instance to the database. + + Args: + instance (T): SQLAlchemy model instance to add + + Raises: + QueryError: If adding fails + """ + try: + with self.session() as session: + session.add(instance) + session.flush() + except Exception as e: + logger.error("Failed to add instance: %s", e) + raise QueryError(f"Failed to add instance: {e}") from e + + def query(self, model: type[T]) -> list[T]: + """Query all instances of a model. + + Args: + model (type[T]): SQLAlchemy model class to query + + Returns: + list[T]: List of model instances + + Raises: + QueryError: If query fails + """ + try: + with self.session() as session: + return session.query(model).all() + except Exception as e: + logger.error("Failed to query instances: %s", e) + raise QueryError(f"Failed to query instances: {e}") from e + + def get(self, model: type[T], id: uuid.UUID) -> Optional[T]: + """Get a single instance by ID. + + Args: + model (type[T]): SQLAlchemy model class + id (uuid.UUID): Instance ID to get + + Returns: + Optional[T]: Model instance if found, None otherwise + + Raises: + QueryError: If query fails + """ + try: + with self.session() as session: + return session.query(model).get(id) + except Exception as e: + logger.error("Failed to get instance: %s", e) + raise QueryError(f"Failed to get instance: {e}") from e diff --git a/command_line_assistant/daemon/database/models/__init__.py b/command_line_assistant/daemon/database/models/__init__.py new file mode 100644 index 0000000..b63dd02 --- /dev/null +++ b/command_line_assistant/daemon/database/models/__init__.py @@ -0,0 +1 @@ +"""Main module to hold the database models""" diff --git a/command_line_assistant/daemon/database/models/base.py b/command_line_assistant/daemon/database/models/base.py new file mode 100644 index 0000000..944c3bc --- /dev/null +++ b/command_line_assistant/daemon/database/models/base.py @@ -0,0 +1,6 @@ +"""Base module to hold the declarative base for sqlalchemy models""" + +from sqlalchemy.ext.declarative import declarative_base + +#: The declarative base model for SQLAlchemy models +BaseModel = declarative_base() diff --git a/command_line_assistant/daemon/database/models/history.py b/command_line_assistant/daemon/database/models/history.py new file mode 100644 index 0000000..a933eac --- /dev/null +++ b/command_line_assistant/daemon/database/models/history.py @@ -0,0 +1,43 @@ +"""Module containing SQLAlchemy models for the daemon.""" + +import uuid +from datetime import datetime + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, String +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import relationship + +from command_line_assistant.daemon.database.models.base import BaseModel + + +class HistoryModel(BaseModel): + """SQLAlchemy model for history table that maps to HistoryEntry dataclass.""" + + __tablename__ = "history" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + timestamp = Column(DateTime, default=datetime.utcnow()) + deleted_at = Column(DateTime, nullable=True) + + # Relationships + interaction_id = Column( + UUID(as_uuid=True), ForeignKey("interaction.id"), nullable=False + ) + interaction = relationship("InteractionModel", backref="history") + + +class InteractionModel(BaseModel): + """SQLAlchemy model for interaction table.""" + + __tablename__ = "interaction" + + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + query_text = Column(String) + query_role = Column(String, default="user") + response_text = Column(String) + response_role = Column(String, default="assistant") + response_tokens = Column(Integer, default=0) + session_id = Column(UUID(as_uuid=True), nullable=False, default=uuid.uuid4) + os_distribution = Column(String, default="RHEL") + os_version = Column(String, nullable=False) + os_arch = Column(String, nullable=False) diff --git a/command_line_assistant/dbus/interfaces.py b/command_line_assistant/dbus/interfaces.py index fe576a4..80b4257 100644 --- a/command_line_assistant/dbus/interfaces.py +++ b/command_line_assistant/dbus/interfaces.py @@ -11,6 +11,7 @@ from command_line_assistant.dbus.constants import HISTORY_IDENTIFIER, QUERY_IDENTIFIER from command_line_assistant.dbus.structures import ( HistoryEntry, + HistoryItem, Message, ) from command_line_assistant.history.manager import HistoryManager @@ -42,8 +43,7 @@ def RetrieveAnswer(self) -> Structure: # Deal with history management manager = HistoryManager(self.implementation.config, LocalHistory) - current_history = manager.read() - manager.write(current_history, query, llm_response) + manager.write(query, llm_response) audit_logger.info( "Query executed successfully.", @@ -53,7 +53,7 @@ def RetrieveAnswer(self) -> Structure: "response": llm_response, }, ) - # Return the data - okay + # Return the data return Message.to_structure(message) @emits_properties_changed @@ -70,6 +70,25 @@ def ProcessQuery(self, query: Structure) -> None: class HistoryInterface(InterfaceTemplate): """The DBus interface of a history""" + def _parse_history_entries(self, entries: list[dict[str, str]]) -> HistoryEntry: + """Parse the history entries in a common format for all methods + + Args: + entries (list[dict[str, str]]): List of entries in a dictionary format with only the necessary information. + + Returns: + HistoryEntry: An instance of HistoryEntry with all necessary information. + """ + history_entry = HistoryEntry() + for entry in entries: + history_item = HistoryItem() + history_item.query = entry["query"] + history_item.response = entry["response"] + history_item.timestamp = entry["timestamp"] + history_entry.entries.append(history_item) + + return history_entry + def GetHistory(self) -> Structure: """Get all conversations from history. @@ -77,16 +96,11 @@ def GetHistory(self) -> Structure: Structure: The history entries in a dbus structure format. """ manager = HistoryManager(self.implementation.config, LocalHistory) - history = manager.read() - + history_entries = manager.read() history_entry = HistoryEntry() - if history.history: - _ = [ - history_entry.set_from_dict(entry.to_dict()) - for entry in history.history - ] - else: - history_entry.entries = [] + + if history_entries: + history_entry = self._parse_history_entries(history_entries) return HistoryEntry.to_structure(history_entry) @@ -98,11 +112,11 @@ def GetFirstConversation(self) -> Structure: Structure: A single history entry in a dbus structure format. """ manager = HistoryManager(self.implementation.config, LocalHistory) - history = manager.read() + history_entries = manager.read() history_entry = HistoryEntry() - if history.history: - last_entry = history.history[0] - history_entry.set_from_dict(last_entry.to_dict()) + + if history_entries: + history_entry = self._parse_history_entries(history_entries[:1]) return HistoryEntry.to_structure(history_entry) @@ -113,12 +127,11 @@ def GetLastConversation(self) -> Structure: Structure: A single history entyr in a dbus structure format. """ manager = HistoryManager(self.implementation.config, LocalHistory) - history = manager.read() + history_entries = manager.read() history_entry = HistoryEntry() - if history.history: - last_entry = history.history[-1] - history_entry.set_from_dict(last_entry.to_dict()) + if history_entries: + history_entry = self._parse_history_entries(history_entries[-1:]) return HistoryEntry.to_structure(history_entry) @@ -132,28 +145,20 @@ def GetFilteredConversation(self, filter: Str) -> Structure: Structure: A single history entyr in a dbus structure format. """ manager = HistoryManager(self.implementation.config, LocalHistory) - history = manager.read() + history_entries = manager.read() history_entry = HistoryEntry() - found_entries = [] - if history.history: + if history_entries: logger.info("Filtering the user history with keyword '%s'", filter) - # We ignore the type in the condition as pyright thinks that "Str" is not "str". - # Pyright is correct about this, but "Str" is a special type for dbus. It will be "str" in the end. - found_entries = [ + # Filter entries where the query or response contains the filter string + filtered_entries = [ entry - for entry in history.history - if ( - filter in entry.interaction.query.text # type: ignore - or filter in entry.interaction.response.text # type: ignore - ) + for entry in history_entries + if (filter in entry["query"] or filter in entry["response"]) ] - logger.info("Found %s entries in the history", len(found_entries)) - # Normalize the entries to send over dbus - _ = [ - history_entry.set_from_dict(entry.to_dict()) for entry in set(found_entries) - ] + history_entry = self._parse_history_entries(filtered_entries) + return HistoryEntry.to_structure(history_entry) def ClearHistory(self) -> None: diff --git a/command_line_assistant/history/base.py b/command_line_assistant/history/base.py index f91f55f..38df194 100644 --- a/command_line_assistant/history/base.py +++ b/command_line_assistant/history/base.py @@ -2,21 +2,13 @@ import logging from abc import ABC, abstractmethod -from datetime import datetime from command_line_assistant.config import Config -from command_line_assistant.history.schemas import ( - History, - HistoryEntry, - InteractionData, - QueryData, - ResponseData, -) logger = logging.getLogger(__name__) -class BaseHistory(ABC): +class BaseHistoryPlugin(ABC): """Abstract base class for history""" def __init__(self, config: Config) -> None: @@ -28,7 +20,7 @@ def __init__(self, config: Config) -> None: self._config = config @abstractmethod - def read(self) -> History: + def read(self) -> list[dict[str, str]]: """Abstract method to represent a read operation Returns: @@ -36,11 +28,10 @@ def read(self) -> History: """ @abstractmethod - def write(self, current_history: History, query: str, response: str) -> None: + def write(self, query: str, response: str) -> None: """Abstract method to represent a write operation Args: - current_history (History): An instance of the current history to append new data query (str): The user question response (str): The LLM response """ @@ -49,39 +40,6 @@ def write(self, current_history: History, query: str, response: str) -> None: def clear(self) -> None: """Abstract method to represent a clear operation""" - def _add_new_entry( - self, current_history: History, query: str, response: str - ) -> History: - """Internal method to add a new entry to the current user history - - Args: - current_history (History): An instance of the current history to append new data - query (str): The user question - response (str): The LLM response - - Returns: - History: The modified history with the new entry - """ - new_entry = HistoryEntry( - interaction=InteractionData( - query=QueryData(text=query), - response=ResponseData( - text=response, - tokens=len( - response.split() # TODO(r0x0d): Simple token count, replace with actual - ), - ), - ) - ) - - current_history.history.append(new_entry) - current_history.metadata.entry_count = len(current_history.history) - # NOTE(r0x0d): This way of getting the timestamp is deprecated in newer - # Python versions, however, the correct method is not available in Python 3.9. - # This would be the replacement datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - current_history.metadata.last_updated = datetime.utcnow().isoformat() + "Z" - return current_history - def _check_if_history_is_enabled(self) -> bool: """Check if the history is enabled in the configuration file. diff --git a/command_line_assistant/history/manager.py b/command_line_assistant/history/manager.py index 96a873b..9c900c0 100644 --- a/command_line_assistant/history/manager.py +++ b/command_line_assistant/history/manager.py @@ -3,8 +3,7 @@ from typing import Optional, Type from command_line_assistant.config import Config -from command_line_assistant.history.base import BaseHistory -from command_line_assistant.history.schemas import History +from command_line_assistant.history.base import BaseHistoryPlugin class HistoryManager: @@ -18,7 +17,7 @@ class HistoryManager: """ def __init__( - self, config: Config, plugin: Optional[Type[BaseHistory]] = None + self, config: Config, plugin: Optional[Type[BaseHistoryPlugin]] = None ) -> None: """Initialize the history manager. @@ -27,15 +26,15 @@ def __init__( plugin (Optional[Type[BaseHistory]], optional): Optional history implementation class """ self._config = config - self._plugin: Optional[Type[BaseHistory]] = None - self._instance: Optional[BaseHistory] = None + self._plugin: Optional[Type[BaseHistoryPlugin]] = None + self._instance: Optional[BaseHistoryPlugin] = None # Set initial plugin if provided if plugin: self.plugin = plugin @property - def plugin(self) -> Optional[Type[BaseHistory]]: + def plugin(self) -> Optional[Type[BaseHistoryPlugin]]: """Property for the internal plugin attribute Returns: @@ -44,7 +43,7 @@ def plugin(self) -> Optional[Type[BaseHistory]]: return self._plugin @plugin.setter - def plugin(self, plugin_cls: Type[BaseHistory]) -> None: + def plugin(self, plugin_cls: Type[BaseHistoryPlugin]) -> None: """Set and initialize a new plugin. Args: @@ -53,7 +52,7 @@ def plugin(self, plugin_cls: Type[BaseHistory]) -> None: Raises: TypeError: If plugin_cls is not a subclass of BaseHistory """ - if not issubclass(plugin_cls, BaseHistory): + if not issubclass(plugin_cls, BaseHistoryPlugin): raise TypeError( f"Plugin must be a subclass of BaseHistory, got {plugin_cls.__name__}" ) @@ -61,25 +60,24 @@ def plugin(self, plugin_cls: Type[BaseHistory]) -> None: self._plugin = plugin_cls self._instance = plugin_cls(self._config) - def read(self) -> History: + def read(self) -> list[dict[str, str]]: """Read history entries using the current plugin. Raises: RuntimeError: If no plugin is set Returns: - History object containing entries and metadata + HistoryModel: Result from the database. """ if not self._instance: raise RuntimeError("No history plugin set. Set plugin before operations.") return self._instance.read() - def write(self, current_history: History, query: str, response: str) -> None: + def write(self, query: str, response: str) -> None: """Write a new history entry using the current plugin. Args: - current_history (History): The current user history query (str): The user's query response (str): The LLM's response @@ -89,7 +87,7 @@ def write(self, current_history: History, query: str, response: str) -> None: if not self._instance: raise RuntimeError("No history plugin set. Set plugin before operations.") - self._instance.write(current_history, query, response) + self._instance.write(query, response) def clear(self) -> None: """Clear all history entries. diff --git a/command_line_assistant/history/plugins/local.py b/command_line_assistant/history/plugins/local.py index dd8ecc2..2874a34 100644 --- a/command_line_assistant/history/plugins/local.py +++ b/command_line_assistant/history/plugins/local.py @@ -1,98 +1,142 @@ """Plugin for handling local history managemnet.""" -import json import logging +import platform +import uuid +from datetime import datetime +from sqlalchemy import desc + +from command_line_assistant.config import Config +from command_line_assistant.daemon.database.manager import DatabaseManager +from command_line_assistant.daemon.database.models.history import ( + HistoryModel, + InteractionModel, +) from command_line_assistant.dbus.exceptions import ( CorruptedHistoryError, MissingHistoryFileError, ) -from command_line_assistant.history.base import BaseHistory -from command_line_assistant.history.schemas import History +from command_line_assistant.history.base import BaseHistoryPlugin logger = logging.getLogger(__name__) -class LocalHistory(BaseHistory): +class LocalHistory(BaseHistoryPlugin): """Class meant to manage the conversation history locally.""" - def read(self) -> History: - """Reads the history from a file and returns it as a list of dictionaries. + def __init__(self, config: Config) -> None: + """Default constructor for class + + Args: + config (Config): Configuration class + """ + super().__init__(config) + self._db: DatabaseManager = self._initialize_database() + + def _initialize_database(self) -> DatabaseManager: + """Initialize the database connection and create tables if needed. + + Returns: + Database: A new instance of the database. Raises: - CorruptedHistoryError: Raised when the file is corrupted or the json can't be serialized. - MissingHistoryFileError: Raised when the history file could not be found. + MissingHistoryFileError: If the database cannot be initialized properly. + """ + try: + db = DatabaseManager(self._config) + db.connect() + return db + except Exception as e: + logger.error("Failed to initialize database: %s", e) + raise MissingHistoryFileError(f"Could not initialize database: {e}") from e + + def read(self) -> list[dict[str, str]]: + """Reads the history from the database. Returns: - History: An instance of a `py:History` class that holds the history data. + History: An instance of a History class that holds the history data. + + Raises: + CorruptedHistoryError: Raised when there's an error reading from the database. + MissingHistoryFileError: Raised when the database file is missing. """ - history = History() if not self._check_if_history_is_enabled(): - return history - - filepath = self._config.history.file + return [] - logger.info("Reading history at %s", filepath) try: - data = filepath.read_text() - return History.from_json(data) - except json.JSONDecodeError as e: - logger.error("Failed to read history file %s: %s", filepath, e) - raise CorruptedHistoryError( - f"The history file {filepath} seems to be corrupted. Can't load the file." - ) from e - except FileNotFoundError as e: - logger.error("History file does not exist %s: %s", filepath, e) - raise MissingHistoryFileError( - f"The history file {filepath} is missing." - ) from e - - def write(self, current_history: History, query: str, response: str) -> None: - """Write history to a file + with self._db.session() as session: + # Query history entries with relationships + entries = ( + session.query(HistoryModel) + .join(InteractionModel) + .filter(HistoryModel.deleted_at.is_(None)) + .order_by(desc(HistoryModel.timestamp)) + .all() + ) + + return [ + { + "query": entry.interaction.query_text, + "response": entry.interaction.response_text, + "timestamp": str(entry.timestamp), + } + for entry in entries + ] + except Exception as e: + logger.error("Failed to read from database: %s", e) + raise CorruptedHistoryError(f"Failed to read from database: {e}") from e + + def write(self, query: str, response: str) -> None: + """Write history to the database. Args: - current_history (History): An instance of the current history to append new data query (str): The user question response (str): The LLM response Raises: - CorruptedHistoryError: Raised when the file is corrupted or the json can't be serialized. - MissingHistoryFileError: Raised when the history file could not be found. + CorruptedHistoryError: Raised when there's an error writing to the database. + MissingHistoryFileError: Raised when the database file is missing. """ if not self._check_if_history_is_enabled(): return - filepath = self._config.history.file - final_history = self._add_new_entry(current_history, query, response) - logger.info("Writting user history at %s", filepath) try: - filepath.write_text(final_history.to_json()) - except json.JSONDecodeError as e: - logger.error("Failed to write history file %s: %s", filepath, e) - raise CorruptedHistoryError( - f"Can't write data to the history file {filepath}." - ) from e - except FileNotFoundError as e: - logger.error("History file does not exist %s: %s", filepath, e) - raise MissingHistoryFileError( - f"The history file {filepath} is missing." - ) from e + with self._db.session() as session: + # Create Interaction record + interaction = InteractionModel( + query_text=query, + response_text=response, + response_tokens=len(response), + session_id=uuid.uuid4(), + os_distribution="RHEL", # Default to RHEL for now + os_version=platform.release(), + os_arch=platform.machine(), + ) + session.add(interaction) + + # Create History record + history = HistoryModel( + interaction=interaction, + ) + session.add(history) + except Exception as e: + logger.error("Failed to write to database: %s", e) + raise CorruptedHistoryError(f"Failed to write to database: {e}") from e def clear(self) -> None: - """Clear the local history by adding a blank version of history. + """Clear the database by dropping and recreating tables. Raises: - MissingHistoryFileError: Raised when the history file could not be found. + MissingHistoryFileError: Raised when the database file is missing. """ - # Write empty history - current_history = History() - filepath = self._config.history.file - logger.info("Clearing history at %s", filepath) try: - filepath.write_text(current_history.to_json()) - logger.info("History cleared successfully") - except FileNotFoundError as e: - logger.error("History file does not exist %s: %s", filepath, e) - raise MissingHistoryFileError( - f"The history file {filepath} is missing." - ) from e + with self._db.session() as session: + # Soft delete by setting deleted_at + session.query(HistoryModel).update( + {"deleted_at": datetime.utcnow()}, synchronize_session=False + ) + logger.info("Database cleared successfully") + except Exception as e: + logger.error("Failed to clear database: %s", e) + raise MissingHistoryFileError(f"Failed to clear database: {e}") from e diff --git a/command_line_assistant/history/schemas.py b/command_line_assistant/history/schemas.py deleted file mode 100644 index f3dffc7..0000000 --- a/command_line_assistant/history/schemas.py +++ /dev/null @@ -1,202 +0,0 @@ -"""Module to hold the history schema and it's sub schemas.""" - -import json -import platform -import uuid -from dataclasses import dataclass, field -from datetime import datetime -from typing import Optional - -from command_line_assistant.constants import VERSION - - -@dataclass(frozen=True) -class QueryData: - """Schema to represent a query emited by the user. - - Attributes: - text (Optional[str], optional): The user text - role (str): The role of the user. Defaults to "user". - """ - - text: Optional[str] = None - role: str = "user" - - -@dataclass(frozen=True) -class ResponseData: - """Schema to represent the LLM response. - - Attributes: - text (Optional[str], optional): The LLM response - tokens (Optional[int], optional): Amount of tokens consumed in the message. - role (str): The role of the response. Defaults to "assistant". - """ - - text: Optional[str] = None - tokens: Optional[int] = 0 - role: str = "assistant" - - -@dataclass(frozen=True) -class InteractionData: - """Schema to represent the interaction data between user and LLM. - - Attributes: - query (QueryData): The query data representation - response (ResponseData): The response data representation - """ - - query: QueryData = field(default_factory=QueryData) - response: ResponseData = field(default_factory=ResponseData) - - -@dataclass(frozen=True) -class OSInfo: - """Schema to represent the system information - - Attributes: - distribution (str): The system distribution name. Defaults to "RHEL". - version (str): The version of the system. Defaults to `py:platform.version()` - arch (str): The architecture of the system. Defaults to `py:platform.architecture()` - """ - - distribution: str = "RHEL" - version: str = platform.version() - arch: str = platform.architecture()[0] - - -@dataclass(frozen=True) -class EntryMetadata: - """Schema to represent the entry metadata information - - Attributes: - session_id (str): An unique identifier to the session. Defaults to `py:uuid.uuid4()` - os_info (OSInfo): The system information - """ - - session_id: str = field(default_factory=lambda: str(uuid.uuid4())) - os_info: OSInfo = field(default_factory=OSInfo) - - -@dataclass(frozen=True) -class HistoryEntry: - """Schema to represent an entry of the history - - Attributes: - id (str): An unique identifier for this entry. Defaults to `py:uuid.uuid4()` - timestamp (str): The datetime (UTC) in iso format for the entry - interaction (InteractionData): Instance of an interaction for the entry - metadata (EntryMetadata): Instance of entry metadata - """ - - id: str = field(default_factory=lambda: str(uuid.uuid4())) - # NOTE(r0x0d): This way of getting the timestamp is deprecated in newer - # Python versions, however, the correct method is not available in Python 3.9. - # This would be the replacement datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - timestamp: str = field(default_factory=lambda: datetime.utcnow().isoformat() + "Z") - interaction: InteractionData = field(default_factory=InteractionData) - metadata: EntryMetadata = field(default_factory=EntryMetadata) - - def to_dict(self) -> dict: - """Helper method to transform the currenty entry in dictionary. - - Returns: - dict: Dictionary containing the information of the schema - """ - return { - "id": self.id, - "timestamp": self.timestamp, - "interaction": { - "query": vars(self.interaction.query), - "response": vars(self.interaction.response), - }, - "metadata": { - "session_id": self.metadata.session_id, - "os_info": vars(self.metadata.os_info), - }, - } - - -@dataclass -class HistoryMetadata: - """Schema to represent the history metadata - - Attributes: - last_updated (str): The datetime (UTC) in iso format for the last update - version (str): The current program version. Defaults to `py:VERSION` - entry_count (int): Quantity of entries added to the history - size_bytes (int): The size of all entries in bytes - """ - - # NOTE(r0x0d): This way of getting the timestamp is deprecated in newer - # Python versions, however, the correct method is not available in Python 3.9. - # This would be the replacement datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - last_updated: str = field( - default_factory=lambda: datetime.utcnow().isoformat() + "Z" - ) - version: str = VERSION - entry_count: int = 0 - size_bytes: int = 0 - - -@dataclass -class History: - """Schema to represent the current user history - - Attributes: - history (list[HistoryEntry]): A list of each entry in the history - metadata (HistoryMetadata): The metadata for the current history - """ - - history: list[HistoryEntry] = field(default_factory=list) - metadata: HistoryMetadata = field(default_factory=HistoryMetadata) - - def to_json(self) -> str: - """Helper method to transform current instance to json - - Returns: - str: A valid json from the current class - """ - return json.dumps( - { - "history": [entry.to_dict() for entry in self.history], - "metadata": vars(self.metadata), - }, - indent=2, - ) - - @classmethod - def from_json(cls, json_str: str) -> "History": - """Helper method to convert a json string to a History instance - - Args: - json_str (str): The json string to be converted - - Returns: - History: The instance of this schema converted from json - """ - data = json.loads(json_str) - history = [] - - for entry_data in data["history"]: - query = QueryData(**entry_data["interaction"]["query"]) - response = ResponseData(**entry_data["interaction"]["response"]) - interaction = InteractionData(query=query, response=response) - - os_info = OSInfo(**entry_data["metadata"]["os_info"]) - metadata = EntryMetadata( - session_id=entry_data["metadata"]["session_id"], - os_info=os_info, - ) - - entry = HistoryEntry( - id=entry_data["id"], - timestamp=entry_data["timestamp"], - interaction=interaction, - metadata=metadata, - ) - history.append(entry) - - metadata = HistoryMetadata(**data["metadata"]) - return cls(history=history, metadata=metadata) diff --git a/data/development/config/command-line-assistant/config.toml b/data/development/config/command-line-assistant/config.toml index 69286fa..e566b97 100644 --- a/data/development/config/command-line-assistant/config.toml +++ b/data/development/config/command-line-assistant/config.toml @@ -8,7 +8,10 @@ prompt_separator = "$" [history] enabled = true -file = "~/.local/share/command-line-assistant/command-line-assistant_history.json" + +[history.database] +type = "sqlite" +connection_string = "~/.local/share/command-line-assistant/history.db" [backend] endpoint = "http://localhost:8080" diff --git a/data/release/xdg/config.toml b/data/release/xdg/config.toml index 333849c..abeb85c 100644 --- a/data/release/xdg/config.toml +++ b/data/release/xdg/config.toml @@ -8,7 +8,10 @@ prompt_separator = "$" [history] enabled = true -file = "/var/lib/command-line-assistant/history.json" + +[history.database] +type = "sqlite" +connection_string = "/var/lib/command-line-assistant/history.db" [backend] endpoint = "http://localhost:8080" diff --git a/docs/source/daemon/database/index.rst b/docs/source/daemon/database/index.rst new file mode 100644 index 0000000..ce6d935 --- /dev/null +++ b/docs/source/daemon/database/index.rst @@ -0,0 +1,8 @@ +Database +======== + +.. toctree:: + :maxdepth: 2 + + manager + models/index diff --git a/docs/source/history/schemas.rst b/docs/source/daemon/database/manager.rst similarity index 51% rename from docs/source/history/schemas.rst rename to docs/source/daemon/database/manager.rst index 67c939c..02640dd 100644 --- a/docs/source/history/schemas.rst +++ b/docs/source/daemon/database/manager.rst @@ -1,7 +1,7 @@ -Schemas +Manager ======= -.. automodule:: command_line_assistant.history.schemas +.. automodule:: command_line_assistant.daemon.database.manager :members: :undoc-members: :private-members: diff --git a/docs/source/daemon/database/models/base.rst b/docs/source/daemon/database/models/base.rst new file mode 100644 index 0000000..fd0f1c1 --- /dev/null +++ b/docs/source/daemon/database/models/base.rst @@ -0,0 +1,8 @@ +Base +==== + +.. automodule:: command_line_assistant.daemon.database.models.base + :members: + :undoc-members: + :private-members: + :no-index: diff --git a/docs/source/daemon/database/models/history.rst b/docs/source/daemon/database/models/history.rst new file mode 100644 index 0000000..2d550d8 --- /dev/null +++ b/docs/source/daemon/database/models/history.rst @@ -0,0 +1,8 @@ +History +======= + +.. automodule:: command_line_assistant.daemon.database.models.history + :members: + :undoc-members: + :private-members: + :no-index: diff --git a/docs/source/daemon/database/models/index.rst b/docs/source/daemon/database/models/index.rst new file mode 100644 index 0000000..ccea0ae --- /dev/null +++ b/docs/source/daemon/database/models/index.rst @@ -0,0 +1,8 @@ +Models +====== + +.. toctree:: + :maxdepth: 2 + + base + history diff --git a/docs/source/daemon/index.rst b/docs/source/daemon/index.rst index a8ff9b2..6a507e3 100644 --- a/docs/source/daemon/index.rst +++ b/docs/source/daemon/index.rst @@ -6,4 +6,5 @@ Daemon clad session + database/index http/index diff --git a/docs/source/history/index.rst b/docs/source/history/index.rst index 23a3343..6d7455a 100644 --- a/docs/source/history/index.rst +++ b/docs/source/history/index.rst @@ -6,5 +6,4 @@ History base manager - schemas plugins/index diff --git a/packaging/command-line-assistant.spec b/packaging/command-line-assistant.spec index 15d6faa..7e803ad 100644 --- a/packaging/command-line-assistant.spec +++ b/packaging/command-line-assistant.spec @@ -21,6 +21,12 @@ BuildRequires: systemd-units Requires: python3-dasbus Requires: python3-requests +# In RHEL 9 we only have version 1.4+ +%if 0%{?rhel} && 0%{?rhel} < 10 +Requires: python3-sqlalchemy >= 1.4.45 +%else +Requires: python3-sqlalchemy +%endif Requires: systemd # Not needed after RHEL 10 as it is native in Python 3.11+ diff --git a/pdm.lock b/pdm.lock index 2d72997..4d3498a 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:a6e6c281ff5554109c71f24bb23cbd2000b41d87de93ca95c765dda9efdbb669" +content_hash = "sha256:0766be8f03c0e5a45a024af107efc70f7b301f28f33351465bac1b7a33416a28" [[metadata.targets]] requires_python = ">=3.9" @@ -357,6 +357,70 @@ files = [ {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] +[[package]] +name = "greenlet" +version = "3.1.1" +requires_python = ">=3.7" +summary = "Lightweight in-process concurrent programming" +groups = ["default"] +marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.14\"" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + [[package]] name = "idna" version = "3.10" @@ -965,6 +1029,62 @@ files = [ {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.37" +requires_python = ">=3.7" +summary = "Database Abstraction Library" +groups = ["default"] +dependencies = [ + "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.14\"", + "importlib-metadata; python_version < \"3.8\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f5d254a22394847245f411a2956976401e84da4288aa70cbcd5190744062c1"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41296bbcaa55ef5fdd32389a35c710133b097f7b2609d8218c0eabded43a1d84"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bedee60385c1c0411378cbd4dc486362f5ee88deceea50002772912d798bb00f"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c67415258f9f3c69867ec02fea1bf6508153709ecbd731a982442a590f2b7e4"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-win32.whl", hash = "sha256:650dcb70739957a492ad8acff65d099a9586b9b8920e3507ca61ec3ce650bb72"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-win_amd64.whl", hash = "sha256:93d1543cd8359040c02b6614421c8e10cd7a788c40047dbc507ed46c29ae5636"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:648ec5acf95ad59255452ef759054f2176849662af4521db6cb245263ae4aa33"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35bd2df269de082065d4b23ae08502a47255832cc3f17619a5cea92ce478b02b"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f581d365af9373a738c49e0c51e8b18e08d8a6b1b15cc556773bcd8a192fa8b"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82df02816c14f8dc9f4d74aea4cb84a92f4b0620235daa76dde002409a3fbb5a"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94b564e38b344d3e67d2e224f0aec6ba09a77e4582ced41e7bfd0f757d926ec9"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:955a2a765aa1bd81aafa69ffda179d4fe3e2a3ad462a736ae5b6f387f78bfeb8"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-win32.whl", hash = "sha256:03f0528c53ca0b67094c4764523c1451ea15959bbf0a8a8a3096900014db0278"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-win_amd64.whl", hash = "sha256:4b12885dc85a2ab2b7d00995bac6d967bffa8594123b02ed21e8eb2205a7584b"}, + {file = "SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1"}, + {file = "sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb"}, +] + [[package]] name = "termcolor" version = "2.5.0" @@ -1062,8 +1182,7 @@ name = "typing-extensions" version = "4.12.2" requires_python = ">=3.8" summary = "Backported and Experimental Type Hints for Python 3.8+" -groups = ["dev"] -marker = "python_version < \"3.11\"" +groups = ["default", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, diff --git a/pyproject.toml b/pyproject.toml index 7b8c2d3..fe69956 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ dependencies = [ "requests", "dasbus", "PyGObject", + "sqlalchemy>=2.0.37", ] requires-python = ">=3.9" # RHEL 9 and 10 readme = "README.md" diff --git a/tests/commands/test_history.py b/tests/commands/test_history.py index ca01c30..32ad80a 100644 --- a/tests/commands/test_history.py +++ b/tests/commands/test_history.py @@ -45,7 +45,7 @@ def test_retrieve_all_conversations_success(mock_proxy, sample_history_entry, ca sample_history_entry ) - HistoryCommand(clear=False, first=False, last=False)._retrieve_all_conversations() + HistoryCommand(clear=False, first=False, last=False).run() captured = capsys.readouterr() assert "Getting all conversations from history" in captured.out @@ -57,21 +57,7 @@ def test_retrieve_all_conversations_empty(mock_proxy, capsys): empty_history = HistoryEntry() mock_proxy.GetHistory.return_value = empty_history.to_structure(empty_history) - HistoryCommand(clear=False, first=False, last=False)._retrieve_all_conversations() - captured = capsys.readouterr() - assert "No history found.\n" in captured.out - - -def test_retrieve_conversation_filtered_empty(mock_proxy, capsys): - """Test retrieving first conversation when history is empty.""" - empty_history = HistoryEntry() - mock_proxy.GetFilteredConversation.return_value = empty_history.to_structure( - empty_history - ) - - HistoryCommand( - clear=False, first=True, last=False, filter="missing" - )._retrieve_conversation_filtered(filter="missing") + HistoryCommand(clear=False, first=False, last=False).run() captured = capsys.readouterr() assert "No history found.\n" in captured.out @@ -84,9 +70,7 @@ def test_retrieve_conversation_filtered_success( sample_history_entry ) - HistoryCommand( - clear=False, first=False, last=True, filter="test" - )._retrieve_conversation_filtered(filter="missing") + HistoryCommand(clear=False, first=False, last=False, filter="missing").run() captured = capsys.readouterr() mock_proxy.GetFilteredConversation.assert_called_once() assert ( @@ -101,7 +85,7 @@ def test_retrieve_first_conversation_success(mock_proxy, sample_history_entry, c sample_history_entry ) - HistoryCommand(clear=False, first=True, last=False)._retrieve_first_conversation() + HistoryCommand(clear=False, first=True, last=False).run() captured = capsys.readouterr() mock_proxy.GetFirstConversation.assert_called_once() assert ( @@ -117,7 +101,7 @@ def test_retrieve_first_conversation_empty(mock_proxy, capsys): empty_history ) - HistoryCommand(clear=False, first=True, last=False)._retrieve_first_conversation() + HistoryCommand(clear=False, first=True, last=False).run() captured = capsys.readouterr() assert "No history found.\n" in captured.out @@ -128,7 +112,7 @@ def test_retrieve_last_conversation_success(mock_proxy, sample_history_entry, ca sample_history_entry ) - HistoryCommand(clear=False, first=False, last=True)._retrieve_last_conversation() + HistoryCommand(clear=False, first=False, last=True).run() captured = capsys.readouterr() mock_proxy.GetLastConversation.assert_called_once() assert ( @@ -144,14 +128,40 @@ def test_retrieve_last_conversation_empty(mock_proxy, capsys): empty_history ) - HistoryCommand(clear=False, first=False, last=True)._retrieve_last_conversation() + HistoryCommand(clear=False, first=False, last=True).run() captured = capsys.readouterr() assert "No history found.\n" in captured.out def test_clear_history_success(mock_proxy, capsys): """Test clearing history successfully.""" - HistoryCommand(clear=True, first=False, last=False)._clear_history() + HistoryCommand(clear=True, first=False, last=False).run() captured = capsys.readouterr() assert "Cleaning the history" in captured.out mock_proxy.ClearHistory.assert_called_once() + + +@pytest.mark.parametrize( + ("query", "response", "expected"), + ( + ( + "test", + "test", + "\x1b[92mQuery: test\x1b[0m\n\x1b[94mAnswer: test\x1b[0m\nTime:\n", + ), + ), +) +def test_show_history(query, response, expected, capsys): + item = HistoryItem() + item.query = query + item.response = response + HistoryCommand(clear=False, first=False, last=False)._show_history([item]) + + captured = capsys.readouterr() + assert expected in captured.out + + +def test_show_history_no_entries(capsys): + HistoryCommand(clear=False, first=False, last=False)._show_history([]) + captured = capsys.readouterr() + assert "No history found." in captured.out diff --git a/tests/config/test_config.py b/tests/config/test_config.py index ab2fecb..75875f0 100644 --- a/tests/config/test_config.py +++ b/tests/config/test_config.py @@ -13,7 +13,6 @@ @pytest.fixture def get_config_template(tmp_path) -> str: output_file = tmp_path / "output.tmp" - history_file = tmp_path / "history.json" return f"""\ [output] @@ -26,7 +25,6 @@ def get_config_template(tmp_path) -> str: [history] enabled = true -file = "{history_file}" # max number of queries in history (including responses) [backend] diff --git a/tests/config/test_schema.py b/tests/config/test_schema.py index 269e12c..d396945 100644 --- a/tests/config/test_schema.py +++ b/tests/config/test_schema.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest from command_line_assistant.config import schemas @@ -11,6 +13,7 @@ (schemas.BackendSchema,), (schemas.HistorySchema,), (schemas.AuthSchema,), + (schemas.DatabaseSchema,), ), ) def test_initialize_schemas(schema): @@ -25,3 +28,30 @@ def test_logging_schema_invalid_level(): ValueError, match="The requested level 'NOT_FOUND' is not allowed." ): schemas.LoggingSchema(level=level) + + +def test_database_schema_invalid_type(): + type = "NOT_FOUND_DB" + with pytest.raises( + ValueError, match="The database type must be one of .*, not NOT_FOUND_DB" + ): + schemas.DatabaseSchema(type=type) + + +@pytest.mark.parametrize( + ("type", "port", "connection_string"), + ( + ("sqlite", None, "sqlite:/test"), + ("mysql", 3306, None), + ("postgresql", 5432, None), + ), +) +def test_database_schema_default_initialization(type, port, connection_string): + result = schemas.DatabaseSchema( + type=type, port=port, connection_string=connection_string, database="test" + ) + + assert result.port == port + if connection_string: + assert result.connection_string == Path(connection_string) + assert result.type == type diff --git a/tests/conftest.py b/tests/conftest.py index 09e3774..460b6ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,7 +13,7 @@ LoggingSchema, OutputSchema, ) -from command_line_assistant.config.schemas import AuthSchema +from command_line_assistant.config.schemas import AuthSchema, DatabaseSchema from command_line_assistant.logger import LOGGING_CONFIG_DICTIONARY from tests.helpers import MockStream @@ -42,7 +42,7 @@ def mock_config(tmp_path): """Fixture to create a mock configuration""" cert_file = tmp_path / "cert.pem" key_file = tmp_path / "key.pem" - history_file = tmp_path / "command-line-assistant" / "history.json" + history_db = tmp_path / "history.db" cert_file.write_text("cert") key_file.write_text("key") @@ -56,7 +56,9 @@ def mock_config(tmp_path): endpoint="http://test.endpoint/v1/query", auth=AuthSchema(cert_file=cert_file, key_file=key_file, verify_ssl=False), ), - history=HistorySchema(enabled=True, file=history_file), + history=HistorySchema( + enabled=True, database=DatabaseSchema(connection_string=history_db) + ), logging=LoggingSchema(level="debug"), ) @@ -77,38 +79,3 @@ def mock_proxy(): @pytest.fixture def mock_stream(): return MockStream() - - -@pytest.fixture -def sample_history_data(): - """Create sample history data for testing.""" - return { - "history": [ - { - "id": "test-id", - "timestamp": "2024-01-01T00:00:00Z", - "interaction": { - "query": {"text": "test query", "role": "user"}, - "response": { - "text": "test response", - "tokens": 2, - "role": "assistant", - }, - }, - "metadata": { - "session_id": "test-session", - "os_info": { - "distribution": "RHEL", - "version": "test", - "arch": "x86_64", - }, - }, - } - ], - "metadata": { - "last_updated": "2024-01-01T00:00:00Z", - "version": "0.1.0", - "entry_count": 1, - "size_bytes": 0, - }, - } diff --git a/tests/daemon/database/__init__.py b/tests/daemon/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/daemon/database/test_manager.py b/tests/daemon/database/test_manager.py new file mode 100644 index 0000000..cea43c7 --- /dev/null +++ b/tests/daemon/database/test_manager.py @@ -0,0 +1,177 @@ +import uuid +from unittest.mock import Mock, patch + +import pytest +from sqlalchemy.exc import SQLAlchemyError + +from command_line_assistant.daemon.database.manager import ( + ConnectionError, + DatabaseManager, + QueryError, +) +from command_line_assistant.daemon.database.models.history import ( + HistoryModel, + InteractionModel, +) + + +@pytest.fixture +def database_manager(mock_config): + """Fixture to create a database manager instance.""" + manager = DatabaseManager(mock_config) + manager.connect() # Create tables + return manager + + +def test_database_manager_initialization(mock_config): + """Test successful database manager initialization.""" + manager = DatabaseManager(mock_config) + assert manager._config == mock_config + assert manager._engine is not None + assert manager._session_factory is not None + + +def test_database_manager_initialization_failure(mock_config): + """Test database manager initialization failure.""" + + def failure(): + raise Exception("Connection failed") + + mock_config.history.database.get_connection_url = failure + + with pytest.raises(ConnectionError) as exc_info: + DatabaseManager(mock_config) + + assert "Could not create database engine" in str(exc_info.value) + + +def test_connect_success(database_manager): + """Test successful database connection.""" + try: + database_manager.connect() + except Exception as e: + pytest.fail(f"connect() raised {e} unexpectedly!") + + +def test_connect_failure(mock_config): + """Test database connection failure.""" + manager = DatabaseManager(mock_config) + + # Mock SQLAlchemy's create_all to raise an exception + with patch("sqlalchemy.MetaData.create_all") as mock_create_all: + mock_create_all.side_effect = SQLAlchemyError("Failed to create tables") + + with pytest.raises(ConnectionError) as exc_info: + manager.connect() + + assert "Could not create tables" in str(exc_info.value) + + +def test_session_context_manager(database_manager): + """Test session context manager successful operation.""" + with database_manager.session() as session: + assert session is not None + # Verify session is active + assert session.is_active + + +def test_session_context_manager_with_error(database_manager): + """Test session context manager handles errors properly.""" + with pytest.raises(QueryError): + with database_manager.session(): + raise SQLAlchemyError("Database error") + + +def test_add_success(database_manager): + """Test successful addition of a model instance.""" + uid = uuid.uuid4() + interaction = InteractionModel( + id=uid, + query_text="test query", + response_text="test response", + os_distribution="RHEL", + os_version="8", + os_arch="x86_64", + ) + + history = HistoryModel(interaction=interaction) + + database_manager.add(history) + result = database_manager.get(InteractionModel, uid) + assert result + + +def test_add_failure(database_manager): + """Test failure when adding a model instance.""" + invalid_model = Mock() # This will cause SQLAlchemy to raise an error + + with pytest.raises(QueryError) as exc_info: + database_manager.add(invalid_model) + + assert "Failed to add instance" in str(exc_info.value) + + +def test_query_success(database_manager): + """Test successful query operation.""" + # First add some data + interaction = InteractionModel( + query_text="test query", + response_text="test response", + os_distribution="RHEL", + os_version="8", + os_arch="x86_64", + ) + uid = uuid.uuid4() + history = HistoryModel(id=uid, interaction=interaction) + + database_manager.add(history) + + # Now query it + results = database_manager.query(HistoryModel) + assert len(results) > 0 + assert isinstance(results[0], HistoryModel) + + +def test_query_failure(database_manager): + """Test failure during query operation.""" + with patch("sqlalchemy.orm.Session.query") as mock_query: + mock_query.side_effect = SQLAlchemyError("Query failed") + + with pytest.raises(QueryError) as exc_info: + database_manager.query(HistoryModel) + + assert "Failed to query instances" in str(exc_info.value) + + +def test_get_success(database_manager): + """Test successful get operation.""" + # First add some data + interaction = InteractionModel( + query_text="test query", + response_text="test response", + os_distribution="RHEL", + os_version="8", + os_arch="x86_64", + ) + + uid = uuid.uuid4() + history = HistoryModel( + id=uid, + interaction=interaction, + ) + database_manager.add(history) + + # Now get it by ID + result = database_manager.get(HistoryModel, uid) + assert result + + +def test_get_failure(database_manager): + """Test failure during get operation.""" + with patch("sqlalchemy.orm.Session.query") as mock_query: + mock_query.side_effect = SQLAlchemyError("Get failed") + + with pytest.raises(QueryError) as exc_info: + database_manager.get(HistoryModel, "123") + + assert "Failed to get instance" in str(exc_info.value) diff --git a/tests/dbus/test_interfaces.py b/tests/dbus/test_interfaces.py index 87c646d..6f2f087 100644 --- a/tests/dbus/test_interfaces.py +++ b/tests/dbus/test_interfaces.py @@ -1,4 +1,3 @@ -import json from unittest.mock import Mock, patch import pytest @@ -9,7 +8,14 @@ QueryInterface, ) from command_line_assistant.dbus.structures import HistoryEntry, Message -from command_line_assistant.history.schemas import History +from command_line_assistant.history.manager import HistoryManager +from command_line_assistant.history.plugins.local import LocalHistory + + +@pytest.fixture +def mock_history_entry(mock_config): + manager = HistoryManager(mock_config, LocalHistory) + return manager @pytest.fixture @@ -42,8 +48,6 @@ def history_interface(mock_implementation): def test_query_interface_retrieve_answer(query_interface, mock_implementation): """Test retrieving answer from query interface.""" expected_response = "test response" - mock_implementation.config.history.file.parent.mkdir() - mock_implementation.config.history.file.write_text(History().to_json()) with patch( "command_line_assistant.dbus.interfaces.submit", return_value=expected_response ) as mock_submit: @@ -70,14 +74,12 @@ def test_query_interface_process_query(query_interface, mock_implementation): assert processed_query.message == test_query.message -def test_history_interface_get_history( - history_interface, mock_implementation, sample_history_data -): +def test_history_interface_get_history(history_interface, mock_history_entry): """Test getting all history through history interface.""" - mock_history = History.from_json(json.dumps(sample_history_data)) - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ) as manager: + manager.write("test query", "test response") response = history_interface.GetHistory() reconstructed = HistoryEntry.from_structure(response) @@ -87,13 +89,14 @@ def test_history_interface_get_history( def test_history_interface_get_first_conversation( - history_interface, mock_implementation, sample_history_data + history_interface, mock_history_entry ): """Test getting first conversation through history interface.""" - mock_history = History.from_json(json.dumps(sample_history_data)) - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ) as manager: + manager.write("test query", "test response") response = history_interface.GetFirstConversation() reconstructed = HistoryEntry.from_structure(response) @@ -102,14 +105,12 @@ def test_history_interface_get_first_conversation( assert reconstructed.entries[0].response == "test response" -def test_history_interface_get_last_conversation( - history_interface, mock_implementation, sample_history_data -): +def test_history_interface_get_last_conversation(history_interface, mock_history_entry): """Test getting first conversation through history interface.""" - mock_history = History.from_json(json.dumps(sample_history_data)) - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ) as manager: + manager.write("test query", "test response") response = history_interface.GetLastConversation() reconstructed = HistoryEntry.from_structure(response) @@ -119,52 +120,13 @@ def test_history_interface_get_last_conversation( def test_history_interface_get_filtered_conversation( - history_interface, mock_implementation, sample_history_data + history_interface, mock_history_entry ): """Test getting filtered conversation through history interface.""" - mock_history = History.from_json(json.dumps(sample_history_data)) - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history - response = history_interface.GetFilteredConversation(filter="test") - - reconstructed = HistoryEntry.from_structure(response) - assert len(reconstructed.entries) == 1 - assert reconstructed.entries[0].query == "test query" - assert reconstructed.entries[0].response == "test response" - - -def test_history_interface_get_filtered_conversation_duplicate_entries( - history_interface, mock_implementation, sample_history_data -): - """Test getting filtered conversation through duplicate history interface.""" - # Add a new entry manually - sample_history_data["history"].append( - { - "id": "test-id", - "timestamp": "2024-01-01T00:00:00Z", - "interaction": { - "query": {"text": "test query", "role": "user"}, - "response": { - "text": "test response", - "tokens": 2, - "role": "assistant", - }, - }, - "metadata": { - "session_id": "test-session", - "os_info": { - "distribution": "RHEL", - "version": "test", - "arch": "x86_64", - }, - }, - } - ) - mock_history = History.from_json(json.dumps(sample_history_data)) - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ) as manager: + manager.write("test query", "test response") response = history_interface.GetFilteredConversation(filter="test") reconstructed = HistoryEntry.from_structure(response) @@ -174,39 +136,17 @@ def test_history_interface_get_filtered_conversation_duplicate_entries( def test_history_interface_get_filtered_conversation_duplicate_entries_not_matching( - history_interface, mock_implementation, sample_history_data + history_interface, mock_history_entry ): """Test getting filtered conversation through duplicated history interface. This test will have a duplicated entry, but not matching the "id". This should be enough to be considered a new entry """ - # Add a new entry manually - sample_history_data["history"].append( - { - "id": "test-other-id", - "timestamp": "2024-01-01T00:00:00Z", - "interaction": { - "query": {"text": "test query", "role": "user"}, - "response": { - "text": "test response", - "tokens": 2, - "role": "assistant", - }, - }, - "metadata": { - "session_id": "test-session", - "os_info": { - "distribution": "RHEL", - "version": "test", - "arch": "x86_64", - }, - }, - } - ) - mock_history = History.from_json(json.dumps(sample_history_data)) - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = mock_history + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ) as manager: + manager.write("test query", "test response") + manager.write("test query", "test response") response = history_interface.GetFilteredConversation(filter="test") reconstructed = HistoryEntry.from_structure(response) @@ -222,13 +162,11 @@ def test_history_interface_clear_history(history_interface): mock_manager.return_value.clear.assert_called_once() -def test_history_interface_empty_history(history_interface, mock_implementation): +def test_history_interface_empty_history(history_interface): """Test handling empty history in all methods.""" - empty_history = History() - - with patch("command_line_assistant.dbus.interfaces.HistoryManager") as mock_manager: - mock_manager.return_value.read.return_value = empty_history - + with patch( + "command_line_assistant.history.manager.HistoryManager", mock_history_entry + ): # Test all methods with empty history for method in [ history_interface.GetHistory, diff --git a/tests/history/plugins/test_local.py b/tests/history/plugins/test_local.py index 8a276bb..24d915f 100644 --- a/tests/history/plugins/test_local.py +++ b/tests/history/plugins/test_local.py @@ -1,147 +1,205 @@ -import json -import logging -from unittest.mock import patch +import uuid +from datetime import datetime +from unittest.mock import Mock, create_autospec, patch import pytest +from sqlalchemy.orm import Session +from command_line_assistant.daemon.database.manager import DatabaseManager +from command_line_assistant.daemon.database.models.history import ( + HistoryModel, + InteractionModel, +) from command_line_assistant.dbus.exceptions import ( CorruptedHistoryError, MissingHistoryFileError, ) from command_line_assistant.history.plugins.local import LocalHistory -from command_line_assistant.history.schemas import ( - History, - HistoryMetadata, -) @pytest.fixture -def local_history(mock_config): - """Create a LocalHistory instance for testing.""" - return LocalHistory(mock_config) +def mock_db_session() -> Mock: + """Fixture for database session.""" + return create_autospec(Session, instance=True) -def test_read_when_history_disabled(local_history): - """Test reading history when history is disabled.""" - local_history._config.history.enabled = False +@pytest.fixture +def mock_db_manager(mock_db_session: Mock) -> Mock: + """Fixture for DatabaseManager with mocked session.""" + db_manager = create_autospec(DatabaseManager, instance=True) + db_manager.session.return_value.__enter__.return_value = mock_db_session + db_manager.session.return_value.__exit__.return_value = None + return db_manager - history = local_history.read() - assert isinstance(history, History) - assert len(history.history) == 0 - assert isinstance(history.metadata, HistoryMetadata) +@pytest.fixture +def local_history(mock_config: Mock, mock_db_manager: Mock) -> LocalHistory: + """Fixture for LocalHistory instance with mocked dependencies.""" + with patch( + "command_line_assistant.history.plugins.local.DatabaseManager", + return_value=mock_db_manager, + ): + history = LocalHistory(mock_config) + return history + + +class TestLocalHistoryInitialization: + """Test cases for LocalHistory initialization.""" + + def test_initialization_success(self, mock_config: Mock): + """Should initialize successfully.""" + with patch( + "command_line_assistant.history.plugins.local.DatabaseManager" + ) as mock_db: + mock_db.return_value = create_autospec(DatabaseManager, instance=True) + history = LocalHistory(mock_config) + assert isinstance(history._db, DatabaseManager) + + def test_initialization_failure(self, mock_config: Mock): + """Should raise MissingHistoryFileError on initialization failure.""" + with patch( + "command_line_assistant.history.plugins.local.DatabaseManager" + ) as mock_db: + mock_db.side_effect = Exception("DB Init Error") + with pytest.raises( + MissingHistoryFileError, match="Could not initialize database" + ): + LocalHistory(mock_config) + + +class TestLocalHistoryRead: + """Test cases for reading history.""" + + def test_read_disabled_history( + self, local_history: LocalHistory, mock_config: Mock + ): + """Should return empty list when history is disabled.""" + mock_config.history.enabled = False + assert local_history.read() == [] + def test_read_success(self, local_history: LocalHistory, mock_db_session: Mock): + """Should successfully read and format history entries.""" + # Create mock history entries + mock_interaction = Mock(spec=InteractionModel) + mock_interaction.query_text = "test query" + mock_interaction.response_text = "test response" -def test_read_existing_history(local_history, sample_history_data): - """Test reading existing history file.""" - with patch("pathlib.Path.read_text") as mock_read: - mock_read.return_value = json.dumps(sample_history_data) + mock_history = Mock(spec=HistoryModel) + mock_history.interaction = mock_interaction + mock_history.timestamp = datetime.utcnow() - history = local_history.read() + mock_db_session.query.return_value.join.return_value.filter.return_value.order_by.return_value.all.return_value = [ + mock_history + ] - assert isinstance(history, History) - assert len(history.history) == 1 - assert history.history[0].interaction.query.text == "test query" - assert history.history[0].interaction.response.text == "test response" - assert history.metadata.entry_count == 1 + result = local_history.read() + assert len(result) == 1 + assert result[0]["query"] == "test query" + assert result[0]["response"] == "test response" + assert "timestamp" in result[0] -def test_read_invalid_json(local_history, caplog): - """Test reading invalid JSON history file.""" - with patch("pathlib.Path.read_text") as mock_read: - mock_read.return_value = "invalid json" + def test_read_failure(self, local_history: LocalHistory, mock_db_session: Mock): + """Should raise CorruptedHistoryError on read failure.""" + mock_db_session.query.side_effect = Exception("DB Read Error") - with ( - caplog.at_level(logging.ERROR), - pytest.raises(CorruptedHistoryError, match="seems to be corrupted."), - ): + with pytest.raises(CorruptedHistoryError, match="Failed to read from database"): local_history.read() -def test_write_new_entry(local_history): - """Test writing a new history entry.""" - current_history = History() - query = "test query" - response = "test response" - - with patch("pathlib.Path.write_text") as mock_write: - local_history.write(current_history, query, response) - - # Verify write was called - mock_write.assert_called_once() - - # Verify the written content - written_data = json.loads(mock_write.call_args[0][0]) - assert len(written_data["history"]) == 1 - assert written_data["history"][0]["interaction"]["query"]["text"] == query - assert written_data["history"][0]["interaction"]["response"]["text"] == response +class TestLocalHistoryWrite: + """Test cases for writing history.""" + def test_write_disabled_history( + self, local_history: LocalHistory, mock_config: Mock + ): + """Should not write when history is disabled.""" + mock_config.history.enabled = False + local_history.write("query", "response") + assert local_history._db.session.call_count == 0 # type: ignore + + @pytest.mark.parametrize( + "query,response", + [ + ("test query", "test response"), + ("", "empty query test"), + ("empty response test", ""), + ], + ) + def test_write_success( + self, + local_history: LocalHistory, + mock_db_session: Mock, + query: str, + response: str, + ): + """Should successfully write history entries.""" + with patch( + "uuid.uuid4", return_value=uuid.UUID("12345678-1234-5678-1234-567812345678") + ): + local_history.write(query, response) -def test_write_when_history_disabled(local_history): - """Test writing history when history is disabled.""" - local_history._config.history.enabled = False - current_history = History() - - with patch("pathlib.Path.write_text") as mock_write: - local_history.write(current_history, "query", "response") - mock_write.assert_not_called() + # Verify interaction was created with correct attributes + mock_db_session.add.assert_called() + calls = mock_db_session.add.call_args_list + # First call should be InteractionModel + interaction = calls[0][0][0] + assert isinstance(interaction, InteractionModel) + assert interaction.query_text == query # type: ignore + assert interaction.response_text == response # type: ignore + assert interaction.session_id is not None -def test_write_with_error(local_history, caplog): - """Test writing history when an error occurs.""" - current_history = History() + # Second call should be HistoryModel + history = calls[1][0][0] + assert isinstance(history, HistoryModel) + assert history.interaction == interaction - with patch("pathlib.Path.write_text") as mock_write: - mock_write.side_effect = json.JSONDecodeError("Test error", "doc", 0) + def test_write_failure(self, local_history: LocalHistory, mock_db_session: Mock): + """Should raise CorruptedHistoryError on write failure.""" + mock_db_session.add.side_effect = Exception("DB Write Error") - with ( - caplog.at_level(logging.ERROR), - pytest.raises( - CorruptedHistoryError, match="Can't write data to the history file" - ), - ): - local_history.write(current_history, "query", "response") + with pytest.raises(CorruptedHistoryError, match="Failed to write to database"): + local_history.write("query", "response") - assert "Failed to write history file" in caplog.text +class TestLocalHistoryClear: + """Test cases for clearing history.""" -def test_clear_history(local_history): - """Test clearing history.""" - with patch("pathlib.Path.write_text") as mock_write: + def test_clear_success(self, local_history: LocalHistory, mock_db_session: Mock): + """Should successfully clear history.""" local_history.clear() - mock_write.assert_called_once() - written_data = json.loads(mock_write.call_args[0][0]) - assert len(written_data["history"]) == 0 - assert written_data["metadata"]["entry_count"] == 0 - - -def test_clear_history_with_error(local_history, caplog): - """Test clearing history when an error occurs.""" - with ( - caplog.at_level(logging.ERROR), - pytest.raises(MissingHistoryFileError, match="The history file"), - ): - local_history.clear() + # Verify soft delete was performed + mock_db_session.query.return_value.update.assert_called_once() + update_args = mock_db_session.query.return_value.update.call_args[0][0] + assert "deleted_at" in update_args + assert isinstance(update_args["deleted_at"], datetime) + def test_clear_failure(self, local_history: LocalHistory, mock_db_session: Mock): + """Should raise MissingHistoryFileError on clear failure.""" + mock_db_session.query.return_value.update.side_effect = Exception( + "DB Clear Error" + ) -def test_check_if_history_is_enabled(local_history): - """Test history enabled check.""" - assert local_history._check_if_history_is_enabled() is True + with pytest.raises(MissingHistoryFileError, match="Failed to clear database"): + local_history.clear() - local_history._config.history.enabled = False - assert local_history._check_if_history_is_enabled() is False +def test_integration_workflow(local_history: LocalHistory, mock_db_session: Mock): + """Integration test for full local history workflow.""" + # Setup mock responses + mock_db_session.query.return_value.join.return_value.filter.return_value.order_by.return_value.all.return_value = [] -def test_add_new_entry(local_history): - """Test adding a new entry to history.""" - current_history = History() - query = "test query" - response = "test response" + # Test read (empty) + assert local_history.read() == [] - updated_history = local_history._add_new_entry(current_history, query, response) + # Test write + local_history.write("test query", "test response") + assert ( + mock_db_session.add.call_count == 2 + ) # One for InteractionModel, one for HistoryModel - assert len(updated_history.history) == 1 - assert updated_history.history[0].interaction.query.text == query - assert updated_history.history[0].interaction.response.text == response - assert updated_history.metadata.entry_count == 1 + # Test clear + local_history.clear() + mock_db_session.query.return_value.update.assert_called_once() diff --git a/tests/history/test_manager.py b/tests/history/test_manager.py index 25ee79b..26598c8 100644 --- a/tests/history/test_manager.py +++ b/tests/history/test_manager.py @@ -1,54 +1,31 @@ import pytest -from command_line_assistant.config import Config -from command_line_assistant.config.schemas import HistorySchema -from command_line_assistant.history.base import BaseHistory +from command_line_assistant.history.base import BaseHistoryPlugin from command_line_assistant.history.manager import HistoryManager -from command_line_assistant.history.schemas import ( - History, - HistoryEntry, - InteractionData, - QueryData, - ResponseData, -) +from command_line_assistant.history.plugins.local import LocalHistory -class MockHistoryPlugin(BaseHistory): +class MockHistoryPlugin(BaseHistoryPlugin): def __init__(self, config): super().__init__(config) self.read_called = False self.write_called = False self.clear_called = False - self._history = History() - def read(self) -> History: + def read(self): self.read_called = True - return self._history + return [] - def write(self, current_history: History, query: str, response: str) -> None: + def write(self, query: str, response: str) -> None: self.write_called = True - entry = HistoryEntry( - interaction=InteractionData( - query=QueryData(text=query), response=ResponseData(text=response) - ) - ) - self._history.history.append(entry) def clear(self) -> None: self.clear_called = True - self._history.history = [] - - -@pytest.fixture -def mock_config(tmp_path): - return Config( - history=HistorySchema(enabled=True, file=tmp_path / "test_history.json") - ) @pytest.fixture def history_manager(mock_config): - return HistoryManager(mock_config, plugin=MockHistoryPlugin) + return HistoryManager(mock_config, plugin=LocalHistory) def test_history_manager_initialization(mock_config): @@ -71,7 +48,7 @@ def test_history_manager_invalid_plugin(mock_config): """Test setting an invalid plugin""" manager = HistoryManager(mock_config) - class InvalidPlugin(BaseHistory): + class InvalidPlugin(BaseHistoryPlugin): pass with pytest.raises(TypeError): @@ -89,7 +66,7 @@ def test_history_manager_write_without_plugin(mock_config): """Test writing history without setting a plugin first""" manager = HistoryManager(mock_config) with pytest.raises(RuntimeError): - manager.write(History(), "test query", "test response") + manager.write("test query", "test response") def test_history_manager_clear_without_plugin(mock_config): @@ -102,9 +79,9 @@ def test_history_manager_clear_without_plugin(mock_config): def test_history_manager_read(history_manager): """Test reading history""" history = history_manager.read() - assert isinstance(history, History) - assert isinstance(history_manager._instance, MockHistoryPlugin) - assert history_manager._instance.read_called + assert isinstance(history, list) + assert len(history) == 0 + assert isinstance(history_manager._instance, LocalHistory) def test_history_manager_write(history_manager): @@ -112,25 +89,23 @@ def test_history_manager_write(history_manager): test_query = "How do I check disk space?" test_response = "Use the df command" - history_manager.write(History(), test_query, test_response) + history_manager.write(test_query, test_response) - assert history_manager._instance.write_called history = history_manager.read() - assert len(history.history) == 1 - assert history.history[0].interaction.query.text == test_query - assert history.history[0].interaction.response.text == test_response + assert len(history) == 1 + assert history[0]["query"] == test_query + assert history[0]["response"] == test_response def test_history_manager_clear(history_manager): """Test clearing history""" # First write something - history_manager.write(History(), "test query", "test response") - assert len(history_manager.read().history) == 1 + history_manager.write("test query", "test response") + assert len(history_manager.read()) == 1 # Then clear it history_manager.clear() - assert history_manager._instance.clear_called - assert len(history_manager.read().history) == 0 + assert len(history_manager.read()) == 0 def test_history_manager_multiple_writes(history_manager): @@ -142,11 +117,11 @@ def test_history_manager_multiple_writes(history_manager): ] for query, response in entries: - history_manager.write(History(), query, response) + history_manager.write(query, response) history = history_manager.read() - assert len(history.history) == len(entries) + assert len(history) == len(entries) for i, (query, response) in enumerate(entries): - assert history.history[i].interaction.query.text == query - assert history.history[i].interaction.response.text == response + assert history[i]["query"] == query + assert history[i]["response"] == response diff --git a/tests/history/test_schemas.py b/tests/history/test_schemas.py deleted file mode 100644 index 43c9db1..0000000 --- a/tests/history/test_schemas.py +++ /dev/null @@ -1,218 +0,0 @@ -import json -from datetime import datetime - -import pytest - -from command_line_assistant.history.schemas import ( - EntryMetadata, - History, - HistoryEntry, - HistoryMetadata, - InteractionData, - OSInfo, - QueryData, - ResponseData, -) - - -def test_query_data_initialization(): - """Test QueryData initialization and defaults""" - query = QueryData() - assert query.text is None - assert query.role == "user" - - # Test with values - query = QueryData(text="test query", role="custom") - assert query.text == "test query" - assert query.role == "custom" - - -def test_response_data_initialization(): - """Test ResponseData initialization and defaults""" - response = ResponseData() - assert response.text is None - assert response.tokens == 0 - assert response.role == "assistant" - - # Test with values - response = ResponseData(text="test response", tokens=42, role="custom") - assert response.text == "test response" - assert response.tokens == 42 - assert response.role == "custom" - - -def test_interaction_data_initialization(): - """Test InteractionData initialization and defaults""" - interaction = InteractionData() - assert isinstance(interaction.query, QueryData) - assert isinstance(interaction.response, ResponseData) - - # Test with custom query and response - query = QueryData(text="test query") - response = ResponseData(text="test response") - interaction = InteractionData(query=query, response=response) - assert interaction.query.text == "test query" - assert interaction.response.text == "test response" - - -def test_os_info_initialization(): - """Test OSInfo initialization and defaults""" - os_info = OSInfo() - assert os_info.distribution == "RHEL" - assert isinstance(os_info.version, str) - assert isinstance(os_info.arch, str) - - # Test with custom values - os_info = OSInfo(distribution="Ubuntu", version="22.04", arch="x86_64") - assert os_info.distribution == "Ubuntu" - assert os_info.version == "22.04" - assert os_info.arch == "x86_64" - - -def test_entry_metadata_initialization(): - """Test EntryMetadata initialization""" - metadata = EntryMetadata() - assert isinstance(metadata.session_id, str) - assert isinstance(metadata.os_info, OSInfo) - - # Verify UUID format - import uuid - - uuid.UUID(metadata.session_id) # Should not raise exception - - -def test_history_entry_initialization(): - """Test HistoryEntry initialization and to_dict method""" - entry = HistoryEntry() - assert isinstance(entry.id, str) - assert isinstance(entry.timestamp, str) - assert isinstance(entry.interaction, InteractionData) - assert isinstance(entry.metadata, EntryMetadata) - - -def test_history_entry_to_dict(): - """Test HistoryEntry to_dict conversion""" - entry = HistoryEntry( - interaction=InteractionData( - QueryData("test query"), ResponseData("test response") - ) - ) - - entry_dict = entry.to_dict() - assert isinstance(entry_dict, dict) - assert entry_dict["interaction"]["query"]["text"] == "test query" - assert entry_dict["interaction"]["response"]["text"] == "test response" - assert "id" in entry_dict - assert "timestamp" in entry_dict - assert "metadata" in entry_dict - - -def test_history_metadata_initialization(): - """Test HistoryMetadata initialization""" - metadata = HistoryMetadata() - assert isinstance(metadata.last_updated, str) - assert isinstance(metadata.version, str) - assert metadata.entry_count == 0 - assert metadata.size_bytes == 0 - - -def test_history_initialization(): - """Test History initialization""" - history = History() - assert isinstance(history.history, list) - assert len(history.history) == 0 - assert isinstance(history.metadata, HistoryMetadata) - - -def test_history_json_serialization(): - """Test History to_json and from_json methods""" - # Create a history with some test data - history = History() - entry = HistoryEntry( - interaction=InteractionData( - QueryData("test query"), ResponseData("test response") - ) - ) - history.history.append(entry) - - # Convert to JSON - json_str = history.to_json() - assert isinstance(json_str, str) - - # Parse JSON string to verify structure - parsed = json.loads(json_str) - assert "history" in parsed - assert "metadata" in parsed - assert len(parsed["history"]) == 1 - - # Convert back from JSON - new_history = History.from_json(json_str) - assert isinstance(new_history, History) - assert len(new_history.history) == 1 - assert new_history.history[0].interaction.query.text == "test query" - assert new_history.history[0].interaction.response.text == "test response" - - -def test_history_with_multiple_entries(): - """Test History with multiple entries""" - history = History() - - # Add multiple entries - entries = [ - ("query1", "response1"), - ("query2", "response2"), - ("query3", "response3"), - ] - - for query_text, response_text in entries: - entry = HistoryEntry( - interaction=InteractionData( - QueryData(query_text), ResponseData(response_text) - ) - ) - history.history.append(entry) - - # Verify entries - assert len(history.history) == len(entries) - for i, (query_text, response_text) in enumerate(entries): - assert history.history[i].interaction.query.text == query_text - assert history.history[i].interaction.response.text == response_text - - -def test_history_json_roundtrip_with_special_characters(): - """Test History JSON serialization with special characters""" - history = History() - entry = HistoryEntry( - interaction=InteractionData( - QueryData("test\nquery with 'special' \"characters\" & symbols"), - ResponseData("response\twith\nspecial\rcharacters"), - ) - ) - history.history.append(entry) - - # Roundtrip through JSON - json_str = history.to_json() - new_history = History.from_json(json_str) - - assert new_history.history[0].interaction.query.text == entry.interaction.query.text - assert ( - new_history.history[0].interaction.response.text - == entry.interaction.response.text - ) - - -@pytest.mark.parametrize("invalid_json", ["", "{}", '{"invalid": "data"}']) -def test_history_from_json_with_invalid_data(invalid_json): - """Test History.from_json with invalid JSON data""" - with pytest.raises((KeyError, json.JSONDecodeError)): - History.from_json(invalid_json) - - -def test_history_entry_timestamp_format(): - """Test that HistoryEntry timestamps are in the correct format""" - entry = HistoryEntry() - # Verify the timestamp is in ISO format - try: - datetime.fromisoformat(entry.timestamp.rstrip("Z")) - except ValueError: - pytest.fail("Timestamp is not in valid ISO format") From a786c1d2a9ae3b8362913ce650b3c4cf866cabe9 Mon Sep 17 00:00:00 2001 From: Rodolfo Olivieri Date: Tue, 14 Jan 2025 08:31:28 -0300 Subject: [PATCH 2/2] Fix build --- .packit.yaml | 6 +++--- packaging/command-line-assistant.spec | 8 +++----- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.packit.yaml b/.packit.yaml index 3bfab6a..a782896 100644 --- a/.packit.yaml +++ b/.packit.yaml @@ -13,8 +13,8 @@ jobs: project: command-line-assistant targets: - epel-9-x86_64 - - epel-10-x86_64 - epel-9-aarch64 + - epel-10-x86_64 - epel-10-aarch64 actions: # do not get the version from a tag (git describe) but from the spec file @@ -29,8 +29,8 @@ jobs: project: command-line-assistant targets: - epel-9-x86_64 - - epel-10-x86_64 - epel-9-aarch64 + - epel-10-x86_64 - epel-10-aarch64 actions: # bump spec so we get release starting with 2 and hence all the default branch builds will @@ -49,8 +49,8 @@ jobs: project: command-line-assistant targets: - epel-9-x86_64 - - epel-10-x86_64 - epel-9-aarch64 + - epel-10-x86_64 - epel-10-aarch64 actions: # bump spec so we get release starting with 2 and hence all the default branch builds will diff --git a/packaging/command-line-assistant.spec b/packaging/command-line-assistant.spec index 7e803ad..bcae8d5 100644 --- a/packaging/command-line-assistant.spec +++ b/packaging/command-line-assistant.spec @@ -21,12 +21,7 @@ BuildRequires: systemd-units Requires: python3-dasbus Requires: python3-requests -# In RHEL 9 we only have version 1.4+ -%if 0%{?rhel} && 0%{?rhel} < 10 -Requires: python3-sqlalchemy >= 1.4.45 -%else Requires: python3-sqlalchemy -%endif Requires: systemd # Not needed after RHEL 10 as it is native in Python 3.11+ @@ -35,6 +30,9 @@ BuildRequires: python3-tomli Requires: python3-tomli %endif +# Ref: https://docs.fedoraproject.org/en-US/packaging-guidelines/Python_201x/#_automatically_generated_dependencies +%{?python_disable_dependency_generator} + %description A simple wrapper to interact with RAG