Skip to content

Commit

Permalink
feat(lcel): migrated to lcel and pydantic (#2185)
Browse files Browse the repository at this point in the history
# Description

Please include a summary of the changes and the related issue. Please
also include relevant motivation and context.

## Checklist before requesting a review

Please delete options that are not relevant.

- [ ] My code follows the style guidelines of this project
- [ ] I have performed a self-review of my code
- [ ] I have commented hard-to-understand areas
- [ ] I have ideally added tests that prove my fix is effective or that
my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] Any dependent changes have been merged

## Screenshots (if appropriate):

---------

Co-authored-by: Antoine Dewez <44063631+Zewed@users.noreply.github.com>
  • Loading branch information
StanGirard and Zewed authored Feb 14, 2024
1 parent 2ba3bc1 commit 08e015a
Show file tree
Hide file tree
Showing 33 changed files with 969 additions and 713 deletions.
29 changes: 16 additions & 13 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,46 +6,49 @@ name = "pypi"
[packages]
langchain = "*"
litellm = "*"
openai = "==1.1.1"
openai = "*"
gitpython = "==3.1.36"
pdf2image = "==1.16.3"
nest-asyncio = "==1.5.6"
pypdf = "==3.9.0"
supabase = "==1.1.0"
tiktoken = "==0.4.0"
fastapi = "==0.95.2"
supabase = "*"
tiktoken = "*"
fastapi = "*"
python-multipart = "==0.0.6"
uvicorn = "==0.22.0"
pypandoc = "==1.11"
docx2txt = "==0.8"
python-jose = "==3.3.0"
asyncpg = "==0.27.0"
flake8 = "==6.0.0"
flake8-black = "==0.3.6"
flake8 = "*"
flake8-black = "*"
sentry-sdk = {extras = ["fastapi"] }
pyright = "==1.1.316"
resend = "==0.5.1"
resend = "*"
html5lib = "==1.1"
beautifulsoup4 = "*"
newspaper3k = "*"
xlrd = "==1.0.0"
redis = "==4.5.4"
xlrd = "*"
redis = "*"
flower = "*"
boto3 = "==1.33.7"
botocore = "==1.33.7"
boto3 = "*"
botocore = "*"
celery = {extras = ["sqs"] }
python-dotenv = "*"
pytest-mock = "*"
pytest-celery = "*"
pytesseract = "==0.3.10"
pytesseract = "*"
async-generator = "*"
posthog = "==3.1.0"
posthog = "*"
jq = "==1.6.0"
pytest = "*"
ddtrace = "*"
watchdog = "*"
langchain-community = "*"
langchain-openai = "*"
pydantic-settings = "*"
unstructured = {extras = ["all-docs"], version = "*"}
langfuse = "*"

[dev-packages]
black = "*"
Expand Down
803 changes: 442 additions & 361 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions backend/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,11 @@ RUN pip install --no-cache-dir -r requirements.txt --timeout 200

RUN if [ "$DEV_MODE" = "true" ]; then pip install --no-cache debugpy --timeout 200; fi

WORKDIR /code
# Copy the rest of the application
COPY . .


EXPOSE 5050

CMD ["uvicorn", "main:app","--reload", "--host", "0.0.0.0", "--port", "5050", "--workers", "6"]
16 changes: 11 additions & 5 deletions backend/llm/utils/format_chat_history.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
from typing import List, Tuple
from typing import Dict, List, Tuple

from langchain.schema import AIMessage, BaseMessage, HumanMessage, SystemMessage
from modules.chat.dto.outputs import GetChatHistoryOutput


def format_chat_history(history) -> List[Tuple[str, str]]:
"""Format the chat history into a list of tuples (human, ai)"""

return [(chat.user_message, chat.assistant) for chat in history]
def format_chat_history(
history: List[GetChatHistoryOutput],
) -> List[Dict[str, str]]:
"""Format the chat history into a list of HumanMessage and AIMessage"""
formatted_history = []
for chat in history:
formatted_history.append(HumanMessage(chat.user_message))
formatted_history.append(AIMessage(chat.assistant))
return formatted_history


def format_history_to_openai_mesages(
Expand Down
5 changes: 1 addition & 4 deletions backend/llm/utils/get_prompt_to_use.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,12 @@
from uuid import UUID

from llm.utils.get_prompt_to_use_id import get_prompt_to_use_id
from modules.prompt.entity.prompt import Prompt
from modules.prompt.service import PromptService

promptService = PromptService()


def get_prompt_to_use(
brain_id: Optional[UUID], prompt_id: Optional[UUID]
) -> Optional[Prompt]:
def get_prompt_to_use(brain_id: Optional[UUID], prompt_id: Optional[UUID]) -> str:
prompt_to_use_id = get_prompt_to_use_id(brain_id, prompt_id)
if prompt_to_use_id is None:
return None
Expand Down
6 changes: 2 additions & 4 deletions backend/models/brains_subscription_invitations.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from uuid import UUID

from logger import get_logger
from pydantic import BaseModel
from pydantic import ConfigDict, BaseModel

logger = get_logger(__name__)

Expand All @@ -10,6 +10,4 @@ class BrainSubscription(BaseModel):
brain_id: UUID
email: str
rights: str = "Viewer"

class Config:
arbitrary_types_allowed = True
model_config = ConfigDict(arbitrary_types_allowed=True)
2 changes: 1 addition & 1 deletion backend/models/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

class File(BaseModel):
id: Optional[UUID] = None
file: Optional[UploadFile]
file: Optional[UploadFile] = None
file_name: Optional[str] = ""
file_size: Optional[int] = None
file_sha1: Optional[str] = ""
Expand Down
14 changes: 10 additions & 4 deletions backend/models/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,21 @@
from logger import get_logger
from models.databases.supabase.supabase import SupabaseDB
from posthog import Posthog
from pydantic import BaseSettings
from pydantic_settings import BaseSettings, SettingsConfigDict
from supabase.client import Client, create_client
from vectorstore.supabase import SupabaseVectorStore

logger = get_logger(__name__)


class BrainRateLimiting(BaseSettings):
model_config = SettingsConfigDict(validate_default=False)
max_brain_per_user: int = 5


# The `PostHogSettings` class is used to initialize and interact with the PostHog analytics service.
class PostHogSettings(BaseSettings):
model_config = SettingsConfigDict(validate_default=False)
posthog_api_key: str = None
posthog_api_url: str = None
posthog: Posthog = None
Expand Down Expand Up @@ -102,15 +104,19 @@ def set_once_user_properties(self, user_id: UUID, event_name, properties: dict):


class BrainSettings(BaseSettings):
openai_api_key: str
supabase_url: str
supabase_service_key: str
model_config = SettingsConfigDict(validate_default=False)
openai_api_key: str = ""
supabase_url: str = ""
supabase_service_key: str = ""
resend_api_key: str = "null"
resend_email_address: str = "brain@mail.quivr.app"
ollama_api_base_url: str = None
langfuse_public_key: str = None
langfuse_secret_key: str = None


class ResendSettings(BaseSettings):
model_config = SettingsConfigDict(validate_default=False)
resend_api_key: str = "null"


Expand Down
4 changes: 2 additions & 2 deletions backend/modules/api_key/service/api_key_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from modules.api_key.repository.api_keys import ApiKeys
from modules.user.entity.user_identity import UserIdentity
from modules.user.service.user_service import UserService
from pydantic import DateError

logger = get_logger(__name__)

Expand Down Expand Up @@ -37,7 +36,8 @@ async def verify_api_key(
if api_key_creation_date.year == current_date.year:
return True
return False
except DateError:
except Exception as e:
logger.error(f"Error verifying API key: {e}")
return False

async def get_user_from_api_key(
Expand Down
20 changes: 10 additions & 10 deletions backend/modules/brain/dto/inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ class CreateBrainProperties(BaseModel, extra=Extra.forbid):
name: Optional[str] = "Default brain"
description: str = "This is a description"
status: Optional[str] = "private"
model: Optional[str]
model: Optional[str] = None
temperature: Optional[float] = 0.0
max_tokens: Optional[int] = 2000
prompt_id: Optional[UUID] = None
brain_type: Optional[BrainType] = BrainType.DOC
brain_definition: Optional[CreateApiBrainDefinition]
brain_definition: Optional[CreateApiBrainDefinition] = None
brain_secrets_values: Optional[dict] = {}
connected_brains_ids: Optional[list[UUID]] = []
integration: Optional[BrainIntegrationSettings] = None
Expand All @@ -61,14 +61,14 @@ def dict(self, *args, **kwargs):


class BrainUpdatableProperties(BaseModel):
name: Optional[str]
description: Optional[str]
temperature: Optional[float]
model: Optional[str]
max_tokens: Optional[int]
status: Optional[str]
prompt_id: Optional[UUID]
brain_definition: Optional[ApiBrainDefinitionEntity]
name: Optional[str] = None
description: Optional[str] = None
temperature: Optional[float] = None
model: Optional[str] = None
max_tokens: Optional[int] = None
status: Optional[str] = None
prompt_id: Optional[UUID] = None
brain_definition: Optional[ApiBrainDefinitionEntity] = None
connected_brains_ids: Optional[list[UUID]] = []
integration: Optional[BrainIntegrationUpdateSettings] = None

Expand Down
4 changes: 2 additions & 2 deletions backend/modules/brain/entity/api_brain_definition_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
class ApiBrainDefinitionSchemaProperty(BaseModel, extra=Extra.forbid):
type: str
description: str
enum: Optional[list]
enum: Optional[list] = None
name: str

def dict(self, **kwargs):
Expand All @@ -26,7 +26,7 @@ class ApiBrainDefinitionSchema(BaseModel, extra=Extra.forbid):
class ApiBrainDefinitionSecret(BaseModel, extra=Extra.forbid):
name: str
type: str
description: Optional[str]
description: Optional[str] = None


class ApiBrainAllowedMethods(str, Enum):
Expand Down
24 changes: 12 additions & 12 deletions backend/modules/brain/entity/brain_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,18 @@ class BrainType(str, Enum):
class BrainEntity(BaseModel):
brain_id: UUID
name: str
description: Optional[str]
temperature: Optional[float]
model: Optional[str]
max_tokens: Optional[int]
status: Optional[str]
prompt_id: Optional[UUID]
description: Optional[str] = None
temperature: Optional[float] = None
model: Optional[str] = None
max_tokens: Optional[int] = None
status: Optional[str] = None
prompt_id: Optional[UUID] = None
last_update: str
brain_type: BrainType
brain_definition: Optional[ApiBrainDefinitionEntity]
connected_brains_ids: Optional[List[UUID]]
raw: Optional[bool]
jq_instructions: Optional[str]
brain_definition: Optional[ApiBrainDefinitionEntity] = None
connected_brains_ids: Optional[List[UUID]] = None
raw: Optional[bool] = None
jq_instructions: Optional[str] = None

@property
def id(self) -> UUID:
Expand All @@ -44,11 +44,11 @@ def dict(self, **kwargs):
class PublicBrain(BaseModel):
id: UUID
name: str
description: Optional[str]
description: Optional[str] = None
number_of_subscribers: int = 0
last_update: str
brain_type: BrainType
brain_definition: Optional[ApiBrainDefinitionEntity]
brain_definition: Optional[ApiBrainDefinitionEntity] = None


class RoleEnum(str, Enum):
Expand Down
8 changes: 4 additions & 4 deletions backend/modules/brain/entity/integration_brain.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
class IntegrationDescriptionEntity(BaseModel):
id: UUID
integration_name: str
integration_logo_url: Optional[str]
connection_settings: Optional[dict]
integration_logo_url: Optional[str] = None
connection_settings: Optional[dict] = None


class IntegrationEntity(BaseModel):
id: str
user_id: str
brain_id: str
integration_id: str
settings: Optional[dict]
credentials: Optional[dict]
settings: Optional[dict] = None
credentials: Optional[dict] = None
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class NotionSearchResponse(BaseModel):
"""Represents the response from the Notion Search API"""

results: list[dict[str, Any]]
next_cursor: Optional[str]
next_cursor: Optional[str] = None
has_more: bool = False


Expand Down
Loading

0 comments on commit 08e015a

Please sign in to comment.