Skip to content

Commit

Permalink
Fix follow ups in thread + fix user name (#3686)
Browse files Browse the repository at this point in the history
* Fix follow ups in thread + fix user name

* Add back single history str

* Remove newline
  • Loading branch information
Weves authored Jan 16, 2025
1 parent c9a420e commit 8a4d762
Show file tree
Hide file tree
Showing 23 changed files with 153 additions and 117 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""Add chat_message__standard_answer table
Revision ID: c5eae4a75a1b
Revises: 0f7ff6d75b57
Create Date: 2025-01-15 14:08:49.688998
"""
from alembic import op
import sqlalchemy as sa

# revision identifiers, used by Alembic.
revision = "c5eae4a75a1b"
down_revision = "0f7ff6d75b57"
branch_labels = None
depends_on = None


def upgrade() -> None:
op.create_table(
"chat_message__standard_answer",
sa.Column("chat_message_id", sa.Integer(), nullable=False),
sa.Column("standard_answer_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["chat_message_id"],
["chat_message.id"],
),
sa.ForeignKeyConstraint(
["standard_answer_id"],
["standard_answer.id"],
),
sa.PrimaryKeyConstraint("chat_message_id", "standard_answer_id"),
)


def downgrade() -> None:
op.drop_table("chat_message__standard_answer")
15 changes: 10 additions & 5 deletions backend/ee/onyx/onyxbot/slack/handlers/handle_standard_answers.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,9 +150,9 @@ def _handle_standard_answers(
db_session=db_session,
description="",
user_id=None,
persona_id=slack_channel_config.persona.id
if slack_channel_config.persona
else 0,
persona_id=(
slack_channel_config.persona.id if slack_channel_config.persona else 0
),
onyxbot_flow=True,
slack_thread_id=slack_thread_id,
)
Expand Down Expand Up @@ -182,7 +182,7 @@ def _handle_standard_answers(
formatted_answers.append(formatted_answer)
answer_message = "\n\n".join(formatted_answers)

_ = create_new_chat_message(
chat_message = create_new_chat_message(
chat_session_id=chat_session.id,
parent_message=new_user_message,
prompt_id=prompt.id if prompt else None,
Expand All @@ -191,8 +191,13 @@ def _handle_standard_answers(
message_type=MessageType.ASSISTANT,
error=None,
db_session=db_session,
commit=True,
commit=False,
)
# attach the standard answers to the chat message
chat_message.standard_answers = [
standard_answer for standard_answer, _ in matching_standard_answers
]
db_session.commit()

update_emote_react(
emoji=DANSWER_REACT_EMOJI,
Expand Down
25 changes: 7 additions & 18 deletions backend/onyx/chat/answer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from onyx.chat.models import CitationInfo
from onyx.chat.models import OnyxAnswerPiece
from onyx.chat.models import PromptConfig
from onyx.chat.prompt_builder.build import AnswerPromptBuilder
from onyx.chat.prompt_builder.build import default_build_system_message
from onyx.chat.prompt_builder.build import default_build_user_message
from onyx.chat.prompt_builder.build import LLMCall
from onyx.chat.prompt_builder.answer_prompt_builder import AnswerPromptBuilder
from onyx.chat.prompt_builder.answer_prompt_builder import default_build_system_message
from onyx.chat.prompt_builder.answer_prompt_builder import default_build_user_message
from onyx.chat.prompt_builder.answer_prompt_builder import LLMCall
from onyx.chat.stream_processing.answer_response_handler import (
CitationResponseHandler,
)
Expand Down Expand Up @@ -212,19 +212,6 @@ def _get_response(self, llm_calls: list[LLMCall]) -> AnswerStream:
current_llm_call
) or ([], [])

# Quotes are no longer supported
# answer_handler: AnswerResponseHandler
# if self.answer_style_config.citation_config:
# answer_handler = CitationResponseHandler(
# context_docs=search_result,
# doc_id_to_rank_map=map_document_id_order(search_result),
# )
# elif self.answer_style_config.quotes_config:
# answer_handler = QuotesResponseHandler(
# context_docs=search_result,
# )
# else:
# raise ValueError("No answer style config provided")
answer_handler = CitationResponseHandler(
context_docs=final_search_results,
final_doc_id_to_rank_map=map_document_id_order(final_search_results),
Expand Down Expand Up @@ -265,11 +252,13 @@ def processed_streamed_output(self) -> AnswerStream:
user_query=self.question,
prompt_config=self.prompt_config,
files=self.latest_query_files,
single_message_history=self.single_message_history,
),
message_history=self.message_history,
llm_config=self.llm.config,
raw_user_query=self.question,
raw_user_uploaded_files=self.latest_query_files or [],
single_message_history=self.single_message_history,
raw_user_text=self.question,
)
prompt_builder.update_system_prompt(
default_build_system_message(self.prompt_config)
Expand Down
2 changes: 1 addition & 1 deletion backend/onyx/chat/llm_response_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from onyx.chat.models import ResponsePart
from onyx.chat.models import StreamStopInfo
from onyx.chat.models import StreamStopReason
from onyx.chat.prompt_builder.build import LLMCall
from onyx.chat.prompt_builder.answer_prompt_builder import LLMCall
from onyx.chat.stream_processing.answer_response_handler import AnswerResponseHandler
from onyx.chat.tool_handling.tool_response_handler import ToolResponseHandler

Expand Down
22 changes: 1 addition & 21 deletions backend/onyx/chat/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from pydantic import BaseModel
from pydantic import ConfigDict
from pydantic import Field
from pydantic import model_validator

from onyx.configs.constants import DocumentSource
from onyx.configs.constants import MessageType
Expand Down Expand Up @@ -261,13 +260,8 @@ class CitationConfig(BaseModel):
all_docs_useful: bool = False


class QuotesConfig(BaseModel):
pass


class AnswerStyleConfig(BaseModel):
citation_config: CitationConfig | None = None
quotes_config: QuotesConfig | None = None
citation_config: CitationConfig
document_pruning_config: DocumentPruningConfig = Field(
default_factory=DocumentPruningConfig
)
Expand All @@ -276,20 +270,6 @@ class AnswerStyleConfig(BaseModel):
# right now, only used by the simple chat API
structured_response_format: dict | None = None

@model_validator(mode="after")
def check_quotes_and_citation(self) -> "AnswerStyleConfig":
if self.citation_config is None and self.quotes_config is None:
raise ValueError(
"One of `citation_config` or `quotes_config` must be provided"
)

if self.citation_config is not None and self.quotes_config is not None:
raise ValueError(
"Only one of `citation_config` or `quotes_config` must be provided"
)

return self


class PromptConfig(BaseModel):
"""Final representation of the Prompt configuration passed
Expand Down
6 changes: 6 additions & 0 deletions backend/onyx/chat/process_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,11 @@ def stream_chat_message_objects(
enforce_chat_session_id_for_search_docs: bool = True,
bypass_acl: bool = False,
include_contexts: bool = False,
# a string which represents the history of a conversation. Used in cases like
# Slack threads where the conversation cannot be represented by a chain of User/Assistant
# messages.
# NOTE: is not stored in the database at all.
single_message_history: str | None = None,
) -> ChatPacketStream:
"""Streams in order:
1. [conditional] Retrieved documents if a search needs to be run
Expand Down Expand Up @@ -707,6 +712,7 @@ def stream_chat_message_objects(
],
tools=tools,
force_use_tool=_get_force_search_settings(new_msg_req, tools),
single_message_history=single_message_history,
)

reference_db_search_docs = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from onyx.llm.utils import message_to_prompt_and_imgs
from onyx.natural_language_processing.utils import get_tokenizer
from onyx.prompts.chat_prompts import CHAT_USER_CONTEXT_FREE_PROMPT
from onyx.prompts.direct_qa_prompts import HISTORY_BLOCK
from onyx.prompts.prompt_utils import add_date_time_to_prompt
from onyx.prompts.prompt_utils import drop_messages_history_overflow
from onyx.tools.force import ForceUseTool
Expand All @@ -42,11 +43,22 @@ def default_build_system_message(


def default_build_user_message(
user_query: str, prompt_config: PromptConfig, files: list[InMemoryChatFile] = []
user_query: str,
prompt_config: PromptConfig,
files: list[InMemoryChatFile] = [],
single_message_history: str | None = None,
) -> HumanMessage:
history_block = (
HISTORY_BLOCK.format(history_str=single_message_history)
if single_message_history
else ""
)

user_prompt = (
CHAT_USER_CONTEXT_FREE_PROMPT.format(
task_prompt=prompt_config.task_prompt, user_query=user_query
history_block=history_block,
task_prompt=prompt_config.task_prompt,
user_query=user_query,
)
if prompt_config.task_prompt
else user_query
Expand All @@ -64,7 +76,8 @@ def __init__(
user_message: HumanMessage,
message_history: list[PreviousMessage],
llm_config: LLMConfig,
raw_user_text: str,
raw_user_query: str,
raw_user_uploaded_files: list[InMemoryChatFile],
single_message_history: str | None = None,
) -> None:
self.max_tokens = compute_max_llm_input_tokens(llm_config)
Expand All @@ -83,10 +96,6 @@ def __init__(
self.history_token_cnts,
) = translate_history_to_basemessages(message_history)

# for cases where like the QA flow where we want to condense the chat history
# into a single message rather than a sequence of User / Assistant messages
self.single_message_history = single_message_history

self.system_message_and_token_cnt: tuple[SystemMessage, int] | None = None
self.user_message_and_token_cnt = (
user_message,
Expand All @@ -95,7 +104,10 @@ def __init__(

self.new_messages_and_token_cnts: list[tuple[BaseMessage, int]] = []

self.raw_user_message = raw_user_text
# used for building a new prompt after a tool-call
self.raw_user_query = raw_user_query
self.raw_user_uploaded_files = raw_user_uploaded_files
self.single_message_history = single_message_history

def update_system_prompt(self, system_message: SystemMessage | None) -> None:
if not system_message:
Expand Down
4 changes: 1 addition & 3 deletions backend/onyx/chat/prompt_builder/citations_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,7 @@ def build_citations_user_message(
)

history_block = (
HISTORY_BLOCK.format(history_str=history_message) + "\n"
if history_message
else ""
HISTORY_BLOCK.format(history_str=history_message) if history_message else ""
)
query, img_urls = message_to_prompt_and_imgs(message)

Expand Down
8 changes: 4 additions & 4 deletions backend/onyx/chat/tool_handling/tool_response_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from langchain_core.messages import ToolCall

from onyx.chat.models import ResponsePart
from onyx.chat.prompt_builder.build import LLMCall
from onyx.chat.prompt_builder.answer_prompt_builder import LLMCall
from onyx.llm.interfaces import LLM
from onyx.tools.force import ForceUseTool
from onyx.tools.message import build_tool_message
Expand Down Expand Up @@ -62,7 +62,7 @@ def get_tool_call_for_non_tool_calling_llm(
llm_call.force_use_tool.args
if llm_call.force_use_tool.args is not None
else tool.get_args_for_non_tool_calling_llm(
query=llm_call.prompt_builder.raw_user_message,
query=llm_call.prompt_builder.raw_user_query,
history=llm_call.prompt_builder.raw_message_history,
llm=llm,
force_run=True,
Expand All @@ -76,7 +76,7 @@ def get_tool_call_for_non_tool_calling_llm(
else:
tool_options = check_which_tools_should_run_for_non_tool_calling_llm(
tools=llm_call.tools,
query=llm_call.prompt_builder.raw_user_message,
query=llm_call.prompt_builder.raw_user_query,
history=llm_call.prompt_builder.raw_message_history,
llm=llm,
)
Expand All @@ -95,7 +95,7 @@ def get_tool_call_for_non_tool_calling_llm(
select_single_tool_for_non_tool_calling_llm(
tools_and_args=available_tools_and_args,
history=llm_call.prompt_builder.raw_message_history,
query=llm_call.prompt_builder.raw_user_message,
query=llm_call.prompt_builder.raw_user_query,
llm=llm,
)
if available_tools_and_args
Expand Down
2 changes: 1 addition & 1 deletion backend/onyx/onyxbot/slack/handlers/handle_buttons.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def handle_generate_answer_button(
channel_to_respond=channel_id,
msg_to_respond=cast(str, message_ts or thread_ts),
thread_to_respond=cast(str, thread_ts or message_ts),
sender=user_id or None,
sender_id=user_id or None,
email=email or None,
bypass_filters=True,
is_bot_msg=False,
Expand Down
8 changes: 4 additions & 4 deletions backend/onyx/onyxbot/slack/handlers/handle_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@


def send_msg_ack_to_user(details: SlackMessageInfo, client: WebClient) -> None:
if details.is_bot_msg and details.sender:
if details.is_bot_msg and details.sender_id:
respond_in_thread(
client=client,
channel=details.channel_to_respond,
thread_ts=details.msg_to_respond,
receiver_ids=[details.sender],
receiver_ids=[details.sender_id],
text="Hi, we're evaluating your query :face_with_monocle:",
)
return
Expand Down Expand Up @@ -70,7 +70,7 @@ def schedule_feedback_reminder(

try:
response = client.chat_scheduleMessage(
channel=details.sender, # type:ignore
channel=details.sender_id, # type:ignore
post_at=int(future.timestamp()),
blocks=[
get_feedback_reminder_blocks(
Expand Down Expand Up @@ -123,7 +123,7 @@ def handle_message(
logger = setup_logger(extra={SLACK_CHANNEL_ID: channel})

messages = message_info.thread_messages
sender_id = message_info.sender
sender_id = message_info.sender_id
bypass_filters = message_info.bypass_filters
is_bot_msg = message_info.is_bot_msg
is_bot_dm = message_info.is_bot_dm
Expand Down
Loading

0 comments on commit 8a4d762

Please sign in to comment.