Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor chatbase to use the new chat completion method in ChatCraftChat class. #832

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 8 additions & 149 deletions src/Chat/ChatBase.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,9 @@ import useChatOpenAI from "../hooks/use-chat-openai";
import { useModels } from "../hooks/use-models";
import { useSettings } from "../hooks/use-settings";
import { useUser } from "../hooks/use-user";
import useChatCompletion from "../hooks/use-chat-completion";
import { ChatCraftChat } from "../lib/ChatCraftChat";
import { ChatCraftCommand } from "../lib/ChatCraftCommand";
import { ChatCraftFunction } from "../lib/ChatCraftFunction";
import { ChatCraftFunctionCallMessage, ChatCraftHumanMessage } from "../lib/ChatCraftMessage";
import { WebHandler } from "../lib/WebHandler";
import { ChatCraftCommandRegistry } from "../lib/commands";
import ChatHeader from "./ChatHeader";
import { ChatCompletionError } from "../lib/ai";

type ChatBaseProps = {
chat: ChatCraftChat;
Expand All @@ -45,6 +40,7 @@ function ChatBase({ chat }: ChatBaseProps) {
const { error: apiError } = useModels();
// When chatting with OpenAI, a streaming message is returned during loading
const { streamingMessage, callChatApi, cancel, paused, resume, togglePause } = useChatOpenAI();
const { chatCompletion } = useChatCompletion();
const { settings, setSettings } = useSettings();
const { isOpen: isSidebarVisible, onToggle: toggleSidebarVisible } = useDisclosure({
defaultIsOpen: settings.sidebarVisible,
Expand Down Expand Up @@ -206,154 +202,17 @@ function ChatBase({ chat }: ChatBaseProps) {
async (prompt?: string, imageUrls?: string[]) => {
setLoading(true);

// Special-case for "help", to invoke /help command
if (prompt?.toLowerCase() === "help") {
prompt = "/help";
}

// If we have a web handler registered for this url
const handler = WebHandler.getMatchingHandler(prompt ?? "");

if (prompt && handler) {
try {
const result = await handler.executeHandler(prompt);

chat.addMessage(new ChatCraftHumanMessage({ user, text: result }));
forceScroll();
} catch (err: any) {
error({
title: "Error running Web Handler",
message: err.message,
});
}

setLoading(false);
return;
}

// If this is a slash command, execute that instead of prompting LLM
if (prompt && ChatCraftCommandRegistry.isCommand(prompt)) {
const commandFunction = ChatCraftCommandRegistry.getCommand(prompt);
if (commandFunction) {
setShouldAutoScroll(true);
try {
await commandFunction(chat, user);
forceScroll();
} catch (err: any) {
error({
title: `Error Running Command`,
message: `There was an error running the command: ${err.message}.`,
});
}
} else {
// The input was a command, but not a recognized one.
// Handle this case as appropriate for your application.

// We are sure that this won't return null
// since prompt is definitely a command
const { command } = ChatCraftCommand.parseCommand(prompt)!;
const commandFunction = ChatCraftCommandRegistry.getCommand(`/commands ${command}`)!;
setShouldAutoScroll(true);
try {
await commandFunction(chat, user);
forceScroll();
} catch (err: any) {
error({
title: `Error Running Command`,
message: `There was an error running the command: ${err.message}.`,
});
}
}

setLoading(false);
return;
}

// Not a slash command, so pass this prompt to LLM
let promptMessage: ChatCraftHumanMessage | undefined;
try {
// If the prompt text exist, package it up as a human message and add to the chat
if (prompt) {
// Add this prompt message to the chat
promptMessage = new ChatCraftHumanMessage({ text: prompt, imageUrls, user });
await chat.addMessage(promptMessage);
} else if (imageUrls?.length) {
// Add only image to the chat
promptMessage = new ChatCraftHumanMessage({ text: "", imageUrls, user });
await chat.addMessage(promptMessage);
}

// If there's any problem loading referenced functions, show an error
const onError = (err: Error) => {
error({
title: `Error Loading Function`,
message: err.message,
});
};

// If there are any functions mentioned in the chat (via @fn or @fn-url),
// pass those through to the LLM to use if necessary.
const functions = await chat.functions(onError);

// If the user has specified a single function in this prompt, ask LLM to call it.
let functionToCall: ChatCraftFunction | undefined;
if (promptMessage && functions) {
const messageFunctions = await promptMessage.functions(onError);
if (messageFunctions?.length === 1) {
functionToCall = messageFunctions[0];
}
}

// NOTE: we strip out the ChatCraft App messages before sending to OpenAI.
const messages = chat.messages({ includeAppMessages: false });

// Clear any previous audio clips
clearAudioQueue();

const response = await callChatApi(messages, {
functions,
functionToCall,
});

// Add this response message to the chat
await chat.addMessage(response);

// If it's a function call message, invoke the function
if (response instanceof ChatCraftFunctionCallMessage) {
const func = await ChatCraftFunction.find(response.func.id);
if (!func) {
error({
title: `Function Error`,
message: `No such function: ${response.func.name} (${response.func.id}`,
});
return;
}

const result = await func.invoke(response.func.params);
// Add this result message to the chat
await chat.addMessage(result);

// If the user has opted to always send function results back to LLM, do it now
if (settings.alwaysSendFunctionResult) {
await onPrompt();
}

forceScroll();
}
} catch (err: any) {
if (err instanceof ChatCompletionError && err.incompleteResponse) {
// Add this partial response to the chat
await chat.addMessage(err.incompleteResponse);
}

await chatCompletion(prompt ?? "", chat, imageUrls);
} catch (err) {
error({
title: `Response Error`,
message: err.message,
title: `Completion Error`,
message: `Error with chat completion: ${err}`,
});
console.error(err);
} finally {
setLoading(false);
setShouldAutoScroll(false);
// Clear any previous audio clips
clearAudioQueue();
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
Expand Down
152 changes: 152 additions & 0 deletions src/hooks/use-chat-completion.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
import { ChatCraftFunctionCallMessage, ChatCraftHumanMessage } from "../lib/ChatCraftMessage";
import { ChatCraftFunction } from "../lib/ChatCraftFunction";
import { ChatCraftCommand } from "../lib/ChatCraftCommand";
import { WebHandler } from "../lib/WebHandler";
import { ChatCraftCommandRegistry } from "../lib/commands";
import { ChatCraftChat } from "../lib/ChatCraftChat";
import { useAlert } from "../hooks/use-alert";
import useChatOpenAI from "./use-chat-openai";
import { ChatCompletionError } from "../lib/ai";
import { useUser } from "../hooks/use-user";
import { useSettings } from "./use-settings";

function useChatCompletion() {
const { error } = useAlert();
const { callChatApi } = useChatOpenAI();
const { user } = useUser();
const { settings } = useSettings();

const chatCompletion = async (prompt: string, chat: ChatCraftChat, imageUrls?: string[]) => {
// Special-case for "help", to invoke /help command
if (prompt?.toLowerCase() === "help") {
prompt = "/help";
}
// If we have a web handler registered for this url
const handler = WebHandler.getMatchingHandler(prompt ?? "");

if (prompt && handler) {
try {
const result = await handler.executeHandler(prompt);

chat.addMessage(new ChatCraftHumanMessage({ user, text: result }));
} catch (err: any) {
error({
title: "Error running Web Handler",
message: err.message,
});
}
return;
}
// If this is a slash command, execute that instead of prompting LLM
if (prompt && ChatCraftCommandRegistry.isCommand(prompt)) {
const commandFunction = ChatCraftCommandRegistry.getCommand(prompt);

if (commandFunction) {
try {
await commandFunction(chat, user);
} catch (err: any) {
error({
title: `Error Running Command`,
message: `There was an error running the command: ${err.message}.`,
});
}
} else {
// The input was a command, but not a recognized one.
// Handle this case as appropriate for your application.

// We are sure that this won't return null
// since prompt is definitely a command
const { command } = ChatCraftCommand.parseCommand(prompt)!;
const commandFunction = ChatCraftCommandRegistry.getCommand(`/commands ${command}`)!;
try {
await commandFunction(chat, user);
} catch (err: any) {
error({
title: `Error Running Command`,
message: `There was an error running the command: ${err.message}.`,
});
}
}
return;
}
try {
let promptMessage: ChatCraftHumanMessage | undefined;
if (prompt) {
// Add this prompt message to the chat
promptMessage = new ChatCraftHumanMessage({ text: prompt, imageUrls, user });
await chat.addMessage(promptMessage);
} else if (imageUrls?.length) {
// Add only image to the chat
promptMessage = new ChatCraftHumanMessage({ text: "", imageUrls, user });
await chat.addMessage(promptMessage);
}

// If there's any problem loading referenced functions, show an error
const onError = (err: Error) => {
error({
title: `Error Loading Function`,
message: err.message,
});
};

// If there are any functions mentioned in the chat (via @fn or @fn-url),
// pass those through to the LLM to use if necessary.
const functions = await chat.functions(onError);

// If the user has specified a single function in this prompt, ask LLM to call it.
let functionToCall: ChatCraftFunction | undefined;
if (promptMessage && functions) {
const messageFunctions = await promptMessage.functions(onError);
if (messageFunctions?.length === 1) {
functionToCall = messageFunctions[0];
}
}

// NOTE: we strip out the ChatCraft App messages before sending to OpenAI.
const messages = chat.messages({ includeAppMessages: false });

const response = await callChatApi(messages, {
functions,
functionToCall,
});

// Add this response message to the chat
await chat.addMessage(response);

// If it's a function call message, invoke the function
if (response instanceof ChatCraftFunctionCallMessage) {
const func = await ChatCraftFunction.find(response.func.id);
if (!func) {
error({
title: `Function Error`,
message: `No such function: ${response.func.name} (${response.func.id}`,
});
return;
}

const result = await func.invoke(response.func.params);
// Add this result message to the chat
await chat.addMessage(result);

// If the user has opted to always send function results back to LLM, do it now
if (settings.alwaysSendFunctionResult) {
await chatCompletion(prompt ?? "", chat, imageUrls);
}
}
} catch (err: any) {
if (err instanceof ChatCompletionError && err.incompleteResponse) {
// Add this partial response to the chat
await chat.addMessage(err.incompleteResponse);
}

error({
title: `Response Error`,
message: err.message,
});
}
};

return { chatCompletion };
}

export default useChatCompletion;
Loading