Skip to content

Commit

Permalink
fix: adding openai key in request header for chatbot in playground (#243
Browse files Browse the repository at this point in the history
)

* fix: prompt engineer agent for map decomposition

* feat: host application

* chore: update azure

* chore: update azure

* chore: update azure

* fix: adding default ops in init

* chore: refactoring fastapi models

* feat: allowing frontend to use a separate server for backend

* feat: allowing frontend to use a separate server for backend

* feat: allowing frontend to use a separate server for backend

* feat: allowing frontend to use a separate server for backend

* feat: allowing frontend to use a separate server for backend

* feat: allowing frontend to use a separate server for backend

* fix: add openai key when using AI assistant
  • Loading branch information
shreyashankar authored Dec 23, 2024
1 parent f1a12d2 commit 6807262
Show file tree
Hide file tree
Showing 3 changed files with 130 additions and 15 deletions.
109 changes: 100 additions & 9 deletions website/src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,63 @@
import { createOpenAI } from "@ai-sdk/openai";
import { streamText } from "ai";

// Allow streaming responses up to 60 seconds
interface ChatMessage {
role: "user" | "assistant" | "system";
content: string;
}

const MAX_TOTAL_CHARS = 500000;

function truncateMessages(messages: ChatMessage[]): ChatMessage[] {
// Calculate total length
let totalLength = messages.reduce((sum, msg) => sum + msg.content.length, 0);

// If under limit, return original messages
if (totalLength <= MAX_TOTAL_CHARS) {
return messages;
}

// Clone messages to avoid mutating original array
let truncatedMessages = JSON.parse(JSON.stringify(messages));

while (totalLength > MAX_TOTAL_CHARS) {
console.log(`Messages are too long (${totalLength} chars), truncating...`);
// Find longest message
let longestMsgIndex = 0;
let maxLength = 0;

truncatedMessages.forEach((msg: ChatMessage, index: number) => {
if (msg.content.length > maxLength) {
maxLength = msg.content.length;
longestMsgIndex = index;
}
});

// Get the message to truncate
const message = truncatedMessages[longestMsgIndex];
const contentLength = message.content.length;

// Calculate the middle section to remove
const quarterLength = Math.floor(contentLength / 4);
const startPos = quarterLength;
const endPos = contentLength - quarterLength;

// Truncate the middle section
message.content =
message.content.substring(0, startPos) +
" ... " +
message.content.substring(endPos);

// Recalculate total length
totalLength = truncatedMessages.reduce(
(sum: number, msg: ChatMessage) => sum + msg.content.length,
0
);
}

return truncatedMessages;
}

export const maxDuration = 60;

export async function POST(req: Request) {
Expand All @@ -10,31 +66,66 @@ export async function POST(req: Request) {
const apiKey =
req.headers.get("x-openai-key") || process.env.OPENAI_API_KEY;

console.log("Chat API: OpenAI key present:", !!apiKey);

if (!apiKey) {
return new Response(
JSON.stringify({ error: "OpenAI API key is required" }),
{ status: 400 }
JSON.stringify({
error:
"OpenAI API key is required. Please add your API key in Edit > Edit API Keys",
}),
{
status: 400,
headers: {
"Content-Type": "application/json",
},
}
);
}

// Truncate messages if needed
const truncatedMessages = truncateMessages(messages);
const wasMessagesTruncated = truncatedMessages !== messages;
if (wasMessagesTruncated) {
console.log("Messages were truncated to fit within size limit");
}

const openai = createOpenAI({
apiKey,
baseURL: process.env.OPENAI_API_BASE,
baseURL: process.env.OPENAI_API_BASE || "https://api.openai.com/v1",
compatibility: "strict",
});

const modelName = process.env.MODEL_NAME || "gpt-4o-mini";

const result = await streamText({
model: openai(process.env.MODEL_NAME),
system: "You are a helpful assistant.",
messages,
model: openai(modelName),
system:
truncatedMessages.find((m: ChatMessage) => m.role === "system")
?.content || "You are a helpful assistant.",
messages: truncatedMessages.filter(
(m: ChatMessage) => m.role !== "system"
),
});

return result.toDataStreamResponse();
} catch (error) {
console.error("Chat API error:", error);

return new Response(
error instanceof Error ? error.message : "An error occurred",
{ status: 500 }
JSON.stringify({
error:
error instanceof Error
? error.message
: "An unexpected error occurred",
details: error instanceof Error ? error.stack : undefined,
}),
{
status: 500,
headers: {
"Content-Type": "application/json",
},
}
);
}
}
30 changes: 24 additions & 6 deletions website/src/components/AIChatPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,12 @@ interface AIChatPanelProps {
onClose: () => void;
}

interface Message {
role: "user" | "assistant" | "system";
content: string;
id: string;
}

const DEFAULT_SUGGESTIONS = [
"Go over current outputs",
"Help me refine my current operation prompt",
Expand All @@ -51,6 +57,20 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({ onClose }) => {
const scrollAreaRef = useRef<HTMLDivElement>(null);
const [error, setError] = useState<string | null>(null);

const openAiKey = useMemo(() => {
const key = apiKeys.find((key) => key.name === "OPENAI_API_KEY")?.value;
console.log("Chat Panel: OpenAI key present:", !!key);
return key;
}, [apiKeys]);

const chatHeaders = useMemo(() => {
const headers: Record<string, string> = {};
if (openAiKey) {
headers["x-openai-key"] = openAiKey;
}
return headers;
}, [openAiKey]);

const {
messages,
setMessages,
Expand All @@ -63,13 +83,9 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({ onClose }) => {
api: "/api/chat",
initialMessages: [],
id: "persistent-chat",
headers: useMemo(() => {
const openAiKey = apiKeys.find(
(key) => key.name === "OPENAI_API_KEY"
)?.value;
return openAiKey ? { "x-openai-key": openAiKey } : {};
}, [apiKeys]),
headers: chatHeaders,
onError: (error) => {
console.error("Chat error:", error);
setError(error.message);
toast({
title: "Error",
Expand Down Expand Up @@ -134,6 +150,8 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({ onClose }) => {
e.preventDefault();
if (!input.trim() || isLoading) return;

console.log("📝 Submitting message with API key present:", !!openAiKey);

setError(null);

if (!hasOpenAIKey && !isLocalMode) {
Expand Down
6 changes: 6 additions & 0 deletions website/src/components/PromptImprovementDialog.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,12 @@ export function PromptImprovementDialog({
const { messages, isLoading, append, setMessages } = useChat({
api: "/api/chat",
id: `prompt-improvement-${chatKey}`,
headers: useMemo(() => {
const openAiKey = apiKeys.find(
(key) => key.name === "OPENAI_API_KEY"
)?.value;
return openAiKey ? { "x-openai-key": openAiKey } : {};
}, [apiKeys]),
onFinish: () => {
// Optional: handle completion
},
Expand Down

0 comments on commit 6807262

Please sign in to comment.