Skip to content

Commit

Permalink
fix(openai): Support o1 streaming (langchain-ai#7229)
Browse files Browse the repository at this point in the history
  • Loading branch information
bracesproul authored and FilipZmijewski committed Nov 27, 2024
1 parent 70206f0 commit 6ab8bcc
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 14 deletions.
14 changes: 0 additions & 14 deletions libs/langchain-openai/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import {
ToolMessageChunk,
OpenAIToolCall,
isAIMessage,
convertToChunk,
UsageMetadata,
} from "@langchain/core/messages";
import {
Expand Down Expand Up @@ -1360,19 +1359,6 @@ export class ChatOpenAI<
options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
if (this.model.includes("o1-")) {
console.warn(
"[WARNING]: OpenAI o1 models do not yet support token-level streaming. Streaming will yield single chunk."
);
const result = await this._generate(messages, options, runManager);
const messageChunk = convertToChunk(result.generations[0].message);
yield new ChatGenerationChunk({
message: messageChunk,
text:
typeof messageChunk.content === "string" ? messageChunk.content : "",
});
return;
}
const messagesMapped: OpenAICompletionParam[] =
_convertMessagesToOpenAIParams(messages);
const params = {
Expand Down
28 changes: 28 additions & 0 deletions libs/langchain-openai/src/tests/chat_models.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1166,3 +1166,31 @@ describe("Audio output", () => {
).toBeGreaterThan(1);
});
});

test("Can stream o1 requests", async () => {
const model = new ChatOpenAI({
model: "o1-mini",
});
const stream = await model.stream(
"Write me a very simple hello world program in Python. Ensure it is wrapped in a function called 'hello_world' and has descriptive comments."
);
let finalMsg: AIMessageChunk | undefined;
let numChunks = 0;
for await (const chunk of stream) {
finalMsg = finalMsg ? concat(finalMsg, chunk) : chunk;
numChunks += 1;
}

expect(finalMsg).toBeTruthy();
if (!finalMsg) {
throw new Error("No final message found");
}
if (typeof finalMsg.content === "string") {
expect(finalMsg.content.length).toBeGreaterThan(10);
} else {
expect(finalMsg.content.length).toBeGreaterThanOrEqual(1);
}

// A
expect(numChunks).toBeGreaterThan(3);
});

0 comments on commit 6ab8bcc

Please sign in to comment.