Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(anthropic,openai): Add payload formatting utils to Anthropic and OpenAI #6876

Merged
merged 4 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion libs/langchain-anthropic/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
export * from "./chat_models.js";
export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js";
export { convertPromptToAnthropic } from "./utils/prompts.js";
2 changes: 1 addition & 1 deletion libs/langchain-anthropic/src/tests/chat_models.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { AIMessage, HumanMessage, ToolMessage } from "@langchain/core/messages";
import { z } from "zod";
import { OutputParserException } from "@langchain/core/output_parsers";
import { ChatAnthropic } from "../chat_models.js";
import { _convertMessagesToAnthropicPayload } from "../index.js";
import { _convertMessagesToAnthropicPayload } from "../utils/message_inputs.js";

test("withStructuredOutput with output validation", async () => {
const model = new ChatAnthropic({
Expand Down
25 changes: 25 additions & 0 deletions libs/langchain-anthropic/src/tests/prompts.int.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import Anthropic from "@anthropic-ai/sdk";
import { pull } from "langchain/hub";

import { convertPromptToAnthropic } from "../utils/prompts.js";

test("basic traceable implementation", async () => {
jacoblee93 marked this conversation as resolved.
Show resolved Hide resolved
const prompt = await pull("jacob/joke-generator");
const formattedPrompt = await prompt.invoke({
topic: "cats",
});

const { system, messages } = convertPromptToAnthropic(formattedPrompt);

const anthropicClient = new Anthropic();

const anthropicResponse = await anthropicClient.messages.create({
model: "claude-3-haiku-20240307",
system,
messages: messages,

Check failure on line 19 in libs/langchain-anthropic/src/tests/prompts.int.test.ts

View workflow job for this annotation

GitHub Actions / Check linting

Expected property shorthand
max_tokens: 1024,
stream: false,
});

expect(anthropicResponse.content).toBeDefined();
});
51 changes: 51 additions & 0 deletions libs/langchain-anthropic/src/utils/prompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import type { BasePromptValue } from "@langchain/core/prompt_values";
import Anthropic from "@anthropic-ai/sdk";

import { _convertMessagesToAnthropicPayload } from "./message_inputs.js";

/**
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
* a format expected by Anthropic's JS SDK.
*
* Requires the "@langchain/anthropic" package to be installed in addition
* to the Anthropic SDK.
*
* @example
* ```ts
* import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
* import { pull } from "langchain/hub";
*
* import Anthropic from '@anthropic-ai/sdk';
*
* const prompt = await pull("jacob/joke-generator");
* const formattedPrompt = await prompt.invoke({
* topic: "cats",
* });
*
* const { system, messages } = convertPromptToAnthropic(formattedPrompt);
*
* const anthropicClient = new Anthropic({
* apiKey: 'your_api_key',
* });
*
* const anthropicResponse = await anthropicClient.messages.create({
* model: "claude-3-5-sonnet-20240620",
* max_tokens: 1024,
* stream: false,
* system,
* messages,
* });
* ```
* @param formattedPrompt
* @returns A partial Anthropic payload.
*/
export function convertPromptToAnthropic(
formattedPrompt: BasePromptValue
): Anthropic.Messages.MessageCreateParams {
const messages = formattedPrompt.toChatMessages();
const anthropicBody = _convertMessagesToAnthropicPayload(messages);
if (anthropicBody.messages === undefined) {
anthropicBody.messages = [];
}
return anthropicBody;
}
1 change: 1 addition & 0 deletions libs/langchain-openai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ export * from "./types.js";
export * from "./utils/openai.js";
export * from "./utils/azure.js";
export * from "./tools/index.js";
export { convertPromptToOpenAI } from "./utils/prompts.js";
22 changes: 22 additions & 0 deletions libs/langchain-openai/src/tests/prompts.int.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import OpenAI from "openai";
import { pull } from "langchain/hub";

import { convertPromptToOpenAI } from "../utils/prompts.js";

test("basic traceable implementation", async () => {
jacoblee93 marked this conversation as resolved.
Show resolved Hide resolved
const prompt = await pull("jacob/joke-generator");
const formattedPrompt = await prompt.invoke({
topic: "cats",
});

const { messages } = convertPromptToOpenAI(formattedPrompt);

const openAIClient = new OpenAI();

const openAIResponse = await openAIClient.chat.completions.create({
model: "gpt-4o-mini",
messages,
});

expect(openAIResponse.choices.length).toBeGreaterThan(0);
});
47 changes: 47 additions & 0 deletions libs/langchain-openai/src/utils/prompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/* eslint-disable import/no-extraneous-dependencies */
import type { BasePromptValue } from "@langchain/core/prompt_values";
import type { OpenAI } from "openai";

import { _convertMessagesToOpenAIParams } from "../chat_models.js";

/**
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
* a format expected by OpenAI's JS SDK.
*
* Requires the "@langchain/openai" package to be installed in addition
* to the OpenAI SDK.
*
* @example
* ```ts
* import { convertPromptToOpenAI } from "langsmith/utils/hub/openai";
* import { pull } from "langchain/hub";
*
* import OpenAI from 'openai';
*
* const prompt = await pull("jacob/joke-generator");
* const formattedPrompt = await prompt.invoke({
* topic: "cats",
* });
*
* const { messages } = convertPromptToOpenAI(formattedPrompt);
*
* const openAIClient = new OpenAI();
*
* const openaiResponse = await openAIClient.chat.completions.create({
* model: "gpt-4o",
* messages,
* });
* ```
* @param formattedPrompt
* @returns A partial OpenAI payload.
*/
export function convertPromptToOpenAI(formattedPrompt: BasePromptValue): {
messages: OpenAI.Chat.ChatCompletionMessageParam[];
} {
const messages = formattedPrompt.toChatMessages();
return {
messages: _convertMessagesToOpenAIParams(
messages
) as OpenAI.Chat.ChatCompletionMessageParam[],
};
}
Loading