From 989ef06060aa12f5237230a82509c7fbbbbe2037 Mon Sep 17 00:00:00 2001 From: Jason Liu Date: Tue, 2 Jan 2024 18:42:16 -0500 Subject: [PATCH 1/3] test modes --- tests/mode.test.ts | 52 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 tests/mode.test.ts diff --git a/tests/mode.test.ts b/tests/mode.test.ts new file mode 100644 index 00000000..69463d6b --- /dev/null +++ b/tests/mode.test.ts @@ -0,0 +1,52 @@ +import Instructor from "@/instructor"; +import { describe, expect, test } from "bun:test"; +import OpenAI from "openai"; +import { z } from "zod"; + +const models = ["gpt-3.5-turbo", "gpt-4"]; +const modes = ["FUNCTIONS", "JSON", "TOOLS"]; + +const createTestCases = () => { + return models.flatMap(model => modes.map(mode => ({ model, mode }))); +}; + +const UserSchema = z.object({ + age: z.number(), + name: z.string().refine(name => name.includes(" "), { + message: "Name must contain a space" + }) +}); + +type User = z.infer; + +async function extractUser(model: string, mode: string) { + const oai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY ?? undefined, + organization: process.env.OPENAI_ORG_ID ?? undefined + }); + + const client = Instructor({ + client: oai, + mode: mode as "FUNCTIONS" | "JSON" | "TOOLS" + }); + + const user: User = await client.chat.completions.create({ + messages: [{ role: "user", content: "Jason Liu is 30 years old" }], + model: model, + response_model: UserSchema, + max_retries: 3 + }); + + return user; +} + +describe("FunctionCall", () => { + const testCases = createTestCases(); + + test.each(testCases)("Should return extracted name and age for model %s and mode %s", async ({ model, mode }) => { + const user = await extractUser(model, mode); + + expect(user.name).toEqual("Jason Liu"); + expect(user.age).toEqual(30); + }); +}); From 4e7fa7d6d7d2002dc89acc04f3baeec7027379b4 Mon Sep 17 00:00:00 2001 From: Jason Liu Date: Tue, 2 Jan 2024 18:44:56 -0500 Subject: [PATCH 2/3] add json mode --- tests/mode.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/mode.test.ts b/tests/mode.test.ts index 69463d6b..eda4507f 100644 --- a/tests/mode.test.ts +++ b/tests/mode.test.ts @@ -3,7 +3,7 @@ import { describe, expect, test } from "bun:test"; import OpenAI from "openai"; import { z } from "zod"; -const models = ["gpt-3.5-turbo", "gpt-4"]; +const models = ["gpt-3.5-turbo-1106", "gpt-4-1106-preview"]; const modes = ["FUNCTIONS", "JSON", "TOOLS"]; const createTestCases = () => { From 98851baf44f52a790eb9c514e2757f04ad4dce96 Mon Sep 17 00:00:00 2001 From: Dimitri Kennedy Date: Tue, 2 Jan 2024 20:13:06 -0500 Subject: [PATCH 3/3] few small fixes and updates to test suite since it seems that function calling is fully removed from the 1106 models --- src/constants/modes.ts | 2 +- src/oai/params.ts | 20 +++++++++++--- src/oai/parser.ts | 3 +-- tests/mode.test.ts | 60 ++++++++++++++++++++++++------------------ 4 files changed, 53 insertions(+), 32 deletions(-) diff --git a/src/constants/modes.ts b/src/constants/modes.ts index b3970b32..e0cf5bfd 100644 --- a/src/constants/modes.ts +++ b/src/constants/modes.ts @@ -4,6 +4,6 @@ export const MODE = { JSON: "JSON", MD_JSON: "MD_JSON", JSON_SCHEMA: "JSON_SCHEMA" -} +} as const export type MODE = keyof typeof MODE diff --git a/src/oai/params.ts b/src/oai/params.ts index ae1795ca..df1a8618 100644 --- a/src/oai/params.ts +++ b/src/oai/params.ts @@ -11,13 +11,24 @@ export function OAIBuildFunctionParams(definition, params) { } export function OAIBuildToolFunctionParams(definition, params) { + const { name, ...definitionParams } = definition + return { ...params, tool_choice: { type: "function", - function: { name: definition.name } + function: { name } }, - tools: [...(params?.tools ?? []), definition] + tools: [ + { + type: "function", + function: { + name, + parameters: definitionParams + } + }, + ...(params?.tools ?? []) + ] } } @@ -27,7 +38,8 @@ export function OAIBuildMessageBasedParams(definition, params, mode) { response_format: { type: "json_object" } }, [MODE.JSON_SCHEMA]: { - response_format: { type: "json_object", schema: definition } + //TODO: not sure what is different about this mode - the OAI sdk doesnt accept a schema here + response_format: { type: "json_object" } } } @@ -39,7 +51,7 @@ export function OAIBuildMessageBasedParams(definition, params, mode) { messages: [ ...(params?.messages ?? []), { - role: "SYSTEM", + role: "system", content: ` Given a user prompt, you will return fully valid JSON based on the following description and schema. You will return no other prose. You will take into account the descriptions for each paramater within the schema diff --git a/src/oai/parser.ts b/src/oai/parser.ts index e5a4276d..5d294903 100644 --- a/src/oai/parser.ts +++ b/src/oai/parser.ts @@ -35,7 +35,6 @@ export function OAIResponseFnArgsParser( | OpenAI.Chat.Completions.ChatCompletion ) { const parsedData = typeof data === "string" ? JSON.parse(data) : data - const text = parsedData.choices?.[0]?.message?.function_call?.arguments ?? "{}" return JSON.parse(text) @@ -56,7 +55,7 @@ export function OAIResponseToolArgsParser( ) { const parsedData = typeof data === "string" ? JSON.parse(data) : data - const text = parsedData.choices?.[0]?.message?.tool_call?.function?.arguments ?? "{}" + const text = parsedData.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments ?? "{}" return JSON.parse(text) } diff --git a/tests/mode.test.ts b/tests/mode.test.ts index eda4507f..c9f8124b 100644 --- a/tests/mode.test.ts +++ b/tests/mode.test.ts @@ -1,52 +1,62 @@ -import Instructor from "@/instructor"; -import { describe, expect, test } from "bun:test"; -import OpenAI from "openai"; -import { z } from "zod"; +import Instructor from "@/instructor" +import { describe, expect, test } from "bun:test" +import OpenAI from "openai" +import { z } from "zod" -const models = ["gpt-3.5-turbo-1106", "gpt-4-1106-preview"]; -const modes = ["FUNCTIONS", "JSON", "TOOLS"]; +import { MODE } from "@/constants/modes" -const createTestCases = () => { - return models.flatMap(model => modes.map(mode => ({ model, mode }))); -}; +const models_latest = ["gpt-3.5-turbo-1106", "gpt-4-1106-preview"] +const models_old = ["gpt-3.5-turbo", "gpt-4"] + +const createTestCases = (): { model: string; mode: MODE }[] => { + const { FUNCTIONS, ...rest } = MODE + const modes = Object.values(rest) + + return [ + ...models_latest.flatMap(model => modes.map(mode => ({ model, mode }))), + ...models_old.flatMap(model => ({ model, mode: FUNCTIONS })) + ] +} const UserSchema = z.object({ age: z.number(), name: z.string().refine(name => name.includes(" "), { message: "Name must contain a space" }) -}); +}) -type User = z.infer; +type User = z.infer -async function extractUser(model: string, mode: string) { +async function extractUser(model: string, mode: MODE) { const oai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY ?? undefined, organization: process.env.OPENAI_ORG_ID ?? undefined - }); + }) const client = Instructor({ client: oai, - mode: mode as "FUNCTIONS" | "JSON" | "TOOLS" - }); + mode: mode + }) const user: User = await client.chat.completions.create({ messages: [{ role: "user", content: "Jason Liu is 30 years old" }], model: model, response_model: UserSchema, max_retries: 3 - }); + }) - return user; + return user } -describe("FunctionCall", () => { - const testCases = createTestCases(); +describe("Modes", async () => { + const testCases = createTestCases() - test.each(testCases)("Should return extracted name and age for model %s and mode %s", async ({ model, mode }) => { - const user = await extractUser(model, mode); + for await (const { model, mode } of testCases) { + test(`Should return extracted name and age for model ${model} and mode ${mode}`, async () => { + const user = await extractUser(model, mode) - expect(user.name).toEqual("Jason Liu"); - expect(user.age).toEqual(30); - }); -}); + expect(user.name).toEqual("Jason Liu") + expect(user.age).toEqual(30) + }) + } +})