From d52249aff830b382812f6f4cb803e51b309f9c60 Mon Sep 17 00:00:00 2001 From: timothycarambat Date: Tue, 31 Dec 2024 09:52:21 -0800 Subject: [PATCH] support attachments via thread API chat/stream-chat endpoints --- server/endpoints/api/workspaceThread/index.js | 34 ++++++++++++++++--- server/swagger/openapi.json | 18 ++++++++-- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/server/endpoints/api/workspaceThread/index.js b/server/endpoints/api/workspaceThread/index.js index 0d6eb59c67..d85cf739b1 100644 --- a/server/endpoints/api/workspaceThread/index.js +++ b/server/endpoints/api/workspaceThread/index.js @@ -344,7 +344,14 @@ function apiWorkspaceThreadEndpoints(app) { example: { message: "What is AnythingLLM?", mode: "query | chat", - userId: 1 + userId: 1, + attachments: [ + { + name: "image.png", + mime: "image/png", + contentString: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAA..." + } + ] } } } @@ -374,7 +381,12 @@ function apiWorkspaceThreadEndpoints(app) { */ try { const { slug, threadSlug } = request.params; - const { message, mode = "query", userId } = reqBody(request); + const { + message, + mode = "query", + userId, + attachments = [], + } = reqBody(request); const workspace = await Workspace.get({ slug }); const thread = await WorkspaceThread.get({ slug: threadSlug, @@ -414,6 +426,7 @@ function apiWorkspaceThreadEndpoints(app) { mode, user, thread, + attachments, }); await Telemetry.sendTelemetry("sent_chat", { LLMSelection: process.env.LLM_PROVIDER || "openai", @@ -469,7 +482,14 @@ function apiWorkspaceThreadEndpoints(app) { example: { message: "What is AnythingLLM?", mode: "query | chat", - userId: 1 + userId: 1, + attachments: [ + { + name: "image.png", + mime: "image/png", + contentString: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAA..." + } + ] } } } @@ -520,7 +540,12 @@ function apiWorkspaceThreadEndpoints(app) { */ try { const { slug, threadSlug } = request.params; - const { message, mode = "query", userId } = reqBody(request); + const { + message, + mode = "query", + userId, + attachments = [], + } = reqBody(request); const workspace = await Workspace.get({ slug }); const thread = await WorkspaceThread.get({ slug: threadSlug, @@ -568,6 +593,7 @@ function apiWorkspaceThreadEndpoints(app) { mode, user, thread, + attachments, }); await Telemetry.sendTelemetry("sent_chat", { LLMSelection: process.env.LLM_PROVIDER || "openai", diff --git a/server/swagger/openapi.json b/server/swagger/openapi.json index 19d14766ce..46a63809a5 100644 --- a/server/swagger/openapi.json +++ b/server/swagger/openapi.json @@ -2902,7 +2902,14 @@ "example": { "message": "What is AnythingLLM?", "mode": "query | chat", - "userId": 1 + "userId": 1, + "attachments": [ + { + "name": "image.png", + "mime": "image/png", + "contentString": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAA..." + } + ] } } } @@ -3007,7 +3014,14 @@ "example": { "message": "What is AnythingLLM?", "mode": "query | chat", - "userId": 1 + "userId": 1, + "attachments": [ + { + "name": "image.png", + "mime": "image/png", + "contentString": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAA..." + } + ] } } }