Skip to content

Commit

Permalink
Orchestrator logs its own requests (#14255)
Browse files Browse the repository at this point in the history
* Orchestrator logs its own requests

- add data field to request
- add option to turn off default logging
- extracted getJsonOfText
- add docu for getTextOfResponse

fixed #14252

Signed-off-by: Jonas Helming <jhelming@eclipsesource.com>
  • Loading branch information
JonasHelming authored Oct 10, 2024
1 parent 43c4fe7 commit 04a8b3e
Show file tree
Hide file tree
Showing 4 changed files with 104 additions and 36 deletions.
70 changes: 40 additions & 30 deletions packages/ai-chat/src/common/chat-agents.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,8 @@ export abstract class AbstractChatAgent {
protected defaultLanguageModelPurpose: string,
public iconClass: string = 'codicon codicon-copilot',
public locations: ChatAgentLocation[] = ChatAgentLocation.ALL,
public tags: String[] = ['Chat']) {
public tags: String[] = ['Chat'],
public defaultLogging: boolean = true) {
}

@postConstruct()
Expand All @@ -152,14 +153,16 @@ export abstract class AbstractChatAgent {
throw new Error('Couldn\'t find a matching language model. Please check your setup!');
}
const messages = await this.getMessages(request.session);
this.recordingService.recordRequest({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.id,
request: request.request.text,
messages
});
if (this.defaultLogging) {
this.recordingService.recordRequest({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.id,
request: request.request.text,
messages
});
}

const systemMessageDescription = await this.getSystemMessageDescription();
const tools: Map<string, ToolRequest> = new Map();
Expand Down Expand Up @@ -192,13 +195,15 @@ export abstract class AbstractChatAgent {
);
await this.addContentsToResponse(languageModelResponse, request);
request.response.complete();
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()
});
if (this.defaultLogging) {
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()
});
}
} catch (e) {
this.handleError(request, e);
}
Expand Down Expand Up @@ -307,25 +312,30 @@ export abstract class AbstractStreamParsingChatAgent extends AbstractChatAgent {
const contents = this.parseContents(languageModelResponse.text);
request.response.response.addContents(contents);
request.response.complete();
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()
});
if (this.defaultLogging) {
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()

});
}
return;
}
if (isLanguageModelStreamResponse(languageModelResponse)) {
await this.addStreamResponse(languageModelResponse, request);
request.response.complete();
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()
});
if (this.defaultLogging) {
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: request.response.requestId,
response: request.response.response.asString()
});
}
return;
}
this.logger.error(
Expand Down
18 changes: 17 additions & 1 deletion packages/ai-chat/src/common/chat-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ export interface ChatRequestModel {
readonly response: ChatResponseModel;
readonly message: ParsedChatRequest;
readonly agentId?: string;
readonly data?: { [key: string]: unknown };
}

export interface ChatProgressMessage {
Expand Down Expand Up @@ -342,14 +343,29 @@ export class ChatRequestModelImpl implements ChatRequestModel {
protected _request: ChatRequest;
protected _response: ChatResponseModelImpl;
protected _agentId?: string;
protected _data: { [key: string]: unknown };

constructor(session: ChatModel, public readonly message: ParsedChatRequest, agentId?: string) {
constructor(session: ChatModel, public readonly message: ParsedChatRequest, agentId?: string,
data: { [key: string]: unknown } = {}) {
// TODO accept serialized data as a parameter to restore a previously saved ChatRequestModel
this._request = message.request;
this._id = generateUuid();
this._session = session;
this._response = new ChatResponseModelImpl(this._id, agentId);
this._agentId = agentId;
this._data = data;
}

get data(): { [key: string]: unknown } | undefined {
return this._data;
}

addData(key: string, value: unknown): void {
this._data[key] = value;
}

getDataByKey(key: string): unknown {
return this._data[key];
}

get id(): string {
Expand Down
33 changes: 29 additions & 4 deletions packages/ai-chat/src/common/orchestrator-chat-agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,15 @@
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import { AgentSpecificVariables, getJsonOfResponse, LanguageModelResponse } from '@theia/ai-core';
import { AgentSpecificVariables, getJsonOfText, getTextOfResponse, LanguageModelResponse } from '@theia/ai-core';
import {
PromptTemplate
} from '@theia/ai-core/lib/common';
import { inject, injectable } from '@theia/core/shared/inversify';
import { ChatAgentService } from './chat-agent-service';
import { AbstractStreamParsingChatAgent, ChatAgent, SystemMessageDescription } from './chat-agents';
import { ChatRequestModelImpl, InformationalChatResponseContentImpl } from './chat-model';
import { generateUuid } from '@theia/core';

export const orchestratorTemplate: PromptTemplate = {
id: 'orchestrator-system',
Expand Down Expand Up @@ -59,6 +60,7 @@ You must only use the \`id\` attribute of the agent, never the name.
`};

export const OrchestratorChatAgentId = 'Orchestrator';
const OrchestatorRequestIdKey = 'orchestatorRequestIdKey';

@injectable()
export class OrchestratorChatAgent extends AbstractStreamParsingChatAgent implements ChatAgent {
Expand All @@ -74,7 +76,7 @@ export class OrchestratorChatAgent extends AbstractStreamParsingChatAgent implem
super(OrchestratorChatAgentId, [{
purpose: 'agent-selection',
identifier: 'openai/gpt-4o',
}], 'agent-selection', 'codicon codicon-symbol-boolean');
}], 'agent-selection', 'codicon codicon-symbol-boolean', undefined, undefined, false);
this.name = OrchestratorChatAgentId;
this.description = 'This agent analyzes the user request against the description of all available chat agents and selects the best fitting agent to answer the request \
(by using AI).The user\'s request will be directly delegated to the selected agent without further confirmation.';
Expand All @@ -88,8 +90,19 @@ export class OrchestratorChatAgent extends AbstractStreamParsingChatAgent implem
@inject(ChatAgentService)
protected chatAgentService: ChatAgentService;

override invoke(request: ChatRequestModelImpl): Promise<void> {
override async invoke(request: ChatRequestModelImpl): Promise<void> {
request.response.addProgressMessage({ content: 'Determining the most appropriate agent', status: 'inProgress' });
// We generate a dedicated ID for recording the orchestrator request/response, as we will forward the original request to another agent
const orchestartorRequestId = generateUuid();
request.addData(OrchestatorRequestIdKey, orchestartorRequestId);
const userPrompt = request.request.text;
this.recordingService.recordRequest({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: orchestartorRequestId,
request: userPrompt,
});
return super.invoke(request);
}

Expand All @@ -100,8 +113,20 @@ export class OrchestratorChatAgent extends AbstractStreamParsingChatAgent implem

protected override async addContentsToResponse(response: LanguageModelResponse, request: ChatRequestModelImpl): Promise<void> {
let agentIds: string[] = [];
const responseText = await getTextOfResponse(response);
// We use the previously generated, dedicated ID to log the orchestrator response before we forward the original request
const orchestratorRequestId = request.getDataByKey(OrchestatorRequestIdKey);
if (typeof orchestratorRequestId === 'string') {
this.recordingService.recordResponse({
agentId: this.id,
sessionId: request.session.id,
timestamp: Date.now(),
requestId: orchestratorRequestId,
response: responseText,
});
}
try {
const jsonResponse = await getJsonOfResponse(response);
const jsonResponse = await getJsonOfText(responseText);
if (Array.isArray(jsonResponse)) {
agentIds = jsonResponse.filter((id: string) => id !== this.id);
}
Expand Down
19 changes: 18 additions & 1 deletion packages/ai-core/src/common/language-model-util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,18 @@
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import { isLanguageModelStreamResponse, isLanguageModelTextResponse, LanguageModelResponse, ToolRequest } from './language-model';
import { isLanguageModelParsedResponse, isLanguageModelStreamResponse, isLanguageModelTextResponse, LanguageModelResponse, ToolRequest } from './language-model';

/**
* Retrieves the text content from a `LanguageModelResponse` object.
*
* **Important:** For stream responses, the stream can only be consumed once. Calling this function multiple times on the same stream response will return an empty string (`''`)
* on subsequent calls, as the stream will have already been consumed.
*
* @param {LanguageModelResponse} response - The response object, which may contain a text, stream, or parsed response.
* @returns {Promise<string>} - A promise that resolves to the text content of the response.
* @throws {Error} - Throws an error if the response type is not supported or does not contain valid text content.
*/
export const getTextOfResponse = async (response: LanguageModelResponse): Promise<string> => {
if (isLanguageModelTextResponse(response)) {
return response.text;
Expand All @@ -25,12 +35,18 @@ export const getTextOfResponse = async (response: LanguageModelResponse): Promis
result += chunk.content ?? '';
}
return result;
} else if (isLanguageModelParsedResponse(response)) {
return response.content;
}
throw new Error(`Invalid response type ${response}`);
};

export const getJsonOfResponse = async (response: LanguageModelResponse): Promise<unknown> => {
const text = await getTextOfResponse(response);
return getJsonOfText(text);
};

export const getJsonOfText = (text: string): unknown => {
if (text.startsWith('```json')) {
const regex = /```json\s*([\s\S]*?)\s*```/g;
let match;
Expand All @@ -47,6 +63,7 @@ export const getJsonOfResponse = async (response: LanguageModelResponse): Promis
}
throw new Error('Invalid response format');
};

export const toolRequestToPromptText = (toolRequest: ToolRequest): string => {
const parameters = toolRequest.parameters;
let paramsText = '';
Expand Down

0 comments on commit 04a8b3e

Please sign in to comment.