From f62800cd727ecd2b4a41fe6bbef411f8bc6f0a2e Mon Sep 17 00:00:00 2001 From: Michael Kret <88898367+michael-radency@users.noreply.github.com> Date: Tue, 9 Apr 2024 15:06:12 +0300 Subject: [PATCH] fix: Continue on fail / error output support for chains and agents (#9078) --- .../agents/ConversationalAgent/execute.ts | 67 ++++--- .../agents/OpenAiFunctionsAgent/execute.ts | 65 ++++--- .../agents/PlanAndExecuteAgent/execute.ts | 65 ++++--- .../agents/Agent/agents/ReActAgent/execute.ts | 67 ++++--- .../agents/Agent/agents/SqlAgent/execute.ts | 175 +++++++++--------- .../OpenAiAssistant/OpenAiAssistant.node.ts | 111 ++++++----- .../nodes/chains/ChainLLM/ChainLlm.node.ts | 99 +++++----- .../ChainRetrievalQA/ChainRetrievalQa.node.ts | 41 ++-- .../V2/ChainSummarizationV2.node.ts | 147 ++++++++------- 9 files changed, 459 insertions(+), 378 deletions(-) diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index 67ebec8469bb6..fd14107627de4 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -79,36 +79,45 @@ export async function conversationalAgentExecute( const items = this.getInputData(); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let input; - - if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('text', itemIndex) as string; - } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); - } - - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text parameter is empty.'); - } - - if (prompt) { - input = (await prompt.invoke({ input })).value; - } - - let response = await agentExecutor - .withConfig(getTracingConfig(this)) - .invoke({ input, outputParsers }); - - if (outputParser) { - response = { output: await outputParser.parse(response.output as string) }; + try { + let input; + + if (this.getNode().typeVersion <= 1.2) { + input = this.getNodeParameter('text', itemIndex) as string; + } else { + input = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } + + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘text parameter is empty.'); + } + + if (prompt) { + input = (await prompt.invoke({ input })).value; + } + + let response = await agentExecutor + .withConfig(getTracingConfig(this)) + .invoke({ input, outputParsers }); + + if (outputParser) { + response = { output: await outputParser.parse(response.output as string) }; + } + + returnData.push({ json: response }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; } - - returnData.push({ json: response }); } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts index 7f9ea2040a782..d6173c2847f00 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts @@ -85,35 +85,44 @@ export async function openAiFunctionsAgentExecute( const items = this.getInputData(); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let input; - if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('text', itemIndex) as string; - } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + try { + let input; + if (this.getNode().typeVersion <= 1.2) { + input = this.getNodeParameter('text', itemIndex) as string; + } else { + input = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } + + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); + } + + if (prompt) { + input = (await prompt.invoke({ input })).value; + } + + let response = await agentExecutor + .withConfig(getTracingConfig(this)) + .invoke({ input, outputParsers }); + + if (outputParser) { + response = { output: await outputParser.parse(response.output as string) }; + } + + returnData.push({ json: response }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; } - - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); - } - - if (prompt) { - input = (await prompt.invoke({ input })).value; - } - - let response = await agentExecutor - .withConfig(getTracingConfig(this)) - .invoke({ input, outputParsers }); - - if (outputParser) { - response = { output: await outputParser.parse(response.output as string) }; - } - - returnData.push({ json: response }); } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts index 8c4a9667e077f..3957f867cd2ce 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts @@ -60,35 +60,44 @@ export async function planAndExecuteAgentExecute( const items = this.getInputData(); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let input; - if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('text', itemIndex) as string; - } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + try { + let input; + if (this.getNode().typeVersion <= 1.2) { + input = this.getNodeParameter('text', itemIndex) as string; + } else { + input = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } + + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); + } + + if (prompt) { + input = (await prompt.invoke({ input })).value; + } + + let response = await agentExecutor + .withConfig(getTracingConfig(this)) + .invoke({ input, outputParsers }); + + if (outputParser) { + response = { output: await outputParser.parse(response.output as string) }; + } + + returnData.push({ json: response }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; } - - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); - } - - if (prompt) { - input = (await prompt.invoke({ input })).value; - } - - let response = await agentExecutor - .withConfig(getTracingConfig(this)) - .invoke({ input, outputParsers }); - - if (outputParser) { - response = { output: await outputParser.parse(response.output as string) }; - } - - returnData.push({ json: response }); } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index 94359aa47f0b2..a2a6392a5ff13 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -80,36 +80,45 @@ export async function reActAgentAgentExecute( const items = this.getInputData(); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let input; - - if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('text', itemIndex) as string; - } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + try { + let input; + + if (this.getNode().typeVersion <= 1.2) { + input = this.getNodeParameter('text', itemIndex) as string; + } else { + input = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } + + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); + } + + if (prompt) { + input = (await prompt.invoke({ input })).value; + } + + let response = await agentExecutor + .withConfig(getTracingConfig(this)) + .invoke({ input, outputParsers }); + + if (outputParser) { + response = { output: await outputParser.parse(response.output as string) }; + } + + returnData.push({ json: response }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; } - - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); - } - - if (prompt) { - input = (await prompt.invoke({ input })).value; - } - - let response = await agentExecutor - .withConfig(getTracingConfig(this)) - .invoke({ input, outputParsers }); - - if (outputParser) { - response = { output: await outputParser.parse(response.output as string) }; - } - - returnData.push({ json: response }); } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts index e8b989d865bed..36bc126868198 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts @@ -41,106 +41,115 @@ export async function sqlAgentAgentExecute( const returnData: INodeExecutionData[] = []; for (let i = 0; i < items.length; i++) { - const item = items[i]; - let input; - if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('input', i) as string; - } else { - input = getPromptInputByType({ - ctx: this, - i, - inputKey: 'text', - promptTypeKey: 'promptType', - }); - } + try { + const item = items[i]; + let input; + if (this.getNode().typeVersion <= 1.2) { + input = this.getNodeParameter('input', i) as string; + } else { + input = getPromptInputByType({ + ctx: this, + i, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.'); - } + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.'); + } - const options = this.getNodeParameter('options', i, {}); - const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as - | 'mysql' - | 'postgres' - | 'sqlite'; + const options = this.getNodeParameter('options', i, {}); + const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as + | 'mysql' + | 'postgres' + | 'sqlite'; + + const includedSampleRows = options.includedSampleRows as number; + const includedTablesArray = parseTablesString((options.includedTables as string) ?? ''); + const ignoredTablesArray = parseTablesString((options.ignoredTables as string) ?? ''); + + let dataSource: DataSource | null = null; + if (selectedDataSource === 'sqlite') { + if (!item.binary) { + throw new NodeOperationError( + this.getNode(), + 'No binary data found, please connect a binary to the input if you want to use SQLite as data source', + ); + } + + const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data'); + dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName); + } + + if (selectedDataSource === 'postgres') { + dataSource = await getPostgresDataSource.call(this); + } - const includedSampleRows = options.includedSampleRows as number; - const includedTablesArray = parseTablesString((options.includedTables as string) ?? ''); - const ignoredTablesArray = parseTablesString((options.ignoredTables as string) ?? ''); + if (selectedDataSource === 'mysql') { + dataSource = await getMysqlDataSource.call(this); + } - let dataSource: DataSource | null = null; - if (selectedDataSource === 'sqlite') { - if (!item.binary) { + if (!dataSource) { throw new NodeOperationError( this.getNode(), - 'No binary data found, please connect a binary to the input if you want to use SQLite as data source', + 'No data source found, please configure data source', ); } - const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data'); - dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName); - } + const agentOptions: SqlCreatePromptArgs = { + topK: (options.topK as number) ?? 10, + prefix: (options.prefixPrompt as string) ?? SQL_PREFIX, + suffix: (options.suffixPrompt as string) ?? SQL_SUFFIX, + inputVariables: ['chatHistory', 'input', 'agent_scratchpad'], + }; + + const dbInstance = await SqlDatabase.fromDataSourceParams({ + appDataSource: dataSource, + includesTables: includedTablesArray.length > 0 ? includedTablesArray : undefined, + ignoreTables: ignoredTablesArray.length > 0 ? ignoredTablesArray : undefined, + sampleRowsInTableInfo: includedSampleRows ?? 3, + }); - if (selectedDataSource === 'postgres') { - dataSource = await getPostgresDataSource.call(this); - } + const toolkit = new SqlToolkit(dbInstance, model); + const agentExecutor = createSqlAgent(model, toolkit, agentOptions); - if (selectedDataSource === 'mysql') { - dataSource = await getMysqlDataSource.call(this); - } + const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as + | BaseChatMemory + | undefined; - if (!dataSource) { - throw new NodeOperationError( - this.getNode(), - 'No data source found, please configure data source', - ); - } + agentExecutor.memory = memory; - const agentOptions: SqlCreatePromptArgs = { - topK: (options.topK as number) ?? 10, - prefix: (options.prefixPrompt as string) ?? SQL_PREFIX, - suffix: (options.suffixPrompt as string) ?? SQL_SUFFIX, - inputVariables: ['chatHistory', 'input', 'agent_scratchpad'], - }; - - const dbInstance = await SqlDatabase.fromDataSourceParams({ - appDataSource: dataSource, - includesTables: includedTablesArray.length > 0 ? includedTablesArray : undefined, - ignoreTables: ignoredTablesArray.length > 0 ? ignoredTablesArray : undefined, - sampleRowsInTableInfo: includedSampleRows ?? 3, - }); - - const toolkit = new SqlToolkit(dbInstance, model); - const agentExecutor = createSqlAgent(model, toolkit, agentOptions); - - const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as - | BaseChatMemory - | undefined; - - agentExecutor.memory = memory; - - let chatHistory = ''; - if (memory) { - const messages = await memory.chatHistory.getMessages(); - chatHistory = serializeChatHistory(messages); - } + let chatHistory = ''; + if (memory) { + const messages = await memory.chatHistory.getMessages(); + chatHistory = serializeChatHistory(messages); + } - let response: IDataObject; - try { - response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ - input, - signal: this.getExecutionCancelSignal(), - chatHistory, - }); + let response: IDataObject; + try { + response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ + input, + signal: this.getExecutionCancelSignal(), + chatHistory, + }); + } catch (error) { + if ((error.message as IDataObject)?.output) { + response = error.message as IDataObject; + } else { + throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i }); + } + } + + returnData.push({ json: response }); } catch (error) { - if ((error.message as IDataObject)?.output) { - response = error.message as IDataObject; - } else { - throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i }); + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: i } }); + continue; } - } - returnData.push({ json: response }); + throw error; + } } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts index 5dafa6d187296..77fa3911e02f5 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts @@ -320,67 +320,76 @@ export class OpenAiAssistant implements INodeType { const returnData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - const input = this.getNodeParameter('text', itemIndex) as string; - const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string; - const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array< - 'code_interpreter' | 'retrieval' - >; + try { + const input = this.getNodeParameter('text', itemIndex) as string; + const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string; + const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array< + 'code_interpreter' | 'retrieval' + >; - const options = this.getNodeParameter('options', itemIndex, {}) as { - baseURL?: string; - maxRetries: number; - timeout: number; - }; + const options = this.getNodeParameter('options', itemIndex, {}) as { + baseURL?: string; + maxRetries: number; + timeout: number; + }; - if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); - } + if (input === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); + } + + const client = new OpenAIClient({ + apiKey: credentials.apiKey as string, + maxRetries: options.maxRetries ?? 2, + timeout: options.timeout ?? 10000, + baseURL: options.baseURL, + }); + let agent; + const nativeToolsParsed: OpenAIToolType = nativeTools.map((tool) => ({ type: tool })); + const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? []; + const newTools = [...transformedConnectedTools, ...nativeToolsParsed]; - const client = new OpenAIClient({ - apiKey: credentials.apiKey as string, - maxRetries: options.maxRetries ?? 2, - timeout: options.timeout ?? 10000, - baseURL: options.baseURL, - }); - let agent; - const nativeToolsParsed: OpenAIToolType = nativeTools.map((tool) => ({ type: tool })); - const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? []; - const newTools = [...transformedConnectedTools, ...nativeToolsParsed]; + // Existing agent, update tools with currently assigned + if (assistantId) { + agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true }); - // Existing agent, update tools with currently assigned - if (assistantId) { - agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true }); + await client.beta.assistants.update(assistantId, { + tools: newTools, + }); + } else { + const name = this.getNodeParameter('name', itemIndex, '') as string; + const instructions = this.getNodeParameter('instructions', itemIndex, '') as string; + const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string; - await client.beta.assistants.update(assistantId, { - tools: newTools, - }); - } else { - const name = this.getNodeParameter('name', itemIndex, '') as string; - const instructions = this.getNodeParameter('instructions', itemIndex, '') as string; - const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string; + agent = await OpenAIAssistantRunnable.createAssistant({ + model, + client, + instructions, + name, + tools: newTools, + asAgent: true, + }); + } - agent = await OpenAIAssistantRunnable.createAssistant({ - model, - client, - instructions, - name, - tools: newTools, - asAgent: true, + const agentExecutor = AgentExecutor.fromAgentAndTools({ + agent, + tools, }); - } - const agentExecutor = AgentExecutor.fromAgentAndTools({ - agent, - tools, - }); + const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ + content: input, + signal: this.getExecutionCancelSignal(), + timeout: options.timeout ?? 10000, + }); - const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ - content: input, - signal: this.getExecutionCancelSignal(), - timeout: options.timeout ?? 10000, - }); + returnData.push({ json: response }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } - returnData.push({ json: response }); + throw error; + } } return await this.prepareOutputData(returnData); diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts index 030eef34a26bc..5f8dcde626ad1 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts @@ -519,55 +519,64 @@ export class ChainLlm implements INodeType { const outputParsers = await getOptionalOutputParsers(this); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let prompt: string; - if (this.getNode().typeVersion <= 1.3) { - prompt = this.getNodeParameter('prompt', itemIndex) as string; - } else { - prompt = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); - } - const messages = this.getNodeParameter( - 'messages.messageValues', - itemIndex, - [], - ) as MessagesTemplate[]; - - if (prompt === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.'); - } - - const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages); - - responses.forEach((response) => { - let data: IDataObject; - if (typeof response === 'string') { - data = { - response: { - text: response.trim(), - }, - }; - } else if (Array.isArray(response)) { - data = { - data: response, - }; - } else if (response instanceof Object) { - data = response as IDataObject; + try { + let prompt: string; + if (this.getNode().typeVersion <= 1.3) { + prompt = this.getNodeParameter('prompt', itemIndex) as string; } else { - data = { - response: { - text: response, - }, - }; + prompt = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } + const messages = this.getNodeParameter( + 'messages.messageValues', + itemIndex, + [], + ) as MessagesTemplate[]; + + if (prompt === undefined) { + throw new NodeOperationError(this.getNode(), "The 'prompt' parameter is empty."); } - returnData.push({ - json: data, + const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages); + + responses.forEach((response) => { + let data: IDataObject; + if (typeof response === 'string') { + data = { + response: { + text: response.trim(), + }, + }; + } else if (Array.isArray(response)) { + data = { + data: response, + }; + } else if (response instanceof Object) { + data = response as IDataObject; + } else { + data = { + response: { + text: response, + }, + }; + } + + returnData.push({ + json: data, + }); }); - }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; + } } return [returnData]; diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts index 0652f7cf47174..cc5cba5b7ca9c 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts @@ -160,25 +160,34 @@ export class ChainRetrievalQa implements INodeType { // Run for each item for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - let query; + try { + let query; - if (this.getNode().typeVersion <= 1.2) { - query = this.getNodeParameter('query', itemIndex) as string; - } else { - query = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); - } + if (this.getNode().typeVersion <= 1.2) { + query = this.getNodeParameter('query', itemIndex) as string; + } else { + query = getPromptInputByType({ + ctx: this, + i: itemIndex, + inputKey: 'text', + promptTypeKey: 'promptType', + }); + } - if (query === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.'); - } + if (query === undefined) { + throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.'); + } - const response = await chain.withConfig(getTracingConfig(this)).invoke({ query }); - returnData.push({ json: { response } }); + const response = await chain.withConfig(getTracingConfig(this)).invoke({ query }); + returnData.push({ json: { response } }); + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; + } + + throw error; + } } return await this.prepareOutputData(returnData); } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts index 30cab761c1638..9d05b0a7c5115 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts @@ -329,90 +329,99 @@ export class ChainSummarizationV2 implements INodeType { const returnData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - const summarizationMethodAndPrompts = this.getNodeParameter( - 'options.summarizationMethodAndPrompts.values', - itemIndex, - {}, - ) as { - prompt?: string; - refineQuestionPrompt?: string; - refinePrompt?: string; - summarizationMethod: 'map_reduce' | 'stuff' | 'refine'; - combineMapPrompt?: string; - }; + try { + const summarizationMethodAndPrompts = this.getNodeParameter( + 'options.summarizationMethodAndPrompts.values', + itemIndex, + {}, + ) as { + prompt?: string; + refineQuestionPrompt?: string; + refinePrompt?: string; + summarizationMethod: 'map_reduce' | 'stuff' | 'refine'; + combineMapPrompt?: string; + }; - const chainArgs = getChainPromptsArgs( - summarizationMethodAndPrompts.summarizationMethod ?? 'map_reduce', - summarizationMethodAndPrompts, - ); + const chainArgs = getChainPromptsArgs( + summarizationMethodAndPrompts.summarizationMethod ?? 'map_reduce', + summarizationMethodAndPrompts, + ); - const chain = loadSummarizationChain(model, chainArgs); - const item = items[itemIndex]; + const chain = loadSummarizationChain(model, chainArgs); + const item = items[itemIndex]; - let processedDocuments: Document[]; + let processedDocuments: Document[]; - // Use dedicated document loader input to load documents - if (operationMode === 'documentLoader') { - const documentInput = (await this.getInputConnectionData( - NodeConnectionType.AiDocument, - 0, - )) as N8nJsonLoader | Array>>; + // Use dedicated document loader input to load documents + if (operationMode === 'documentLoader') { + const documentInput = (await this.getInputConnectionData( + NodeConnectionType.AiDocument, + 0, + )) as N8nJsonLoader | Array>>; - const isN8nLoader = - documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader; + const isN8nLoader = + documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader; - processedDocuments = isN8nLoader - ? await documentInput.processItem(item, itemIndex) - : documentInput; + processedDocuments = isN8nLoader + ? await documentInput.processItem(item, itemIndex) + : documentInput; - const response = await chain.withConfig(getTracingConfig(this)).invoke({ - input_documents: processedDocuments, - }); + const response = await chain.withConfig(getTracingConfig(this)).invoke({ + input_documents: processedDocuments, + }); - returnData.push({ json: { response } }); - } + returnData.push({ json: { response } }); + } - // Take the input and use binary or json loader - if (['nodeInputJson', 'nodeInputBinary'].includes(operationMode)) { - let textSplitter: TextSplitter | undefined; + // Take the input and use binary or json loader + if (['nodeInputJson', 'nodeInputBinary'].includes(operationMode)) { + let textSplitter: TextSplitter | undefined; - switch (chunkingMode) { - // In simple mode we use recursive character splitter with default settings - case 'simple': - const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number; - const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number; + switch (chunkingMode) { + // In simple mode we use recursive character splitter with default settings + case 'simple': + const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number; + const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number; - textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize }); - break; + textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize }); + break; - // In advanced mode user can connect text splitter node so we just retrieve it - case 'advanced': - textSplitter = (await this.getInputConnectionData( - NodeConnectionType.AiTextSplitter, - 0, - )) as TextSplitter | undefined; - break; - default: - break; - } + // In advanced mode user can connect text splitter node so we just retrieve it + case 'advanced': + textSplitter = (await this.getInputConnectionData( + NodeConnectionType.AiTextSplitter, + 0, + )) as TextSplitter | undefined; + break; + default: + break; + } - let processor: N8nJsonLoader | N8nBinaryLoader; - if (operationMode === 'nodeInputBinary') { - const binaryDataKey = this.getNodeParameter( - 'options.binaryDataKey', - itemIndex, - 'data', - ) as string; - processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter); - } else { - processor = new N8nJsonLoader(this, 'options.', textSplitter); + let processor: N8nJsonLoader | N8nBinaryLoader; + if (operationMode === 'nodeInputBinary') { + const binaryDataKey = this.getNodeParameter( + 'options.binaryDataKey', + itemIndex, + 'data', + ) as string; + processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter); + } else { + processor = new N8nJsonLoader(this, 'options.', textSplitter); + } + + const processedItem = await processor.processItem(item, itemIndex); + const response = await chain.call({ + input_documents: processedItem, + }); + returnData.push({ json: { response } }); + } + } catch (error) { + if (this.continueOnFail()) { + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); + continue; } - const processedItem = await processor.processItem(item, itemIndex); - const response = await chain.call({ - input_documents: processedItem, - }); - returnData.push({ json: { response } }); + throw error; } }