Skip to content

Commit

Permalink
fix(core): Improve model sub-nodes error handling (n8n-io#11418)
Browse files Browse the repository at this point in the history
  • Loading branch information
burivuhster authored Nov 8, 2024
1 parent b496bf3 commit 57467d0
Show file tree
Hide file tree
Showing 24 changed files with 309 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {

import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

const modelField: INodeProperties = {
displayName: 'Model',
Expand Down Expand Up @@ -214,6 +215,7 @@ export class LmChatAnthropic implements INodeType {
topK: options.topK,
topP: options.topP,
callbacks: [new N8nLlmTracing(this, { tokensUsageParser })],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import { ChatOllama } from '@langchain/ollama';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatOllama implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -64,6 +65,7 @@ export class LmChatOllama implements INodeType {
model: modelName,
format: options.format === 'default' ? undefined : options.format,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */

import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import {
NodeConnectionType,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
type SupplyData,
type JsonObject,
NodeApiError,
} from 'n8n-workflow';

import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { RateLimitError } from 'openai';
import { getCustomErrorMessage } from '../../vendors/OpenAi/helpers/error-handling';

export class LmChatOpenAi implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -276,25 +275,7 @@ export class LmChatOpenAi implements INodeType {
response_format: { type: options.responseFormat },
}
: undefined,
onFailedAttempt: (error: any) => {
// If the error is a rate limit error, we want to handle it differently
// because OpenAI has multiple different rate limit errors
if (error instanceof RateLimitError) {
const errorCode = error?.code;
if (errorCode) {
const customErrorMessage = getCustomErrorMessage(errorCode);

const apiError = new NodeApiError(this.getNode(), error as unknown as JsonObject);
if (customErrorMessage) {
apiError.message = customErrorMessage;
}

throw apiError;
}
}

throw error;
},
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
import { Cohere } from '@langchain/cohere';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmCohere implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -99,6 +100,7 @@ export class LmCohere implements INodeType {
apiKey: credentials.apiKey as string,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { Ollama } from '@langchain/community/llms/ollama';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { ollamaDescription, ollamaModel, ollamaOptions } from './description';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmOllama implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -62,6 +63,7 @@ export class LmOllama implements INodeType {
model: modelName,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import type {

import { OpenAI, type ClientOptions } from '@langchain/openai';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

type LmOpenAiOptions = {
baseURL?: string;
Expand Down Expand Up @@ -260,6 +261,7 @@ export class LmOpenAi implements INodeType {
timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
import { HuggingFaceInference } from '@langchain/community/llms/hf';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmOpenHuggingFaceInference implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -143,6 +144,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
apiKey: credentials.apiKey as string,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {

import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatAwsBedrock implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -151,6 +152,7 @@ export class LmChatAwsBedrock implements INodeType {
sessionToken: credentials.sessionToken as string,
},
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
import { ChatOpenAI } from '@langchain/openai';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatAzureOpenAi implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -195,6 +196,7 @@ export class LmChatAzureOpenAi implements INodeType {
response_format: { type: options.responseFormat },
}
: undefined,
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import type { SafetySetting } from '@google/generative-ai';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { additionalOptions } from '../gemini-common/additional-options';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatGoogleGemini implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -144,6 +145,7 @@ export class LmChatGoogleGemini implements INodeType {
maxOutputTokens: options.maxOutputTokens,
safetySettings,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { additionalOptions } from '../gemini-common/additional-options';
import { makeErrorFromStatus } from './error-handling';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatGoogleVertex implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -170,7 +171,8 @@ export class LmChatGoogleVertex implements INodeType {
safetySettings,
callbacks: [new N8nLlmTracing(this)],
// Handle ChatVertexAI invocation errors to provide better error messages
onFailedAttempt: (error: any) => {
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, (error: any) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
const customError = makeErrorFromStatus(Number(error?.response?.status), {
modelName,
});
Expand All @@ -180,7 +182,7 @@ export class LmChatGoogleVertex implements INodeType {
}

throw error;
},
}),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
import { ChatGroq } from '@langchain/groq';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatGroq implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -144,6 +145,7 @@ export class LmChatGroq implements INodeType {
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import type { ChatMistralAIInput } from '@langchain/mistralai';
import { ChatMistralAI } from '@langchain/mistralai';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';

export class LmChatMistralCloud implements INodeType {
description: INodeTypeDescription = {
Expand Down Expand Up @@ -190,6 +191,7 @@ export class LmChatMistralCloud implements INodeType {
modelName,
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
});

return {
Expand Down
31 changes: 26 additions & 5 deletions packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
import type { SerializedFields } from '@langchain/core/dist/load/map_keys';
import { getModelNameForTiktoken } from '@langchain/core/language_models/base';
import { encodingForModel } from '@langchain/core/utils/tiktoken';
import type {
Serialized,
SerializedNotImplemented,
SerializedSecret,
} from '@langchain/core/load/serializable';
import type { BaseMessage } from '@langchain/core/messages';
import type { LLMResult } from '@langchain/core/outputs';
import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { encodingForModel } from '@langchain/core/utils/tiktoken';
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
import { pick } from 'lodash';
import type { BaseMessage } from '@langchain/core/messages';
import type { SerializedFields } from '@langchain/core/dist/load/map_keys';
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';

import { logAiEvent } from '../../utils/helpers';

type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
Expand All @@ -30,6 +31,10 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o';
export class N8nLlmTracing extends BaseCallbackHandler {
name = 'N8nLlmTracing';

// This flag makes sure that LangChain will wait for the handlers to finish before continuing
// This is crucial for the handleLLMError handler to work correctly (it should be called before the error is propagated to the root node)
awaitHandlers = true;

connectionType = NodeConnectionType.AiLanguageModel;

promptTokensEstimate = 0;
Expand Down Expand Up @@ -135,6 +140,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
[{ json: { ...response } }],
]);

logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
messages: parsedMessages,
options: runDetails.options,
Expand Down Expand Up @@ -172,6 +178,8 @@ export class N8nLlmTracing extends BaseCallbackHandler {
runId: string,
parentRunId?: string | undefined,
) {
const runDetails = this.runsMap[runId] ?? { index: Object.keys(this.runsMap).length };

// Filter out non-x- headers to avoid leaking sensitive information in logs
if (typeof error === 'object' && error?.hasOwnProperty('headers')) {
const errorWithHeaders = error as { headers: Record<string, unknown> };
Expand All @@ -183,6 +191,19 @@ export class N8nLlmTracing extends BaseCallbackHandler {
});
}

if (error instanceof NodeError) {
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, error);
} else {
// If the error is not a NodeError, we wrap it in a NodeOperationError
this.executionFunctions.addOutputData(
this.connectionType,
runDetails.index,
new NodeOperationError(this.executionFunctions.getNode(), error as JsonObject, {
functionality: 'configuration-node',
}),
);
}

logAiEvent(this.executionFunctions, 'ai-llm-errored', {
error: Object.keys(error).length === 0 ? error.toString() : error,
runId,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import { n8nDefaultFailedAttemptHandler } from './n8nDefaultFailedAttemptHandler';

class MockHttpError extends Error {
response: { status: number };

constructor(message: string, code: number) {
super(message);
this.response = { status: code };
}
}

describe('n8nDefaultFailedAttemptHandler', () => {
it('should throw error if message starts with "Cancel"', () => {
const error = new Error('Cancel operation');
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
});

it('should throw error if message starts with "AbortError"', () => {
const error = new Error('AbortError occurred');
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
});

it('should throw error if name is "AbortError"', () => {
class MockAbortError extends Error {
constructor() {
super('Some error');
this.name = 'AbortError';
}
}

const error = new MockAbortError();

expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
});

it('should throw error if code is "ECONNABORTED"', () => {
class MockAbortError extends Error {
code: string;

constructor() {
super('Some error');
this.code = 'ECONNABORTED';
}
}

const error = new MockAbortError();
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
});

it('should throw error if status is in STATUS_NO_RETRY', () => {
const error = new MockHttpError('Some error', 400);
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
});

it('should not throw error if status is not in STATUS_NO_RETRY', () => {
const error = new MockHttpError('Some error', 500);
error.response = { status: 500 };

expect(() => n8nDefaultFailedAttemptHandler(error)).not.toThrow();
});

it('should not throw error if no conditions are met', () => {
const error = new Error('Some random error');
expect(() => n8nDefaultFailedAttemptHandler(error)).not.toThrow();
});
});
Loading

0 comments on commit 57467d0

Please sign in to comment.