Skip to content

Commit a5eb441

Browse files
author
Ruby Lo
committed
added N8nLlmTracing utility
1 parent 51c15d2 commit a5eb441

6 files changed

Lines changed: 128 additions & 5 deletions

File tree

docker/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ USER root
44

55
RUN mkdir -p /opt/n8n-custom-nodes && \
66
cd /opt/n8n-custom-nodes && \
7-
npm install n8n-nodes-langfuse-llm-dev@0.1.4 && \
7+
npm install n8n-nodes-openai-langfuse@0.1.9 && \
88
chown -R node:node /opt/n8n-custom-nodes
99

1010
ENV N8N_CUSTOM_EXTENSIONS="/opt/n8n-custom-nodes"

nodes/LmChatOpenAiLangfuse/LmChatOpenAiLangfuse.node.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
22
import {
3-
NodeConnectionType as NodeConnectionTypeT,
43
jsonParse,
54
type INodeType,
65
type INodeTypeDescription,
@@ -10,6 +9,7 @@ import {
109

1110
import { CallbackHandler } from 'langfuse-langchain';
1211
import { searchModels } from './methods/loadModels';
12+
import { N8nLlmTracing } from './utils/N8nLlmTracing'
1313

1414

1515
export class LmChatOpenAiLangfuse implements INodeType {
@@ -46,7 +46,7 @@ export class LmChatOpenAiLangfuse implements INodeType {
4646
},
4747

4848
inputs: [],
49-
outputs: [NodeConnectionTypeT.AiLanguageModel],
49+
outputs: ['ai_languageModel'],
5050
outputNames: ['Model'],
5151
credentials: [
5252
{ name: 'openAiApiWithLangfuseApi', required: true },
@@ -435,7 +435,7 @@ export class LmChatOpenAiLangfuse implements INodeType {
435435
modelKwargs.reasoning_effort = options.reasoningEffort;
436436

437437
const model = new ChatOpenAI({
438-
callbacks: [lfHandler],
438+
callbacks: [lfHandler, new N8nLlmTracing(this)],
439439
metadata: customMetadata,
440440
apiKey: credentials.apiKey as string,
441441
configuration: { baseURL: configuration.baseURL },
Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
import { BaseCallbackHandler } from "@langchain/core/callbacks/base";
2+
import type { Serialized } from "@langchain/core/load/serializable";
3+
import type { BaseMessage } from "@langchain/core/messages";
4+
import type { LLMResult } from "@langchain/core/outputs";
5+
import {
6+
type ISupplyDataFunctions,
7+
type IExecuteFunctions,
8+
type IDataObject,
9+
NodeOperationError,
10+
type JsonObject,
11+
} from "n8n-workflow";
12+
import { logAiEvent } from "./helpers";
13+
14+
type RunDetail = {
15+
index: number;
16+
messages: BaseMessage[] | string[] | string;
17+
options: any;
18+
};
19+
20+
export class N8nLlmTracing extends BaseCallbackHandler {
21+
name = "N8nLlmTracing";
22+
awaitHandlers = true;
23+
24+
connectionType = 'ai_languageModel';
25+
#parentRunIndex?: number;
26+
runsMap: Record<string, RunDetail> = {};
27+
28+
constructor(private executionFunctions: ISupplyDataFunctions | IExecuteFunctions) {
29+
super();
30+
}
31+
32+
async handleLLMStart(llm: Serialized, prompts: string[], runId: string) {
33+
const sourceNodeRunIndex =
34+
this.#parentRunIndex !== undefined
35+
? this.#parentRunIndex + (this.executionFunctions as any).getNextRunIndex?.()
36+
: undefined;
37+
38+
const options = llm.type === "constructor" ? llm.kwargs : llm;
39+
const { index } = (this.executionFunctions as any).addInputData(
40+
this.connectionType,
41+
[[{ json: { messages: prompts, options } }]],
42+
sourceNodeRunIndex,
43+
);
44+
45+
this.runsMap[runId] = {
46+
index,
47+
options,
48+
messages: prompts,
49+
};
50+
}
51+
52+
async handleLLMEnd(output: LLMResult, runId: string) {
53+
const runDetails = this.runsMap[runId] ?? { index: 0, options: {}, messages: [] };
54+
55+
const response = { response: { generations: output.generations } };
56+
57+
const sourceNodeRunIndex =
58+
this.#parentRunIndex !== undefined ? this.#parentRunIndex + runDetails.index : undefined;
59+
60+
(this.executionFunctions as any).addOutputData(
61+
this.connectionType,
62+
runDetails.index,
63+
[[{ json: response }]],
64+
undefined,
65+
sourceNodeRunIndex,
66+
);
67+
68+
logAiEvent(this.executionFunctions, "ai-llm-generated-output", {
69+
messages: runDetails.messages,
70+
options: runDetails.options,
71+
response,
72+
});
73+
}
74+
75+
async handleLLMError(error: IDataObject | Error, runId: string, parentRunId?: string) {
76+
const runDetails = this.runsMap[runId] ?? { index: 0, options: {}, messages: [] };
77+
78+
(this.executionFunctions as any).addOutputData(
79+
this.connectionType,
80+
runDetails.index,
81+
new NodeOperationError((this.executionFunctions as any).getNode(), error as JsonObject, {
82+
functionality: "configuration-node",
83+
}),
84+
);
85+
86+
logAiEvent(this.executionFunctions, "ai-llm-errored", {
87+
error: (error as any)?.message ?? String(error),
88+
runId,
89+
parentRunId,
90+
});
91+
}
92+
93+
setParentRunIndex(runIndex: number) {
94+
this.#parentRunIndex = runIndex;
95+
}
96+
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
import { DynamicStructuredTool } from "langchain/tools";
2+
3+
export class N8nTool extends DynamicStructuredTool<any> {
4+
async _call(input: string, runManager?: any): Promise<any> {
5+
try {
6+
const parser = this.schema as any;
7+
const parsed = await parser.parseAsync(input);
8+
return this.func(parsed, runManager);
9+
} catch (err: any) {
10+
throw new Error(`Tool input parse failed: ${err?.message ?? String(err)}`);
11+
}
12+
}
13+
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
import type { IDataObject, ISupplyDataFunctions, IExecuteFunctions } from "n8n-workflow";
2+
3+
export function logAiEvent(
4+
executeFunctions: ISupplyDataFunctions | IExecuteFunctions,
5+
event: "ai-llm-generated-output" | "ai-llm-errored",
6+
data?: IDataObject,
7+
) {
8+
try {
9+
(executeFunctions as any).logAiEvent(event, data ? JSON.stringify(data) : undefined);
10+
} catch (error) {
11+
(executeFunctions as any).logger?.debug?.(`Error logging AI event: ${event}`);
12+
}
13+
}

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "n8n-nodes-openai-langfuse",
3-
"version": "0.1.3",
3+
"version": "0.1.9",
44
"description": "n8n community node: Langfuse + OpenAI-compatible LLM provider",
55
"keywords": [
66
"n8n-community-node-package",
@@ -46,6 +46,7 @@
4646
"@langchain/core": "^0.3.72",
4747
"@langchain/openai": "^0.6.9",
4848
"langfuse-langchain": "^3.0.0",
49+
"openai": "^5.19.1",
4950
"proxy-from-env": "^1.1.0",
5051
"undici": "^7.15.0"
5152
},

0 commit comments

Comments
 (0)