@langinsight/langchain
v0.0.12
Published
LangInsight callback handler for LangChain.
Readme
@langinsight/langchain
LangInsight callback handler for LangChain.
Usage
Basic Usage
import { ChatOllama } from "@langchain/ollama";
import { LangInsight } from "@langinsight/langchain";
const model = new ChatOllama({ model: "gpt-oss:20b" });
const { LANGINSIGHT_ENDPOINT, LANGINSIGHT_API_KEY } = process.env;
// Specify callback handler when invoking LLM
await model.invoke("hi!", {
callbacks: [
new LangInsight.CallbackHandler({
metadata: { userId: "admin", sessionId: "admin" },
endpoint: LANGINSIGHT_ENDPOINT,
apiKey: LANGINSIGHT_API_KEY,
}),
],
});Usage with LangGraph
import { Annotation, END, START, StateGraph } from "@langchain/langgraph";
import { ChatOllama } from "@langchain/ollama";
import { LangInsight } from "@langinsight/langchain";
const model = new ChatOllama({ model: "gpt-oss:20b" });
// Configure LangInsight callback handler
const callbacks = [
new LangInsight.CallbackHandler({
metadata: { userId: "admin", sessionId: "langgraph-example" },
endpoint: process.env.LANGINSIGHT_ENDPOINT,
apiKey: process.env.LANGINSIGHT_API_KEY,
}),
];
// Define graph state
const GraphState = Annotation.Root({
input: Annotation<string>,
response: Annotation<string>,
});
// Define node
async function processInput(state: typeof GraphState.State) {
const response = await model.invoke(state.input, { callbacks });
return {
response: response.content as string,
};
}
// Build graph
const workflow = new StateGraph(GraphState)
.addNode("process", processInput)
.addEdge(START, "process")
.addEdge("process", END);
const app = workflow.compile();
// Execute graph
const result = await app.invoke(
{ input: "What is LangGraph?" },
{ callbacks }
);Options
apiKey: LangInsight API key (required)endpoint: LangInsight API endpoint (required)metadata: Metadata to attach to trace information (required)userId: User IDsessionId: Session ID- Any other key-value pairs
