@mantr/sdk
v1.0.1
Published
Official TypeScript/JavaScript SDK for Mantr - Deterministic Semantic Memory
Downloads
32
Maintainers
Readme
Mantr TypeScript/JavaScript SDK
Deterministic Semantic Memory for AI Agents
What is Mantr?
Mantr provides 100% reproducible knowledge retrieval through graph-based semantic walks powered by Sanskrit phonetic ontology. Perfect for building AI agents, RAG systems, and deterministic search.
Quick Start
Installation
npm install @mantr/sdk
# or
yarn add @mantr/sdkTypeScript Example
import { MantrClient } from '@mantr/sdk';
const client = new MantrClient({ apiKey: 'vak_live_...' });
const result = await client.walk({
phonemes: ['dharma', 'karma', 'yoga'],
depth: 3,
limit: 100
});
console.log(`Found ${result.paths.length} paths`);JavaScript Example
const { MantrClient } = require('@mantr/sdk');
const client = new MantrClient({ apiKey: 'vak_live_...' });
client.walk({ phonemes: ['knowledge'] })
.then(result => console.log(result.paths))
.catch(err => console.error(err));AI & Agentic Patterns
1. RAG with Next.js API Routes
// app/api/chat/route.ts
import { MantrClient } from '@mantr/sdk';
import OpenAI from 'openai';
const mantr = new MantrClient({ apiKey: process.env.MANTR_API_KEY! });
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY! });
export async function POST(req: Request) {
const { question } = await req.json();
// Get deterministic context
const context = await mantr.walk({
phonemes: extractConcepts(question),
depth: 4,
limit: 20
});
// Format for LLM
const contextStr = context.paths
.map(p => p.nodes.join(' → '))
.join('\n');
// Generate response
const response = await openai.chat.completions.create({
model: 'gpt-4',
messages: [
{ role: 'system', content: `Context:\n${contextStr}` },
{ role: 'user', content: question }
]
});
return Response.json({ answer: response.choices[0].message.content });
}2. Multi-Agent System (LangChain)
import { MantrClient } from '@mantr/sdk';
import { BaseTool } from 'langchain/tools';
class MantrMemoryTool extends BaseTool {
name = 'mantr_memory';
description = 'Get deterministic context from Mantr';
private client: MantrClient;
constructor(apiKey: string) {
super();
this.client = new MantrClient({ apiKey });
}
async _call(query: string): Promise<string> {
const result = await this.client.walk({
phonemes: extractConcepts(query),
depth: 3
});
return JSON.stringify(result.paths);
}
}
// Use in agent
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import { OpenAI } from 'langchain/llms/openai';
const tools = [new MantrMemoryTool('vak_live_...')];
const llm = new OpenAI({ temperature: 0 });
const agent = await initializeAgentExecutorWithOptions(tools, llm, {
agentType: 'zero-shot-react-description',
});
const result = await agent.call({
input: 'What do you know about authentication?'
});3. Streaming Chat with Context
import { MantrClient } from '@mantr/sdk';
class ContextualChat {
private client: MantrClient;
private history: string[] = [];
constructor(apiKey: string) {
this.client = new MantrClient({ apiKey });
}
async sendMessage(message: string): Promise<AsyncIterable<string>> {
// Get context from all history
const allConcepts = [...this.history, message];
const context = await this.client.walk({
phonemes: extractConcepts(allConcepts),
depth: 4
});
this.history.push(message);
// Stream response with context
return this.streamLLMResponse(message, context);
}
private async *streamLLMResponse(message: string, context: any) {
// Implementation using OpenAI streaming
const stream = await openai.chat.completions.create({
model: 'gpt-4',
messages: [
{ role: 'system', content: formatContext(context) },
{ role: 'user', content: message }
],
stream: true
});
for await (const chunk of stream) {
yield chunk.choices[0]?.delta?.content || '';
}
}
}4. Express.js Middleware
import { MantrClient } from '@mantr/sdk';
import express from 'express';
const mantr = new MantrClient({ apiKey: 'vak_live_...' });
// Middleware to enrich requests with context
export const contextMiddleware = async (req, res, next) => {
const query = req.query.q as string;
if (query) {
try {
const context = await mantr.walk({
phonemes: extractConcepts(query),
depth: 3
});
req.mantrContext = context;
} catch (error) {
console.error('Mantr error:', error);
}
}
next();
};
// Usage
app.use(contextMiddleware);
app.get('/api/search', (req, res) => {
res.json({
query: req.query.q,
context: req.mantrContext?.paths || []
});
});5. React Hook
import { MantrClient } from '@mantr/sdk';
import { useState, useEffect } from 'react';
export function useMantrContext(concepts: string[]) {
const [context, setContext] = useState(null);
const [loading, setLoading] = useState(false);
useEffect(() => {
if (!concepts.length) return;
const client = new MantrClient({ apiKey: process.env.NEXT_PUBLIC_MANTR_API_KEY! });
setLoading(true);
client.walk({ phonemes: concepts, depth: 3 })
.then(setContext)
.finally(() => setLoading(false));
}, [concepts.join(',')]);
return { context, loading };
}
// Usage in component
function SearchPage() {
const { context, loading } = useMantrContext(['authentication', 'security']);
if (loading) return <div>Loading context...</div>;
return (
<div>
{context?.paths.map(path => (
<div key={path.nodes.join('-')}>
{path.nodes.join(' → ')}
</div>
))}
</div>
);
}6. Vercel AI SDK Integration
import { MantrClient } from '@mantr/sdk';
import { streamText } from 'ai';
import { openai } from '@ai-sdk/openai';
const mantr = new MantrClient({ apiKey: process.env.MANTR_API_KEY! });
export async function POST(req: Request) {
const { messages } = await req.json();
// Get context from conversation
const lastMessage = messages[messages.length - 1];
const context = await mantr.walk({
phonemes: extractConcepts(lastMessage.content),
depth: 4
});
// Stream response with context
const result = await streamText({
model: openai('gpt-4'),
messages: [
{
role: 'system',
content: `Context: ${JSON.stringify(context.paths)}`
},
...messages
]
});
return result.toAIStreamResponse();
}API Reference
MantrClient
const client = new MantrClient({
apiKey: string,
baseUrl?: string, // Default: 'https://api.mantr.net'
timeout?: number // Default: 30000 (30s)
});walk()
const result = await client.walk({
phonemes: string[], // Required: Concepts to explore
pod?: string, // Optional: Isolated context
depth?: number, // Optional: 1-10, default 3
limit?: number // Optional: 1-1000, default 100
});Examples
- nextjs-rag/ - Next.js app with RAG
- express-api/ - Express server with context
- langchain-agent/ - LangChain integration
- react-hooks/ - React hooks for Mantr
License
MIT - see LICENSE
