@rocketnew/llm-sdk
v1.1.0
Published
Universal LLM SDK for JavaScript/TypeScript - OpenAI, Anthropic, Gemini, Perplexity and more
Maintainers
Readme
RocketLLM TypeScript SDK
Universal LLM SDK for JavaScript/TypeScript.
Install
npm install @rocketnew/llm-sdkChat Completion
OpenAI
Basic
import { completion } from '@rocketnew/llm-sdk';
const response = await completion({
model: 'gpt-5.4',
messages: [{ role: 'user', content: 'Hello!' }]
});Streaming
const stream = await completion({
model: 'gpt-5.4',
messages: [{ role: 'user', content: 'Count to 5' }],
stream: true
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || '');
}Tool calling
const response = await completion({
model: 'gpt-5.4',
messages: [{ role: 'user', content: 'What is the weather in Paris?' }],
tools: [{
type: 'function',
function: {
name: 'get_weather',
description: 'Get weather for a location',
parameters: {
type: 'object',
properties: {
location: { type: 'string' }
},
required: ['location']
}
}
}]
});JSON mode
const response = await completion({
model: 'gpt-5.4',
messages: [{ role: 'user', content: 'List 3 colors in JSON' }],
response_format: { type: 'json_object' }
});Vision
const response = await completion({
model: 'gpt-5.4',
messages: [{
role: 'user',
content: [
{ type: 'text', text: 'Describe this image' },
{ type: 'image_url', image_url: 'https://example.com/image.jpg' }
]
}]
});Image generation
import { imageGeneration } from '@rocketnew/llm-sdk';
const response = await imageGeneration({
model: 'gpt-image-1.5',
prompt: 'A photorealistic sunset over a mountain lake',
size: '1024x1024',
quality: 'high',
n: 1
});
const b64 = response.data[0].b64_json;Image editing
import { imageEdit } from '@rocketnew/llm-sdk';
import * as fs from 'fs';
const response = await imageEdit({
model: 'gpt-image-1.5',
image: fs.readFileSync('source.png'),
prompt: 'Add a rainbow over the mountains',
size: '1024x1024'
});Text-to-speech
import { speech } from '@rocketnew/llm-sdk';
const audio = await speech({
model: 'gpt-4o-mini-tts',
input: 'Hello from RocketLLM.',
voice: 'alloy',
response_format: 'mp3'
});
// Write the audio bytes to disk
await audio.writeToFile('hello.mp3');Speech-to-text
import { transcription } from '@rocketnew/llm-sdk';
import * as fs from 'fs';
const response = await transcription({
model: 'gpt-4o-transcribe',
file: fs.readFileSync('audio.mp3')
});
console.log(response.text);Anthropic
Basic
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'Hello!' }]
});Streaming
const stream = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'Write a poem' }],
stream: true
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || '');
}Tool calling
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'Calculate 25 * 4' }],
tools: [{
type: 'function',
function: {
name: 'calculate',
parameters: {
type: 'object',
properties: {
operation: { type: 'string' },
a: { type: 'number' },
b: { type: 'number' }
}
}
}
}]
});Extended thinking
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'Solve this complex problem...' }],
thinking: { type: 'enabled', budget_tokens: 2000 }
});
console.log(response.choices[0].message.thinking_blocks);Reasoning effort
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'Explain quantum computing' }],
reasoning_effort: 'high'
});Web search
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'What are the latest AI news?' }],
web_search_options: {
search_context_size: 'medium'
}
});JSON mode
const response = await completion({
model: 'claude-sonnet-4-6',
messages: [{ role: 'user', content: 'List 3 colors' }],
response_format: { type: 'json_object' }
});Gemini
Chat completion
Basic
const response = await completion({
model: 'gemini/gemini-2.5-flash',
messages: [{ role: 'user', content: 'Hello!' }]
});Streaming
const stream = await completion({
model: 'gemini/gemini-2.5-flash',
messages: [{ role: 'user', content: 'Explain AI' }],
stream: true
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || '');
}Tool calling
const response = await completion({
model: 'gemini/gemini-2.5-flash',
messages: [{ role: 'user', content: 'Get weather for Tokyo' }],
tools: [{
type: 'function',
function: {
name: 'get_weather',
parameters: {
type: 'object',
properties: {
location: { type: 'string' }
}
}
}
}]
});Vision
const response = await completion({
model: 'gemini/gemini-2.5-flash',
messages: [{
role: 'user',
content: [
{ type: 'text', text: 'What is in this image?' },
{ type: 'image_url', image_url: 'https://example.com/photo.jpg' }
]
}]
});JSON mode
const response = await completion({
model: 'gemini/gemini-2.5-flash',
messages: [{ role: 'user', content: 'List 3 countries' }],
response_format: { type: 'json_object' }
});Image generation
import { imageGeneration } from '@rocketnew/llm-sdk';
const response = await imageGeneration({
model: 'gemini/gemini-2.5-flash-image',
prompt: 'A peaceful zen garden with cherry blossoms',
size: '1024x1024',
n: 1
});Image editing
import { imageEdit } from '@rocketnew/llm-sdk';
import * as fs from 'fs';
const response = await imageEdit({
model: 'gemini/gemini-2.5-flash-image',
image: fs.readFileSync('source.png'),
prompt: 'Add falling cherry blossom petals',
size: '1024x1024'
});Text-to-speech
import { speech } from '@rocketnew/llm-sdk';
const audio = await speech({
model: 'gemini/gemini-2.5-flash-preview-tts',
input: 'Hello from Gemini.',
voice: 'Kore'
});
// Gemini returns audio/wav
await audio.writeToFile('hello.wav');Perplexity
Basic
const response = await completion({
model: 'perplexity/sonar-pro',
messages: [{ role: 'user', content: 'What is TypeScript?' }]
});Streaming
const stream = await completion({
model: 'perplexity/sonar-pro',
messages: [{ role: 'user', content: 'Explain React' }],
stream: true
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content || '');
}With citations
const response = await completion({
model: 'perplexity/sonar-pro',
messages: [{ role: 'user', content: 'Latest AI developments?' }]
});
// Response content includes inline citations
console.log(response.choices[0].message.content);Made with ❤️ by Rocket Team
