babyapi
v0.1.2
Published
BabyAPI client (OpenAI-compatible /v1/completions and /v1/chat/completions).
Downloads
373
Maintainers
Readme

BabyAPI (JS SDK)
A tiny Node.js client for BabyAPI — an OpenAI-compatible API for hosted open-weight models.
- OpenAI-compatible endpoints:
POST /v1/completionsPOST /v1/chat/completions
- BabyAPI convenience endpoint:
POST /infer(simple text-in, text-out)
Minimal surface area. Calm defaults. You bring an API key — we handle the GPUs.
Install
npm install babyapiQuick start
const { BabyAPI } = require('babyapi');
const client = new BabyAPI({
apiKey: process.env.BABYAPI_API_KEY,
// baseURL: 'https://api.babyapi.org', // optional
});
async function run() {
const res = await client.chat.completions.create({
model: 'mistral',
messages: [{ role: 'user', content: 'Say hi in 5 words.' }],
});
console.log(res.choices?.[0]?.message?.content);
}
run().catch(console.error);Configuration
const client = new BabyAPI({
apiKey: process.env.BABYAPI_API_KEY, // required
baseURL: process.env.BABYAPI_BASE_URL, // optional (default: https://api.babyapi.org)
timeoutMs: 60_000, // JSON requests only
maxRetries: 2, // retry transient failures
retryBaseDelayMs: 250, // exponential backoff base
defaultModel: 'mistral', // used by client.baby.infer when model is omitted
defaultHeaders: { 'x-app': 'my-sideproject' }, // extra headers for every request
});Environment variables supported:
BABYAPI_API_KEY(orBABY_API_KEY)BABYAPI_BASE_URLBABYAPI_DEFAULT_MODEL
OpenAI-compatible: Chat Completions
const res = await client.chat.completions.create({
model: 'mixtral',
messages: [
{ role: 'system', content: 'You are concise.' },
{ role: 'user', content: 'Give me 3 tagline ideas for a tiny LLM API.' },
],
temperature: 0.7,
});
console.log(res.choices?.[0]?.message?.content);OpenAI-compatible: Completions
const res = await client.completions.create({
model: 'mistral',
prompt: 'Write a friendly release note opener for BabyAPI.',
max_tokens: 120,
temperature: 0.7,
});
console.log(res.choices?.[0]?.text);Streaming (SSE) for chat
.stream() returns an async iterator that yields SSE events.
for await (const evt of client.chat.completions.stream({
model: 'mistral',
messages: [{ role: 'user', content: 'Write a short poem about servers.' }],
})) {
if (evt.done) break;
// evt.data: parsed JSON when possible (otherwise null)
// evt.raw: raw "data:" payload string
//
// For OpenAI-style streams, you usually want:
const delta = evt.data?.choices?.[0]?.delta?.content;
if (delta) process.stdout.write(delta);
}
process.stdout.write('\n');Streaming for completions
for await (const evt of client.completions.stream({
model: 'mistral',
prompt: 'Write 5 bullet points about calm APIs.',
})) {
if (evt.done) break;
const text = evt.data?.choices?.[0]?.text;
if (text) process.stdout.write(text);
}
process.stdout.write('\n');Abort / cancellation (AbortController)
const ac = new AbortController();
setTimeout(() => ac.abort(), 1500);
try {
const res = await client.chat.completions.create(
{
model: 'mistral',
messages: [{ role: 'user', content: 'Write a long story...' }],
},
{ signal: ac.signal }
);
console.log(res.choices?.[0]?.message?.content);
} catch (err) {
if (err.name === 'BabyAPIError') {
console.error('BabyAPIError:', err.code, err.status, err.message);
} else {
console.error(err);
}
}BabyAPI convenience: client.infer(...) (routes chat vs completions)
If you prefer “just do the right thing”, use client.infer(...):
// Routes to /v1/chat/completions if messages[] exists
const chatRes = await client.infer({
model: 'mistral',
messages: [{ role: 'user', content: 'One-line slogan for BabyAPI?' }],
});
console.log(chatRes.choices?.[0]?.message?.content);
// Routes to /v1/completions if prompt exists
const compRes = await client.infer({
model: 'mistral',
prompt: 'Give 3 product names for a tiny LLM SDK.',
max_tokens: 60,
});
console.log(compRes.choices?.[0]?.text);BabyAPI convenience: client.baby.infer(...) (simple text-out)
This hits BabyAPI’s /infer endpoint and returns a normalized response:
const out = await client.baby.infer({
model: 'mistral',
prompt: 'Write a 1-line release note title.',
maxTokens: 40,
temperature: 0.5,
});
console.log(out.output);
console.log(out.usage); // prompt_tokens / completion_tokens / total_tokens
console.log(out.finish_reason);You can also pass a raw string (uses defaultModel if configured):
const client = new BabyAPI({ apiKey: process.env.BABYAPI_API_KEY, defaultModel: 'mistral' });
const out = await client.baby.infer('Explain BabyAPI in one sentence.');
console.log(out.output);Supported options (aliases are accepted):
max_tokens/maxTokenstemperaturetop_p/topPtop_k/topKstoppresence_penalty/presencePenaltyfrequency_penalty/frequencyPenalty
Vision / image input (OpenAI-style)
If your selected model supports vision, you can send an image using OpenAI-style message content:
const res = await client.chat.completions.create({
model: 'pixtral', // or another vision-capable model you expose
messages: [
{
role: 'user',
content: [
{ type: 'text', text: 'Describe the image in 2 sentences. Then list 3 distinct objects you can see.' },
{
type: 'image_url',
image_url: {
url: 'https://api.babyapi.org/images/banner.png',
},
},
],
},
],
});
console.log(res.choices?.[0]?.message?.content);Vision streaming
for await (const evt of client.chat.completions.stream({
model: 'pixtral',
messages: [
{
role: 'user',
content: [
{ type: 'text', text: 'What is this image trying to communicate?' },
{ type: 'image_url', image_url: { url: 'https://api.babyapi.org/images/banner.png' } },
],
},
],
})) {
if (evt.done) break;
const delta = evt.data?.choices?.[0]?.delta?.content;
if (delta) process.stdout.write(delta);
}
process.stdout.write('\n');Note: image support depends on the model you choose. If the model is text-only, the API may reject image inputs.
Errors
All SDK errors are thrown as BabyAPIError when possible:
try {
await client.chat.completions.create({ model: 'mistral', messages: [] });
} catch (err) {
if (err.name === 'BabyAPIError') {
console.error({
message: err.message,
status: err.status,
code: err.code,
type: err.type,
requestId: err.requestId,
});
} else {
console.error(err);
}
}Request options (per-call)
Every .create(...) / .stream(...) call can override:
const res = await client.chat.completions.create(
{
model: 'mistral',
messages: [{ role: 'user', content: 'Hello.' }],
},
{
apiKey: process.env.BABYAPI_API_KEY, // override key
timeoutMs: 30_000, // JSON only
maxRetries: 0, // disable retries
headers: { 'x-trace': 'abc123' }, // extra per-request headers
// signal: new AbortController().signal, // cancellation
}
);TypeScript
This package ships types via index.d.ts.
import { BabyAPI } from 'babyapi';
const client = new BabyAPI({ apiKey: process.env.BABYAPI_API_KEY! });
const res = await client.chat.completions.create({
model: 'mistral',
messages: [{ role: 'user', content: 'TypeScript works.' }],
});License
MIT.
