ai-stream-client
v1.0.6
Published
Lightweight library for handling AI text streaming from any backend
Maintainers
Readme
ai-stream-client
🚀 Lightweight library for handling AI text streaming from any backend.
Installation
npm install ai-stream-clientQuick Start
React Hook (Recommended)
import { useAIStream } from "ai-stream-client/react";
function ChatApp() {
const { sendMessage, result, loading, error, abort, clear } = useAIStream({
url: "YOUR_API_ENDPOINT_HERE",
onStream: (data) => {
// Handle each data chunk as it arrives
console.log("Received:", data);
},
onFinish: () => {
console.log("Stream complete!");
},
});
return (
<div>
<button onClick={() => sendMessage({ prompt: "Hello AI!" })} disabled={loading}>
{loading ? "Sending..." : "Send Message"}
</button>
<button onClick={abort} disabled={!loading}>
Stop
</button>
<button onClick={clear}>
Reset
</button>
{Boolean(error) && (
<p>Error: {error instanceof Error ? error.message : String(error)}</p>
)}
<pre>{typeof result === 'string' ? result : JSON.stringify(result, null, 2)}</pre>
</div>
);
}Vanilla JavaScript
import { createAIStream } from "ai-stream-client";
createAIStream({
url: "YOUR_API_ENDPOINT_HERE",
body: { prompt: "Hello AI!" },
onChunk: (data) => console.log("Received:", data),
onFinish: () => console.log("Done!"),
});React Hook API
Options
{
url: string; // Your API endpoint
method?: "POST" | "GET"; // HTTP method (default: POST)
headers?: Record<string, string>; // Request headers
onStream?: (data: unknown) => void; // Called for each data chunk
onFinish?: () => void; // Called when complete
onError?: (error: unknown) => void; // Called on error
}Returns
{
sendMessage: (body: unknown) => void; // Send message and start streaming
abort: () => void; // Cancel the stream
clear: () => void; // Clear all state
result: unknown; // Accumulated result (string, object, etc.)
chunks: unknown[]; // Array of individual chunks
loading: boolean; // Stream active state
isActive: boolean; // Alias for loading
isIdle: boolean; // Opposite of loading
isError: boolean; // True if error exists
error: unknown; // Error if any
}Examples
With FormData
const formData = new FormData();
formData.append('text', 'Hello');
formData.append('context', 'World');
const { sendMessage } = useAIStream({
url: '/api/chat',
onStream: (data) => console.log(data),
});
sendMessage(formData);With Authorization
const { sendMessage } = useAIStream({
url: '/api/chat',
headers: {
'Authorization': `Bearer ${token}`,
},
onStream: (data) => console.log(data),
});Using Chunks Array
const { chunks, sendMessage, loading } = useAIStream({
url: '/api/chat',
});
// Process chunks
useEffect(() => {
chunks.forEach(chunk => {
// Do something with each chunk
console.log(chunk);
});
}, [chunks]);Handling Different Result Types
const { result, sendMessage } = useAIStream({
url: '/api/chat',
});
// Display based on result type
{result && (
typeof result === 'string' ? (
<p>{result}</p>
) : Array.isArray(result) ? (
<ul>
{result.map((item, i) => <li key={i}>{item}</li>)}
</ul>
) : (
<pre>{JSON.stringify(result, null, 2)}</pre>
)
)}Vue 3 Composition API
<script setup>
import { useAIStream } from "ai-stream-client/vue";
const { sendMessage, result, loading, error } = useAIStream({
url: "YOUR_API_ENDPOINT_HERE",
onChunk: (data) => {
// Handle each data chunk as it arrives
console.log("Received:", data);
},
onFinish: () => {
console.log("Stream complete!");
},
});
</script>
<template>
<div>
<button @click="sendMessage({ prompt: 'Hello AI!' })" :disabled="loading">
{{ loading ? 'Sending...' : 'Send Message' }}
</button>
<p v-if="error">Error: {{ error.message }}</p>
<p v-if="result && typeof result === 'string'">{{ result }}</p>
<pre v-else-if="result">{{ JSON.stringify(result, null, 2) }}</pre>
</div>
</template>Backend Requirements
Your backend should:
- Set the streaming header:
Content-Type: text/event-stream - Send data in chunks using
res.write() - Call
res.end()when done
Example Node.js/Express:
app.post("/api/stream", (req, res) => {
res.setHeader("Content-Type", "text/event-stream");
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Connection", "keep-alive");
res.flushHeaders();
// Send data in chunks
const data = "Hello world from AI".split(" ");
let i = 0;
const interval = setInterval(() => {
if (i < data.length) {
res.write(data[i] + " ");
i++;
} else {
clearInterval(interval);
res.end();
}
}, 100);
});TypeScript Support
Full TypeScript support out of the box:
import type {
StreamOptions,
UseAIStreamOptions,
UseAIStreamReturn,
} from "ai-stream-client";License
MIT
Made with ❤️ for the AI developer community
