@llamaflowjs/autotool
v2.0.0
Published
auto transpile your JS function to LLM Agent compatible
Downloads
7
Readme
@llamaflowjs/autotool
Auto transpile your JS function to LLM Agent compatible
Usage
First, Install the package
npm install @llamaflowjs/autotool
pnpm add @llamaflowjs/autotool
yarn add @llamaflowjs/autotoolSecond, Add the plugin/loader to your configuration:
Next.js
import { withNext } from "@llamaflowjs/autotool/next";
/** @type {import('next').NextConfig} */
const nextConfig = {};
export default withNext(nextConfig);Node.js
node --import @llamaflowjs/autotool/node ./path/to/your/script.jsThird, add "use tool" on top of your tool file or change to .tool.ts.
"use tool";
export function getWeather(city: string) {
// ...
}
// ...Finally, export a chat handler function to the frontend using llamaflowjs Agent
"use server";
// imports ...
export async function chatWithAI(message: string): Promise<JSX.Element> {
const agent = new OpenAIAgent({
tools: convertTools("llamaflowjs"),
});
const uiStream = createStreamableUI();
agent
.chat({
stream: true,
message,
})
.then(async (responseStream) => {
return responseStream.pipeTo(
new WritableStream({
start: () => {
uiStream.append("\n");
},
write: async (message) => {
uiStream.append(message.response.delta);
},
close: () => {
uiStream.done();
},
}),
);
});
return uiStream.value;
}License
MIT
