Next.js
API route handlers, React Server Components, and client hooks for Next.js 13+ App Router.
Installation
Terminal
npm install @tryhamster/gerbilAPI Route Handler
Create a simple API route that handles chat requests:
app/api/chat/route.ts
01// app/api/chat/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03
04export const POST = gerbil.handler({05 model: "qwen3-0.6b",06});07
08// This creates an endpoint that accepts:09// POST /api/chat10// Body: { prompt: string, stream?: boolean, options?: {...} }11// Returns: { text: string, ... } or streams textHandler Options
app/api/ai/route.ts
01// app/api/ai/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03
04export const POST = gerbil.handler({05 // Model configuration06 model: "qwen3-0.6b",07 device: "auto", // "auto" | "gpu" | "cpu"08 dtype: "q4", // "q4" | "q8" | "fp16" | "fp32"09 10 // Default generation options11 maxTokens: 500,12 temperature: 0.7,13 thinking: false,14 15 // System prompt (can be overridden per request)16 system: "You are a helpful assistant.",17 18 // Callbacks19 onStart: async (req) => {20 console.log("Generation started");21 },22 onFinish: async (result) => {23 console.log("Generated:", result.text);24 },25 onError: async (error) => {26 console.error("Error:", error);27 },28});Multiple Endpoints
Create multiple AI endpoints with a catch-all route:
app/api/ai/[...path]/route.ts
01// app/api/ai/[...path]/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03
04const handlers = gerbil.createHandlers({05 model: "qwen3-0.6b",06 maxTokens: 500,07});08
09export async function POST(10 req: Request,11 { params }: { params: { path: string[] } }12) {13 const path = params.path.join("/");14
15 switch (path) {16 case "generate":17 return handlers.generate(req);18 case "stream":19 return handlers.stream(req);20 case "json":21 return handlers.json(req);22 case "embed":23 return handlers.embed(req);24 case "chat":25 return handlers.chat(req);26 default:27 return new Response("Not found", { status: 404 });28 }29}30
31export async function GET(32 req: Request,33 { params }: { params: { path: string[] } }34) {35 const path = params.path.join("/");36
37 switch (path) {38 case "info":39 return handlers.info(req);40 case "models":41 return handlers.models(req);42 default:43 return new Response("Not found", { status: 404 });44 }45}46
47// Endpoints created:48// POST /api/ai/generate - Generate text49// POST /api/ai/stream - Stream text50// POST /api/ai/json - Structured JSON output51// POST /api/ai/embed - Generate embeddings52// POST /api/ai/chat - Multi-turn chat53// GET /api/ai/info - Model info54// GET /api/ai/models - List available modelsStreaming Responses
Stream responses for real-time chat UIs:
app/api/stream/route.ts
01// app/api/stream/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03
04export async function POST(req: Request) {05 const { prompt, system } = await req.json();06
07 const stream = await gerbil.stream(prompt, {08 model: "qwen3-0.6b",09 system,10 });11
12 return new Response(stream, {13 headers: {14 "Content-Type": "text/event-stream",15 "Cache-Control": "no-cache",16 "Connection": "keep-alive",17 },18 });19}Structured JSON Output
app/api/extract/route.ts
01// app/api/extract/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03import { z } from "zod";04
05const PersonSchema = z.object({06 name: z.string(),07 age: z.number(),08 email: z.string().email().optional(),09});10
11export async function POST(req: Request) {12 const { text } = await req.json();13
14 const data = await gerbil.json(text, {15 model: "qwen3-0.6b",16 schema: PersonSchema,17 retries: 3,18 });19
20 return Response.json(data);21}React Server Components
Use Gerbil directly in Server Components:
app/summary/page.tsx
01// app/summary/page.tsx02import gerbil from "@tryhamster/gerbil";03
04// This runs on the server05export default async function SummaryPage() {06 await gerbil.loadModel("qwen3-0.6b");07
08 const summary = await gerbil.generate(09 "Summarize: The quick brown fox jumps over the lazy dog.",10 { maxTokens: 100 }11 );12
13 return (14 <div>15 <h1>Summary</h1>16 <p>{summary.text}</p>17 </div>18 );19}Client Components
Use the React hooks in client components:
app/chat/page.tsx
01// app/chat/page.tsx02"use client";03
04import { useGerbil } from "@tryhamster/gerbil/react";05import { useState } from "react";06
07export default function ChatPage() {08 const [input, setInput] = useState("");09 const [response, setResponse] = useState("");10 const { generate, stream, isLoading } = useGerbil({11 endpoint: "/api/ai",12 });13
14 const handleGenerate = async () => {15 const result = await generate(input);16 setResponse(result.text);17 };18
19 const handleStream = async () => {20 setResponse("");21 for await (const chunk of stream(input)) {22 setResponse((prev) => prev + chunk);23 }24 };25
26 return (27 <div className="p-4">28 <textarea29 value={input}30 onChange={(e) => setInput(e.target.value)}31 placeholder="Enter your prompt..."32 className="w-full p-2 border rounded"33 />34 <div className="flex gap-2 mt-2">35 <button onClick={handleGenerate} disabled={isLoading}>36 Generate37 </button>38 <button onClick={handleStream} disabled={isLoading}>39 Stream40 </button>41 </div>42 {response && (43 <div className="mt-4 p-4 bg-gray-100 rounded">44 {response}45 </div>46 )}47 </div>48 );49}Middleware
Add authentication or rate limiting:
app/api/ai/route.ts
01// app/api/ai/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03import { getServerSession } from "next-auth";04
05export async function POST(req: Request) {06 // Check authentication07 const session = await getServerSession();08 if (!session) {09 return new Response("Unauthorized", { status: 401 });10 }11
12 // Rate limiting (example with upstash)13 const ip = req.headers.get("x-forwarded-for") || "anonymous";14 const { success } = await ratelimit.limit(ip);15 if (!success) {16 return new Response("Too many requests", { status: 429 });17 }18
19 // Process request20 const { prompt } = await req.json();21 const result = await gerbil.generate(prompt, {22 model: "qwen3-0.6b",23 });24
25 return Response.json(result);26}Edge Runtime
Note: Gerbil requires Node.js runtime for model loading. For edge deployments, use a separate API server or serverless function.
app/api/ai/route.ts
// app/api/ai/route.ts// This route uses Node.js runtime (default)export const runtime = "nodejs";
import { gerbil } from "@tryhamster/gerbil/next";
export const POST = gerbil.handler({ model: "qwen3-0.6b",});Environment Variables
.env.local
# .env.local
# Model cache directory (optional)GERBIL_CACHE_DIR=./models
# Default model (optional)GERBIL_DEFAULT_MODEL=qwen3-0.6b
# Device preference (optional)GERBIL_DEVICE=autoFull Chat Application
Complete chat app with streaming, history, and thinking mode:
app/api/chat/route.ts
01// app/api/chat/route.ts02import { gerbil } from "@tryhamster/gerbil/next";03
04export async function POST(req: Request) {05 const { messages, thinking } = await req.json();06
07 const stream = await gerbil.chat(messages, {08 model: "qwen3-0.6b",09 thinking,10 stream: true,11 });12
13 return new Response(stream, {14 headers: { "Content-Type": "text/event-stream" },15 });16}app/chat/page.tsx
01// app/chat/page.tsx02"use client";03
04import { useChat } from "@tryhamster/gerbil/react";05
06export default function ChatApp() {07 const {08 messages,09 input,10 setInput,11 handleSubmit,12 isLoading,13 thinking,14 setThinking,15 } = useChat({16 endpoint: "/api/chat",17 });18
19 return (20 <div className="flex flex-col h-screen">21 {/* Messages */}22 <div className="flex-1 overflow-auto p-4 space-y-4">23 {messages.map((m, i) => (24 <div25 key={i}26 className={`p-3 rounded ${27 m.role === "user" ? "bg-blue-100 ml-auto" : "bg-gray-100"28 } max-w-[80%]`}29 >30 {m.thinking && (31 <div className="text-sm text-gray-500 italic mb-2">32 {m.thinking}33 </div>34 )}35 {m.content}36 </div>37 ))}38 </div>39
40 {/* Input */}41 <form onSubmit={handleSubmit} className="p-4 border-t">42 <div className="flex gap-2">43 <input44 value={input}45 onChange={(e) => setInput(e.target.value)}46 placeholder="Type a message..."47 className="flex-1 p-2 border rounded"48 disabled={isLoading}49 />50 <label className="flex items-center gap-1">51 <input52 type="checkbox"53 checked={thinking}54 onChange={(e) => setThinking(e.target.checked)}55 />56 Think57 </label>58 <button59 type="submit"60 disabled={isLoading}61 className="px-4 py-2 bg-blue-500 text-white rounded"62 >63 {isLoading ? "..." : "Send"}64 </button>65 </div>66 </form>67 </div>68 );69}