From 86f92ed4f5c2ae718e1e30af393ccb916573e961 Mon Sep 17 00:00:00 2001
From: Marc Muller <153335452+datainvest23@users.noreply.github.com>
Date: Wed, 24 Sep 2025 02:23:28 +0200
Subject: [PATCH] Add Next.js OpenRouter chat example with tools
---
examples/next-chat/.env.local.example | 5 +
examples/next-chat/README.md | 49 ++
examples/next-chat/app/api/chat/route.ts | 65 +++
.../next-chat/app/components/chat-message.tsx | 38 ++
.../app/components/tool-invocation-card.tsx | 49 ++
examples/next-chat/app/globals.css | 278 +++++++++
examples/next-chat/app/layout.tsx | 17 +
examples/next-chat/app/page.tsx | 551 ++++++++++++++++++
examples/next-chat/lib/conversation-types.ts | 26 +
examples/next-chat/lib/format.ts | 15 +
examples/next-chat/lib/models.ts | 41 ++
examples/next-chat/lib/tools.ts | 82 +++
examples/next-chat/next-env.d.ts | 5 +
examples/next-chat/next.config.mjs | 6 +
examples/next-chat/package.json | 26 +
examples/next-chat/tsconfig.json | 19 +
pnpm-workspace.yaml | 1 +
17 files changed, 1273 insertions(+)
create mode 100644 examples/next-chat/.env.local.example
create mode 100644 examples/next-chat/README.md
create mode 100644 examples/next-chat/app/api/chat/route.ts
create mode 100644 examples/next-chat/app/components/chat-message.tsx
create mode 100644 examples/next-chat/app/components/tool-invocation-card.tsx
create mode 100644 examples/next-chat/app/globals.css
create mode 100644 examples/next-chat/app/layout.tsx
create mode 100644 examples/next-chat/app/page.tsx
create mode 100644 examples/next-chat/lib/conversation-types.ts
create mode 100644 examples/next-chat/lib/format.ts
create mode 100644 examples/next-chat/lib/models.ts
create mode 100644 examples/next-chat/lib/tools.ts
create mode 100644 examples/next-chat/next-env.d.ts
create mode 100644 examples/next-chat/next.config.mjs
create mode 100644 examples/next-chat/package.json
create mode 100644 examples/next-chat/tsconfig.json
diff --git a/examples/next-chat/.env.local.example b/examples/next-chat/.env.local.example
new file mode 100644
index 0000000..bcdf210
--- /dev/null
+++ b/examples/next-chat/.env.local.example
@@ -0,0 +1,5 @@
+# Required: obtain an API key from https://openrouter.ai/keys
+OPENROUTER_API_KEY=sk-or-...
+
+# Optional: override the base URL if you are pointing at a proxy.
+# OPENROUTER_BASE_URL=https://openrouter.ai/api/v1
diff --git a/examples/next-chat/README.md b/examples/next-chat/README.md
new file mode 100644
index 0000000..76a99cc
--- /dev/null
+++ b/examples/next-chat/README.md
@@ -0,0 +1,49 @@
+# OpenRouter Next.js Chat Example
+
+This example demonstrates how to build a streaming chat experience in Next.js using the
+[`@openrouter/ai-sdk-provider`](https://www.npmjs.com/package/@openrouter/ai-sdk-provider)
+and the Vercel AI SDK. The UI lets you:
+
+- pick an OpenRouter model
+- toggle tool usage on or off
+- watch streaming assistant replies
+- inspect tool invocations and their inputs/outputs in real time
+
+## Getting Started
+
+1. Install dependencies:
+
+ ```bash
+ pnpm install
+ pnpm --filter @openrouter/examples-next-chat dev
+ ```
+
+ > **Note:** the example is part of the monorepo. You can also `cd examples/next-chat`
+ > and run `pnpm install` followed by `pnpm dev`.
+
+2. Copy the example environment file and add your OpenRouter key:
+
+ ```bash
+ cp examples/next-chat/.env.local.example examples/next-chat/.env.local
+ ```
+
+ At minimum you need `OPENROUTER_API_KEY`. Set `OPENROUTER_BASE_URL` if you proxy requests.
+
+3. Start the development server:
+
+ ```bash
+ pnpm --filter @openrouter/examples-next-chat dev
+ ```
+
+ Visit `http://localhost:3000` to try the chat experience.
+
+## How It Works
+
+- `app/api/chat/route.ts` configures the OpenRouter provider, streams responses with tools, and
+ returns AI SDK UI message streams.
+- `app/page.tsx` implements a small client-side state machine that consumes the stream, renders
+ messages, and keeps track of tool invocations.
+- `lib/tools.ts` defines two sample tools (`getCurrentWeather` and `getCurrentTime`). You can add
+ your own tools or wire in real data sources.
+
+This example is intentionally lightweight so you can adapt it for your own projects.
diff --git a/examples/next-chat/app/api/chat/route.ts b/examples/next-chat/app/api/chat/route.ts
new file mode 100644
index 0000000..cf55968
--- /dev/null
+++ b/examples/next-chat/app/api/chat/route.ts
@@ -0,0 +1,65 @@
+import { createOpenRouter } from '@openrouter/ai-sdk-provider';
+import type { ModelMessage } from 'ai';
+import { streamText } from 'ai';
+
+import { BASIC_TOOLS } from '../../../lib/tools';
+import { DEFAULT_SYSTEM_PROMPT } from '../../../lib/models';
+
+interface ChatRequestBody {
+ modelId: string;
+ toolMode?: 'auto' | 'disabled';
+ messages: ModelMessage[];
+}
+
+const openrouter = createOpenRouter({
+ compatibility: 'strict',
+ baseURL: process.env.OPENROUTER_BASE_URL ?? process.env.OPENROUTER_API_BASE,
+});
+
+function normalizeToolMode(toolMode: ChatRequestBody['toolMode']) {
+ return toolMode === 'disabled' ? 'disabled' : 'auto';
+}
+
+export async function POST(request: Request) {
+ const apiKey = process.env.OPENROUTER_API_KEY;
+ if (!apiKey) {
+ return Response.json(
+ { error: 'Missing OPENROUTER_API_KEY environment variable.' },
+ { status: 500 },
+ );
+ }
+
+ let body: ChatRequestBody;
+ try {
+ body = (await request.json()) as ChatRequestBody;
+ } catch (_error) {
+ return Response.json({ error: 'Invalid JSON payload.' }, { status: 400 });
+ }
+
+ if (!body || typeof body.modelId !== 'string') {
+ return Response.json({ error: 'Request must include a modelId string.' }, { status: 400 });
+ }
+
+ if (!Array.isArray(body.messages) || body.messages.some((message) => typeof message !== 'object')) {
+ return Response.json({ error: 'Messages must be an array of chat messages.' }, { status: 400 });
+ }
+
+ const toolMode = normalizeToolMode(body.toolMode);
+ const shouldExposeTools = toolMode !== 'disabled';
+
+ try {
+ const result = streamText({
+ model: openrouter(body.modelId),
+ system: DEFAULT_SYSTEM_PROMPT,
+ messages: body.messages,
+ tools: shouldExposeTools ? BASIC_TOOLS : undefined,
+ toolChoice: shouldExposeTools ? 'auto' : 'none',
+ });
+
+ return result.toUIMessageStreamResponse();
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : 'Unknown error while contacting OpenRouter.';
+ return Response.json({ error: errorMessage }, { status: 500 });
+ }
+}
diff --git a/examples/next-chat/app/components/chat-message.tsx b/examples/next-chat/app/components/chat-message.tsx
new file mode 100644
index 0000000..2b63363
--- /dev/null
+++ b/examples/next-chat/app/components/chat-message.tsx
@@ -0,0 +1,38 @@
+import type { ConversationEntry } from '../../lib/conversation-types';
+import { formatStructuredData } from '../../lib/format';
+import { ToolInvocationCard } from './tool-invocation-card';
+
+function getAvatarLabel(role: ConversationEntry['role']) {
+ return role === 'user' ? 'You' : 'AI';
+}
+
+export function ChatMessage({ entry }: { entry: ConversationEntry }) {
+ return (
+
+
+ {getAvatarLabel(entry.role)}
+
+
+ {entry.text ?
{entry.text}
: null}
+ {entry.reasoning.length > 0 ? (
+
+ Reasoning
+ {entry.reasoning.join('\n')}
+
+ ) : null}
+ {entry.metadata ? (
+
+ Metadata
+ {formatStructuredData(entry.metadata)}
+
+ ) : null}
+ {entry.toolInvocations.map((tool) => (
+
+ ))}
+ {entry.pending ? (
+
Streaming response…
+ ) : null}
+
+
+ );
+}
diff --git a/examples/next-chat/app/components/tool-invocation-card.tsx b/examples/next-chat/app/components/tool-invocation-card.tsx
new file mode 100644
index 0000000..2a5423d
--- /dev/null
+++ b/examples/next-chat/app/components/tool-invocation-card.tsx
@@ -0,0 +1,49 @@
+import type { ToolInvocationDisplay } from '../../lib/conversation-types';
+import { formatStructuredData } from '../../lib/format';
+
+const STATUS_LABELS: Record = {
+ 'collecting-input': 'Collecting input',
+ 'awaiting-execution': 'Running tool',
+ completed: 'Completed',
+ error: 'Error',
+};
+
+export function ToolInvocationCard({ invocation }: { invocation: ToolInvocationDisplay }) {
+ const statusClass = invocation.state === 'error' ? 'status error' : 'status';
+ const inputToShow =
+ invocation.input !== undefined
+ ? formatStructuredData(invocation.input)
+ : invocation.inputPreview;
+ const outputToShow =
+ invocation.output !== undefined ? formatStructuredData(invocation.output) : undefined;
+
+ return (
+
+
+
{invocation.name}
+
{STATUS_LABELS[invocation.state]}
+
+ {invocation.providerExecuted ? (
+
Executed by provider
+ ) : null}
+ {inputToShow ? (
+
+
Input
+
{inputToShow}
+
+ ) : null}
+ {outputToShow ? (
+
+
Result
+
{outputToShow}
+
+ ) : null}
+ {invocation.error ? (
+
+
Error
+
{formatStructuredData(invocation.error)}
+
+ ) : null}
+
+ );
+}
diff --git a/examples/next-chat/app/globals.css b/examples/next-chat/app/globals.css
new file mode 100644
index 0000000..e3bef4f
--- /dev/null
+++ b/examples/next-chat/app/globals.css
@@ -0,0 +1,278 @@
+:root {
+ color-scheme: light dark;
+ font-family: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
+ background-color: #0f172a;
+ color: #0f172a;
+}
+
+body {
+ margin: 0;
+ min-height: 100vh;
+ background: radial-gradient(circle at top, #1d4ed8, #0f172a 55%);
+ color: #0f172a;
+}
+
+main {
+ display: flex;
+ flex-direction: column;
+ align-items: center;
+ padding: 2.5rem 1.5rem 3rem;
+ gap: 1.5rem;
+}
+
+.app-shell {
+ width: 100%;
+ max-width: 960px;
+ background-color: rgba(255, 255, 255, 0.95);
+ border-radius: 1.5rem;
+ padding: 2rem;
+ box-shadow: 0 24px 60px rgba(15, 23, 42, 0.35);
+ backdrop-filter: blur(16px);
+ display: flex;
+ flex-direction: column;
+ gap: 2rem;
+}
+
+h1 {
+ font-size: clamp(1.75rem, 3vw, 2.5rem);
+ font-weight: 700;
+ color: #0f172a;
+ margin: 0;
+}
+
+.subtitle {
+ color: #475569;
+ margin: 0;
+ font-size: 1rem;
+}
+
+.controls {
+ display: flex;
+ flex-wrap: wrap;
+ gap: 1rem;
+ align-items: flex-end;
+}
+
+.control {
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+ min-width: 220px;
+}
+
+.control label,
+.control .control-title {
+ font-weight: 600;
+ color: #1e293b;
+}
+
+.control select,
+.control textarea,
+.control input,
+.control button {
+ font: inherit;
+ border-radius: 0.75rem;
+ border: 1px solid #cbd5f5;
+ padding: 0.75rem 1rem;
+ transition: border-color 0.2s ease, box-shadow 0.2s ease;
+}
+
+.control select:focus,
+.control textarea:focus,
+.control input:focus {
+ outline: none;
+ border-color: #2563eb;
+ box-shadow: 0 0 0 3px rgba(37, 99, 235, 0.18);
+}
+
+.chat-panel {
+ display: flex;
+ flex-direction: column;
+ gap: 1.25rem;
+ min-height: 360px;
+}
+
+.chat-messages {
+ display: flex;
+ flex-direction: column;
+ gap: 1rem;
+ padding: 1rem;
+ border-radius: 1rem;
+ background-color: rgba(241, 245, 255, 0.6);
+ border: 1px solid rgba(99, 102, 241, 0.15);
+ max-height: 420px;
+ overflow-y: auto;
+}
+
+.message {
+ display: flex;
+ gap: 0.75rem;
+ align-items: flex-start;
+}
+
+.message .bubble {
+ flex: 1;
+ border-radius: 1rem;
+ padding: 1rem 1.25rem;
+ box-shadow: 0 8px 24px rgba(15, 23, 42, 0.08);
+ display: flex;
+ flex-direction: column;
+ gap: 0.75rem;
+}
+
+.message.user .bubble {
+ background: linear-gradient(135deg, #2563eb, #0f172a);
+ color: #f8fafc;
+}
+
+.message.assistant .bubble {
+ background: #f8fafc;
+ border: 1px solid rgba(37, 99, 235, 0.12);
+ color: #0f172a;
+}
+
+.message .avatar {
+ font-weight: 600;
+ color: #1e293b;
+ width: 44px;
+ display: inline-flex;
+ align-items: center;
+ justify-content: center;
+ background: rgba(37, 99, 235, 0.1);
+ border-radius: 999px;
+ padding: 0.5rem;
+}
+
+.message.user .avatar {
+ background: rgba(14, 165, 233, 0.12);
+ color: #0f172a;
+}
+
+.message .bubble pre {
+ margin: 0;
+ font-family: 'JetBrains Mono', 'Fira Code', ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace;
+ font-size: 0.85rem;
+ line-height: 1.45;
+ background-color: rgba(15, 23, 42, 0.05);
+ border-radius: 0.75rem;
+ padding: 0.75rem;
+ overflow-x: auto;
+}
+
+.tool-card {
+ border-radius: 0.9rem;
+ padding: 0.85rem;
+ background-color: rgba(37, 99, 235, 0.08);
+ border: 1px solid rgba(37, 99, 235, 0.2);
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+}
+
+.tool-card h4 {
+ margin: 0;
+ font-size: 0.95rem;
+ color: #1e293b;
+}
+
+.tool-card .status {
+ font-size: 0.8rem;
+ font-weight: 600;
+ color: #1d4ed8;
+}
+
+.tool-card .status.error {
+ color: #b91c1c;
+}
+
+.chat-form {
+ display: flex;
+ flex-direction: column;
+ gap: 1rem;
+}
+
+.chat-form textarea {
+ min-height: 120px;
+ resize: vertical;
+}
+
+.chat-form .actions {
+ display: flex;
+ gap: 0.75rem;
+ justify-content: flex-end;
+ flex-wrap: wrap;
+}
+
+button.primary {
+ background: linear-gradient(135deg, #2563eb, #0f172a);
+ color: #f8fafc;
+ border: none;
+ cursor: pointer;
+ padding: 0.75rem 1.5rem;
+ border-radius: 0.75rem;
+ font-weight: 600;
+ box-shadow: 0 12px 24px rgba(37, 99, 235, 0.35);
+}
+
+button.secondary {
+ background: transparent;
+ color: #1e293b;
+ border: 1px solid rgba(15, 23, 42, 0.15);
+ cursor: pointer;
+}
+
+button:disabled {
+ opacity: 0.6;
+ cursor: not-allowed;
+ box-shadow: none;
+}
+
+.status-pill {
+ display: inline-flex;
+ align-items: center;
+ gap: 0.35rem;
+ background-color: rgba(37, 99, 235, 0.12);
+ color: #1d4ed8;
+ padding: 0.35rem 0.65rem;
+ border-radius: 999px;
+ font-size: 0.75rem;
+ font-weight: 600;
+}
+
+.status-pill.error {
+ background-color: rgba(239, 68, 68, 0.18);
+ color: #b91c1c;
+}
+
+.status-pill.idle {
+ background-color: rgba(34, 197, 94, 0.18);
+ color: #15803d;
+}
+
+.error-banner {
+ padding: 0.75rem 1rem;
+ border-radius: 0.75rem;
+ border: 1px solid rgba(239, 68, 68, 0.25);
+ background-color: rgba(254, 226, 226, 0.7);
+ color: #7f1d1d;
+}
+
+@media (max-width: 768px) {
+ .app-shell {
+ padding: 1.5rem;
+ border-radius: 1rem;
+ }
+
+ .controls {
+ flex-direction: column;
+ align-items: stretch;
+ }
+
+ .control {
+ min-width: 100%;
+ }
+
+ .chat-messages {
+ max-height: none;
+ }
+}
diff --git a/examples/next-chat/app/layout.tsx b/examples/next-chat/app/layout.tsx
new file mode 100644
index 0000000..3c4e069
--- /dev/null
+++ b/examples/next-chat/app/layout.tsx
@@ -0,0 +1,17 @@
+import type { Metadata } from 'next';
+import type { ReactNode } from 'react';
+import './globals.css';
+
+export const metadata: Metadata = {
+ title: 'OpenRouter Chat Playground',
+ description:
+ 'A minimal Next.js chat app that demonstrates streaming OpenRouter responses, model selection, and tool use.',
+};
+
+export default function RootLayout({ children }: { children: ReactNode }) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/examples/next-chat/app/page.tsx b/examples/next-chat/app/page.tsx
new file mode 100644
index 0000000..2dfea02
--- /dev/null
+++ b/examples/next-chat/app/page.tsx
@@ -0,0 +1,551 @@
+'use client';
+
+import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
+import type { FormEvent } from 'react';
+import type { ModelMessage } from 'ai';
+
+import { ChatMessage } from './components/chat-message';
+import type {
+ ConversationEntry,
+ ToolInvocationDisplay,
+ ToolInvocationState,
+} from '../lib/conversation-types';
+import type { ToolMode } from '../lib/models';
+import { DEFAULT_MODEL_ID, DEFAULT_TOOL_MODE, MODEL_OPTIONS } from '../lib/models';
+
+const TOOL_MODE_OPTIONS: Array<{ value: ToolMode; label: string }> = [
+ { value: 'auto', label: 'Automatic tool calling' },
+ { value: 'disabled', label: 'Disable tools' },
+];
+
+function createMessageId(counterRef: { current: number }, prefix: string) {
+ counterRef.current += 1;
+ return `${prefix}-${Date.now()}-${counterRef.current}`;
+}
+
+function mapConversationToModelMessages(history: ConversationEntry[]): ModelMessage[] {
+ return history.map((entry) => {
+ if (entry.role === 'user') {
+ return { role: 'user', content: entry.text } as ModelMessage;
+ }
+
+ return { role: 'assistant', content: entry.text } as ModelMessage;
+ });
+}
+
+export default function ChatPage() {
+ const [conversation, setConversation] = useState([]);
+ const [input, setInput] = useState('');
+ const [modelId, setModelId] = useState(DEFAULT_MODEL_ID);
+ const [toolMode, setToolMode] = useState(DEFAULT_TOOL_MODE);
+ const [isStreaming, setIsStreaming] = useState(false);
+ const [error, setError] = useState(null);
+
+ const abortControllerRef = useRef(null);
+ const messageCounterRef = useRef(0);
+ const currentAssistantIdRef = useRef(null);
+
+ const selectedModel = useMemo(
+ () => MODEL_OPTIONS.find((option) => option.id === modelId) ?? MODEL_OPTIONS[0],
+ [modelId],
+ );
+ const toolsSupported = selectedModel?.supportsTools ?? false;
+
+ useEffect(() => {
+ if (!toolsSupported && toolMode !== 'disabled') {
+ setToolMode('disabled');
+ }
+ }, [toolsSupported, toolMode]);
+
+ const appendEntry = useCallback((entry: ConversationEntry) => {
+ setConversation((prev) => [...prev, entry]);
+ }, []);
+
+ const updateEntryById = useCallback(
+ (id: string, updater: (entry: ConversationEntry) => ConversationEntry) => {
+ setConversation((prev) => {
+ const index = prev.findIndex((item) => item.id === id);
+ if (index === -1) {
+ return prev;
+ }
+
+ const updated = updater(prev[index]);
+ if (updated === prev[index]) {
+ return prev;
+ }
+
+ const next = [...prev];
+ next[index] = updated;
+ return next;
+ });
+ },
+ [],
+ );
+
+ const handleStop = useCallback(() => {
+ abortControllerRef.current?.abort();
+ }, []);
+
+ const handleClear = useCallback(() => {
+ abortControllerRef.current?.abort();
+ abortControllerRef.current = null;
+ currentAssistantIdRef.current = null;
+ setConversation([]);
+ setError(null);
+ setIsStreaming(false);
+ }, []);
+
+ const streamResponse = useCallback(
+ async (history: ConversationEntry[]) => {
+ setIsStreaming(true);
+ const controller = new AbortController();
+ abortControllerRef.current = controller;
+ const requestMessages = mapConversationToModelMessages(history);
+ const payload = JSON.stringify({
+ messages: requestMessages,
+ modelId,
+ toolMode,
+ });
+
+ const toolBuffers = new Map();
+ const reasoningBuffers = new Map();
+
+ const ensureAssistantMessage = (metadata?: unknown, messageId?: unknown) => {
+ if (currentAssistantIdRef.current) {
+ if (metadata !== undefined) {
+ updateEntryById(currentAssistantIdRef.current, (entry) => ({
+ ...entry,
+ metadata: metadata ?? entry.metadata,
+ }));
+ }
+ return currentAssistantIdRef.current;
+ }
+
+ const newId =
+ typeof messageId === 'string'
+ ? messageId
+ : createMessageId(messageCounterRef, 'assistant');
+ currentAssistantIdRef.current = newId;
+ appendEntry({
+ id: newId,
+ role: 'assistant',
+ text: '',
+ reasoning: [],
+ toolInvocations: [],
+ metadata,
+ pending: true,
+ });
+ return newId;
+ };
+
+ const updateToolInvocation = (
+ assistantId: string,
+ callId: string,
+ updater: (invocation: ToolInvocationDisplay) => ToolInvocationDisplay,
+ ) => {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ toolInvocations: entry.toolInvocations.map((invocation) =>
+ invocation.id === callId ? updater(invocation) : invocation,
+ ),
+ }));
+ };
+
+ const parseEvent = (eventText: string) => {
+ const dataLines = eventText
+ .split('\n')
+ .filter((line) => line.startsWith('data:'))
+ .map((line) => line.slice(5).trim());
+
+ if (dataLines.length === 0) {
+ return null;
+ }
+
+ const payloadText = dataLines.join('');
+ if (!payloadText) {
+ return null;
+ }
+
+ try {
+ return JSON.parse(payloadText) as Record;
+ } catch (_error) {
+ return null;
+ }
+ };
+
+ const finalizeAssistant = (options?: { fallback?: string }) => {
+ const assistantId = currentAssistantIdRef.current;
+ if (!assistantId) {
+ return;
+ }
+
+ updateEntryById(assistantId, (entry) => {
+ if (!entry.pending) {
+ return entry;
+ }
+
+ const nextText = entry.text || options?.fallback || entry.text;
+ return {
+ ...entry,
+ text: nextText,
+ pending: false,
+ };
+ });
+ };
+
+ const processChunk = (chunk: Record) => {
+ const type = typeof chunk.type === 'string' ? chunk.type : null;
+ if (!type) {
+ return;
+ }
+
+ if (type === 'start') {
+ ensureAssistantMessage(chunk.messageMetadata, chunk.messageId);
+ return;
+ }
+
+ const assistantId = ensureAssistantMessage();
+
+ switch (type) {
+ case 'text-delta': {
+ if (typeof chunk.delta === 'string') {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ text: entry.text + chunk.delta,
+ }));
+ }
+ break;
+ }
+ case 'message-metadata': {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ metadata: chunk.messageMetadata ?? entry.metadata,
+ }));
+ break;
+ }
+ case 'reasoning': {
+ if (typeof chunk.text === 'string') {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ reasoning: [...entry.reasoning, chunk.text as string],
+ }));
+ }
+ break;
+ }
+ case 'reasoning-start': {
+ if (typeof chunk.id === 'string') {
+ reasoningBuffers.set(chunk.id, '');
+ }
+ break;
+ }
+ case 'reasoning-delta': {
+ if (typeof chunk.id === 'string' && typeof chunk.delta === 'string') {
+ const existing = reasoningBuffers.get(chunk.id) ?? '';
+ reasoningBuffers.set(chunk.id, existing + chunk.delta);
+ }
+ break;
+ }
+ case 'reasoning-end': {
+ if (typeof chunk.id === 'string') {
+ const content = reasoningBuffers.get(chunk.id);
+ reasoningBuffers.delete(chunk.id);
+ if (content && content.trim().length > 0) {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ reasoning: [...entry.reasoning, content],
+ }));
+ }
+ }
+ break;
+ }
+ case 'tool-input-start': {
+ if (typeof chunk.toolCallId === 'string' && typeof chunk.toolName === 'string') {
+ toolBuffers.set(chunk.toolCallId, '');
+ const initialState: ToolInvocationState =
+ chunk.providerExecuted === true ? 'awaiting-execution' : 'collecting-input';
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ toolInvocations: [
+ ...entry.toolInvocations,
+ {
+ id: chunk.toolCallId as string,
+ name: chunk.toolName as string,
+ inputPreview: '',
+ state: initialState,
+ providerExecuted: chunk.providerExecuted === true,
+ },
+ ],
+ }));
+ }
+ break;
+ }
+ case 'tool-input-delta': {
+ if (typeof chunk.toolCallId === 'string' && typeof chunk.inputTextDelta === 'string') {
+ const nextValue = (toolBuffers.get(chunk.toolCallId) ?? '') + chunk.inputTextDelta;
+ toolBuffers.set(chunk.toolCallId, nextValue);
+ updateToolInvocation(assistantId, chunk.toolCallId, (invocation) => ({
+ ...invocation,
+ inputPreview: nextValue,
+ }));
+ }
+ break;
+ }
+ case 'tool-input-available': {
+ if (typeof chunk.toolCallId === 'string') {
+ toolBuffers.delete(chunk.toolCallId);
+ updateToolInvocation(assistantId, chunk.toolCallId, (invocation) => ({
+ ...invocation,
+ input: chunk.input ?? invocation.input,
+ inputPreview: undefined,
+ providerExecuted:
+ invocation.providerExecuted || chunk.providerExecuted === true,
+ state: 'awaiting-execution',
+ }));
+ }
+ break;
+ }
+ case 'tool-output-available': {
+ if (typeof chunk.toolCallId === 'string') {
+ updateToolInvocation(assistantId, chunk.toolCallId, (invocation) => ({
+ ...invocation,
+ output: chunk.output ?? invocation.output,
+ state: 'completed',
+ }));
+ }
+ break;
+ }
+ case 'tool-output-error': {
+ if (typeof chunk.toolCallId === 'string') {
+ updateToolInvocation(assistantId, chunk.toolCallId, (invocation) => ({
+ ...invocation,
+ error: typeof chunk.errorText === 'string' ? chunk.errorText : 'Tool error',
+ state: 'error',
+ }));
+ }
+ break;
+ }
+ case 'finish': {
+ updateEntryById(assistantId, (entry) => ({
+ ...entry,
+ metadata: chunk.messageMetadata ?? entry.metadata,
+ pending: false,
+ }));
+ break;
+ }
+ case 'abort': {
+ finalizeAssistant({ fallback: 'Response aborted.' });
+ break;
+ }
+ case 'error': {
+ if (typeof chunk.errorText === 'string') {
+ setError(chunk.errorText);
+ }
+ finalizeAssistant({ fallback: 'The model returned an error.' });
+ break;
+ }
+ default:
+ break;
+ }
+ };
+
+ try {
+ const response = await fetch('/api/chat', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: payload,
+ signal: controller.signal,
+ });
+
+ if (!response.ok || !response.body) {
+ const message = await response.text();
+ throw new Error(message || 'Unable to reach the chat endpoint.');
+ }
+
+ const reader = response.body.getReader();
+ const decoder = new TextDecoder();
+ let buffer = '';
+
+ while (true) {
+ const { value, done } = await reader.read();
+ buffer += decoder.decode(value ?? new Uint8Array(), { stream: !done });
+
+ let boundary = buffer.indexOf('\n\n');
+ while (boundary !== -1) {
+ const eventText = buffer.slice(0, boundary);
+ buffer = buffer.slice(boundary + 2);
+ const chunk = parseEvent(eventText);
+ if (chunk) {
+ processChunk(chunk);
+ }
+ boundary = buffer.indexOf('\n\n');
+ }
+
+ if (done) {
+ break;
+ }
+ }
+
+ finalizeAssistant();
+ } catch (error) {
+ if (controller.signal.aborted) {
+ finalizeAssistant({ fallback: 'Generation cancelled.' });
+ return;
+ }
+
+ const message =
+ error instanceof Error ? error.message : 'Unexpected error while streaming response.';
+ setError(message);
+ finalizeAssistant({ fallback: 'The response ended unexpectedly.' });
+ } finally {
+ setIsStreaming(false);
+ abortControllerRef.current = null;
+ currentAssistantIdRef.current = null;
+ }
+ },
+ [appendEntry, modelId, toolMode, updateEntryById],
+ );
+
+ const handleSubmit = useCallback(
+ (event: FormEvent) => {
+ event.preventDefault();
+ if (isStreaming) {
+ return;
+ }
+
+ const trimmed = input.trim();
+ if (!trimmed) {
+ return;
+ }
+
+ const userEntry: ConversationEntry = {
+ id: createMessageId(messageCounterRef, 'user'),
+ role: 'user',
+ text: trimmed,
+ reasoning: [],
+ toolInvocations: [],
+ metadata: undefined,
+ pending: false,
+ };
+
+ const nextConversation = [...conversation, userEntry];
+ setConversation(nextConversation);
+ setInput('');
+ setError(null);
+ void streamResponse(nextConversation);
+ },
+ [conversation, input, isStreaming, streamResponse],
+ );
+
+ return (
+
+
+
+
+
+
+
+
+ {selectedModel ?
{selectedModel.description}
: null}
+
+
+
+
+
+ {!toolsSupported ? (
+
Tools are disabled for this model.
+ ) : null}
+
+
+
+
Status
+
+ {isStreaming ? 'Streaming response…' : 'Ready'}
+
+
+
+
+
Conversation
+
+
+
+
+
+ {error ? {error}
: null}
+
+ {conversation.length === 0 ? (
+
+ Start by asking a question. The assistant streams its reply and displays every tool call.
+
+ ) : (
+ conversation.map((entry) =>
)
+ )}
+
+
+
+
+
+
+ );
+}
diff --git a/examples/next-chat/lib/conversation-types.ts b/examples/next-chat/lib/conversation-types.ts
new file mode 100644
index 0000000..24fc9d4
--- /dev/null
+++ b/examples/next-chat/lib/conversation-types.ts
@@ -0,0 +1,26 @@
+export type ToolInvocationState =
+ | 'collecting-input'
+ | 'awaiting-execution'
+ | 'completed'
+ | 'error';
+
+export interface ToolInvocationDisplay {
+ id: string;
+ name: string;
+ inputPreview?: string;
+ input?: unknown;
+ output?: unknown;
+ error?: string;
+ providerExecuted?: boolean;
+ state: ToolInvocationState;
+}
+
+export interface ConversationEntry {
+ id: string;
+ role: 'user' | 'assistant';
+ text: string;
+ reasoning: string[];
+ toolInvocations: ToolInvocationDisplay[];
+ metadata?: unknown;
+ pending: boolean;
+}
diff --git a/examples/next-chat/lib/format.ts b/examples/next-chat/lib/format.ts
new file mode 100644
index 0000000..3454b53
--- /dev/null
+++ b/examples/next-chat/lib/format.ts
@@ -0,0 +1,15 @@
+export function formatStructuredData(value: unknown): string {
+ if (value === undefined || value === null) {
+ return '—';
+ }
+
+ if (typeof value === 'string') {
+ return value;
+ }
+
+ try {
+ return JSON.stringify(value, null, 2);
+ } catch (_error) {
+ return String(value);
+ }
+}
diff --git a/examples/next-chat/lib/models.ts b/examples/next-chat/lib/models.ts
new file mode 100644
index 0000000..4494930
--- /dev/null
+++ b/examples/next-chat/lib/models.ts
@@ -0,0 +1,41 @@
+export interface ModelOption {
+ id: string;
+ label: string;
+ description: string;
+ supportsTools: boolean;
+}
+
+export type ToolMode = 'auto' | 'disabled';
+
+export const MODEL_OPTIONS: ModelOption[] = [
+ {
+ id: 'openai/gpt-4.1-mini',
+ label: 'OpenAI GPT-4.1 Mini',
+ description:
+ 'Fast and capable general-purpose model with strong support for streaming tool calls.',
+ supportsTools: true,
+ },
+ {
+ id: 'anthropic/claude-3.7-sonnet',
+ label: 'Anthropic Claude 3.7 Sonnet',
+ description:
+ 'Reasoning-focused assistant that can plan multi-step solutions and execute structured tools.',
+ supportsTools: true,
+ },
+ {
+ id: 'meta-llama/llama-3.1-70b-instruct',
+ label: 'Llama 3.1 70B Instruct',
+ description:
+ 'Great open-weight model for narrative tasks. Tools are disabled by default for this model.',
+ supportsTools: false,
+ },
+];
+
+export const DEFAULT_MODEL_ID = MODEL_OPTIONS[0]?.id ?? 'openai/gpt-4.1-mini';
+
+export const DEFAULT_TOOL_MODE: ToolMode = 'auto';
+
+export const DEFAULT_SYSTEM_PROMPT =
+ 'You are an expert assistant running on OpenRouter. Provide concise, actionable answers, '
+ + 'call the available tools when they make the response more helpful, and always explain how '
+ + 'you used any tool results.';
diff --git a/examples/next-chat/lib/tools.ts b/examples/next-chat/lib/tools.ts
new file mode 100644
index 0000000..ab3d737
--- /dev/null
+++ b/examples/next-chat/lib/tools.ts
@@ -0,0 +1,82 @@
+import { tool } from 'ai';
+import { z } from 'zod';
+
+function roundTo(value: number, decimals: number) {
+ const factor = 10 ** decimals;
+ return Math.round(value * factor) / factor;
+}
+
+export const getCurrentWeather = tool({
+ description:
+ 'Look up an approximate weather report for a location. Useful for travel planning or casual questions.',
+ inputSchema: z.object({
+ location: z
+ .string({ description: 'City, region, or coordinates describing the location to inspect.' })
+ .min(2),
+ unit: z
+ .enum(['celsius', 'fahrenheit'], {
+ description: 'Unit to use when reporting the temperature.',
+ })
+ .default('celsius'),
+ }),
+ execute: async ({ location, unit }) => {
+ const fakeTemperatureCelsius = 18 + Math.random() * 10;
+ const temperatureCelsius = roundTo(fakeTemperatureCelsius, 1);
+ const temperatureFahrenheit = roundTo((temperatureCelsius * 9) / 5 + 32, 1);
+
+ return {
+ location,
+ unit,
+ report: `Skies are mostly clear over ${location}. A gentle breeze keeps the humidity comfortable.`,
+ temperature: unit === 'celsius' ? temperatureCelsius : temperatureFahrenheit,
+ feelsLike: unit === 'celsius'
+ ? roundTo(temperatureCelsius - 1.1, 1)
+ : roundTo(temperatureFahrenheit - 1.8, 1),
+ humidity: roundTo(52 + Math.random() * 8, 1),
+ windKph: roundTo(8 + Math.random() * 6, 1),
+ source: 'open-meteorology.example',
+ };
+ },
+});
+
+export const getCurrentTime = tool({
+ description:
+ 'Return the current local time for a requested IANA timezone or city description. '
+ + 'Helpful for scheduling and calendar coordination tasks.',
+ inputSchema: z.object({
+ timezone: z
+ .string({ description: 'An IANA timezone such as "Europe/Paris" or "America/New_York".' })
+ .default('UTC'),
+ locale: z
+ .string({ description: 'BCP47 locale string used when formatting the timestamp.' })
+ .default('en-US'),
+ }),
+ execute: async ({ timezone, locale }) => {
+ const now = new Date();
+ let formatted: string;
+ try {
+ formatted = now.toLocaleString(locale, { timeZone: timezone, hour12: false });
+ } catch (_error) {
+ formatted = now.toLocaleString('en-US', { timeZone: 'UTC', hour12: false });
+ return {
+ timezone,
+ locale,
+ iso: now.toISOString(),
+ formatted,
+ note: `Unable to format for timezone "${timezone}". Falling back to UTC.`,
+ };
+ }
+
+ return {
+ timezone,
+ locale,
+ iso: now.toISOString(),
+ formatted,
+ };
+ },
+});
+
+export const BASIC_TOOLS = {
+ getCurrentWeather,
+ getCurrentTime,
+};
diff --git a/examples/next-chat/next-env.d.ts b/examples/next-chat/next-env.d.ts
new file mode 100644
index 0000000..4f11a03
--- /dev/null
+++ b/examples/next-chat/next-env.d.ts
@@ -0,0 +1,5 @@
+///
+///
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/basic-features/typescript for more information.
diff --git a/examples/next-chat/next.config.mjs b/examples/next-chat/next.config.mjs
new file mode 100644
index 0000000..d5456a1
--- /dev/null
+++ b/examples/next-chat/next.config.mjs
@@ -0,0 +1,6 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+ reactStrictMode: true,
+};
+
+export default nextConfig;
diff --git a/examples/next-chat/package.json b/examples/next-chat/package.json
new file mode 100644
index 0000000..9af84ae
--- /dev/null
+++ b/examples/next-chat/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "@openrouter/examples-next-chat",
+ "private": true,
+ "version": "0.1.0",
+ "type": "module",
+ "scripts": {
+ "dev": "next dev",
+ "build": "next build",
+ "start": "next start",
+ "lint": "next lint"
+ },
+ "dependencies": {
+ "@openrouter/ai-sdk-provider": "workspace:*",
+ "ai": "5.0.5",
+ "next": "14.2.15",
+ "react": "18.3.1",
+ "react-dom": "18.3.1",
+ "zod": "3.25.76"
+ },
+ "devDependencies": {
+ "@types/node": "20.17.24",
+ "@types/react": "18.3.5",
+ "@types/react-dom": "18.3.0",
+ "typescript": "5.9.2"
+ }
+}
diff --git a/examples/next-chat/tsconfig.json b/examples/next-chat/tsconfig.json
new file mode 100644
index 0000000..81bf283
--- /dev/null
+++ b/examples/next-chat/tsconfig.json
@@ -0,0 +1,19 @@
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": false,
+ "skipLibCheck": true,
+ "strict": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "bundler",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "types": ["node"]
+ },
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
+ "exclude": ["node_modules"]
+}
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index bd8ded7..3759f2c 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -1,5 +1,6 @@
packages:
- '.'
+ - 'examples/*'
onlyBuiltDependencies:
- '@biomejs/biome'