Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions examples/next-chat/.env.local.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Required: obtain an API key from https://openrouter.ai/keys
OPENROUTER_API_KEY=sk-or-...

# Optional: override the base URL if you are pointing at a proxy.
# OPENROUTER_BASE_URL=https://openrouter.ai/api/v1
49 changes: 49 additions & 0 deletions examples/next-chat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# OpenRouter Next.js Chat Example

This example demonstrates how to build a streaming chat experience in Next.js using the
[`@openrouter/ai-sdk-provider`](https://www.npmjs.com/package/@openrouter/ai-sdk-provider)
and the Vercel AI SDK. The UI lets you:

- pick an OpenRouter model
- toggle tool usage on or off
- watch streaming assistant replies
- inspect tool invocations and their inputs/outputs in real time

## Getting Started

1. Install dependencies:

```bash
pnpm install
pnpm --filter @openrouter/examples-next-chat dev
```

> **Note:** the example is part of the monorepo. You can also `cd examples/next-chat`
> and run `pnpm install` followed by `pnpm dev`.

2. Copy the example environment file and add your OpenRouter key:

```bash
cp examples/next-chat/.env.local.example examples/next-chat/.env.local
```

At minimum you need `OPENROUTER_API_KEY`. Set `OPENROUTER_BASE_URL` if you proxy requests.

3. Start the development server:

```bash
pnpm --filter @openrouter/examples-next-chat dev
```

Visit `http://localhost:3000` to try the chat experience.

## How It Works

- `app/api/chat/route.ts` configures the OpenRouter provider, streams responses with tools, and
returns AI SDK UI message streams.
- `app/page.tsx` implements a small client-side state machine that consumes the stream, renders
messages, and keeps track of tool invocations.
- `lib/tools.ts` defines two sample tools (`getCurrentWeather` and `getCurrentTime`). You can add
your own tools or wire in real data sources.

This example is intentionally lightweight so you can adapt it for your own projects.
65 changes: 65 additions & 0 deletions examples/next-chat/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import type { ModelMessage } from 'ai';
import { streamText } from 'ai';

import { BASIC_TOOLS } from '../../../lib/tools';
import { DEFAULT_SYSTEM_PROMPT } from '../../../lib/models';

interface ChatRequestBody {
modelId: string;
toolMode?: 'auto' | 'disabled';
messages: ModelMessage[];
}

const openrouter = createOpenRouter({
compatibility: 'strict',
baseURL: process.env.OPENROUTER_BASE_URL ?? process.env.OPENROUTER_API_BASE,
});

function normalizeToolMode(toolMode: ChatRequestBody['toolMode']) {
return toolMode === 'disabled' ? 'disabled' : 'auto';
}

export async function POST(request: Request) {
const apiKey = process.env.OPENROUTER_API_KEY;
if (!apiKey) {
return Response.json(
{ error: 'Missing OPENROUTER_API_KEY environment variable.' },
{ status: 500 },
);
}

let body: ChatRequestBody;
try {
body = (await request.json()) as ChatRequestBody;
} catch (_error) {
return Response.json({ error: 'Invalid JSON payload.' }, { status: 400 });
}

if (!body || typeof body.modelId !== 'string') {
return Response.json({ error: 'Request must include a modelId string.' }, { status: 400 });
}

if (!Array.isArray(body.messages) || body.messages.some((message) => typeof message !== 'object')) {
return Response.json({ error: 'Messages must be an array of chat messages.' }, { status: 400 });
}

const toolMode = normalizeToolMode(body.toolMode);
const shouldExposeTools = toolMode !== 'disabled';

try {
const result = streamText({
model: openrouter(body.modelId),
system: DEFAULT_SYSTEM_PROMPT,
messages: body.messages,
tools: shouldExposeTools ? BASIC_TOOLS : undefined,
toolChoice: shouldExposeTools ? 'auto' : 'none',
});

return result.toUIMessageStreamResponse();
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Unknown error while contacting OpenRouter.';
return Response.json({ error: errorMessage }, { status: 500 });
}
}
38 changes: 38 additions & 0 deletions examples/next-chat/app/components/chat-message.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import type { ConversationEntry } from '../../lib/conversation-types';
import { formatStructuredData } from '../../lib/format';
import { ToolInvocationCard } from './tool-invocation-card';

function getAvatarLabel(role: ConversationEntry['role']) {
return role === 'user' ? 'You' : 'AI';
}

export function ChatMessage({ entry }: { entry: ConversationEntry }) {
return (
<div className={`message ${entry.role}`}>
<div className="avatar" aria-hidden>
{getAvatarLabel(entry.role)}
</div>
<div className="bubble">
{entry.text ? <p>{entry.text}</p> : null}
{entry.reasoning.length > 0 ? (
<details>
<summary>Reasoning</summary>
<pre>{entry.reasoning.join('\n')}</pre>
</details>
) : null}
{entry.metadata ? (
<details>
<summary>Metadata</summary>
<pre>{formatStructuredData(entry.metadata)}</pre>
</details>
) : null}
{entry.toolInvocations.map((tool) => (
<ToolInvocationCard key={tool.id} invocation={tool} />
))}
{entry.pending ? (
<div className="status-pill">Streaming response…</div>
) : null}
</div>
</div>
);
}
49 changes: 49 additions & 0 deletions examples/next-chat/app/components/tool-invocation-card.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import type { ToolInvocationDisplay } from '../../lib/conversation-types';
import { formatStructuredData } from '../../lib/format';

const STATUS_LABELS: Record<ToolInvocationDisplay['state'], string> = {
'collecting-input': 'Collecting input',
'awaiting-execution': 'Running tool',
completed: 'Completed',
error: 'Error',
};

export function ToolInvocationCard({ invocation }: { invocation: ToolInvocationDisplay }) {
const statusClass = invocation.state === 'error' ? 'status error' : 'status';
const inputToShow =
invocation.input !== undefined
? formatStructuredData(invocation.input)
: invocation.inputPreview;
const outputToShow =
invocation.output !== undefined ? formatStructuredData(invocation.output) : undefined;

return (
<div className="tool-card">
<div>
<h4>{invocation.name}</h4>
<div className={statusClass}>{STATUS_LABELS[invocation.state]}</div>
</div>
{invocation.providerExecuted ? (
<div className="status-pill">Executed by provider</div>
) : null}
{inputToShow ? (
<div>
<strong>Input</strong>
<pre>{inputToShow}</pre>
</div>
) : null}
{outputToShow ? (
<div>
<strong>Result</strong>
<pre>{outputToShow}</pre>
</div>
) : null}
{invocation.error ? (
<div>
<strong>Error</strong>
<pre>{formatStructuredData(invocation.error)}</pre>
</div>
) : null}
</div>
);
}
Loading