From 687c9856cf4ab24f2ddd14a3a06bbed5468b4746 Mon Sep 17 00:00:00 2001 From: Loule | Louis <35641311+Loule95450@users.noreply.github.com> Date: Sun, 16 Nov 2025 22:31:54 +0100 Subject: [PATCH 1/4] Add OpenRouter embedding model implementation and tests Introduces the OpenRouterEmbeddingModel class for embedding generation, including schema validation and API integration. Adds comprehensive tests for model instantiation, embedding functionality, custom settings, and response handling. --- src/embedding/index.test.ts | 253 ++++++++++++++++++++++++++++++++++++ src/embedding/index.ts | 106 +++++++++++++++ src/embedding/schemas.ts | 25 ++++ 3 files changed, 384 insertions(+) create mode 100644 src/embedding/index.test.ts create mode 100644 src/embedding/index.ts create mode 100644 src/embedding/schemas.ts diff --git a/src/embedding/index.test.ts b/src/embedding/index.test.ts new file mode 100644 index 00000000..6cf3dcef --- /dev/null +++ b/src/embedding/index.test.ts @@ -0,0 +1,253 @@ +import { describe, expect, it } from 'vitest'; +import { createOpenRouter } from '../provider'; +import { OpenRouterEmbeddingModel } from './index'; + +describe('OpenRouterEmbeddingModel', () => { + const mockFetch = async ( + _url: URL | RequestInfo, + _init?: RequestInit, + ): Promise => { + return new Response( + JSON.stringify({ + id: 'test-id', + object: 'list', + data: [ + { + object: 'embedding', + embedding: new Array(1536).fill(0.1), + index: 0, + }, + ], + model: 'openai/text-embedding-3-small', + usage: { + prompt_tokens: 5, + total_tokens: 5, + cost: 0.00001, + }, + }), + { + status: 200, + headers: { + 'content-type': 'application/json', + }, + }, + ); + }; + + describe('provider methods', () => { + it('should expose textEmbeddingModel method', () => { + const provider = createOpenRouter({ apiKey: 'test-key' }); + expect(provider.textEmbeddingModel).toBeDefined(); + expect(typeof provider.textEmbeddingModel).toBe('function'); + }); + + it('should expose embedding method (deprecated)', () => { + const provider = createOpenRouter({ apiKey: 'test-key' }); + expect(provider.embedding).toBeDefined(); + expect(typeof provider.embedding).toBe('function'); + }); + + it('should create an embedding model instance', () => { + const provider = createOpenRouter({ apiKey: 'test-key' }); + const model = provider.textEmbeddingModel( + 'openai/text-embedding-3-small', + ); + expect(model).toBeInstanceOf(OpenRouterEmbeddingModel); + expect(model.modelId).toBe('openai/text-embedding-3-small'); + expect(model.provider).toBe('openrouter'); + expect(model.specificationVersion).toBe('v2'); + }); + }); + + describe('doEmbed', () => { + it('should embed a single value', async () => { + const provider = createOpenRouter({ + apiKey: 'test-key', + fetch: mockFetch, + }); + const model = provider.textEmbeddingModel( + 'openai/text-embedding-3-small', + ); + + const result = await model.doEmbed({ + values: ['sunny day at the beach'], + }); + + expect(result.embeddings).toHaveLength(1); + expect(result.embeddings[0]).toHaveLength(1536); + expect(result.usage).toEqual({ tokens: 5 }); + expect( + (result.providerMetadata?.openrouter as { usage?: { cost?: number } }) + ?.usage?.cost, + ).toBe(0.00001); + }); + + it('should embed multiple values', async () => { + const mockFetchMultiple = async ( + _url: URL | RequestInfo, + _init?: RequestInit, + ): Promise => { + return new Response( + JSON.stringify({ + object: 'list', + data: [ + { + object: 'embedding', + embedding: new Array(1536).fill(0.1), + index: 0, + }, + { + object: 'embedding', + embedding: new Array(1536).fill(0.2), + index: 1, + }, + { + object: 'embedding', + embedding: new Array(1536).fill(0.3), + index: 2, + }, + ], + model: 'openai/text-embedding-3-small', + usage: { + prompt_tokens: 15, + total_tokens: 15, + }, + }), + { + status: 200, + headers: { + 'content-type': 'application/json', + }, + }, + ); + }; + + const provider = createOpenRouter({ + apiKey: 'test-key', + fetch: mockFetchMultiple, + }); + const model = provider.textEmbeddingModel( + 'openai/text-embedding-3-small', + ); + + const result = await model.doEmbed({ + values: [ + 'sunny day at the beach', + 'rainy day in the city', + 'snowy mountain peak', + ], + }); + + expect(result.embeddings).toHaveLength(3); + expect(result.embeddings[0]).toHaveLength(1536); + expect(result.embeddings[1]).toHaveLength(1536); + expect(result.embeddings[2]).toHaveLength(1536); + expect(result.usage).toEqual({ tokens: 15 }); + }); + + it('should pass custom settings to API', async () => { + let capturedRequest: Record | undefined; + + const mockFetchWithCapture = async ( + _url: URL | RequestInfo, + init?: RequestInit, + ): Promise => { + capturedRequest = JSON.parse(init?.body as string); + return new Response( + JSON.stringify({ + object: 'list', + data: [ + { + object: 'embedding', + embedding: new Array(1536).fill(0.1), + index: 0, + }, + ], + model: 'openai/text-embedding-3-small', + usage: { + prompt_tokens: 5, + total_tokens: 5, + }, + }), + { + status: 200, + headers: { + 'content-type': 'application/json', + }, + }, + ); + }; + + const provider = createOpenRouter({ + apiKey: 'test-key', + fetch: mockFetchWithCapture, + }); + + const model = provider.textEmbeddingModel( + 'openai/text-embedding-3-small', + { + user: 'test-user-123', + provider: { + order: ['openai'], + allow_fallbacks: false, + }, + }, + ); + + await model.doEmbed({ + values: ['test input'], + }); + + expect(capturedRequest?.user).toBe('test-user-123'); + expect(capturedRequest?.provider).toEqual({ + order: ['openai'], + allow_fallbacks: false, + }); + expect(capturedRequest?.model).toBe('openai/text-embedding-3-small'); + expect(capturedRequest?.input).toEqual(['test input']); + }); + + it('should handle response without usage information', async () => { + const mockFetchNoUsage = async ( + _url: URL | RequestInfo, + _init?: RequestInit, + ): Promise => { + return new Response( + JSON.stringify({ + object: 'list', + data: [ + { + object: 'embedding', + embedding: new Array(1536).fill(0.1), + index: 0, + }, + ], + model: 'openai/text-embedding-3-small', + }), + { + status: 200, + headers: { + 'content-type': 'application/json', + }, + }, + ); + }; + + const provider = createOpenRouter({ + apiKey: 'test-key', + fetch: mockFetchNoUsage, + }); + const model = provider.textEmbeddingModel( + 'openai/text-embedding-3-small', + ); + + const result = await model.doEmbed({ + values: ['test'], + }); + + expect(result.embeddings).toHaveLength(1); + expect(result.usage).toBeUndefined(); + expect(result.providerMetadata).toBeUndefined(); + }); + }); +}); diff --git a/src/embedding/index.ts b/src/embedding/index.ts new file mode 100644 index 00000000..c30a451f --- /dev/null +++ b/src/embedding/index.ts @@ -0,0 +1,106 @@ +import type { + EmbeddingModelV2, + SharedV2Headers, + SharedV2ProviderMetadata, +} from '@ai-sdk/provider'; +import type { + OpenRouterEmbeddingModelId, + OpenRouterEmbeddingSettings, +} from '../types/openrouter-embedding-settings'; + +import { + combineHeaders, + createJsonResponseHandler, + postJsonToApi, +} from '@ai-sdk/provider-utils'; +import { openrouterFailedResponseHandler } from '../schemas/error-response'; +import { OpenRouterEmbeddingResponseSchema } from './schemas'; + +type OpenRouterEmbeddingConfig = { + provider: string; + headers: () => Record; + url: (options: { modelId: string; path: string }) => string; + fetch?: typeof fetch; + extraBody?: Record; +}; + +export class OpenRouterEmbeddingModel implements EmbeddingModelV2 { + readonly specificationVersion = 'v2' as const; + readonly provider = 'openrouter'; + readonly modelId: OpenRouterEmbeddingModelId; + readonly settings: OpenRouterEmbeddingSettings; + readonly maxEmbeddingsPerCall = undefined; + readonly supportsParallelCalls = true; + + private readonly config: OpenRouterEmbeddingConfig; + + constructor( + modelId: OpenRouterEmbeddingModelId, + settings: OpenRouterEmbeddingSettings, + config: OpenRouterEmbeddingConfig, + ) { + this.modelId = modelId; + this.settings = settings; + this.config = config; + } + + async doEmbed(options: { + values: Array; + abortSignal?: AbortSignal; + headers?: Record; + }): Promise<{ + embeddings: Array>; + usage?: { tokens: number }; + providerMetadata?: SharedV2ProviderMetadata; + response?: { + headers?: SharedV2Headers; + body?: unknown; + }; + }> { + const { values, abortSignal, headers } = options; + + const args = { + model: this.modelId, + input: values, + user: this.settings.user, + provider: this.settings.provider, + ...this.config.extraBody, + ...this.settings.extraBody, + }; + + const { value: responseValue, responseHeaders } = await postJsonToApi({ + url: this.config.url({ + path: '/embeddings', + modelId: this.modelId, + }), + headers: combineHeaders(this.config.headers(), headers), + body: args, + failedResponseHandler: openrouterFailedResponseHandler, + successfulResponseHandler: createJsonResponseHandler( + OpenRouterEmbeddingResponseSchema, + ), + abortSignal, + fetch: this.config.fetch, + }); + + return { + embeddings: responseValue.data.map((item) => item.embedding), + usage: responseValue.usage + ? { tokens: responseValue.usage.prompt_tokens } + : undefined, + providerMetadata: responseValue.usage?.cost + ? { + openrouter: { + usage: { + cost: responseValue.usage.cost, + }, + }, + } + : undefined, + response: { + headers: responseHeaders, + body: responseValue, + }, + }; + } +} diff --git a/src/embedding/schemas.ts b/src/embedding/schemas.ts new file mode 100644 index 00000000..55dcfa33 --- /dev/null +++ b/src/embedding/schemas.ts @@ -0,0 +1,25 @@ +import { z } from 'zod/v4'; + +const openrouterEmbeddingUsageSchema = z.object({ + prompt_tokens: z.number(), + total_tokens: z.number(), + cost: z.number().optional(), +}); + +const openrouterEmbeddingDataSchema = z.object({ + object: z.literal('embedding'), + embedding: z.array(z.number()), + index: z.number().optional(), +}); + +export const OpenRouterEmbeddingResponseSchema = z.object({ + id: z.string().optional(), + object: z.literal('list'), + data: z.array(openrouterEmbeddingDataSchema), + model: z.string(), + usage: openrouterEmbeddingUsageSchema.optional(), +}); + +export type OpenRouterEmbeddingResponse = z.infer< + typeof OpenRouterEmbeddingResponseSchema +>; From d2c63495727aa9e43f5374cc82e6036261c93178 Mon Sep 17 00:00:00 2001 From: Loule | Louis <35641311+Loule95450@users.noreply.github.com> Date: Sun, 16 Nov 2025 22:32:02 +0100 Subject: [PATCH 2/4] Add OpenRouter embedding model support Introduces text embedding model support to the OpenRouter provider, including a new textEmbeddingModel method and a deprecated embedding alias for backward compatibility. Updates both the facade and provider to expose and implement these methods. --- src/facade.ts | 26 ++++++++++++++++++++++++++ src/provider.ts | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+) diff --git a/src/facade.ts b/src/facade.ts index 14400588..934ae35b 100644 --- a/src/facade.ts +++ b/src/facade.ts @@ -7,10 +7,15 @@ import type { OpenRouterCompletionModelId, OpenRouterCompletionSettings, } from './types/openrouter-completion-settings'; +import type { + OpenRouterEmbeddingModelId, + OpenRouterEmbeddingSettings, +} from './types/openrouter-embedding-settings'; import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils'; import { OpenRouterChatLanguageModel } from './chat'; import { OpenRouterCompletionLanguageModel } from './completion'; +import { OpenRouterEmbeddingModel } from './embedding'; /** @deprecated Use `createOpenRouter` instead. @@ -78,4 +83,25 @@ Custom headers to include in the requests. url: ({ path }) => `${this.baseURL}${path}`, }); } + + textEmbeddingModel( + modelId: OpenRouterEmbeddingModelId, + settings: OpenRouterEmbeddingSettings = {}, + ) { + return new OpenRouterEmbeddingModel(modelId, settings, { + provider: 'openrouter.embedding', + ...this.baseConfig, + url: ({ path }) => `${this.baseURL}${path}`, + }); + } + + /** + * @deprecated Use textEmbeddingModel instead + */ + embedding( + modelId: OpenRouterEmbeddingModelId, + settings: OpenRouterEmbeddingSettings = {}, + ) { + return this.textEmbeddingModel(modelId, settings); + } } diff --git a/src/provider.ts b/src/provider.ts index f15e0c6e..7b4253b2 100644 --- a/src/provider.ts +++ b/src/provider.ts @@ -7,10 +7,15 @@ import type { OpenRouterCompletionModelId, OpenRouterCompletionSettings, } from './types/openrouter-completion-settings'; +import type { + OpenRouterEmbeddingModelId, + OpenRouterEmbeddingSettings, +} from './types/openrouter-embedding-settings'; import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils'; import { OpenRouterChatLanguageModel } from './chat'; import { OpenRouterCompletionLanguageModel } from './completion'; +import { OpenRouterEmbeddingModel } from './embedding'; import { withUserAgentSuffix } from './utils/with-user-agent-suffix'; import { VERSION } from './version'; @@ -50,6 +55,23 @@ Creates an OpenRouter completion model for text generation. modelId: OpenRouterCompletionModelId, settings?: OpenRouterCompletionSettings, ): OpenRouterCompletionLanguageModel; + + /** +Creates an OpenRouter text embedding model. (AI SDK v5) + */ + textEmbeddingModel( + modelId: OpenRouterEmbeddingModelId, + settings?: OpenRouterEmbeddingSettings, + ): OpenRouterEmbeddingModel; + + /** +Creates an OpenRouter text embedding model. (AI SDK v4 - deprecated, use textEmbeddingModel instead) +@deprecated Use textEmbeddingModel instead + */ + embedding( + modelId: OpenRouterEmbeddingModelId, + settings?: OpenRouterEmbeddingSettings, + ): OpenRouterEmbeddingModel; } export interface OpenRouterProviderSettings { @@ -144,6 +166,18 @@ export function createOpenRouter( extraBody: options.extraBody, }); + const createEmbeddingModel = ( + modelId: OpenRouterEmbeddingModelId, + settings: OpenRouterEmbeddingSettings = {}, + ) => + new OpenRouterEmbeddingModel(modelId, settings, { + provider: 'openrouter.embedding', + url: ({ path }) => `${baseURL}${path}`, + headers: getHeaders, + fetch: options.fetch, + extraBody: options.extraBody, + }); + const createLanguageModel = ( modelId: OpenRouterChatModelId | OpenRouterCompletionModelId, settings?: OpenRouterChatSettings | OpenRouterCompletionSettings, @@ -172,6 +206,8 @@ export function createOpenRouter( provider.languageModel = createLanguageModel; provider.chat = createChatModel; provider.completion = createCompletionModel; + provider.textEmbeddingModel = createEmbeddingModel; + provider.embedding = createEmbeddingModel; // deprecated alias for v4 compatibility return provider as OpenRouterProvider; } From b42d4eb4bfffd23d510b33468a6a8c22bed4c6fd Mon Sep 17 00:00:00 2001 From: Loule | Louis <35641311+Loule95450@users.noreply.github.com> Date: Sun, 16 Nov 2025 22:32:07 +0100 Subject: [PATCH 3/4] Add OpenRouter embedding settings types Introduced new type definitions for OpenRouter embedding settings in src/types/openrouter-embedding-settings.ts and re-exported them from index.ts. These types provide configuration options for embedding model requests, including provider routing preferences and user identification. --- src/types/index.ts | 2 + src/types/openrouter-embedding-settings.ts | 56 ++++++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 src/types/openrouter-embedding-settings.ts diff --git a/src/types/index.ts b/src/types/index.ts index 19e7152c..4ddb6cff 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -2,6 +2,8 @@ import type { LanguageModelV2, LanguageModelV2Prompt } from '@ai-sdk/provider'; export type { LanguageModelV2, LanguageModelV2Prompt }; +export * from './openrouter-embedding-settings'; + export type OpenRouterProviderOptions = { models?: string[]; diff --git a/src/types/openrouter-embedding-settings.ts b/src/types/openrouter-embedding-settings.ts new file mode 100644 index 00000000..a20a60ba --- /dev/null +++ b/src/types/openrouter-embedding-settings.ts @@ -0,0 +1,56 @@ +import type { OpenRouterSharedSettings } from '..'; + +// https://openrouter.ai/api/v1/models +export type OpenRouterEmbeddingModelId = string; + +export type OpenRouterEmbeddingSettings = { + /** + * A unique identifier representing your end-user, which can help OpenRouter to + * monitor and detect abuse. + */ + user?: string; + + /** + * Provider routing preferences to control request routing behavior + */ + provider?: { + /** + * List of provider slugs to try in order (e.g. ["openai", "voyageai"]) + */ + order?: string[]; + /** + * Whether to allow backup providers when primary is unavailable (default: true) + */ + allow_fallbacks?: boolean; + /** + * Only use providers that support all parameters in your request (default: false) + */ + require_parameters?: boolean; + /** + * Control whether to use providers that may store data + */ + data_collection?: 'allow' | 'deny'; + /** + * List of provider slugs to allow for this request + */ + only?: string[]; + /** + * List of provider slugs to skip for this request + */ + ignore?: string[]; + /** + * Sort providers by price, throughput, or latency + */ + sort?: 'price' | 'throughput' | 'latency'; + /** + * Maximum pricing you want to pay for this request + */ + max_price?: { + prompt?: number | string; + completion?: number | string; + image?: number | string; + audio?: number | string; + request?: number | string; + }; + }; +} & OpenRouterSharedSettings; From 5ce4287594d90340cd4177e70816ea6c9f287ca7 Mon Sep 17 00:00:00 2001 From: Loule | Louis <35641311+Loule95450@users.noreply.github.com> Date: Sun, 16 Nov 2025 22:32:11 +0100 Subject: [PATCH 4/4] Add documentation for embedding model support Expanded the README with details on using embedding models with OpenRouter, including usage examples for AI SDK v5 and v4, batch embeddings, and a list of supported embedding models. --- README.md | 58 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/README.md b/README.md index 63dfcba6..10d08a67 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,64 @@ This list is not a definitive list of models supported by OpenRouter, as it cons You can find the latest list of tool-supported models supported by OpenRouter [here](https://openrouter.ai/models?order=newest&supported_parameters=tools). (Note: This list may contain models that are not compatible with the AI SDK.) +## Embeddings + +OpenRouter supports embedding models for semantic search, RAG pipelines, and vector-native features. The provider exposes embeddings compatible with both AI SDK v5 and v4. + +### AI SDK v5 (Recommended) + +```ts +import { embed } from 'ai'; +import { openrouter } from '@openrouter/ai-sdk-provider'; + +const { embedding } = await embed({ + model: openrouter.textEmbeddingModel('openai/text-embedding-3-small'), + value: 'sunny day at the beach', +}); + +console.log(embedding); // Array of numbers representing the embedding +``` + +### Batch Embeddings + +```ts +import { embedMany } from 'ai'; +import { openrouter } from '@openrouter/ai-sdk-provider'; + +const { embeddings } = await embedMany({ + model: openrouter.textEmbeddingModel('openai/text-embedding-3-small'), + values: [ + 'sunny day at the beach', + 'rainy day in the city', + 'snowy mountain peak', + ], +}); + +console.log(embeddings); // Array of embedding arrays +``` + +### AI SDK v4 (Deprecated) + +For backwards compatibility, the `embedding` method is also available: + +```ts +import { embed } from 'ai'; +import { openrouter } from '@openrouter/ai-sdk-provider'; + +const { embedding } = await embed({ + model: openrouter.embedding('openai/text-embedding-3-small'), + value: 'sunny day at the beach', +}); +``` + +### Supported Embedding Models + +OpenRouter supports various embedding models including: +- `openai/text-embedding-3-small` +- `openai/text-embedding-3-large` +- `openai/text-embedding-ada-002` +- And more available on [OpenRouter](https://openrouter.ai/models?output_modalities=embeddings) + ## Passing Extra Body to OpenRouter There are 3 ways to pass extra body to OpenRouter: