Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
545 changes: 179 additions & 366 deletions bun.lock

Large diffs are not rendered by default.

26 changes: 26 additions & 0 deletions docs/models.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,28 @@ GPT-5 family of models:

- `openai:gpt-5`
- `openai:gpt-5-pro`

#### Google (Cloud)

Access Gemini models directly via Google's generative AI API:

- `google:gemini-3-pro-preview`
- `google:gemini-2.5-pro`
- `google:gemini-2.5-flash`

**Setup:**

1. Get your API key from [Google AI Studio](https://aistudio.google.com/)
2. Add to `~/.mux/providers.jsonc`:

```jsonc
{
"google": {
"apiKey": "AIza...",
},
}
```

- `openai:gpt-5-codex`

**Note:** Anthropic models are better supported than GPT-5 class models due to an outstanding issue in the Vercel AI SDK.
Expand Down Expand Up @@ -141,6 +163,10 @@ All providers are configured in `~/.mux/providers.jsonc`. Example configurations
"openai": {
"apiKey": "sk-...",
},
// Required for Google models
"google": {
"apiKey": "AIza...",
},
// Required for OpenRouter models
"openrouter": {
"apiKey": "sk-or-v1-...",
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
},
"dependencies": {
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/google": "^2.0.28",
"@ai-sdk/openai": "^2.0.66",
"@openrouter/ai-sdk-provider": "^1.2.2",
"ghostty-web": "^0.1.1",
Expand Down
6 changes: 3 additions & 3 deletions src/browser/hooks/useModelLRU.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@ import { usePersistedState, readPersistedState, updatePersistedState } from "./u
import { MODEL_ABBREVIATIONS } from "@/browser/utils/slashCommands/registry";
import { defaultModel } from "@/common/utils/ai/models";

const MAX_LRU_SIZE = 8;
const MAX_LRU_SIZE = 12;
const LRU_KEY = "model-lru";

// Default models from abbreviations (for initial LRU population)
// Ensure defaultModel is first, then fill with other abbreviations
// Ensure defaultModel is first, then fill with other abbreviations (deduplicated)
const DEFAULT_MODELS = [
defaultModel,
...Object.values(MODEL_ABBREVIATIONS).filter((m) => m !== defaultModel),
...Array.from(new Set(Object.values(MODEL_ABBREVIATIONS))).filter((m) => m !== defaultModel),
].slice(0, MAX_LRU_SIZE);
function persistModels(models: string[]): void {
updatePersistedState(LRU_KEY, models.slice(0, MAX_LRU_SIZE));
Expand Down
6 changes: 6 additions & 0 deletions src/browser/utils/thinking/policy.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,12 @@ describe("getThinkingPolicyForModel", () => {
"medium",
"high",
]);
expect(getThinkingPolicyForModel("google:gemini-3-pro-preview-11-2025")).toEqual([
"off",
"low",
"medium",
"high",
]);
});
});

Expand Down
10 changes: 9 additions & 1 deletion src/common/constants/knownModels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
* Centralized model metadata. Update model versions here and everywhere else will follow.
*/

type ModelProvider = "anthropic" | "openai";
type ModelProvider = "anthropic" | "openai" | "google";

interface KnownModelDefinition {
/** Provider identifier used by SDK factories */
Expand Down Expand Up @@ -51,6 +51,7 @@ const MODEL_DEFINITIONS = {
providerModelId: "gpt-5.1",
aliases: ["gpt-5.1"],
warm: true,
tokenizerOverride: "openai/gpt-5",
},
GPT_PRO: {
provider: "openai",
Expand All @@ -62,12 +63,19 @@ const MODEL_DEFINITIONS = {
providerModelId: "gpt-5.1-codex",
aliases: ["codex"],
warm: true,
tokenizerOverride: "openai/gpt-5",
},
GPT_MINI: {
provider: "openai",
providerModelId: "gpt-5.1-codex-mini",
aliases: ["codex-mini"],
},
GEMINI_3_PRO: {
provider: "google",
providerModelId: "gemini-3-pro-preview",
aliases: ["gemini-3", "gemini-3-pro"],
tokenizerOverride: "google/gemini-2.5-pro",
},
} as const satisfies Record<string, KnownModelDefinition>;

export type KnownModelKey = keyof typeof MODEL_DEFINITIONS;
Expand Down
8 changes: 8 additions & 0 deletions src/common/constants/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ export async function importOllama() {
return await import("ollama-ai-provider-v2");
}

/**
* Dynamically import the Google provider package
*/
export async function importGoogle() {
return await import("@ai-sdk/google");
}

/**
* Dynamically import the OpenRouter provider package
*/
Expand All @@ -50,6 +57,7 @@ export async function importOpenRouter() {
export const PROVIDER_REGISTRY = {
anthropic: importAnthropic,
openai: importOpenAI,
google: importGoogle,
ollama: importOllama,
openrouter: importOpenRouter,
} as const;
Expand Down
7 changes: 7 additions & 0 deletions src/common/types/providerOptions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ export interface OpenAIProviderOptions {
simulateToolPolicyNoop?: boolean;
}

/**
* Google-specific options
*/
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
export interface GoogleProviderOptions {}

/**
* Ollama-specific options
* Currently empty - Ollama is a local service and doesn't require special options.
Expand All @@ -52,6 +58,7 @@ export interface MuxProviderOptions {
/** Provider-specific options */
anthropic?: AnthropicProviderOptions;
openai?: OpenAIProviderOptions;
google?: GoogleProviderOptions;
ollama?: OllamaProviderOptions;
openrouter?: OpenRouterProviderOptions;
}
8 changes: 8 additions & 0 deletions src/common/utils/ai/providerOptions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,14 @@ export function buildProviderOptions(
return options;
}

// Build Google-specific options
if (provider === "google") {
// Google Gemini models don't currently support the same thinking/reasoning
// configuration as Anthropic/OpenAI, so return empty options for now
log.debug("buildProviderOptions: Google config - no specific options yet");
return {};
}

// Build OpenRouter-specific options
if (provider === "openrouter") {
const reasoningEffort = OPENROUTER_REASONING_EFFORT[effectiveThinking];
Expand Down
14 changes: 13 additions & 1 deletion src/common/utils/providers/ensureProvidersConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,18 @@ const buildProvidersFromEnv = (env: NodeJS.ProcessEnv): ProvidersConfig => {
}
}

const googleKey = trim(env.GOOGLE_API_KEY);
if (googleKey.length > 0) {
const entry: ProviderConfig = { apiKey: googleKey };

const baseUrl = trim(env.GOOGLE_BASE_URL);
if (baseUrl.length > 0) {
entry.baseUrl = baseUrl;
}

providers.google = entry;
}

return providers;
};

Expand All @@ -102,7 +114,7 @@ export const ensureProvidersConfig = (
const providersFromEnv = buildProvidersFromEnv(env);
if (!hasAnyConfiguredProvider(providersFromEnv)) {
throw new Error(
"No provider credentials found. Configure providers.jsonc or set ANTHROPIC_API_KEY / OPENAI_API_KEY / OPENROUTER_API_KEY."
"No provider credentials found. Configure providers.jsonc or set ANTHROPIC_API_KEY / OPENAI_API_KEY / OPENROUTER_API_KEY / GOOGLE_API_KEY."
);
}

Expand Down
Loading