diff --git a/README.md b/README.md
index 7c2b5bc7..4b3e670d 100644
--- a/README.md
+++ b/README.md
@@ -64,7 +64,7 @@ Default endpoints:
- OpenAI base URL: `https://api.openai.com/v1`
- OpenAI websocket mode endpoint: `wss://api.openai.com/v1/responses`
-- OpenRouter base URL: `https://openrouter.ai/api/v1`
+- OpenRouter Anthropic-compatible base URL: `https://openrouter.ai/api`
- OpenRouter Responses API: `https://openrouter.ai/api/v1/responses`
## Quick Start
@@ -91,7 +91,7 @@ OpenRouter:
```bash
BETTER_CLAWD_API_PROVIDER=openrouter
OPENROUTER_API_KEY=your_key_here
-OPENROUTER_BASE_URL=https://openrouter.ai/api/v1
+OPENROUTER_BASE_URL=https://openrouter.ai/api
```
## What You Get
diff --git a/package.json b/package.json
index 75bd7e70..250c97f6 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "better-clawd",
- "version": "0.1.4",
+ "version": "0.1.5",
"description": "Claude Code, but better.",
"type": "module",
"bin": {
diff --git a/src/components/OpenRouterLoginFlow.tsx b/src/components/OpenRouterLoginFlow.tsx
index 92684e86..38a4ed71 100644
--- a/src/components/OpenRouterLoginFlow.tsx
+++ b/src/components/OpenRouterLoginFlow.tsx
@@ -45,8 +45,8 @@ export function OpenRouterLoginFlow({
Configuring OpenRouter login for Better-Clawd...
- OpenRouter support uses your OpenRouter API key with the Responses API
- endpoint.
+ OpenRouter support uses your OpenRouter API key with the
+ Anthropic-compatible Messages API endpoint.
)
@@ -59,8 +59,8 @@ export function OpenRouterLoginFlow({
'Better-Clawd can use OpenRouter with your OpenRouter API key.'}
- Paste your OpenRouter key to use `https://openrouter.ai/api/v1` and the
- Responses API compatibility layer.
+ Paste your OpenRouter key to use the Anthropic-compatible OpenRouter base
+ URL at `https://openrouter.ai/api`.
Paste your OpenRouter API key:
diff --git a/src/utils/logoV2Utils.ts b/src/utils/logoV2Utils.ts
index f7f9720b..c06158b7 100644
--- a/src/utils/logoV2Utils.ts
+++ b/src/utils/logoV2Utils.ts
@@ -9,6 +9,7 @@ import {
truncateToWidth,
truncateToWidthNoEllipsis,
} from './format.js'
+import { getAPIProvider } from './model/providers.js'
import { getStoredChangelogFromMemory, parseChangelog } from './releaseNotes.js'
import { gt } from './semver.js'
import { loadMessageLogs } from './sessionStorage.js'
@@ -253,9 +254,20 @@ export function getLogoDisplayData(): {
const cwd = serverUrl
? `${displayPath} in ${serverUrl.replace(/^https?:\/\//, '')}`
: displayPath
+ const apiProvider = getAPIProvider()
const billingType = isClaudeAISubscriber()
? getSubscriptionName()
- : 'API Usage Billing'
+ : apiProvider === 'openrouter'
+ ? 'OpenRouter'
+ : apiProvider === 'openai'
+ ? 'OpenAI'
+ : apiProvider === 'bedrock'
+ ? 'AWS Bedrock'
+ : apiProvider === 'vertex'
+ ? 'Google Vertex AI'
+ : apiProvider === 'foundry'
+ ? 'Microsoft Foundry'
+ : 'API Usage Billing'
const agentName = getInitialSettings().agent
return {
diff --git a/src/utils/model/modelStrings.ts b/src/utils/model/modelStrings.ts
index 7fe0dc9b..1db7e348 100644
--- a/src/utils/model/modelStrings.ts
+++ b/src/utils/model/modelStrings.ts
@@ -25,19 +25,20 @@ const MODEL_KEYS = Object.keys(ALL_MODEL_CONFIGS) as ModelKey[]
function getBuiltinModelStrings(provider: APIProvider): ModelStrings {
if (provider === 'openai') {
const out = getBuiltinModelStrings('firstParty') as Record
- out.haiku35 = process.env.OPENAI_HAIKU_MODEL || 'gpt-5.4-mini'
out.haiku45 = process.env.OPENAI_HAIKU_MODEL || 'gpt-5.4-mini'
- out.sonnet37 = process.env.OPENAI_SONNET_MODEL || 'gpt-5.4'
- out.sonnet40 = process.env.OPENAI_SONNET_MODEL || 'gpt-5.4'
- out.sonnet45 = process.env.OPENAI_SONNET_MODEL || 'gpt-5.4'
out.sonnet46 = process.env.OPENAI_SONNET_MODEL || 'gpt-5.4'
- out.opus40 = process.env.OPENAI_OPUS_MODEL || 'gpt-5.4'
- out.opus41 = process.env.OPENAI_OPUS_MODEL || 'gpt-5.4'
- out.opus45 = process.env.OPENAI_OPUS_MODEL || 'gpt-5.4'
out.opus46 = process.env.OPENAI_OPUS_MODEL || 'gpt-5.4'
return out as ModelStrings
}
+ if (provider === 'openrouter') {
+ const out = getBuiltinModelStrings('firstParty') as Record
+ out.sonnet46 =
+ process.env.OPENROUTER_SONNET_MODEL || 'anthropic/claude-sonnet-4.6'
+ process.env.OPENROUTER_OPUS_MODEL || 'anthropic/claude-opus-4.6'
+ return out as ModelStrings
+ }
+
const out = {} as ModelStrings
for (const key of MODEL_KEYS) {
out[key] = ALL_MODEL_CONFIGS[key][provider]
diff --git a/src/utils/model/providers.ts b/src/utils/model/providers.ts
index 66a6047a..c48dc6f7 100644
--- a/src/utils/model/providers.ts
+++ b/src/utils/model/providers.ts
@@ -9,6 +9,40 @@ export type APIProvider =
| 'vertex'
| 'foundry'
+function getStoredProviderPreference(): APIProvider | null {
+ try {
+ // Read the global config file directly so provider selection works even
+ // before the guarded config loader is enabled during startup.
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
+ const { readFileSync } = require('fs') as typeof import('fs')
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
+ const { getGlobalClaudeFile } =
+ require('../env.js') as typeof import('../env.js')
+ const raw = readFileSync(getGlobalClaudeFile(), 'utf8')
+ const config = JSON.parse(raw) as {
+ authProvider?: 'anthropic' | 'openrouter' | 'openai'
+ openRouterApiKey?: string
+ openAiApiKey?: string
+ openAiAccessToken?: string
+ }
+
+ switch (config.authProvider) {
+ case 'openrouter':
+ return config.openRouterApiKey ? 'openrouter' : null
+ case 'openai':
+ return config.openAiApiKey || config.openAiAccessToken
+ ? 'openai'
+ : null
+ case 'anthropic':
+ return 'firstParty'
+ default:
+ return null
+ }
+ } catch {
+ return null
+ }
+}
+
function getExplicitProviderOverride(): APIProvider | null {
const rawProvider =
process.env.BETTER_CLAWD_API_PROVIDER ??
@@ -65,7 +99,30 @@ export function isOpenAIConfigured(): boolean {
}
export function getOpenRouterBaseUrl(): string {
- return process.env.OPENROUTER_BASE_URL ?? 'https://openrouter.ai/api/v1'
+ const configuredBaseUrl = process.env.OPENROUTER_BASE_URL
+ const fallbackBaseUrl = 'https://openrouter.ai/api'
+ if (!configuredBaseUrl) {
+ return fallbackBaseUrl
+ }
+
+ try {
+ const url = new URL(configuredBaseUrl)
+
+ if (url.host === 'openrouter.ai') {
+ const normalizedPath = url.pathname.replace(/\/+$/, '')
+ if (normalizedPath === '' || normalizedPath === '/') {
+ url.pathname = '/api'
+ } else if (normalizedPath === '/api/v1') {
+ // Anthropic SDK appends /v1/messages itself, so OpenRouter's SDK base
+ // must stop at /api rather than /api/v1.
+ url.pathname = '/api'
+ }
+ }
+
+ return url.toString().replace(/\/$/, '')
+ } catch {
+ return configuredBaseUrl
+ }
}
export function getOpenAIBaseUrl(): string {
@@ -88,7 +145,7 @@ export function getAPIProvider(): APIProvider {
? 'openai'
: isOpenRouterConfigured()
? 'openrouter'
- : 'firstParty'
+ : getStoredProviderPreference() ?? 'firstParty'
}
export function getAPIProviderForStatsig(): AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS {
diff --git a/src/utils/status.tsx b/src/utils/status.tsx
index 146dfcf4..ef4b946e 100644
--- a/src/utils/status.tsx
+++ b/src/utils/status.tsx
@@ -11,7 +11,11 @@ import { getDisplayPath } from './file.js';
import { formatNumber } from './format.js';
import { getIdeClientName, type IDEExtensionInstallationStatus, isJetBrainsIde, toIDEDisplayName } from './ide.js';
import { getClaudeAiUserDefaultModelDescription, modelDisplayString } from './model/model.js';
-import { getAPIProvider } from './model/providers.js';
+import {
+ getAPIProvider,
+ getOpenAIBaseUrl,
+ getOpenRouterBaseUrl,
+} from './model/providers.js';
import { getMTLSConfig } from './mtls.js';
import { checkInstall } from './nativeInstaller/index.js';
import { getProxyUrl } from './proxy.js';
@@ -264,15 +268,12 @@ export function buildAPIProviderProperties(): Property[] {
} else if (apiProvider === 'openrouter') {
properties.push({
label: 'OpenRouter base URL',
- value:
- process.env.OPENROUTER_BASE_URL ||
- process.env.ANTHROPIC_BASE_URL ||
- 'https://openrouter.ai/api/v1'
+ value: getOpenRouterBaseUrl()
});
} else if (apiProvider === 'openai') {
properties.push({
label: 'OpenAI base URL',
- value: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1'
+ value: getOpenAIBaseUrl()
});
} else if (apiProvider === 'bedrock') {
const bedrockBaseUrl = process.env.BEDROCK_BASE_URL;