feat: support dual protocols (OpenAI/Anthropic) for custom providers (#389)

This commit is contained in:
paisley
2026-03-10 17:35:51 +08:00
committed by GitHub
Unverified
parent 80e89ddc5c
commit 99681777a0
12 changed files with 187 additions and 44 deletions

View File

@@ -162,12 +162,13 @@ export async function handleProviderRoutes(
if (url.pathname === '/api/providers/validate' && req.method === 'POST') {
logLegacyProviderRoute('POST /api/providers/validate');
try {
const body = await parseJsonBody<{ providerId: string; apiKey: string; options?: { baseUrl?: string } }>(req);
const body = await parseJsonBody<{ providerId: string; apiKey: string; options?: { baseUrl?: string; apiProtocol?: string } }>(req);
const provider = await providerService.getLegacyProvider(body.providerId);
const providerType = provider?.type || body.providerId;
const registryBaseUrl = getProviderConfig(providerType)?.baseUrl;
const resolvedBaseUrl = body.options?.baseUrl || provider?.baseUrl || registryBaseUrl;
sendJson(res, 200, await validateApiKeyWithProvider(providerType, body.apiKey, { baseUrl: resolvedBaseUrl }));
const resolvedProtocol = body.options?.apiProtocol || provider?.apiProtocol;
sendJson(res, 200, await validateApiKeyWithProvider(providerType, body.apiKey, { baseUrl: resolvedBaseUrl, apiProtocol: resolvedProtocol as any }));
} catch (error) {
sendJson(res, 500, { valid: false, error: String(error) });
}

View File

@@ -216,7 +216,7 @@ async function syncProviderSecretToRuntime(
async function resolveRuntimeSyncContext(config: ProviderConfig): Promise<RuntimeProviderSyncContext | null> {
const runtimeProviderKey = await resolveRuntimeProviderKey(config);
const meta = getProviderConfig(config.type);
const api = config.type === 'custom' || config.type === 'ollama' ? 'openai-completions' : meta?.api;
const api = config.apiProtocol || (config.type === 'custom' ? 'openai-completions' : meta?.api);
if (!api) {
return null;
}
@@ -245,7 +245,7 @@ async function syncCustomProviderAgentModel(
runtimeProviderKey: string,
apiKey: string | undefined,
): Promise<void> {
if (config.type !== 'custom' && config.type !== 'ollama') {
if (config.type !== 'custom') {
return;
}
@@ -257,7 +257,7 @@ async function syncCustomProviderAgentModel(
const modelId = config.model;
await updateAgentModelProvider(runtimeProviderKey, {
baseUrl: config.baseUrl,
api: 'openai-completions',
api: config.apiProtocol || 'openai-completions',
models: modelId ? [{ id: modelId, name: modelId }] : [],
apiKey: resolvedKey,
});
@@ -310,12 +310,12 @@ export async function syncUpdatedProviderToRuntime(
const defaultProviderId = await getDefaultProvider();
if (defaultProviderId === config.id) {
const modelOverride = config.model ? `${ock}/${config.model}` : undefined;
if (config.type !== 'custom' && config.type !== 'ollama') {
if (config.type !== 'custom') {
await setOpenClawDefaultModel(ock, modelOverride, fallbackModels);
} else {
await setOpenClawDefaultModelWithOverride(ock, modelOverride, {
baseUrl: config.baseUrl,
api: 'openai-completions',
api: config.apiProtocol || 'openai-completions',
}, fallbackModels);
}
}
@@ -379,10 +379,10 @@ export async function syncDefaultProviderToRuntime(
? (provider.model.startsWith(`${ock}/`) ? provider.model : `${ock}/${provider.model}`)
: undefined;
if (provider.type === 'custom' || provider.type === 'ollama') {
if (provider.type === 'custom') {
await setOpenClawDefaultModelWithOverride(ock, modelOverride, {
baseUrl: provider.baseUrl,
api: 'openai-completions',
api: provider.apiProtocol || 'openai-completions',
}, fallbackModels);
} else {
await setOpenClawDefaultModel(ock, modelOverride, fallbackModels);
@@ -460,14 +460,14 @@ export async function syncDefaultProviderToRuntime(
}
if (
(provider.type === 'custom' || provider.type === 'ollama') &&
provider.type === 'custom' &&
providerKey &&
provider.baseUrl
) {
const modelId = provider.model;
await updateAgentModelProvider(ock, {
baseUrl: provider.baseUrl,
api: 'openai-completions',
api: provider.apiProtocol || 'openai-completions',
models: modelId ? [{ id: modelId, name: modelId }] : [],
apiKey: providerKey,
});

View File

@@ -27,9 +27,9 @@ export function providerConfigToAccount(
label: config.name,
authMode: inferAuthMode(config.type),
baseUrl: config.baseUrl,
apiProtocol: config.type === 'custom' || config.type === 'ollama'
apiProtocol: config.apiProtocol || (config.type === 'custom' || config.type === 'ollama'
? 'openai-completions'
: getProviderDefinition(config.type)?.providerConfig?.api,
: getProviderDefinition(config.type)?.providerConfig?.api),
model: config.model,
fallbackModels: config.fallbackModels,
fallbackAccountIds: config.fallbackProviderIds,
@@ -46,6 +46,7 @@ export function providerAccountToConfig(account: ProviderAccount): ProviderConfi
name: account.label,
type: account.vendorId,
baseUrl: account.baseUrl,
apiProtocol: account.apiProtocol,
model: account.model,
fallbackModels: account.fallbackModels,
fallbackProviderIds: account.fallbackAccountIds,

View File

@@ -170,6 +170,44 @@ async function performChatCompletionsProbe(
}
}
async function performAnthropicMessagesProbe(
providerLabel: string,
url: string,
headers: Record<string, string>,
): Promise<{ valid: boolean; error?: string }> {
try {
logValidationRequest(providerLabel, 'POST', url, headers);
const response = await proxyAwareFetch(url, {
method: 'POST',
headers: { ...headers, 'Content-Type': 'application/json' },
body: JSON.stringify({
model: 'validation-probe',
messages: [{ role: 'user', content: 'hi' }],
max_tokens: 1,
}),
});
logValidationStatus(providerLabel, response.status);
const data = await response.json().catch(() => ({}));
if (response.status === 401 || response.status === 403) {
return { valid: false, error: 'Invalid API key' };
}
if (
(response.status >= 200 && response.status < 300) ||
response.status === 400 ||
response.status === 429
) {
return { valid: true };
}
return classifyAuthResponse(response.status, data);
} catch (error) {
return {
valid: false,
error: `Connection error: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
async function validateGoogleQueryKey(
providerType: string,
apiKey: string,
@@ -185,13 +223,26 @@ async function validateAnthropicHeaderKey(
apiKey: string,
baseUrl?: string,
): Promise<{ valid: boolean; error?: string }> {
const base = normalizeBaseUrl(baseUrl || 'https://api.anthropic.com/v1');
const rawBase = normalizeBaseUrl(baseUrl || 'https://api.anthropic.com/v1');
const base = rawBase.endsWith('/v1') ? rawBase : `${rawBase}/v1`;
const url = `${base}/models?limit=1`;
const headers = {
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
};
return await performProviderValidationRequest(providerType, url, headers);
const modelsResult = await performProviderValidationRequest(providerType, url, headers);
// If the endpoint doesn't implement /models (like Minimax Anthropic compatibility), fallback to a /messages probe.
if (modelsResult.error?.includes('API error: 404') || modelsResult.error?.includes('API error: 400')) {
console.log(
`[clawx-validate] ${providerType} /models returned error, falling back to /messages probe`,
);
const messagesUrl = `${base}/messages`;
return await performAnthropicMessagesProbe(providerType, messagesUrl, headers);
}
return modelsResult;
}
async function validateOpenRouterKey(
@@ -206,9 +257,18 @@ async function validateOpenRouterKey(
export async function validateApiKeyWithProvider(
providerType: string,
apiKey: string,
options?: { baseUrl?: string },
options?: { baseUrl?: string; apiProtocol?: string },
): Promise<{ valid: boolean; error?: string }> {
const profile = getValidationProfile(providerType);
let profile = getValidationProfile(providerType);
if (providerType === 'custom' && options?.apiProtocol) {
if (options.apiProtocol === 'anthropic-messages') {
profile = 'anthropic-header';
} else {
profile = 'openai-compatible';
}
}
if (profile === 'none') {
return { valid: true };
}

View File

@@ -54,6 +54,7 @@ export interface ProviderConfig {
name: string;
type: ProviderType;
baseUrl?: string;
apiProtocol?: ProviderProtocol;
model?: string;
fallbackModels?: string[];
fallbackProviderIds?: string[];
@@ -131,25 +132,25 @@ export interface ProviderAccount {
export type ProviderSecret =
| {
type: 'api_key';
accountId: string;
apiKey: string;
}
type: 'api_key';
accountId: string;
apiKey: string;
}
| {
type: 'oauth';
accountId: string;
accessToken: string;
refreshToken: string;
expiresAt: number;
scopes?: string[];
email?: string;
subject?: string;
}
type: 'oauth';
accountId: string;
accessToken: string;
refreshToken: string;
expiresAt: number;
scopes?: string[];
email?: string;
subject?: string;
}
| {
type: 'local';
accountId: string;
apiKey?: string;
};
type: 'local';
accountId: string;
apiKey?: string;
};
export interface ModelSummary {
id: string;

View File

@@ -33,6 +33,7 @@ export interface ProviderConfig {
name: string;
type: ProviderType;
baseUrl?: string;
apiProtocol?: 'openai-completions' | 'openai-responses' | 'anthropic-messages';
model?: string;
fallbackModels?: string[];
fallbackProviderIds?: string[];