rebrand better-clawd and ship initial npm-ready release

This commit is contained in:
x1xhlol
2026-04-01 16:51:18 +02:00
Unverified
parent 420d4155ec
commit 407fa14d6f
109 changed files with 4155 additions and 1690 deletions

View File

@@ -6,7 +6,10 @@ import {
getAnthropicApiKey,
getApiKeyFromApiKeyHelper,
getClaudeAIOAuthTokens,
getOpenAIApiKey,
getOpenRouterApiKey,
isClaudeAISubscriber,
refreshOpenAIAuthTokenIfNeeded,
refreshAndGetAwsCredentials,
refreshGcpCredentialsIfNeeded,
} from 'src/utils/auth.js'
@@ -14,9 +17,12 @@ import { getUserAgent } from 'src/utils/http.js'
import { getSmallFastModel } from 'src/utils/model/model.js'
import {
getAPIProvider,
getOpenAIBaseUrl,
getOpenRouterBaseUrl,
isFirstPartyAnthropicBaseUrl,
} from 'src/utils/model/providers.js'
import { getProxyFetchOptions } from 'src/utils/proxy.js'
import { OpenAIResponsesCompatClient } from './openaiCompat.js'
import {
getIsNonInteractiveSession,
getSessionId,
@@ -98,6 +104,7 @@ export async function getAnthropicClient({
fetchOverride?: ClientOptions['fetch']
source?: string
}): Promise<Anthropic> {
const provider = getAPIProvider()
const containerId = process.env.CLAUDE_CODE_CONTAINER_ID
const remoteSessionId = process.env.CLAUDE_CODE_REMOTE_SESSION_ID
const clientApp = process.env.CLAUDE_AGENT_SDK_CLIENT_APP
@@ -150,7 +157,7 @@ export async function getAnthropicClient({
fetch: resolvedFetch,
}),
}
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK)) {
if (provider === 'bedrock') {
const { AnthropicBedrock } = await import('@anthropic-ai/bedrock-sdk')
// Use region override for small fast model if specified
const awsRegion =
@@ -188,7 +195,7 @@ export async function getAnthropicClient({
// we have always been lying about the return type - this doesn't support batching or models
return new AnthropicBedrock(bedrockArgs) as unknown as Anthropic
}
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY)) {
if (provider === 'foundry') {
const { AnthropicFoundry } = await import('@anthropic-ai/foundry-sdk')
// Determine Azure AD token provider based on configuration
// SDK reads ANTHROPIC_FOUNDRY_API_KEY by default
@@ -218,7 +225,7 @@ export async function getAnthropicClient({
// we have always been lying about the return type - this doesn't support batching or models
return new AnthropicFoundry(foundryArgs) as unknown as Anthropic
}
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX)) {
if (provider === 'vertex') {
// Refresh GCP credentials if gcpAuthRefresh is configured and credentials are expired
// This is similar to how we handle AWS credential refresh for Bedrock
if (!isEnvTruthy(process.env.CLAUDE_CODE_SKIP_VERTEX_AUTH)) {
@@ -297,7 +304,37 @@ export async function getAnthropicClient({
return new AnthropicVertex(vertexArgs) as unknown as Anthropic
}
// Determine authentication method based on available tokens
if (provider === 'openrouter') {
const clientConfig: ConstructorParameters<typeof Anthropic>[0] = {
apiKey: null,
authToken: apiKey || getOpenRouterApiKey(),
baseURL: getOpenRouterBaseUrl(),
...ARGS,
...(isDebugToStdErr() && { logger: createStderrLogger() }),
}
return new Anthropic(clientConfig)
}
if (provider === 'openai') {
await refreshOpenAIAuthTokenIfNeeded()
const openAIKey = apiKey || getOpenAIApiKey()
if (!openAIKey) {
throw new Error(
'OpenAI provider selected but no OpenAI API key or access token is configured.',
)
}
return new OpenAIResponsesCompatClient({
apiKey: openAIKey,
baseURL: getOpenAIBaseUrl(),
defaultHeaders,
fetchImpl: resolvedFetch,
timeoutMs: ARGS.timeout,
}) as unknown as Anthropic
}
// Determine authentication method based on available Anthropic tokens
const clientConfig: ConstructorParameters<typeof Anthropic>[0] = {
apiKey: isClaudeAISubscriber() ? null : apiKey || getAnthropicApiKey(),
authToken: isClaudeAISubscriber()

View File

@@ -343,13 +343,13 @@ export async function checkGroveForNonInteractive(): Promise<void> {
if (config === null || config.notice_is_grace_period) {
// Grace period is still active - show informational message and continue
writeToStderr(
'\nAn update to our Consumer Terms and Privacy Policy will take effect on October 8, 2025. Run `claude` to review the updated terms.\n\n',
'\nAn update to our Consumer Terms and Privacy Policy will take effect on October 8, 2025. Run `better-clawd` to review the updated terms.\n\n',
)
await markGroveNoticeViewed()
} else {
// Grace period has ended - show error message and exit
writeToStderr(
'\n[ACTION REQUIRED] An update to our Consumer Terms and Privacy Policy has taken effect on October 8, 2025. You must run `claude` to review the updated terms.\n\n',
'\n[ACTION REQUIRED] An update to our Consumer Terms and Privacy Policy has taken effect on October 8, 2025. You must run `better-clawd` to review the updated terms.\n\n',
)
await gracefulShutdown(1)
}

View File

@@ -0,0 +1,495 @@
type AnthropicTool = {
name: string
description?: string
input_schema?: Record<string, unknown>
}
type AnthropicContentBlock =
| { type: 'text'; text: string }
| { type: 'tool_use'; id?: string; name: string; input?: unknown }
| { type: 'tool_result'; tool_use_id?: string; content?: unknown }
| { type: string; [key: string]: unknown }
type AnthropicMessage = {
role: 'user' | 'assistant'
content: string | AnthropicContentBlock[]
}
type AnthropicMessagesCreateParams = {
model: string
messages: AnthropicMessage[]
system?: string | Array<{ type?: string; text?: string }>
tools?: AnthropicTool[]
tool_choice?: { type?: string; name?: string } | null
max_tokens?: number
temperature?: number
stream?: boolean
}
type OpenAIResponseOutputItem =
| {
type: 'message'
role?: 'assistant'
content?: Array<{ type: string; text?: string }>
}
| {
type: 'function_call'
id?: string
call_id?: string
name: string
arguments?: string
}
| {
type: string
id?: string
call_id?: string
name?: string
arguments?: string
content?: Array<{ type: string; text?: string }>
summary?: Array<{ type: string; text?: string }>
}
type OpenAIResponse = {
id: string
model: string
output?: OpenAIResponseOutputItem[]
usage?: {
input_tokens?: number
output_tokens?: number
total_tokens?: number
}
}
type OpenAICompatOptions = {
apiKey: string
baseURL: string
defaultHeaders?: Record<string, string>
fetchImpl?: typeof fetch
timeoutMs: number
}
type StreamWithResponse = {
withResponse(): Promise<{
request_id: string
response: Response
data: OpenAICompatStream
}>
}
class OpenAICompatStream implements AsyncIterable<Record<string, unknown>> {
private readonly events: Record<string, unknown>[]
controller = {
abort: () => {
this.aborted = true
},
}
private aborted = false
constructor(events: Record<string, unknown>[]) {
this.events = events
}
async *[Symbol.asyncIterator](): AsyncIterator<Record<string, unknown>> {
for (const event of this.events) {
if (this.aborted) {
return
}
yield event
}
}
}
function normalizeOpenAIModel(model: string): string {
if (
model.startsWith('gpt-') ||
model.startsWith('o') ||
model.startsWith('codex')
) {
return model
}
return process.env.OPENAI_DEFAULT_MODEL || 'gpt-5.4'
}
function systemToInstructions(
system?: AnthropicMessagesCreateParams['system'],
): string | undefined {
if (!system) {
return undefined
}
if (typeof system === 'string') {
return system
}
return system
.map(block => ('text' in block && typeof block.text === 'string' ? block.text : ''))
.filter(Boolean)
.join('\n\n')
}
function stringifyToolOutput(content: unknown): string {
if (typeof content === 'string') {
return content
}
if (Array.isArray(content)) {
return content
.map(item => {
if (typeof item === 'string') {
return item
}
if (
item &&
typeof item === 'object' &&
'text' in item &&
typeof item.text === 'string'
) {
return item.text
}
return JSON.stringify(item)
})
.join('\n')
}
return JSON.stringify(content ?? '')
}
function anthropicMessagesToOpenAIInput(
messages: AnthropicMessage[],
): Array<Record<string, unknown>> {
const input: Array<Record<string, unknown>> = []
for (const message of messages) {
if (typeof message.content === 'string') {
input.push({ role: message.role, content: message.content })
continue
}
let bufferedText: string[] = []
const flushBufferedText = () => {
if (bufferedText.length === 0) {
return
}
input.push({
role: message.role,
content: bufferedText.join('\n'),
})
bufferedText = []
}
for (const block of message.content) {
if (block.type === 'text' && typeof block.text === 'string') {
bufferedText.push(block.text)
continue
}
flushBufferedText()
if (block.type === 'tool_use' && message.role === 'assistant') {
input.push({
type: 'function_call',
call_id: block.id ?? `call_${block.name}`,
name: block.name,
arguments: JSON.stringify(block.input ?? {}),
})
continue
}
if (block.type === 'tool_result' && message.role === 'user') {
input.push({
type: 'function_call_output',
call_id: block.tool_use_id ?? 'tool_call',
output: stringifyToolOutput(block.content),
})
continue
}
input.push({
role: message.role,
content: `[${block.type}] ${stringifyToolOutput(block)}`,
})
}
flushBufferedText()
}
return input
}
function anthropicToolsToOpenAI(
tools?: AnthropicTool[],
): Array<Record<string, unknown>> | undefined {
if (!tools || tools.length === 0) {
return undefined
}
return tools.map(tool => ({
type: 'function',
name: tool.name,
description: tool.description,
parameters: tool.input_schema ?? {
type: 'object',
properties: {},
additionalProperties: true,
},
strict: false,
}))
}
function anthropicToolChoiceToOpenAI(
toolChoice: AnthropicMessagesCreateParams['tool_choice'],
): string | Record<string, unknown> | undefined {
if (!toolChoice?.type) {
return undefined
}
if (toolChoice.type === 'auto' || toolChoice.type === 'none') {
return toolChoice.type
}
if (toolChoice.type === 'tool' && toolChoice.name) {
return {
type: 'function',
name: toolChoice.name,
}
}
return undefined
}
function extractAssistantText(item: OpenAIResponseOutputItem): string {
if ('content' in item && Array.isArray(item.content)) {
return item.content
.map(part => (typeof part.text === 'string' ? part.text : ''))
.join('')
}
if ('summary' in item && Array.isArray(item.summary)) {
return item.summary
.map(part => (typeof part.text === 'string' ? part.text : ''))
.join('')
}
return ''
}
function openAIOutputToAnthropicBlocks(
output: OpenAIResponseOutputItem[] = [],
): Array<Record<string, unknown>> {
const blocks: Array<Record<string, unknown>> = []
for (const item of output) {
if (item.type === 'message') {
const text = extractAssistantText(item)
if (text) {
blocks.push({ type: 'text', text })
}
continue
}
if (item.type === 'function_call') {
let parsedArguments: unknown = {}
try {
parsedArguments = item.arguments ? JSON.parse(item.arguments) : {}
} catch {
parsedArguments = item.arguments ?? {}
}
blocks.push({
type: 'tool_use',
id: item.call_id ?? item.id ?? `call_${item.name}`,
name: item.name,
input: parsedArguments,
})
continue
}
const text = extractAssistantText(item)
if (text) {
blocks.push({ type: 'text', text })
}
}
return blocks
}
function openAIResponseToAnthropicMessage(
response: OpenAIResponse,
model: string,
): Record<string, unknown> {
const blocks = openAIOutputToAnthropicBlocks(response.output)
const stopReason = blocks.some(block => block.type === 'tool_use')
? 'tool_use'
: 'end_turn'
return {
id: response.id,
type: 'message',
role: 'assistant',
model,
content: blocks,
stop_reason: stopReason,
stop_sequence: null,
usage: {
input_tokens: response.usage?.input_tokens ?? 0,
output_tokens: response.usage?.output_tokens ?? 0,
},
}
}
function openAIResponseToAnthropicEvents(
response: OpenAIResponse,
model: string,
): Record<string, unknown>[] {
const message = openAIResponseToAnthropicMessage(response, model)
const blocks = (message.content as Array<Record<string, unknown>>) ?? []
const events: Record<string, unknown>[] = [
{
type: 'message_start',
message,
},
]
blocks.forEach((block, index) => {
if (block.type === 'text') {
events.push({
type: 'content_block_start',
index,
content_block: { type: 'text', text: '' },
})
events.push({
type: 'content_block_delta',
index,
delta: {
type: 'text_delta',
text: block.text,
},
})
events.push({ type: 'content_block_stop', index })
return
}
if (block.type === 'tool_use') {
const rawInput =
typeof block.input === 'string'
? block.input
: JSON.stringify(block.input ?? {})
events.push({
type: 'content_block_start',
index,
content_block: {
type: 'tool_use',
id: block.id,
name: block.name,
input: '',
},
})
events.push({
type: 'content_block_delta',
index,
delta: {
type: 'input_json_delta',
partial_json: rawInput,
},
})
events.push({ type: 'content_block_stop', index })
}
})
events.push({
type: 'message_delta',
delta: {
stop_reason: message.stop_reason,
stop_sequence: null,
},
usage: {
output_tokens: response.usage?.output_tokens ?? 0,
},
})
events.push({ type: 'message_stop' })
return events
}
function buildOpenAIRequestBody(
params: AnthropicMessagesCreateParams,
): Record<string, unknown> {
return {
model: normalizeOpenAIModel(params.model),
input: anthropicMessagesToOpenAIInput(params.messages),
instructions: systemToInstructions(params.system),
tools: anthropicToolsToOpenAI(params.tools),
tool_choice: anthropicToolChoiceToOpenAI(params.tool_choice),
max_output_tokens: params.max_tokens,
temperature: params.temperature,
}
}
export class OpenAIResponsesCompatClient {
private readonly options: OpenAICompatOptions
beta = {
messages: {
create: (
params: AnthropicMessagesCreateParams,
requestOptions?: { signal?: AbortSignal },
): Promise<Record<string, unknown>> | StreamWithResponse => {
if (params.stream) {
return {
withResponse: async () => {
const response = await this.createResponse(params, requestOptions)
const stream = new OpenAICompatStream(
openAIResponseToAnthropicEvents(
response,
normalizeOpenAIModel(params.model),
),
)
return {
request_id: response.id,
response: new Response(JSON.stringify(response), {
status: 200,
headers: { 'content-type': 'application/json' },
}),
data: stream,
}
},
}
}
return this.createResponse(params, requestOptions).then(response =>
openAIResponseToAnthropicMessage(
response,
normalizeOpenAIModel(params.model),
),
)
},
},
}
constructor(options: OpenAICompatOptions) {
this.options = options
}
private async createResponse(
params: AnthropicMessagesCreateParams,
requestOptions?: { signal?: AbortSignal },
): Promise<OpenAIResponse> {
const controller = new AbortController()
const timeout = setTimeout(() => controller.abort(), this.options.timeoutMs)
requestOptions?.signal?.addEventListener('abort', () => controller.abort())
try {
const response = await (this.options.fetchImpl ?? globalThis.fetch)(
`${this.options.baseURL.replace(/\/$/, '')}/responses`,
{
method: 'POST',
signal: controller.signal,
headers: {
'content-type': 'application/json',
Authorization: `Bearer ${this.options.apiKey}`,
...this.options.defaultHeaders,
},
body: JSON.stringify(buildOpenAIRequestBody(params)),
},
)
if (!response.ok) {
throw new Error(
`OpenAI Responses API error ${response.status}: ${await response.text()}`,
)
}
return (await response.json()) as OpenAIResponse
} finally {
clearTimeout(timeout)
}
}
}