Backup before continuing from Codex 5.2 session - User storage, compaction suggestions, streaming improvements

This commit is contained in:
Gemini AI
2025-12-24 21:27:05 +04:00
Unverified
parent f9748391a9
commit e8c38b0add
93 changed files with 10615 additions and 2037 deletions

View File

@@ -1,113 +1,111 @@
/**
* Z.AI API Integration
* Provides access to Z.AI's GLM Coding Plan API (Anthropic-compatible)
* Based on https://docs.z.ai/devpack/tool/claude#step-2-config-glm-coding-plan
*/
import { z } from "zod"
// Configuration schema for Z.AI
export const ZAIConfigSchema = z.object({
apiKey: z.string().optional(),
endpoint: z.string().default("https://api.z.ai/api/anthropic"),
endpoint: z.string().default("https://api.z.ai/api/paas/v4"),
enabled: z.boolean().default(false),
timeout: z.number().default(3000000) // 50 minutes as per docs
timeout: z.number().default(300000)
})
export type ZAIConfig = z.infer<typeof ZAIConfigSchema>
// Message schema (Anthropic-compatible)
export const ZAIMessageSchema = z.object({
role: z.enum(["user", "assistant"]),
role: z.enum(["user", "assistant", "system"]),
content: z.string()
})
export type ZAIMessage = z.infer<typeof ZAIMessageSchema>
// Chat request schema
export const ZAIChatRequestSchema = z.object({
model: z.string().default("claude-sonnet-4-20250514"),
model: z.string().default("glm-4.7"),
messages: z.array(ZAIMessageSchema),
max_tokens: z.number().default(8192),
stream: z.boolean().default(true),
system: z.string().optional()
temperature: z.number().optional(),
thinking: z.object({
type: z.enum(["enabled", "disabled"]).optional()
}).optional()
})
export type ZAIChatRequest = z.infer<typeof ZAIChatRequestSchema>
// Chat response schema
export const ZAIChatResponseSchema = z.object({
id: z.string(),
type: z.string(),
role: z.string(),
content: z.array(z.object({
type: z.string(),
text: z.string().optional()
})),
object: z.string(),
created: z.number(),
model: z.string(),
stop_reason: z.string().nullable().optional(),
stop_sequence: z.string().nullable().optional(),
choices: z.array(z.object({
index: z.number(),
message: z.object({
role: z.string(),
content: z.string().optional(),
reasoning_content: z.string().optional()
}),
finish_reason: z.string()
})),
usage: z.object({
input_tokens: z.number(),
output_tokens: z.number()
}).optional()
prompt_tokens: z.number(),
completion_tokens: z.number(),
total_tokens: z.number()
})
})
export type ZAIChatResponse = z.infer<typeof ZAIChatResponseSchema>
// Stream chunk schema
export const ZAIStreamChunkSchema = z.object({
type: z.string(),
index: z.number().optional(),
delta: z.object({
type: z.string().optional(),
text: z.string().optional()
}).optional(),
message: z.object({
id: z.string(),
type: z.string(),
role: z.string(),
content: z.array(z.any()),
model: z.string()
}).optional(),
content_block: z.object({
type: z.string(),
text: z.string()
}).optional()
id: z.string(),
object: z.string(),
created: z.number(),
model: z.string(),
choices: z.array(z.object({
index: z.number(),
delta: z.object({
role: z.string().optional(),
content: z.string().optional(),
reasoning_content: z.string().optional()
}),
finish_reason: z.string().nullable().optional()
}))
})
export type ZAIStreamChunk = z.infer<typeof ZAIStreamChunkSchema>
export const ZAI_MODELS = [
"glm-4.7",
"glm-4.6",
"glm-4.5",
"glm-4.5-air",
"glm-4.5-flash",
"glm-4.5-long"
] as const
export type ZAIModelName = typeof ZAI_MODELS[number]
export class ZAIClient {
private config: ZAIConfig
private baseUrl: string
constructor(config: ZAIConfig) {
this.config = config
this.baseUrl = config.endpoint.replace(/\/$/, "") // Remove trailing slash
this.baseUrl = config.endpoint.replace(/\/$/, "")
}
/**
* Test connection to Z.AI API
*/
async testConnection(): Promise<boolean> {
if (!this.config.apiKey) {
return false
}
try {
// Make a minimal request to test auth
const response = await fetch(`${this.baseUrl}/v1/messages`, {
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: this.getHeaders(),
body: JSON.stringify({
model: "claude-sonnet-4-20250514",
model: "glm-4.7",
max_tokens: 1,
messages: [{ role: "user", content: "test" }]
})
})
// Any response other than auth error means connection works
return response.status !== 401 && response.status !== 403
} catch (error) {
console.error("Z.AI connection test failed:", error)
@@ -115,28 +113,16 @@ export class ZAIClient {
}
}
/**
* List available models
*/
async listModels(): Promise<string[]> {
// Z.AI provides access to Claude models through their proxy
return [
"claude-sonnet-4-20250514",
"claude-3-5-sonnet-20241022",
"claude-3-opus-20240229",
"claude-3-haiku-20240307"
]
return [...ZAI_MODELS]
}
/**
* Chat completion (streaming)
*/
async *chatStream(request: ZAIChatRequest): AsyncGenerator<ZAIStreamChunk> {
if (!this.config.apiKey) {
throw new Error("Z.AI API key is required")
}
const response = await fetch(`${this.baseUrl}/v1/messages`, {
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: this.getHeaders(),
body: JSON.stringify({
@@ -165,7 +151,7 @@ export class ZAIClient {
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split("\n")
buffer = lines.pop() || "" // Keep incomplete line in buffer
buffer = lines.pop() || ""
for (const line of lines) {
if (line.startsWith("data: ")) {
@@ -176,7 +162,6 @@ export class ZAIClient {
const parsed = JSON.parse(data)
yield parsed as ZAIStreamChunk
} catch (e) {
// Skip invalid JSON
}
}
}
@@ -186,15 +171,12 @@ export class ZAIClient {
}
}
/**
* Chat completion (non-streaming)
*/
async chat(request: ZAIChatRequest): Promise<ZAIChatResponse> {
if (!this.config.apiKey) {
throw new Error("Z.AI API key is required")
}
const response = await fetch(`${this.baseUrl}/v1/messages`, {
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: this.getHeaders(),
body: JSON.stringify({
@@ -211,31 +193,14 @@ export class ZAIClient {
return await response.json()
}
/**
* Get request headers
*/
private getHeaders(): Record<string, string> {
return {
"Content-Type": "application/json",
"x-api-key": this.config.apiKey || "",
"anthropic-version": "2023-06-01"
"Authorization": `Bearer ${this.config.apiKey}`
}
}
/**
* Validate API key
*/
static validateApiKey(apiKey: string): boolean {
return typeof apiKey === "string" && apiKey.length > 0
}
}
// Default available models
export const ZAI_MODELS = [
"claude-sonnet-4-20250514",
"claude-3-5-sonnet-20241022",
"claude-3-opus-20240229",
"claude-3-haiku-20240307"
] as const
export type ZAIModelName = typeof ZAI_MODELS[number]
}