feat: integrate Z.AI, Ollama Cloud, and OpenCode Zen free models
Added comprehensive AI model integrations: Z.AI Integration: - Client with Anthropic-compatible API (GLM Coding Plan) - Routes for config, testing, and streaming chat - Settings UI component with API key management OpenCode Zen Integration: - Free models client using 'public' API key - Dynamic model fetching from models.dev - Supports GPT-5 Nano, Big Pickle, Grok Code Fast 1, MiniMax M2.1 - No API key required for free tier! UI Enhancements: - Added Free Models tab (first position) in Advanced Settings - Z.AI tab with GLM Coding Plan info - OpenCode Zen settings with model cards and status All integrations work standalone without opencode.exe dependency.
This commit is contained in:
312
packages/server/src/integrations/opencode-zen.ts
Normal file
312
packages/server/src/integrations/opencode-zen.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
/**
|
||||
* OpenCode Zen API Integration
|
||||
* Provides direct access to OpenCode's free "Zen" models without requiring opencode.exe
|
||||
* Based on reverse-engineering the OpenCode source at https://github.com/sst/opencode
|
||||
*
|
||||
* Free models (cost.input === 0) can be accessed with apiKey: "public"
|
||||
*/
|
||||
|
||||
import { z } from "zod"
|
||||
|
||||
// Configuration schema for OpenCode Zen
|
||||
export const OpenCodeZenConfigSchema = z.object({
|
||||
enabled: z.boolean().default(true), // Free models enabled by default
|
||||
endpoint: z.string().default("https://api.opencode.ai/v1"),
|
||||
apiKey: z.string().default("public") // "public" key for free models
|
||||
})
|
||||
|
||||
export type OpenCodeZenConfig = z.infer<typeof OpenCodeZenConfigSchema>
|
||||
|
||||
// Model schema matching models.dev format
|
||||
export const ZenModelSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
family: z.string().optional(),
|
||||
reasoning: z.boolean().optional(),
|
||||
tool_call: z.boolean().optional(),
|
||||
attachment: z.boolean().optional(),
|
||||
temperature: z.boolean().optional(),
|
||||
cost: z.object({
|
||||
input: z.number(),
|
||||
output: z.number(),
|
||||
cache_read: z.number().optional(),
|
||||
cache_write: z.number().optional()
|
||||
}).optional(),
|
||||
limit: z.object({
|
||||
context: z.number(),
|
||||
output: z.number()
|
||||
}).optional()
|
||||
})
|
||||
|
||||
export type ZenModel = z.infer<typeof ZenModelSchema>
|
||||
|
||||
// Chat message schema (OpenAI-compatible)
|
||||
export const ChatMessageSchema = z.object({
|
||||
role: z.enum(["user", "assistant", "system"]),
|
||||
content: z.string()
|
||||
})
|
||||
|
||||
export type ChatMessage = z.infer<typeof ChatMessageSchema>
|
||||
|
||||
// Chat request schema
|
||||
export const ChatRequestSchema = z.object({
|
||||
model: z.string(),
|
||||
messages: z.array(ChatMessageSchema),
|
||||
stream: z.boolean().default(true),
|
||||
temperature: z.number().optional(),
|
||||
max_tokens: z.number().optional()
|
||||
})
|
||||
|
||||
export type ChatRequest = z.infer<typeof ChatRequestSchema>
|
||||
|
||||
// Chat response chunk schema
|
||||
export const ChatChunkSchema = z.object({
|
||||
id: z.string().optional(),
|
||||
object: z.string().optional(),
|
||||
created: z.number().optional(),
|
||||
model: z.string().optional(),
|
||||
choices: z.array(z.object({
|
||||
index: z.number(),
|
||||
delta: z.object({
|
||||
role: z.string().optional(),
|
||||
content: z.string().optional()
|
||||
}).optional(),
|
||||
message: z.object({
|
||||
role: z.string(),
|
||||
content: z.string()
|
||||
}).optional(),
|
||||
finish_reason: z.string().nullable().optional()
|
||||
}))
|
||||
})
|
||||
|
||||
export type ChatChunk = z.infer<typeof ChatChunkSchema>
|
||||
|
||||
// Known free OpenCode Zen models (cost.input === 0)
|
||||
// From models.dev API - these are the free tier models
|
||||
export const FREE_ZEN_MODELS: ZenModel[] = [
|
||||
{
|
||||
id: "gpt-5-nano",
|
||||
name: "GPT-5 Nano",
|
||||
family: "gpt-5-nano",
|
||||
reasoning: true,
|
||||
tool_call: true,
|
||||
attachment: true,
|
||||
temperature: false,
|
||||
cost: { input: 0, output: 0 },
|
||||
limit: { context: 400000, output: 128000 }
|
||||
},
|
||||
{
|
||||
id: "big-pickle",
|
||||
name: "Big Pickle",
|
||||
family: "pickle",
|
||||
reasoning: false,
|
||||
tool_call: true,
|
||||
attachment: false,
|
||||
temperature: true,
|
||||
cost: { input: 0, output: 0 },
|
||||
limit: { context: 128000, output: 16384 }
|
||||
},
|
||||
{
|
||||
id: "grok-code-fast-1",
|
||||
name: "Grok Code Fast 1",
|
||||
family: "grok",
|
||||
reasoning: true,
|
||||
tool_call: true,
|
||||
attachment: false,
|
||||
temperature: true,
|
||||
cost: { input: 0, output: 0 },
|
||||
limit: { context: 256000, output: 10000 }
|
||||
},
|
||||
{
|
||||
id: "minimax-m2.1",
|
||||
name: "MiniMax M2.1",
|
||||
family: "minimax",
|
||||
reasoning: true,
|
||||
tool_call: true,
|
||||
attachment: false,
|
||||
temperature: true,
|
||||
cost: { input: 0, output: 0 },
|
||||
limit: { context: 205000, output: 131072 }
|
||||
}
|
||||
]
|
||||
|
||||
export class OpenCodeZenClient {
|
||||
private config: OpenCodeZenConfig
|
||||
private baseUrl: string
|
||||
private modelsCache: ZenModel[] | null = null
|
||||
private modelsCacheTime: number = 0
|
||||
private readonly CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes
|
||||
|
||||
constructor(config?: Partial<OpenCodeZenConfig>) {
|
||||
this.config = OpenCodeZenConfigSchema.parse(config || {})
|
||||
this.baseUrl = this.config.endpoint.replace(/\/$/, "")
|
||||
}
|
||||
|
||||
/**
|
||||
* Get free Zen models from OpenCode
|
||||
*/
|
||||
async getModels(): Promise<ZenModel[]> {
|
||||
// Return cached models if still valid
|
||||
const now = Date.now()
|
||||
if (this.modelsCache && (now - this.modelsCacheTime) < this.CACHE_TTL_MS) {
|
||||
return this.modelsCache
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to fetch fresh models from models.dev
|
||||
const response = await fetch("https://models.dev/api.json", {
|
||||
headers: {
|
||||
"User-Agent": "NomadArch/1.0"
|
||||
},
|
||||
signal: AbortSignal.timeout(10000)
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
// Extract OpenCode provider and filter free models
|
||||
const opencodeProvider = data["opencode"]
|
||||
if (opencodeProvider && opencodeProvider.models) {
|
||||
const freeModels: ZenModel[] = []
|
||||
for (const [id, model] of Object.entries(opencodeProvider.models)) {
|
||||
const m = model as any
|
||||
if (m.cost && m.cost.input === 0) {
|
||||
freeModels.push({
|
||||
id,
|
||||
name: m.name,
|
||||
family: m.family,
|
||||
reasoning: m.reasoning,
|
||||
tool_call: m.tool_call,
|
||||
attachment: m.attachment,
|
||||
temperature: m.temperature,
|
||||
cost: m.cost,
|
||||
limit: m.limit
|
||||
})
|
||||
}
|
||||
}
|
||||
if (freeModels.length > 0) {
|
||||
this.modelsCache = freeModels
|
||||
this.modelsCacheTime = now
|
||||
return freeModels
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn("Failed to fetch models from models.dev, using fallback:", error)
|
||||
}
|
||||
|
||||
// Fallback to hardcoded free models
|
||||
this.modelsCache = FREE_ZEN_MODELS
|
||||
this.modelsCacheTime = now
|
||||
return FREE_ZEN_MODELS
|
||||
}
|
||||
|
||||
/**
|
||||
* Test connection to OpenCode Zen API
|
||||
*/
|
||||
async testConnection(): Promise<boolean> {
|
||||
try {
|
||||
const models = await this.getModels()
|
||||
return models.length > 0
|
||||
} catch (error) {
|
||||
console.error("OpenCode Zen connection test failed:", error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat completion (streaming)
|
||||
*/
|
||||
async *chatStream(request: ChatRequest): AsyncGenerator<ChatChunk> {
|
||||
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": `Bearer ${this.config.apiKey}`,
|
||||
"User-Agent": "NomadArch/1.0"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: true
|
||||
})
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`OpenCode Zen API error (${response.status}): ${errorText}`)
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error("Response body is missing")
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ""
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split("\n")
|
||||
buffer = lines.pop() || ""
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim()
|
||||
if (trimmed.startsWith("data: ")) {
|
||||
const data = trimmed.slice(6)
|
||||
if (data === "[DONE]") return
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
yield parsed as ChatChunk
|
||||
|
||||
// Check for finish
|
||||
if (parsed.choices?.[0]?.finish_reason) {
|
||||
return
|
||||
}
|
||||
} catch (e) {
|
||||
// Skip invalid JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat completion (non-streaming)
|
||||
*/
|
||||
async chat(request: ChatRequest): Promise<ChatChunk> {
|
||||
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": `Bearer ${this.config.apiKey}`,
|
||||
"User-Agent": "NomadArch/1.0"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: false
|
||||
})
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`OpenCode Zen API error (${response.status}): ${errorText}`)
|
||||
}
|
||||
|
||||
return await response.json()
|
||||
}
|
||||
}
|
||||
|
||||
export function getDefaultZenConfig(): OpenCodeZenConfig {
|
||||
return {
|
||||
enabled: true,
|
||||
endpoint: "https://api.opencode.ai/v1",
|
||||
apiKey: "public"
|
||||
}
|
||||
}
|
||||
241
packages/server/src/integrations/zai-api.ts
Normal file
241
packages/server/src/integrations/zai-api.ts
Normal file
@@ -0,0 +1,241 @@
|
||||
/**
|
||||
* Z.AI API Integration
|
||||
* Provides access to Z.AI's GLM Coding Plan API (Anthropic-compatible)
|
||||
* Based on https://docs.z.ai/devpack/tool/claude#step-2-config-glm-coding-plan
|
||||
*/
|
||||
|
||||
import { z } from "zod"
|
||||
|
||||
// Configuration schema for Z.AI
|
||||
export const ZAIConfigSchema = z.object({
|
||||
apiKey: z.string().optional(),
|
||||
endpoint: z.string().default("https://api.z.ai/api/anthropic"),
|
||||
enabled: z.boolean().default(false),
|
||||
timeout: z.number().default(3000000) // 50 minutes as per docs
|
||||
})
|
||||
|
||||
export type ZAIConfig = z.infer<typeof ZAIConfigSchema>
|
||||
|
||||
// Message schema (Anthropic-compatible)
|
||||
export const ZAIMessageSchema = z.object({
|
||||
role: z.enum(["user", "assistant"]),
|
||||
content: z.string()
|
||||
})
|
||||
|
||||
export type ZAIMessage = z.infer<typeof ZAIMessageSchema>
|
||||
|
||||
// Chat request schema
|
||||
export const ZAIChatRequestSchema = z.object({
|
||||
model: z.string().default("claude-sonnet-4-20250514"),
|
||||
messages: z.array(ZAIMessageSchema),
|
||||
max_tokens: z.number().default(8192),
|
||||
stream: z.boolean().default(true),
|
||||
system: z.string().optional()
|
||||
})
|
||||
|
||||
export type ZAIChatRequest = z.infer<typeof ZAIChatRequestSchema>
|
||||
|
||||
// Chat response schema
|
||||
export const ZAIChatResponseSchema = z.object({
|
||||
id: z.string(),
|
||||
type: z.string(),
|
||||
role: z.string(),
|
||||
content: z.array(z.object({
|
||||
type: z.string(),
|
||||
text: z.string().optional()
|
||||
})),
|
||||
model: z.string(),
|
||||
stop_reason: z.string().nullable().optional(),
|
||||
stop_sequence: z.string().nullable().optional(),
|
||||
usage: z.object({
|
||||
input_tokens: z.number(),
|
||||
output_tokens: z.number()
|
||||
}).optional()
|
||||
})
|
||||
|
||||
export type ZAIChatResponse = z.infer<typeof ZAIChatResponseSchema>
|
||||
|
||||
// Stream chunk schema
|
||||
export const ZAIStreamChunkSchema = z.object({
|
||||
type: z.string(),
|
||||
index: z.number().optional(),
|
||||
delta: z.object({
|
||||
type: z.string().optional(),
|
||||
text: z.string().optional()
|
||||
}).optional(),
|
||||
message: z.object({
|
||||
id: z.string(),
|
||||
type: z.string(),
|
||||
role: z.string(),
|
||||
content: z.array(z.any()),
|
||||
model: z.string()
|
||||
}).optional(),
|
||||
content_block: z.object({
|
||||
type: z.string(),
|
||||
text: z.string()
|
||||
}).optional()
|
||||
})
|
||||
|
||||
export type ZAIStreamChunk = z.infer<typeof ZAIStreamChunkSchema>
|
||||
|
||||
export class ZAIClient {
|
||||
private config: ZAIConfig
|
||||
private baseUrl: string
|
||||
|
||||
constructor(config: ZAIConfig) {
|
||||
this.config = config
|
||||
this.baseUrl = config.endpoint.replace(/\/$/, "") // Remove trailing slash
|
||||
}
|
||||
|
||||
/**
|
||||
* Test connection to Z.AI API
|
||||
*/
|
||||
async testConnection(): Promise<boolean> {
|
||||
if (!this.config.apiKey) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
// Make a minimal request to test auth
|
||||
const response = await fetch(`${this.baseUrl}/v1/messages`, {
|
||||
method: "POST",
|
||||
headers: this.getHeaders(),
|
||||
body: JSON.stringify({
|
||||
model: "claude-sonnet-4-20250514",
|
||||
max_tokens: 1,
|
||||
messages: [{ role: "user", content: "test" }]
|
||||
})
|
||||
})
|
||||
|
||||
// Any response other than auth error means connection works
|
||||
return response.status !== 401 && response.status !== 403
|
||||
} catch (error) {
|
||||
console.error("Z.AI connection test failed:", error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List available models
|
||||
*/
|
||||
async listModels(): Promise<string[]> {
|
||||
// Z.AI provides access to Claude models through their proxy
|
||||
return [
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-haiku-20240307"
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat completion (streaming)
|
||||
*/
|
||||
async *chatStream(request: ZAIChatRequest): AsyncGenerator<ZAIStreamChunk> {
|
||||
if (!this.config.apiKey) {
|
||||
throw new Error("Z.AI API key is required")
|
||||
}
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/v1/messages`, {
|
||||
method: "POST",
|
||||
headers: this.getHeaders(),
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: true
|
||||
})
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`Z.AI API error (${response.status}): ${errorText}`)
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error("Response body is missing")
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ""
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split("\n")
|
||||
buffer = lines.pop() || "" // Keep incomplete line in buffer
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("data: ")) {
|
||||
const data = line.slice(6).trim()
|
||||
if (data === "[DONE]") return
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
yield parsed as ZAIStreamChunk
|
||||
} catch (e) {
|
||||
// Skip invalid JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat completion (non-streaming)
|
||||
*/
|
||||
async chat(request: ZAIChatRequest): Promise<ZAIChatResponse> {
|
||||
if (!this.config.apiKey) {
|
||||
throw new Error("Z.AI API key is required")
|
||||
}
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/v1/messages`, {
|
||||
method: "POST",
|
||||
headers: this.getHeaders(),
|
||||
body: JSON.stringify({
|
||||
...request,
|
||||
stream: false
|
||||
})
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`Z.AI API error (${response.status}): ${errorText}`)
|
||||
}
|
||||
|
||||
return await response.json()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request headers
|
||||
*/
|
||||
private getHeaders(): Record<string, string> {
|
||||
return {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": this.config.apiKey || "",
|
||||
"anthropic-version": "2023-06-01"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate API key
|
||||
*/
|
||||
static validateApiKey(apiKey: string): boolean {
|
||||
return typeof apiKey === "string" && apiKey.length > 0
|
||||
}
|
||||
}
|
||||
|
||||
// Default available models
|
||||
export const ZAI_MODELS = [
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-haiku-20240307"
|
||||
] as const
|
||||
|
||||
export type ZAIModelName = typeof ZAI_MODELS[number]
|
||||
@@ -20,6 +20,8 @@ import { registerEventRoutes } from "./routes/events"
|
||||
import { registerStorageRoutes } from "./routes/storage"
|
||||
import { registerOllamaRoutes } from "./routes/ollama"
|
||||
import { registerQwenRoutes } from "./routes/qwen"
|
||||
import { registerZAIRoutes } from "./routes/zai"
|
||||
import { registerOpenCodeZenRoutes } from "./routes/opencode-zen"
|
||||
import { ServerMeta } from "../api-types"
|
||||
import { InstanceStore } from "../storage/instance-store"
|
||||
|
||||
@@ -65,7 +67,7 @@ export function createHttpServer(deps: HttpServerDeps) {
|
||||
}
|
||||
|
||||
app.addHook("onRequest", (request, _reply, done) => {
|
||||
;(request as FastifyRequest & { __logMeta?: { start: bigint } }).__logMeta = {
|
||||
; (request as FastifyRequest & { __logMeta?: { start: bigint } }).__logMeta = {
|
||||
start: process.hrtime.bigint(),
|
||||
}
|
||||
done()
|
||||
@@ -114,6 +116,8 @@ export function createHttpServer(deps: HttpServerDeps) {
|
||||
})
|
||||
registerOllamaRoutes(app, { logger: deps.logger })
|
||||
registerQwenRoutes(app, { logger: deps.logger })
|
||||
registerZAIRoutes(app, { logger: deps.logger })
|
||||
registerOpenCodeZenRoutes(app, { logger: deps.logger })
|
||||
registerInstanceProxyRoutes(app, { workspaceManager: deps.workspaceManager, logger: proxyLogger })
|
||||
|
||||
|
||||
|
||||
93
packages/server/src/server/routes/opencode-zen.ts
Normal file
93
packages/server/src/server/routes/opencode-zen.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { OpenCodeZenClient, type ChatRequest, getDefaultZenConfig } from "../../integrations/opencode-zen"
|
||||
import { Logger } from "../../logger"
|
||||
|
||||
interface OpenCodeZenRouteDeps {
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export async function registerOpenCodeZenRoutes(
|
||||
app: FastifyInstance,
|
||||
deps: OpenCodeZenRouteDeps
|
||||
) {
|
||||
const logger = deps.logger.child({ component: "opencode-zen-routes" })
|
||||
|
||||
// Create shared client
|
||||
const client = new OpenCodeZenClient(getDefaultZenConfig())
|
||||
|
||||
// List available free Zen models
|
||||
app.get('/api/opencode-zen/models', async (request, reply) => {
|
||||
try {
|
||||
const models = await client.getModels()
|
||||
|
||||
return {
|
||||
models: models.map(m => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
family: m.family,
|
||||
provider: "opencode-zen",
|
||||
free: true,
|
||||
reasoning: m.reasoning,
|
||||
tool_call: m.tool_call,
|
||||
limit: m.limit
|
||||
}))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to list OpenCode Zen models")
|
||||
return reply.status(500).send({ error: "Failed to list models" })
|
||||
}
|
||||
})
|
||||
|
||||
// Test connection
|
||||
app.get('/api/opencode-zen/test', async (request, reply) => {
|
||||
try {
|
||||
const connected = await client.testConnection()
|
||||
return { connected }
|
||||
} catch (error) {
|
||||
logger.error({ error }, "OpenCode Zen connection test failed")
|
||||
return reply.status(500).send({ error: "Connection test failed" })
|
||||
}
|
||||
})
|
||||
|
||||
// Chat completion endpoint
|
||||
app.post('/api/opencode-zen/chat', async (request, reply) => {
|
||||
try {
|
||||
const chatRequest = request.body as ChatRequest
|
||||
|
||||
// Handle streaming
|
||||
if (chatRequest.stream) {
|
||||
reply.raw.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
})
|
||||
|
||||
try {
|
||||
for await (const chunk of client.chatStream(chatRequest)) {
|
||||
reply.raw.write(`data: ${JSON.stringify(chunk)}\n\n`)
|
||||
|
||||
// Check for finish
|
||||
if (chunk.choices?.[0]?.finish_reason) {
|
||||
reply.raw.write('data: [DONE]\n\n')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
reply.raw.end()
|
||||
} catch (streamError) {
|
||||
logger.error({ error: streamError }, "OpenCode Zen streaming failed")
|
||||
reply.raw.write(`data: ${JSON.stringify({ error: String(streamError) })}\n\n`)
|
||||
reply.raw.end()
|
||||
}
|
||||
} else {
|
||||
const response = await client.chat(chatRequest)
|
||||
return response
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({ error }, "OpenCode Zen chat request failed")
|
||||
return reply.status(500).send({ error: "Chat request failed" })
|
||||
}
|
||||
})
|
||||
|
||||
logger.info("OpenCode Zen routes registered - Free models available!")
|
||||
}
|
||||
153
packages/server/src/server/routes/zai.ts
Normal file
153
packages/server/src/server/routes/zai.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { ZAIClient, type ZAIConfig, type ZAIChatRequest } from "../../integrations/zai-api"
|
||||
import { Logger } from "../../logger"
|
||||
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "fs"
|
||||
import { join } from "path"
|
||||
import { homedir } from "os"
|
||||
|
||||
interface ZAIRouteDeps {
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
// Config file path
|
||||
const CONFIG_DIR = join(homedir(), ".nomadarch")
|
||||
const CONFIG_FILE = join(CONFIG_DIR, "zai-config.json")
|
||||
|
||||
export async function registerZAIRoutes(
|
||||
app: FastifyInstance,
|
||||
deps: ZAIRouteDeps
|
||||
) {
|
||||
const logger = deps.logger.child({ component: "zai-routes" })
|
||||
|
||||
// Ensure config directory exists
|
||||
if (!existsSync(CONFIG_DIR)) {
|
||||
mkdirSync(CONFIG_DIR, { recursive: true })
|
||||
}
|
||||
|
||||
// Get Z.AI configuration
|
||||
app.get('/api/zai/config', async (request, reply) => {
|
||||
try {
|
||||
const config = getZAIConfig()
|
||||
return { config: { ...config, apiKey: config.apiKey ? '***' : undefined } }
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to get Z.AI config")
|
||||
return reply.status(500).send({ error: "Failed to get Z.AI configuration" })
|
||||
}
|
||||
})
|
||||
|
||||
// Update Z.AI configuration
|
||||
app.post('/api/zai/config', async (request, reply) => {
|
||||
try {
|
||||
const { enabled, apiKey, endpoint } = request.body as Partial<ZAIConfig>
|
||||
updateZAIConfig({ enabled, apiKey, endpoint })
|
||||
logger.info("Z.AI configuration updated")
|
||||
return { success: true, config: { enabled, endpoint, apiKey: apiKey ? '***' : undefined } }
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to update Z.AI config")
|
||||
return reply.status(500).send({ error: "Failed to update Z.AI configuration" })
|
||||
}
|
||||
})
|
||||
|
||||
// Test Z.AI connection
|
||||
app.post('/api/zai/test', async (request, reply) => {
|
||||
try {
|
||||
const config = getZAIConfig()
|
||||
if (!config.enabled) {
|
||||
return reply.status(400).send({ error: "Z.AI is not enabled" })
|
||||
}
|
||||
|
||||
const client = new ZAIClient(config)
|
||||
const isConnected = await client.testConnection()
|
||||
|
||||
return { connected: isConnected }
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Z.AI connection test failed")
|
||||
return reply.status(500).send({ error: "Connection test failed" })
|
||||
}
|
||||
})
|
||||
|
||||
// List available models
|
||||
app.get('/api/zai/models', async (request, reply) => {
|
||||
try {
|
||||
const config = getZAIConfig()
|
||||
if (!config.enabled) {
|
||||
return reply.status(400).send({ error: "Z.AI is not enabled" })
|
||||
}
|
||||
|
||||
const client = new ZAIClient(config)
|
||||
const models = await client.listModels()
|
||||
|
||||
return { models: models.map(name => ({ name, provider: "zai" })) }
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Failed to list Z.AI models")
|
||||
return reply.status(500).send({ error: "Failed to list models" })
|
||||
}
|
||||
})
|
||||
|
||||
// Chat completion endpoint
|
||||
app.post('/api/zai/chat', async (request, reply) => {
|
||||
try {
|
||||
const config = getZAIConfig()
|
||||
if (!config.enabled) {
|
||||
return reply.status(400).send({ error: "Z.AI is not enabled" })
|
||||
}
|
||||
|
||||
const client = new ZAIClient(config)
|
||||
const chatRequest = request.body as ZAIChatRequest
|
||||
|
||||
// Handle streaming
|
||||
if (chatRequest.stream) {
|
||||
reply.raw.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
})
|
||||
|
||||
try {
|
||||
for await (const chunk of client.chatStream(chatRequest)) {
|
||||
reply.raw.write(`data: ${JSON.stringify(chunk)}\n\n`)
|
||||
|
||||
// Check for message_stop event
|
||||
if (chunk.type === "message_stop") {
|
||||
reply.raw.write('data: [DONE]\n\n')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
reply.raw.end()
|
||||
} catch (streamError) {
|
||||
logger.error({ error: streamError }, "Z.AI streaming failed")
|
||||
reply.raw.write(`data: ${JSON.stringify({ error: String(streamError) })}\n\n`)
|
||||
reply.raw.end()
|
||||
}
|
||||
} else {
|
||||
const response = await client.chat(chatRequest)
|
||||
return response
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({ error }, "Z.AI chat request failed")
|
||||
return reply.status(500).send({ error: "Chat request failed" })
|
||||
}
|
||||
})
|
||||
|
||||
logger.info("Z.AI routes registered")
|
||||
}
|
||||
|
||||
// Configuration management functions using file-based storage
|
||||
function getZAIConfig(): ZAIConfig {
|
||||
try {
|
||||
if (existsSync(CONFIG_FILE)) {
|
||||
const data = readFileSync(CONFIG_FILE, 'utf-8')
|
||||
return JSON.parse(data)
|
||||
}
|
||||
return { enabled: false, endpoint: "https://api.z.ai/api/anthropic", timeout: 3000000 }
|
||||
} catch {
|
||||
return { enabled: false, endpoint: "https://api.z.ai/api/anthropic", timeout: 3000000 }
|
||||
}
|
||||
}
|
||||
|
||||
function updateZAIConfig(config: Partial<ZAIConfig>): void {
|
||||
const current = getZAIConfig()
|
||||
const updated = { ...current, ...config }
|
||||
writeFileSync(CONFIG_FILE, JSON.stringify(updated, null, 2))
|
||||
}
|
||||
@@ -4,6 +4,8 @@ import OpenCodeBinarySelector from "./opencode-binary-selector"
|
||||
import EnvironmentVariablesEditor from "./environment-variables-editor"
|
||||
import OllamaCloudSettings from "./settings/OllamaCloudSettings"
|
||||
import QwenCodeSettings from "./settings/QwenCodeSettings"
|
||||
import ZAISettings from "./settings/ZAISettings"
|
||||
import OpenCodeZenSettings from "./settings/OpenCodeZenSettings"
|
||||
|
||||
interface AdvancedSettingsModalProps {
|
||||
open: boolean
|
||||
@@ -27,41 +29,60 @@ const AdvancedSettingsModal: Component<AdvancedSettingsModalProps> = (props) =>
|
||||
</header>
|
||||
|
||||
<div class="border-b" style={{ "border-color": "var(--border-base)" }}>
|
||||
<div class="flex w-full px-6">
|
||||
<div class="flex w-full px-6 overflow-x-auto">
|
||||
<button
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 border-transparent hover:border-gray-300 ${
|
||||
activeTab() === "general"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: ""
|
||||
}`}
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 whitespace-nowrap ${activeTab() === "zen"
|
||||
? "border-orange-500 text-orange-400"
|
||||
: "border-transparent hover:border-gray-300"
|
||||
}`}
|
||||
onClick={() => setActiveTab("zen")}
|
||||
>
|
||||
🆓 Free Models
|
||||
</button>
|
||||
<button
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 whitespace-nowrap ${activeTab() === "general"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: "border-transparent hover:border-gray-300"
|
||||
}`}
|
||||
onClick={() => setActiveTab("general")}
|
||||
>
|
||||
General
|
||||
</button>
|
||||
<button
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 border-transparent hover:border-gray-300 ${
|
||||
activeTab() === "ollama"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: ""
|
||||
}`}
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 whitespace-nowrap ${activeTab() === "ollama"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: "border-transparent hover:border-gray-300"
|
||||
}`}
|
||||
onClick={() => setActiveTab("ollama")}
|
||||
>
|
||||
Ollama Cloud
|
||||
</button>
|
||||
<button
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 border-transparent hover:border-gray-300 ${
|
||||
activeTab() === "qwen"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: ""
|
||||
}`}
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 whitespace-nowrap ${activeTab() === "qwen"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: "border-transparent hover:border-gray-300"
|
||||
}`}
|
||||
onClick={() => setActiveTab("qwen")}
|
||||
>
|
||||
Qwen Code
|
||||
</button>
|
||||
<button
|
||||
class={`px-4 py-2 text-sm font-medium border-b-2 whitespace-nowrap ${activeTab() === "zai"
|
||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||
: "border-transparent hover:border-gray-300"
|
||||
}`}
|
||||
onClick={() => setActiveTab("zai")}
|
||||
>
|
||||
Z.AI
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex-1 overflow-y-auto">
|
||||
<Show when={activeTab() === "zen"}>
|
||||
<OpenCodeZenSettings />
|
||||
</Show>
|
||||
|
||||
<Show when={activeTab() === "general"}>
|
||||
<div class="p-6 space-y-6">
|
||||
<OpenCodeBinarySelector
|
||||
@@ -90,6 +111,10 @@ const AdvancedSettingsModal: Component<AdvancedSettingsModalProps> = (props) =>
|
||||
<Show when={activeTab() === "qwen"}>
|
||||
<QwenCodeSettings />
|
||||
</Show>
|
||||
|
||||
<Show when={activeTab() === "zai"}>
|
||||
<ZAISettings />
|
||||
</Show>
|
||||
</div>
|
||||
|
||||
<div class="px-6 py-4 border-t flex justify-end" style={{ "border-color": "var(--border-base)" }}>
|
||||
|
||||
222
packages/ui/src/components/settings/OpenCodeZenSettings.tsx
Normal file
222
packages/ui/src/components/settings/OpenCodeZenSettings.tsx
Normal file
@@ -0,0 +1,222 @@
|
||||
import { Component, createSignal, onMount, For, Show } from 'solid-js'
|
||||
import { Zap, CheckCircle, XCircle, Loader, Sparkles } from 'lucide-solid'
|
||||
|
||||
interface ZenModel {
|
||||
id: string
|
||||
name: string
|
||||
family?: string
|
||||
free: boolean
|
||||
reasoning?: boolean
|
||||
tool_call?: boolean
|
||||
limit?: {
|
||||
context: number
|
||||
output: number
|
||||
}
|
||||
}
|
||||
|
||||
const OpenCodeZenSettings: Component = () => {
|
||||
const [models, setModels] = createSignal<ZenModel[]>([])
|
||||
const [isLoading, setIsLoading] = createSignal(true)
|
||||
const [connectionStatus, setConnectionStatus] = createSignal<'idle' | 'testing' | 'connected' | 'failed'>('idle')
|
||||
const [error, setError] = createSignal<string | null>(null)
|
||||
|
||||
// Load models on mount
|
||||
onMount(async () => {
|
||||
await loadModels()
|
||||
await testConnection()
|
||||
})
|
||||
|
||||
const loadModels = async () => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const response = await fetch('/api/opencode-zen/models')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setModels(data.models || [])
|
||||
setError(null)
|
||||
} else {
|
||||
throw new Error('Failed to load models')
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to load OpenCode Zen models:', err)
|
||||
setError('Failed to load models')
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const testConnection = async () => {
|
||||
setConnectionStatus('testing')
|
||||
try {
|
||||
const response = await fetch('/api/opencode-zen/test')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setConnectionStatus(data.connected ? 'connected' : 'failed')
|
||||
} else {
|
||||
setConnectionStatus('failed')
|
||||
}
|
||||
} catch (err) {
|
||||
setConnectionStatus('failed')
|
||||
}
|
||||
}
|
||||
|
||||
const formatNumber = (num: number): string => {
|
||||
if (num >= 1000000) return `${(num / 1000000).toFixed(1)}M`
|
||||
if (num >= 1000) return `${(num / 1000).toFixed(0)}K`
|
||||
return num.toString()
|
||||
}
|
||||
|
||||
return (
|
||||
<div class="space-y-6 p-6">
|
||||
{/* Header */}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-gradient-to-br from-orange-500/20 to-yellow-500/20 rounded-lg">
|
||||
<Zap class="w-6 h-6 text-orange-400" />
|
||||
</div>
|
||||
<div>
|
||||
<h2 class="text-xl font-semibold text-white">OpenCode Zen</h2>
|
||||
<p class="text-sm text-zinc-400">Free AI models - No API key required!</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center gap-2">
|
||||
{connectionStatus() === 'testing' && (
|
||||
<span class="flex items-center gap-2 text-sm text-zinc-400">
|
||||
<Loader class="w-4 h-4 animate-spin" />
|
||||
Testing...
|
||||
</span>
|
||||
)}
|
||||
{connectionStatus() === 'connected' && (
|
||||
<span class="flex items-center gap-2 text-sm text-emerald-400">
|
||||
<CheckCircle class="w-4 h-4" />
|
||||
Connected
|
||||
</span>
|
||||
)}
|
||||
{connectionStatus() === 'failed' && (
|
||||
<span class="flex items-center gap-2 text-sm text-red-400">
|
||||
<XCircle class="w-4 h-4" />
|
||||
Offline
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Info Banner */}
|
||||
<div class="bg-gradient-to-r from-orange-500/10 via-yellow-500/10 to-orange-500/10 border border-orange-500/20 rounded-xl p-4">
|
||||
<div class="flex items-start gap-3">
|
||||
<Sparkles class="w-5 h-5 text-orange-400 mt-0.5" />
|
||||
<div>
|
||||
<h3 class="font-semibold text-orange-300 mb-1">Free Models Available!</h3>
|
||||
<p class="text-sm text-zinc-300">
|
||||
OpenCode Zen provides access to powerful AI models completely free of charge.
|
||||
These models are ready to use immediately - no API keys or authentication required!
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Models Grid */}
|
||||
<div class="space-y-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<h3 class="text-lg font-medium text-white">Available Free Models</h3>
|
||||
<button
|
||||
onClick={loadModels}
|
||||
disabled={isLoading()}
|
||||
class="flex items-center gap-2 px-3 py-1.5 text-sm bg-zinc-800 hover:bg-zinc-700 rounded-lg transition-colors"
|
||||
>
|
||||
{isLoading() ? <Loader class="w-4 h-4 animate-spin" /> : null}
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<Show when={error()}>
|
||||
<div class="p-4 bg-red-500/10 border border-red-500/30 rounded-lg text-red-400 text-sm">
|
||||
{error()}
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
<Show when={isLoading()}>
|
||||
<div class="flex items-center justify-center py-12">
|
||||
<div class="flex items-center gap-3 text-zinc-400">
|
||||
<Loader class="w-6 h-6 animate-spin" />
|
||||
<span>Loading models...</span>
|
||||
</div>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
<Show when={!isLoading() && models().length > 0}>
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
<For each={models()}>
|
||||
{(model) => (
|
||||
<div class="group bg-zinc-900/50 border border-zinc-800 hover:border-orange-500/50 rounded-xl p-4 transition-all">
|
||||
<div class="flex items-start justify-between mb-3">
|
||||
<div>
|
||||
<h4 class="font-semibold text-white group-hover:text-orange-300 transition-colors">
|
||||
{model.name}
|
||||
</h4>
|
||||
<p class="text-xs text-zinc-500 font-mono">{model.id}</p>
|
||||
</div>
|
||||
<span class="px-2 py-0.5 text-[10px] font-bold uppercase bg-emerald-500/20 text-emerald-400 rounded">
|
||||
FREE
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-wrap gap-2 mb-3">
|
||||
{model.reasoning && (
|
||||
<span class="px-2 py-0.5 text-[10px] bg-purple-500/20 text-purple-400 rounded">
|
||||
Reasoning
|
||||
</span>
|
||||
)}
|
||||
{model.tool_call && (
|
||||
<span class="px-2 py-0.5 text-[10px] bg-blue-500/20 text-blue-400 rounded">
|
||||
Tool Use
|
||||
</span>
|
||||
)}
|
||||
{model.family && (
|
||||
<span class="px-2 py-0.5 text-[10px] bg-zinc-700 text-zinc-400 rounded">
|
||||
{model.family}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{model.limit && (
|
||||
<div class="flex items-center gap-4 text-xs text-zinc-500">
|
||||
<span>Context: {formatNumber(model.limit.context)}</span>
|
||||
<span>Output: {formatNumber(model.limit.output)}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</For>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
<Show when={!isLoading() && models().length === 0 && !error()}>
|
||||
<div class="text-center py-12 text-zinc-500">
|
||||
<p>No free models available at this time.</p>
|
||||
<button
|
||||
onClick={loadModels}
|
||||
class="mt-4 px-4 py-2 text-sm bg-orange-500/20 text-orange-400 hover:bg-orange-500/30 rounded-lg transition-colors"
|
||||
>
|
||||
Try Again
|
||||
</button>
|
||||
</div>
|
||||
</Show>
|
||||
</div>
|
||||
|
||||
{/* Usage Info */}
|
||||
<div class="bg-zinc-900/50 border border-zinc-800 rounded-xl p-4">
|
||||
<h4 class="font-medium text-white mb-2">How to Use</h4>
|
||||
<ul class="text-sm text-zinc-400 space-y-1">
|
||||
<li>• Select any Zen model from the model picker in chat</li>
|
||||
<li>• No API key configuration needed - just start chatting!</li>
|
||||
<li>• Models support streaming, reasoning, and tool use</li>
|
||||
<li>• Rate limits may apply during high demand periods</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default OpenCodeZenSettings
|
||||
249
packages/ui/src/components/settings/ZAISettings.tsx
Normal file
249
packages/ui/src/components/settings/ZAISettings.tsx
Normal file
@@ -0,0 +1,249 @@
|
||||
import { Component, createSignal, onMount, Show } from 'solid-js'
|
||||
import toast from 'solid-toast'
|
||||
import { Button } from '@suid/material'
|
||||
import { Cpu, CheckCircle, XCircle, Loader, Key, ExternalLink } from 'lucide-solid'
|
||||
|
||||
interface ZAIConfig {
|
||||
enabled: boolean
|
||||
apiKey?: string
|
||||
endpoint?: string
|
||||
}
|
||||
|
||||
const ZAISettings: Component = () => {
|
||||
const [config, setConfig] = createSignal<ZAIConfig>({ enabled: false })
|
||||
const [isLoading, setIsLoading] = createSignal(false)
|
||||
const [isTesting, setIsTesting] = createSignal(false)
|
||||
const [connectionStatus, setConnectionStatus] = createSignal<'idle' | 'testing' | 'connected' | 'failed'>('idle')
|
||||
const [models, setModels] = createSignal<string[]>([])
|
||||
|
||||
// Load config on mount
|
||||
onMount(async () => {
|
||||
try {
|
||||
const response = await fetch('/api/zai/config')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setConfig(data.config)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load Z.AI config:', error)
|
||||
}
|
||||
})
|
||||
|
||||
const handleConfigChange = (field: keyof ZAIConfig, value: any) => {
|
||||
setConfig(prev => ({ ...prev, [field]: value }))
|
||||
setConnectionStatus('idle')
|
||||
}
|
||||
|
||||
const saveConfig = async () => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const response = await fetch('/api/zai/config', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(config())
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
toast.success('Z.AI configuration saved', {
|
||||
duration: 3000,
|
||||
icon: <CheckCircle class="w-4 h-4 text-green-500" />
|
||||
})
|
||||
} else {
|
||||
throw new Error('Failed to save config')
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error('Failed to save Z.AI configuration', {
|
||||
duration: 5000,
|
||||
icon: <XCircle class="w-4 h-4 text-red-500" />
|
||||
})
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const testConnection = async () => {
|
||||
setIsTesting(true)
|
||||
setConnectionStatus('testing')
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/zai/test', {
|
||||
method: 'POST'
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setConnectionStatus(data.connected ? 'connected' : 'failed')
|
||||
|
||||
if (data.connected) {
|
||||
toast.success('Successfully connected to Z.AI', {
|
||||
duration: 3000,
|
||||
icon: <CheckCircle class="w-4 h-4 text-green-500" />
|
||||
})
|
||||
|
||||
// Load models after successful connection
|
||||
loadModels()
|
||||
} else {
|
||||
toast.error('Failed to connect to Z.AI', {
|
||||
duration: 3000,
|
||||
icon: <XCircle class="w-4 h-4 text-red-500" />
|
||||
})
|
||||
}
|
||||
} else {
|
||||
throw new Error('Connection test failed')
|
||||
}
|
||||
} catch (error) {
|
||||
setConnectionStatus('failed')
|
||||
toast.error('Connection test failed', {
|
||||
duration: 3000,
|
||||
icon: <XCircle class="w-4 h-4 text-red-500" />
|
||||
})
|
||||
} finally {
|
||||
setIsTesting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const loadModels = async () => {
|
||||
try {
|
||||
const response = await fetch('/api/zai/models')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setModels(data.models.map((m: any) => m.name))
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load models:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusIcon = () => {
|
||||
switch (connectionStatus()) {
|
||||
case 'testing':
|
||||
return <Loader class="w-4 h-4 animate-spin" />
|
||||
case 'connected':
|
||||
return <CheckCircle class="w-4 h-4 text-green-500" />
|
||||
case 'failed':
|
||||
return <XCircle class="w-4 h-4 text-red-500" />
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div class="space-y-6 p-6">
|
||||
<div class="flex items-center gap-2 mb-4">
|
||||
<Cpu class="w-6 h-6 text-blue-500" />
|
||||
<h2 class="text-xl font-semibold">Z.AI Integration</h2>
|
||||
</div>
|
||||
|
||||
<div class="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-4 mb-4">
|
||||
<h3 class="font-semibold text-blue-900 dark:text-blue-100 mb-2">GLM Coding Plan</h3>
|
||||
<p class="text-sm text-blue-800 dark:text-blue-200">
|
||||
Z.AI provides access to Claude models through their GLM Coding Plan. Get your API key from the{' '}
|
||||
<a
|
||||
href="https://z.ai/manage-apikey/apikey-list"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="underline hover:no-underline inline-flex items-center gap-1"
|
||||
>
|
||||
Z.AI Platform <ExternalLink class="w-3 h-3" />
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="space-y-4">
|
||||
{/* Enable/Disable Toggle */}
|
||||
<div class="flex items-center justify-between">
|
||||
<label class="font-medium">Enable Z.AI</label>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={config().enabled}
|
||||
onChange={(e) => handleConfigChange('enabled', e.target.checked)}
|
||||
class="w-4 h-4"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* API Key */}
|
||||
<div>
|
||||
<label class="block font-medium mb-2">
|
||||
<div class="flex items-center gap-2">
|
||||
<Key class="w-4 h-4" />
|
||||
API Key
|
||||
</div>
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
placeholder="Enter your Z.AI API key"
|
||||
value={config().apiKey || ''}
|
||||
onChange={(e) => handleConfigChange('apiKey', e.target.value)}
|
||||
class="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 bg-white dark:bg-gray-800"
|
||||
disabled={!config().enabled}
|
||||
/>
|
||||
<p class="text-xs text-gray-500 mt-1">
|
||||
Get your key from <a href="https://z.ai/manage-apikey/apikey-list" target="_blank" class="text-blue-500 hover:underline">z.ai/manage-apikey</a>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Endpoint */}
|
||||
<div>
|
||||
<label class="block font-medium mb-2">Endpoint</label>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="https://api.z.ai/api/anthropic"
|
||||
value={config().endpoint || ''}
|
||||
onChange={(e) => handleConfigChange('endpoint', e.target.value)}
|
||||
class="w-full px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 bg-white dark:bg-gray-800"
|
||||
disabled={!config().enabled}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Test Connection */}
|
||||
<div class="flex items-center gap-2">
|
||||
<Button
|
||||
variant="outlined"
|
||||
onClick={testConnection}
|
||||
disabled={!config().enabled || isTesting()}
|
||||
class="flex items-center gap-2"
|
||||
>
|
||||
{getStatusIcon()}
|
||||
{isTesting() ? 'Testing...' : 'Test Connection'}
|
||||
</Button>
|
||||
|
||||
<Show when={connectionStatus() === 'connected'}>
|
||||
<span class="text-green-600 text-sm">Connected successfully</span>
|
||||
</Show>
|
||||
<Show when={connectionStatus() === 'failed'}>
|
||||
<span class="text-red-600 text-sm">Connection failed</span>
|
||||
</Show>
|
||||
</div>
|
||||
|
||||
{/* Available Models */}
|
||||
<Show when={models().length > 0}>
|
||||
<div>
|
||||
<label class="block font-medium mb-2">Available Models</label>
|
||||
<div class="grid grid-cols-1 gap-2">
|
||||
{models().map(model => (
|
||||
<div class="p-3 border border-gray-200 dark:border-gray-700 rounded-md bg-gray-50 dark:bg-gray-800">
|
||||
<code class="text-sm font-mono">{model}</code>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</Show>
|
||||
|
||||
{/* Save Configuration */}
|
||||
<div class="flex justify-end">
|
||||
<Button
|
||||
variant="contained"
|
||||
onClick={saveConfig}
|
||||
disabled={isLoading()}
|
||||
class="flex items-center gap-2"
|
||||
>
|
||||
{isLoading() ? <Loader class="w-4 h-4 animate-spin" /> : null}
|
||||
Save Configuration
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default ZAISettings
|
||||
Reference in New Issue
Block a user