v0.5.0: Binary-Free Mode - No OpenCode binary required

 Major Features:
- Native session management without OpenCode binary
- Provider routing: OpenCode Zen (free), Qwen OAuth, Z.AI
- Streaming chat with tool execution loop
- Mode detection API (/api/meta/mode)
- MCP integration fix (resolved infinite loading)
- NomadArch Native option in UI with comparison info

🆓 Free Models (No API Key):
- GPT-5 Nano (400K context)
- Grok Code Fast 1 (256K context)
- GLM-4.7 (205K context)
- Doubao Seed Code (256K context)
- Big Pickle (200K context)

📦 New Files:
- session-store.ts: Native session persistence
- native-sessions.ts: REST API for sessions
- lite-mode.ts: UI mode detection client
- native-sessions.ts (UI): SolidJS store

🔧 Updated:
- All installers: Optional binary download
- All launchers: Mode detection display
- Binary selector: Added NomadArch Native option
- README: Binary-Free Mode documentation
This commit is contained in:
Gemini AI
2025-12-26 02:08:13 +04:00
Unverified
parent 8dddf4d0cf
commit 4bd2893864
83 changed files with 10678 additions and 1290 deletions

View File

@@ -170,13 +170,52 @@ function handleWorkspaceEvent(event: WorkspaceEventPayload) {
}
}
const logBuffer = new Map<string, LogEntry[]>()
let logFlushPending = false
function flushLogs() {
if (logBuffer.size === 0) {
logFlushPending = false
return
}
batch(() => {
setInstanceLogs((prev) => {
const next = new Map(prev)
for (const [id, newEntries] of logBuffer) {
const existing = next.get(id) ?? []
// Keep only last MAX_LOG_ENTRIES
const combined = [...existing, ...newEntries]
const updated = combined.slice(-MAX_LOG_ENTRIES)
next.set(id, updated)
}
return next
})
})
logBuffer.clear()
logFlushPending = false
}
function handleWorkspaceLog(entry: WorkspaceLogEntry) {
const logEntry: LogEntry = {
timestamp: new Date(entry.timestamp).getTime(),
level: (entry.level as LogEntry["level"]) ?? "info",
message: entry.message,
}
addLog(entry.workspaceId, logEntry)
// Only buffer if streaming is enabled for this instance, to save memory
if (!isInstanceLogStreaming(entry.workspaceId)) {
return
}
const currentBuffer = logBuffer.get(entry.workspaceId) ?? []
currentBuffer.push(logEntry)
logBuffer.set(entry.workspaceId, currentBuffer)
if (!logFlushPending) {
logFlushPending = true
setTimeout(flushLogs, 100) // Throttle updates to every 100ms
}
}
function ensureLogContainer(id: string) {