fix: Ollama click handler and stream completion

- Fixed sidebar Ollama Cloud section to always open modal on click
- Improved stream handling with proper buffer flush on end
- Added 120s timeout for Ollama requests
- Added better logging for debugging
- Fixed activeRequest cleanup on errors
This commit is contained in:
Gemini AI
2025-12-20 13:29:30 +04:00
Unverified
parent 94ae6fa8c5
commit 20aef0fd89
2 changed files with 40 additions and 13 deletions

View File

@@ -62,10 +62,13 @@ export async function streamChat(messages, model = 'gpt-oss:120b', onChunk, onCo
activeRequest = req;
let fullResponse = '';
log(`Response status: ${res.statusCode}`);
if (res.statusCode !== 200) {
let errBody = '';
res.on('data', (c) => errBody += c.toString());
res.on('end', () => {
log(`API Error: ${errBody}`);
onError(new Error(`Ollama API Error ${res.statusCode}: ${errBody}`));
});
return;
@@ -77,7 +80,7 @@ export async function streamChat(messages, model = 'gpt-oss:120b', onChunk, onCo
res.on('data', (chunk) => {
buffer += chunk;
const lines = buffer.split('\n');
buffer = lines.pop();
buffer = lines.pop(); // Keep incomplete line in buffer
for (const line of lines) {
if (!line.trim()) continue;
@@ -88,22 +91,53 @@ export async function streamChat(messages, model = 'gpt-oss:120b', onChunk, onCo
fullResponse += content;
onChunk(content);
}
if (parsed.done) {
// Request is done according to Ollama API
// Check if this is the final message
if (parsed.done === true) {
log('Received done signal from Ollama');
}
} catch (e) {
// Ignore malformed JSON chunks
log(`Parse error (ignored): ${e.message}`);
}
}
});
res.on('end', () => {
// Process any remaining data in buffer
if (buffer.trim()) {
try {
const parsed = JSON.parse(buffer);
const content = parsed.message?.content || '';
if (content) {
fullResponse += content;
onChunk(content);
}
} catch (e) {
// Final chunk wasn't valid JSON, that's fine
}
}
log(`Stream complete. Total response length: ${fullResponse.length}`);
onComplete(fullResponse);
activeRequest = null;
});
res.on('error', (e) => {
log(`Response error: ${e.message}`);
onError(e);
activeRequest = null;
});
});
req.on('error', (e) => {
log(`Request error: ${e.message}`);
onError(e);
activeRequest = null;
});
req.setTimeout(120000, () => {
log('Request timeout after 120s');
req.destroy();
onError(new Error('Ollama Cloud request timeout'));
});
req.setNoDelay(true);

View File

@@ -740,16 +740,9 @@ export const Sidebar = () => {
<div
className={`flex items-center gap-2 text-xs cursor-pointer transition-colors p-2 hover:bg-white/5 rounded-lg mx-2 group ${state.chatSettings.ollamaEnabled ? '' : 'opacity-60'
}`}
onClick={async () => {
// Check Ollama status and open settings if not configured
const electron = (window as any).electron;
if (electron?.ollama) {
const status = await electron.ollama.getKeyStatus();
if (!status.hasKey) {
// Open AI Settings modal - dispatch a custom event
onClick={() => {
// Always open AI Settings modal when clicked
window.dispatchEvent(new CustomEvent('open-ai-settings'));
}
}
}}
title={state.chatSettings.ollamaEnabled ? "Ollama Cloud connected" : "Click to configure Ollama Cloud"}
>