fix: revert streaming to prevent webhook errors

- Removed SSE streaming from chatWithAI()
- Keep sendStreamingMessage() for chunked delivery
- Self-correction loops still active
- Messages will be delivered in chunks with typing indicator
This commit is contained in:
admin
2026-05-05 13:24:59 +00:00
Unverified
parent 96da24cf86
commit d3bf90f985

View File

@@ -155,50 +155,31 @@ export async function initBot(config, api, tools, skills, agents) {
messages, messages,
temperature: opts.temperature ?? 0.7, temperature: opts.temperature ?? 0.7,
max_tokens: opts.maxTokens || 4096, max_tokens: opts.maxTokens || 4096,
stream: true, // Enable streaming
}; };
if (tools.length) body.tools = tools; if (tools.length) body.tools = tools;
const response = await api.client.post('/chat/completions', body, { const response = await api.client.post('/chat/completions', body);
responseType: 'stream', const choice = response.data.choices?.[0];
}); if (!choice) return '❌ No response from model.';
const reader = response.data; const msg = choice.message;
let fullText = ''; if (msg.tool_calls?.length) {
const parts = [];
for await (const chunk of reader) { for (const tc of msg.tool_calls) {
const lines = chunk.toString().split('\n').filter(line => line.trim()); const fn = tc.function;
try {
for (const line of lines) { const handler = toolHandlers[fn.name];
if (line === 'data: [DONE]') continue; if (!handler) { parts.push(`❌ Unknown tool: ${fn.name}`); continue; }
const args = JSON.parse(fn.arguments);
if (line.startsWith('data: ')) { const result = await handler(args);
const data = line.slice(6); parts.push(`${result}`);
try { } catch (e) {
const parsed = JSON.parse(data); parts.push(`❌ Tool ${fn.name} error: ${e.message}`);
const content = parsed.choices?.[0]?.delta?.content;
if (content) {
fullText += content;
}
} catch (e) {
// Skip parse errors for invalid JSON chunks
}
} }
} }
return parts.join('\n\n');
} }
return msg.content || '✅ Done.';
if (!fullText) {
// Fallback to non-streaming if streaming failed
const fallbackResponse = await api.client.post('/chat/completions', {
model,
messages,
temperature: opts.temperature ?? 0.7,
max_tokens: opts.maxTokens || 4096,
});
return fallbackResponse.data.choices[0].message;
}
return { content: fullText };
} catch (error) { } catch (error) {
logger.error('AI error:', error.response?.data || error.message); logger.error('AI error:', error.response?.data || error.message);
return `${error.response?.data?.error?.message || error.message}`; return `${error.response?.data?.error?.message || error.message}`;
@@ -457,7 +438,9 @@ export async function initBot(config, api, tools, skills, agents) {
{ role: 'system', content: buildSystemPrompt(svc) }, { role: 'system', content: buildSystemPrompt(svc) },
{ role: 'user', content: text }, { role: 'user', content: text },
]); ]);
await sendStreamingMessage(ctx, result.content || result);
// Send with streaming effect
await sendStreamingMessage(ctx, result);
}); });
}); });