fix: bulletproof command handler + auto-restart + README overhaul

- sendStreamingMessage: replaced broken simulated streaming with reliable
  HTML send + stripped plain text fallback (was silently failing)
- Added global unhandledRejection guard (catches async errors that
  sequentialize middleware would swallow)
- restart.sh: auto-restart loop on crash (3s delay) instead of bare node
- README: comprehensive update with self-learning memory, curiosity engine,
  memory architecture diagram, updated command table, updated comparison
This commit is contained in:
admin
2026-05-05 14:49:49 +00:00
Unverified
parent a84df42932
commit 050dc6ebe3
4 changed files with 171 additions and 33 deletions

View File

@@ -727,6 +727,11 @@ export async function initBot(config, api, tools, skills, agents) {
logger.error('Bot error:', err.message || err);
});
// ── Global unhandled rejection guard ──
process.on('unhandledRejection', (reason, promise) => {
logger.error('Unhandled rejection:', reason?.message || reason);
});
// ── Express + WebSocket server (keep for webhook compatibility) ──
const app = express();
app.use(express.json());

View File

@@ -477,29 +477,30 @@ export class StreamConsumer {
/**
* Simulated streaming — edits a single message in place as content grows.
* Used when the AI doesn't support SSE streaming (full response received first).
* Used by command handlers (/start, /tools, etc.) for visual flair.
* Falls back to sendFormatted if editing fails.
*
* @deprecated Prefer StreamConsumer with real SSE streaming
* @param {object} ctx - grammy context
* @param {string} text - Full text to "stream"
* @param {object} [options] - { editInterval, cursor }
*/
export async function sendStreamingMessage(ctx, text, options = {}) {
if (!text) return;
const consumer = new StreamConsumer(ctx, {
editInterval: 800,
bufferThreshold: 20,
cursor: '',
});
const html = markdownToHtml(text);
const runPromise = consumer.run();
// Simulate streaming by feeding sentence-by-sentence
const segments = text.match(/[^.!?]+[.!?]+[\s]*/g) || [text];
for (const segment of segments) {
consumer.onDelta(segment);
await new Promise(r => setTimeout(r, 600));
// Try sending as formatted HTML first — most reliable
try {
await ctx.reply(html, { parse_mode: 'HTML' });
return;
} catch (e) {
logger.warn(`sendStreamingMessage HTML failed (${e.message}), trying plain`);
}
consumer.finish();
await runPromise;
// Fallback: stripped plain text
try {
await ctx.reply(stripMarkdown(text), { parse_mode: undefined });
} catch (e2) {
logger.error(`sendStreamingMessage plain also failed: ${e2.message}`);
}
}