Fix UI freeze: Optimize reactive memos and remove trigger loops

Critical performance fixes for MULTIX chat mode:

1. isAgentThinking - Simplified to only check last message
   - Previously iterated ALL messages with .some() on every store update
   - Each getMessage() call created a reactive subscription
   - Now only checks the last message (O(1) instead of O(n))

2. lastAssistantIndex - Memoized with createMemo
   - Changed from function to createMemo for proper caching
   - Added early exit optimization for common case

3. Auto-scroll effect - Removed isAgentThinking dependency
   - The thinking-based scroll was firing on every reactive update
   - Now only triggers on message count changes
   - Streaming scroll is handled by the interval-based effect

These combined fixes prevent the cascading reactive loop that
was freezing the UI during message send.
This commit is contained in:
Gemini AI
2025-12-24 22:49:31 +04:00
Unverified
parent 961c8743d3
commit 6c63bb7d7d

View File

@@ -96,15 +96,26 @@ export default function MultiTaskChat(props: MultiTaskChatProps) {
// Message store integration
const messageStore = () => messageStoreBus.getOrCreate(props.instanceId);
const lastAssistantIndex = () => {
// Memoized to prevent recalculation on every render
const lastAssistantIndex = createMemo(() => {
const ids = filteredMessageIds();
if (ids.length === 0) return -1;
const store = messageStore();
for (let i = ids.length - 1; i >= 0; i--) {
// Only check last few messages to find the last assistant (optimization)
const startIndex = Math.max(0, ids.length - 5);
for (let i = ids.length - 1; i >= startIndex; i--) {
const msg = store.getMessage(ids[i]);
if (msg?.role === "assistant") return i;
}
// If not found in last 5, fall back to full scan (rare case)
for (let i = startIndex - 1; i >= 0; i--) {
const msg = store.getMessage(ids[i]);
if (msg?.role === "assistant") return i;
}
return -1;
};
});
// Filter messages based on selected task - use store's session messages for the task session
const filteredMessageIds = createMemo(() => {
@@ -195,22 +206,12 @@ export default function MultiTaskChat(props: MultiTaskChatProps) {
// Show thinking while we're actively sending
if (isSending()) return true;
const store = messageStore();
// Check for streaming in the specific task session
const taskSessionId = activeTaskSessionId();
const sessionRecord = store.state.sessions[taskSessionId];
const sessionMessages = sessionRecord ? sessionRecord.messageIds : [];
const isAnyStreaming = sessionMessages.some((id: string) => {
const m = store.getMessage(id);
return m?.role === "assistant" && (m.status === "streaming" || m.status === "sending");
});
if (isAnyStreaming) return true;
// Also check the filtered message IDs (for tasks)
// Only check the last message instead of iterating all messages
// This prevents O(n) reactive subscriptions during streaming
const ids = filteredMessageIds();
if (ids.length === 0) return false;
const store = messageStore();
const lastMsg = store.getMessage(ids[ids.length - 1]);
return lastMsg?.role === "assistant" && (lastMsg.status === "streaming" || lastMsg.status === "sending");
});
@@ -234,22 +235,16 @@ export default function MultiTaskChat(props: MultiTaskChatProps) {
createEffect(() => {
const ids = filteredMessageIds();
const count = ids.length;
const thinking = isAgentThinking();
// Only scroll when message COUNT changes, not on every store update
// This prevents the effect from firing on every streaming chunk
// Note: Streaming scrolling is handled by the interval in the isAgentThinking effect above
if (count !== lastScrolledCount && count > 0 && !userScrolling()) {
lastScrolledCount = count;
requestAnimationFrame(() => {
setTimeout(scrollToBottom, 50);
});
}
// Also scroll when thinking first starts
if (thinking && count > 0 && !userScrolling()) {
requestAnimationFrame(() => {
setTimeout(scrollToBottom, 50);
});
}
});
// Scroll event listener to detect user scrolling