fix(chat): prevent duplicate renderer requests and thinking messages (#870)

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: Haze <hazeone@users.noreply.github.com>
This commit is contained in:
Haze
2026-04-18 15:23:16 +08:00
committed by GitHub
Unverified
parent 6d67a77633
commit 24b43335f8
12 changed files with 789 additions and 119 deletions

View File

@@ -1,6 +1,6 @@
{
"name": "clawx",
"version": "0.3.10-beta.1",
"version": "0.3.10-beta.2",
"pnpm": {
"onlyBuiltDependencies": [
"@discordjs/opus",

View File

@@ -491,6 +491,7 @@ export function ChatInput({ onSend, onStop, disabled = false, sending = false, i
onPaste={handlePaste}
placeholder={disabled ? t('composer.gatewayDisconnectedPlaceholder') : ''}
disabled={disabled}
data-testid="chat-composer-input"
className="min-h-[40px] max-h-[200px] resize-none border-0 focus-visible:ring-0 focus-visible:ring-offset-0 shadow-none bg-transparent py-2.5 px-2 text-[15px] placeholder:text-muted-foreground/60 leading-relaxed"
rows={1}
/>
@@ -501,6 +502,7 @@ export function ChatInput({ onSend, onStop, disabled = false, sending = false, i
onClick={sending ? handleStop : handleSend}
disabled={sending ? !canStop : !canSend}
size="icon"
data-testid="chat-composer-send"
className={`shrink-0 h-10 w-10 rounded-full transition-colors ${
(sending || canSend)
? 'bg-black/5 dark:bg-white/10 text-foreground hover:bg-black/10 dark:hover:bg-white/20'

View File

@@ -25,6 +25,44 @@ function cleanUserText(text: string): string {
.trim();
}
function normalizeProgressiveText(text: string | undefined): string {
return typeof text === 'string' ? text.replace(/\r\n/g, '\n').trim() : '';
}
function compactProgressiveParts(parts: string[]): string[] {
const compacted: string[] = [];
for (const part of parts) {
const current = normalizeProgressiveText(part);
if (!current) continue;
const previous = compacted.at(-1);
if (!previous) {
compacted.push(part);
continue;
}
const normalizedPrevious = normalizeProgressiveText(previous);
if (!normalizedPrevious) {
compacted[compacted.length - 1] = part;
continue;
}
if (current === normalizedPrevious || normalizedPrevious.startsWith(current)) {
continue;
}
if (current.startsWith(normalizedPrevious)) {
compacted[compacted.length - 1] = part;
continue;
}
compacted.push(part);
}
return compacted;
}
/**
* Extract displayable text from a message's content field.
* Handles both string content and array-of-blocks content.
@@ -49,7 +87,7 @@ export function extractText(message: RawMessage | unknown): string {
}
}
}
const combined = parts.join('\n\n');
const combined = compactProgressiveParts(parts).join('\n\n');
result = combined.trim().length > 0 ? combined : '';
} else if (typeof msg.text === 'string') {
// Fallback: try .text field
@@ -85,7 +123,7 @@ export function extractThinking(message: RawMessage | unknown): string | null {
}
}
const combined = parts.join('\n\n').trim();
const combined = compactProgressiveParts(parts).join('\n\n').trim();
return combined.length > 0 ? combined : null;
}

View File

@@ -165,14 +165,178 @@ function saveImageCache(cache: Map<string, AttachedFileMeta>): void {
const _imageCache = loadImageCache();
function normalizeBlockText(text: string | undefined): string {
return typeof text === 'string' ? text.replace(/\r\n/g, '\n').trim() : '';
}
function compactProgressiveTextParts(parts: string[]): string[] {
const compacted: string[] = [];
for (const part of parts) {
const current = normalizeBlockText(part);
if (!current) continue;
const previous = compacted.at(-1);
if (!previous) {
compacted.push(part);
continue;
}
const normalizedPrevious = normalizeBlockText(previous);
if (!normalizedPrevious) {
compacted[compacted.length - 1] = part;
continue;
}
if (current === normalizedPrevious || normalizedPrevious.startsWith(current)) {
continue;
}
if (current.startsWith(normalizedPrevious)) {
compacted[compacted.length - 1] = part;
continue;
}
compacted.push(part);
}
return compacted;
}
function normalizeLiveContentBlocks(content: ContentBlock[]): ContentBlock[] {
const normalized: ContentBlock[] = [];
let textBuffer: string[] = [];
let thinkingBuffer: string[] = [];
const flushTextBuffer = () => {
for (const part of compactProgressiveTextParts(textBuffer)) {
normalized.push({ type: 'text', text: part });
}
textBuffer = [];
};
const flushThinkingBuffer = () => {
for (const part of compactProgressiveTextParts(thinkingBuffer)) {
normalized.push({ type: 'thinking', thinking: part });
}
thinkingBuffer = [];
};
for (const block of content) {
if (block.type === 'text' && block.text) {
textBuffer.push(block.text);
continue;
}
if (block.type === 'thinking' && block.thinking) {
thinkingBuffer.push(block.thinking);
continue;
}
flushTextBuffer();
flushThinkingBuffer();
normalized.push(block);
}
flushTextBuffer();
flushThinkingBuffer();
return normalized;
}
function normalizeStreamingMessage(message: unknown): unknown {
if (!message || typeof message !== 'object') return message;
const rawMessage = message as RawMessage;
const rawContent = rawMessage.content;
if (!Array.isArray(rawContent)) return rawMessage;
const normalizedContent = normalizeLiveContentBlocks(rawContent as ContentBlock[]);
const didChange = normalizedContent.some((block, index) => block !== rawContent[index])
|| normalizedContent.length !== rawContent.length;
return didChange
? { ...rawMessage, content: normalizedContent }
: rawMessage;
}
function normalizeComparableUserText(content: unknown): string {
return getMessageText(content)
.replace(/\s+/g, ' ')
.trim();
}
function getComparableAttachmentSignature(message: Pick<RawMessage, '_attachedFiles'>): string {
const files = (message._attachedFiles || [])
.map((file) => file.filePath || `${file.fileName}|${file.mimeType}|${file.fileSize}`)
.filter(Boolean)
.sort();
return files.join('::');
}
function matchesOptimisticUserMessage(
candidate: RawMessage,
optimistic: RawMessage,
optimisticTimestampMs: number,
): boolean {
if (candidate.role !== 'user') return false;
const optimisticText = normalizeComparableUserText(optimistic.content);
const candidateText = normalizeComparableUserText(candidate.content);
const sameText = optimisticText.length > 0 && optimisticText === candidateText;
const optimisticAttachments = getComparableAttachmentSignature(optimistic);
const candidateAttachments = getComparableAttachmentSignature(candidate);
const sameAttachments = optimisticAttachments.length > 0 && optimisticAttachments === candidateAttachments;
const hasOptimisticTimestamp = Number.isFinite(optimisticTimestampMs) && optimisticTimestampMs > 0;
const hasCandidateTimestamp = candidate.timestamp != null;
const timestampMatches = hasOptimisticTimestamp && hasCandidateTimestamp
? Math.abs(toMs(candidate.timestamp as number) - optimisticTimestampMs) < 5000
: false;
if (sameText && sameAttachments) return true;
if (sameText && (!optimisticAttachments || !candidateAttachments) && (timestampMatches || !hasCandidateTimestamp)) return true;
if (sameAttachments && (!optimisticText || !candidateText) && (timestampMatches || !hasCandidateTimestamp)) return true;
return false;
}
function snapshotStreamingAssistantMessage(
currentStream: RawMessage | null,
existingMessages: RawMessage[],
runId: string,
): RawMessage[] {
if (!currentStream) return [];
const normalizedStream = normalizeStreamingMessage(currentStream) as RawMessage;
const streamRole = normalizedStream.role;
if (streamRole !== 'assistant' && streamRole !== undefined) return [];
const snapId = normalizedStream.id || `${runId || 'run'}-turn-${existingMessages.length}`;
if (existingMessages.some((message) => message.id === snapId)) return [];
return [{
...normalizedStream,
role: 'assistant',
id: snapId,
}];
}
function getLatestOptimisticUserMessage(messages: RawMessage[], userTimestampMs: number): RawMessage | undefined {
return [...messages].reverse().find(
(message) => message.role === 'user' && (!message.timestamp || Math.abs(toMs(message.timestamp) - userTimestampMs) < 5000),
);
}
/** Extract plain text from message content (string or content blocks) */
function getMessageText(content: unknown): string {
if (typeof content === 'string') return content;
if (Array.isArray(content)) {
return (content as Array<{ type?: string; text?: string }>)
const parts = (content as Array<{ type?: string; text?: string }>)
.filter(b => b.type === 'text' && b.text)
.map(b => b.text!)
.join('\n');
.map(b => b.text!);
return compactProgressiveTextParts(parts).join('\n');
}
return '';
}
@@ -1416,17 +1580,12 @@ export const useChatStore = create<ChatState>((set, get) => ({
const userMsgAt = get().lastUserMessageAt;
if (get().sending && userMsgAt) {
const userMsMs = toMs(userMsgAt);
const hasRecentUser = enrichedMessages.some(
(m) => m.role === 'user' && m.timestamp && Math.abs(toMs(m.timestamp) - userMsMs) < 5000,
);
if (!hasRecentUser) {
const currentMsgs = get().messages;
const optimistic = [...currentMsgs].reverse().find(
(m) => m.role === 'user' && m.timestamp && Math.abs(toMs(m.timestamp) - userMsMs) < 5000,
);
if (optimistic) {
finalMessages = [...enrichedMessages, optimistic];
}
const optimistic = getLatestOptimisticUserMessage(get().messages, userMsMs);
const hasMatchingUser = optimistic
? enrichedMessages.some((message) => matchesOptimisticUserMessage(message, optimistic, userMsMs))
: false;
if (optimistic && !hasMatchingUser) {
finalMessages = [...enrichedMessages, optimistic];
}
}
@@ -1890,7 +2049,7 @@ export const useChatStore = create<ChatState>((set, get) => ({
const msgRole = (event.message as RawMessage).role;
if (isToolResultRole(msgRole)) return s.streamingMessage;
}
return event.message ?? s.streamingMessage;
return normalizeStreamingMessage(event.message ?? s.streamingMessage);
})(),
streamingTools: updates.length > 0 ? upsertToolStatuses(s.streamingTools, updates) : s.streamingTools,
}));
@@ -1902,17 +2061,18 @@ export const useChatStore = create<ChatState>((set, get) => ({
// Message complete - add to history and clear streaming
const finalMsg = event.message as RawMessage | undefined;
if (finalMsg) {
const updates = collectToolUpdates(finalMsg, resolvedState);
if (isToolResultRole(finalMsg.role)) {
const normalizedFinalMessage = normalizeStreamingMessage(finalMsg) as RawMessage;
const updates = collectToolUpdates(normalizedFinalMessage, resolvedState);
if (isToolResultRole(normalizedFinalMessage.role)) {
// Resolve file path from the streaming assistant message's matching tool call
const currentStreamForPath = get().streamingMessage as RawMessage | null;
const matchedPath = (currentStreamForPath && finalMsg.toolCallId)
? getToolCallFilePath(currentStreamForPath, finalMsg.toolCallId)
const matchedPath = (currentStreamForPath && normalizedFinalMessage.toolCallId)
? getToolCallFilePath(currentStreamForPath, normalizedFinalMessage.toolCallId)
: undefined;
// Mirror enrichWithToolResultFiles: collect images + file refs for next assistant msg
const toolFiles: AttachedFileMeta[] = [
...extractImagesAsAttachedFiles(finalMsg.content),
...extractImagesAsAttachedFiles(normalizedFinalMessage.content),
];
if (matchedPath) {
for (const f of toolFiles) {
@@ -1922,7 +2082,7 @@ export const useChatStore = create<ChatState>((set, get) => ({
}
}
}
const text = getMessageText(finalMsg.content);
const text = getMessageText(normalizedFinalMessage.content);
if (text) {
const mediaRefs = extractMediaRefs(text);
const mediaRefPaths = new Set(mediaRefs.map(r => r.filePath));
@@ -1938,22 +2098,7 @@ export const useChatStore = create<ChatState>((set, get) => ({
// tool result. Without snapshotting here, the intermediate thinking+tool steps
// would be overwritten by the next turn's deltas and never appear in the UI.
const currentStream = s.streamingMessage as RawMessage | null;
const snapshotMsgs: RawMessage[] = [];
if (currentStream) {
const streamRole = currentStream.role;
if (streamRole === 'assistant' || streamRole === undefined) {
// Use message's own id if available, otherwise derive a stable one from runId
const snapId = currentStream.id
|| `${runId || 'run'}-turn-${s.messages.length}`;
if (!s.messages.some(m => m.id === snapId)) {
snapshotMsgs.push({
...(currentStream as RawMessage),
role: 'assistant',
id: snapId,
});
}
}
}
const snapshotMsgs = snapshotStreamingAssistantMessage(currentStream, s.messages, runId);
return {
messages: snapshotMsgs.length > 0 ? [...s.messages, ...snapshotMsgs] : s.messages,
streamingText: '',
@@ -1967,9 +2112,9 @@ export const useChatStore = create<ChatState>((set, get) => ({
});
break;
}
const toolOnly = isToolOnlyMessage(finalMsg);
const hasOutput = hasNonToolAssistantContent(finalMsg);
const msgId = finalMsg.id || (toolOnly ? `run-${runId}-tool-${Date.now()}` : `run-${runId}`);
const toolOnly = isToolOnlyMessage(normalizedFinalMessage);
const hasOutput = hasNonToolAssistantContent(normalizedFinalMessage);
const msgId = normalizedFinalMessage.id || (toolOnly ? `run-${runId}-tool-${Date.now()}` : `run-${runId}`);
set((s) => {
const nextTools = updates.length > 0 ? upsertToolStatuses(s.streamingTools, updates) : s.streamingTools;
const streamingTools = hasOutput ? [] : nextTools;
@@ -1978,12 +2123,12 @@ export const useChatStore = create<ChatState>((set, get) => ({
const pendingImgs = s.pendingToolImages;
const msgWithImages: RawMessage = pendingImgs.length > 0
? {
...finalMsg,
role: (finalMsg.role || 'assistant') as RawMessage['role'],
...normalizedFinalMessage,
role: (normalizedFinalMessage.role || 'assistant') as RawMessage['role'],
id: msgId,
_attachedFiles: [...(finalMsg._attachedFiles || []), ...pendingImgs],
_attachedFiles: [...(normalizedFinalMessage._attachedFiles || []), ...pendingImgs],
}
: { ...finalMsg, role: (finalMsg.role || 'assistant') as RawMessage['role'], id: msgId };
: { ...normalizedFinalMessage, role: (normalizedFinalMessage.role || 'assistant') as RawMessage['role'], id: msgId };
const clearPendingImages = { pendingToolImages: [] as AttachedFileMeta[] };
// Check if message already exists (prevent duplicates)
@@ -2044,15 +2189,15 @@ export const useChatStore = create<ChatState>((set, get) => ({
// content ("Let me get that written down...") is preserved in the UI
// rather than being silently discarded.
const currentStream = get().streamingMessage as RawMessage | null;
if (currentStream && (currentStream.role === 'assistant' || currentStream.role === undefined)) {
const snapId = (currentStream as RawMessage).id
|| `error-snap-${Date.now()}`;
const alreadyExists = get().messages.some(m => m.id === snapId);
if (!alreadyExists) {
set((s) => ({
messages: [...s.messages, { ...currentStream, role: 'assistant' as const, id: snapId }],
}));
}
const errorSnapshot = snapshotStreamingAssistantMessage(
currentStream,
get().messages,
`error-${runId || Date.now()}`,
);
if (errorSnapshot.length > 0) {
set((s) => ({
messages: [...s.messages, ...errorSnapshot],
}));
}
set({

View File

@@ -70,6 +70,169 @@ function saveImageCache(cache: Map<string, AttachedFileMeta>): void {
const _imageCache = loadImageCache();
function normalizeBlockText(text: string | undefined): string {
return typeof text === 'string' ? text.replace(/\r\n/g, '\n').trim() : '';
}
function compactProgressiveTextParts(parts: string[]): string[] {
const compacted: string[] = [];
for (const part of parts) {
const current = normalizeBlockText(part);
if (!current) continue;
const previous = compacted.at(-1);
if (!previous) {
compacted.push(part);
continue;
}
const normalizedPrevious = normalizeBlockText(previous);
if (!normalizedPrevious) {
compacted[compacted.length - 1] = part;
continue;
}
if (current === normalizedPrevious || normalizedPrevious.startsWith(current)) {
continue;
}
if (current.startsWith(normalizedPrevious)) {
compacted[compacted.length - 1] = part;
continue;
}
compacted.push(part);
}
return compacted;
}
function normalizeLiveContentBlocks(content: ContentBlock[]): ContentBlock[] {
const normalized: ContentBlock[] = [];
let textBuffer: string[] = [];
let thinkingBuffer: string[] = [];
const flushTextBuffer = () => {
for (const part of compactProgressiveTextParts(textBuffer)) {
normalized.push({ type: 'text', text: part });
}
textBuffer = [];
};
const flushThinkingBuffer = () => {
for (const part of compactProgressiveTextParts(thinkingBuffer)) {
normalized.push({ type: 'thinking', thinking: part });
}
thinkingBuffer = [];
};
for (const block of content) {
if (block.type === 'text' && block.text) {
textBuffer.push(block.text);
continue;
}
if (block.type === 'thinking' && block.thinking) {
thinkingBuffer.push(block.thinking);
continue;
}
flushTextBuffer();
flushThinkingBuffer();
normalized.push(block);
}
flushTextBuffer();
flushThinkingBuffer();
return normalized;
}
function normalizeStreamingMessage(message: unknown): unknown {
if (!message || typeof message !== 'object') return message;
const rawMessage = message as RawMessage;
const rawContent = rawMessage.content;
if (!Array.isArray(rawContent)) return rawMessage;
const normalizedContent = normalizeLiveContentBlocks(rawContent as ContentBlock[]);
const didChange = normalizedContent.some((block, index) => block !== rawContent[index])
|| normalizedContent.length !== rawContent.length;
return didChange
? { ...rawMessage, content: normalizedContent }
: rawMessage;
}
function normalizeComparableUserText(content: unknown): string {
return getMessageText(content)
.replace(/\s+/g, ' ')
.trim();
}
function getComparableAttachmentSignature(message: Pick<RawMessage, '_attachedFiles'>): string {
const files = (message._attachedFiles || [])
.map((file) => file.filePath || `${file.fileName}|${file.mimeType}|${file.fileSize}`)
.filter(Boolean)
.sort();
return files.join('::');
}
function matchesOptimisticUserMessage(
candidate: RawMessage,
optimistic: RawMessage,
optimisticTimestampMs: number,
): boolean {
if (candidate.role !== 'user') return false;
const optimisticText = normalizeComparableUserText(optimistic.content);
const candidateText = normalizeComparableUserText(candidate.content);
const sameText = optimisticText.length > 0 && optimisticText === candidateText;
const optimisticAttachments = getComparableAttachmentSignature(optimistic);
const candidateAttachments = getComparableAttachmentSignature(candidate);
const sameAttachments = optimisticAttachments.length > 0 && optimisticAttachments === candidateAttachments;
const hasOptimisticTimestamp = Number.isFinite(optimisticTimestampMs) && optimisticTimestampMs > 0;
const hasCandidateTimestamp = candidate.timestamp != null;
const timestampMatches = hasOptimisticTimestamp && hasCandidateTimestamp
? Math.abs(toMs(candidate.timestamp as number) - optimisticTimestampMs) < 5000
: false;
if (sameText && sameAttachments) return true;
if (sameText && (!optimisticAttachments || !candidateAttachments) && (timestampMatches || !hasCandidateTimestamp)) return true;
if (sameAttachments && (!optimisticText || !candidateText) && (timestampMatches || !hasCandidateTimestamp)) return true;
return false;
}
function snapshotStreamingAssistantMessage(
currentStream: RawMessage | null,
existingMessages: RawMessage[],
runId: string,
): RawMessage[] {
if (!currentStream) return [];
const normalizedStream = normalizeStreamingMessage(currentStream) as RawMessage;
const streamRole = normalizedStream.role;
if (streamRole !== 'assistant' && streamRole !== undefined) return [];
const snapId = normalizedStream.id || `${runId || 'run'}-turn-${existingMessages.length}`;
if (existingMessages.some((message) => message.id === snapId)) return [];
return [{
...normalizedStream,
role: 'assistant',
id: snapId,
}];
}
function getLatestOptimisticUserMessage(messages: RawMessage[], userTimestampMs: number): RawMessage | undefined {
return [...messages].reverse().find(
(message) => message.role === 'user' && (!message.timestamp || Math.abs(toMs(message.timestamp) - userTimestampMs) < 5000),
);
}
function upsertImageCacheEntry(filePath: string, file: Omit<AttachedFileMeta, 'filePath'>): void {
_imageCache.set(filePath, { ...file, filePath });
saveImageCache(_imageCache);
@@ -86,10 +249,10 @@ function withAttachedFileSource(
function getMessageText(content: unknown): string {
if (typeof content === 'string') return content;
if (Array.isArray(content)) {
return (content as Array<{ type?: string; text?: string }>)
const parts = (content as Array<{ type?: string; text?: string }>)
.filter(b => b.type === 'text' && b.text)
.map(b => b.text!)
.join('\n');
.map(b => b.text!);
return compactProgressiveTextParts(parts).join('\n');
}
return '';
}
@@ -627,7 +790,7 @@ function extractTextFromContent(content: unknown): string {
parts.push(block.text);
}
}
return parts.join('\n');
return compactProgressiveTextParts(parts).join('\n');
}
function summarizeToolOutput(text: string): string | undefined {
@@ -855,6 +1018,10 @@ export {
upsertToolStatuses,
hasNonToolAssistantContent,
isToolOnlyMessage,
normalizeStreamingMessage,
matchesOptimisticUserMessage,
snapshotStreamingAssistantMessage,
getLatestOptimisticUserMessage,
setHistoryPollTimer,
hasErrorRecoveryTimer,
setErrorRecoveryTimer,

View File

@@ -5,11 +5,13 @@ import {
clearHistoryPoll,
enrichWithCachedImages,
enrichWithToolResultFiles,
getLatestOptimisticUserMessage,
getMessageText,
hasNonToolAssistantContent,
isInternalMessage,
isToolResultRole,
loadMissingPreviews,
matchesOptimisticUserMessage,
toMs,
} from './helpers';
import { buildCronSessionHistoryPath, isCronSessionKey } from './cron-session-utils';
@@ -101,17 +103,12 @@ export function createHistoryActions(
const userMsgAt = get().lastUserMessageAt;
if (get().sending && userMsgAt) {
const userMsMs = toMs(userMsgAt);
const hasRecentUser = enrichedMessages.some(
(m) => m.role === 'user' && m.timestamp && Math.abs(toMs(m.timestamp) - userMsMs) < 5000,
);
if (!hasRecentUser) {
const currentMsgs = get().messages;
const optimistic = [...currentMsgs].reverse().find(
(m) => m.role === 'user' && m.timestamp && Math.abs(toMs(m.timestamp) - userMsMs) < 5000,
);
if (optimistic) {
finalMessages = [...enrichedMessages, optimistic];
}
const optimistic = getLatestOptimisticUserMessage(get().messages, userMsMs);
const hasMatchingUser = optimistic
? enrichedMessages.some((message) => matchesOptimisticUserMessage(message, optimistic, userMsMs))
: false;
if (optimistic && !hasMatchingUser) {
finalMessages = [...enrichedMessages, optimistic];
}
}

View File

@@ -12,7 +12,9 @@ import {
isToolOnlyMessage,
isToolResultRole,
makeAttachedFile,
normalizeStreamingMessage,
setErrorRecoveryTimer,
snapshotStreamingAssistantMessage,
upsertToolStatuses,
} from './helpers';
import type { AttachedFileMeta, RawMessage } from './types';
@@ -65,7 +67,7 @@ export function handleRuntimeEventState(
return s.streamingMessage;
}
}
return event.message ?? s.streamingMessage;
return normalizeStreamingMessage(event.message ?? s.streamingMessage);
})(),
streamingTools: updates.length > 0 ? upsertToolStatuses(s.streamingTools, updates) : s.streamingTools,
}));
@@ -77,16 +79,17 @@ export function handleRuntimeEventState(
// Message complete - add to history and clear streaming
const finalMsg = event.message as RawMessage | undefined;
if (finalMsg) {
const updates = collectToolUpdates(finalMsg, resolvedState);
if (isToolResultRole(finalMsg.role)) {
const normalizedFinalMessage = normalizeStreamingMessage(finalMsg) as RawMessage;
const updates = collectToolUpdates(normalizedFinalMessage, resolvedState);
if (isToolResultRole(normalizedFinalMessage.role)) {
// Resolve file path from the streaming assistant message's matching tool call
const currentStreamForPath = get().streamingMessage as RawMessage | null;
const matchedPath = (currentStreamForPath && finalMsg.toolCallId)
? getToolCallFilePath(currentStreamForPath, finalMsg.toolCallId)
const matchedPath = (currentStreamForPath && normalizedFinalMessage.toolCallId)
? getToolCallFilePath(currentStreamForPath, normalizedFinalMessage.toolCallId)
: undefined;
// Mirror enrichWithToolResultFiles: collect images + file refs for next assistant msg
const toolFiles: AttachedFileMeta[] = extractImagesAsAttachedFiles(finalMsg.content)
const toolFiles: AttachedFileMeta[] = extractImagesAsAttachedFiles(normalizedFinalMessage.content)
.map((file) => (file.source ? file : { ...file, source: 'tool-result' }));
if (matchedPath) {
for (const f of toolFiles) {
@@ -96,7 +99,7 @@ export function handleRuntimeEventState(
}
}
}
const text = getMessageText(finalMsg.content);
const text = getMessageText(normalizedFinalMessage.content);
if (text) {
const mediaRefs = extractMediaRefs(text);
const mediaRefPaths = new Set(mediaRefs.map(r => r.filePath));
@@ -112,22 +115,7 @@ export function handleRuntimeEventState(
// tool result. Without snapshotting here, the intermediate thinking+tool steps
// would be overwritten by the next turn's deltas and never appear in the UI.
const currentStream = s.streamingMessage as RawMessage | null;
const snapshotMsgs: RawMessage[] = [];
if (currentStream) {
const streamRole = currentStream.role;
if (streamRole === 'assistant' || streamRole === undefined) {
// Use message's own id if available, otherwise derive a stable one from runId
const snapId = currentStream.id
|| `${runId || 'run'}-turn-${s.messages.length}`;
if (!s.messages.some(m => m.id === snapId)) {
snapshotMsgs.push({
...(currentStream as RawMessage),
role: 'assistant',
id: snapId,
});
}
}
}
const snapshotMsgs = snapshotStreamingAssistantMessage(currentStream, s.messages, runId);
return {
messages: snapshotMsgs.length > 0 ? [...s.messages, ...snapshotMsgs] : s.messages,
streamingText: '',
@@ -141,9 +129,9 @@ export function handleRuntimeEventState(
});
break;
}
const toolOnly = isToolOnlyMessage(finalMsg);
const hasOutput = hasNonToolAssistantContent(finalMsg);
const msgId = finalMsg.id || (toolOnly ? `run-${runId}-tool-${Date.now()}` : `run-${runId}`);
const toolOnly = isToolOnlyMessage(normalizedFinalMessage);
const hasOutput = hasNonToolAssistantContent(normalizedFinalMessage);
const msgId = normalizedFinalMessage.id || (toolOnly ? `run-${runId}-tool-${Date.now()}` : `run-${runId}`);
set((s) => {
const nextTools = updates.length > 0 ? upsertToolStatuses(s.streamingTools, updates) : s.streamingTools;
const streamingTools = hasOutput ? [] : nextTools;
@@ -152,12 +140,12 @@ export function handleRuntimeEventState(
const pendingImgs = s.pendingToolImages;
const msgWithImages: RawMessage = pendingImgs.length > 0
? {
...finalMsg,
role: (finalMsg.role || 'assistant') as RawMessage['role'],
...normalizedFinalMessage,
role: (normalizedFinalMessage.role || 'assistant') as RawMessage['role'],
id: msgId,
_attachedFiles: [...(finalMsg._attachedFiles || []), ...pendingImgs],
_attachedFiles: [...(normalizedFinalMessage._attachedFiles || []), ...pendingImgs],
}
: { ...finalMsg, role: (finalMsg.role || 'assistant') as RawMessage['role'], id: msgId };
: { ...normalizedFinalMessage, role: (normalizedFinalMessage.role || 'assistant') as RawMessage['role'], id: msgId };
const clearPendingImages = { pendingToolImages: [] as AttachedFileMeta[] };
// Check if message already exists (prevent duplicates)
@@ -218,15 +206,15 @@ export function handleRuntimeEventState(
// content ("Let me get that written down...") is preserved in the UI
// rather than being silently discarded.
const currentStream = get().streamingMessage as RawMessage | null;
if (currentStream && (currentStream.role === 'assistant' || currentStream.role === undefined)) {
const snapId = (currentStream as RawMessage).id
|| `error-snap-${Date.now()}`;
const alreadyExists = get().messages.some(m => m.id === snapId);
if (!alreadyExists) {
set((s) => ({
messages: [...s.messages, { ...currentStream, role: 'assistant' as const, id: snapId }],
}));
}
const errorSnapshot = snapshotStreamingAssistantMessage(
currentStream,
get().messages,
`error-${runId || Date.now()}`,
);
if (errorSnapshot.length > 0) {
set((s) => ({
messages: [...s.messages, ...errorSnapshot],
}));
}
set({
@@ -291,7 +279,7 @@ export function handleRuntimeEventState(
console.warn(`[handleChatEvent] Unknown event state "${resolvedState}", treating message as streaming delta. Event keys:`, Object.keys(event));
const updates = collectToolUpdates(event.message, 'delta');
set((s) => ({
streamingMessage: event.message ?? s.streamingMessage,
streamingMessage: normalizeStreamingMessage(event.message ?? s.streamingMessage),
streamingTools: updates.length > 0 ? upsertToolStatuses(s.streamingTools, updates) : s.streamingTools,
}));
}

View File

@@ -97,6 +97,9 @@ function maybeLoadSessions(
state: { loadSessions: () => Promise<void> },
force = false,
): void {
const { status } = useGatewayStore.getState();
if (status.gatewayReady === false) return;
const now = Date.now();
if (!force && now - lastLoadSessionsAt < LOAD_SESSIONS_MIN_INTERVAL_MS) return;
lastLoadSessionsAt = now;

View File

@@ -16,7 +16,7 @@ function stableStringify(value: unknown): string {
const seededHistory = [
{
role: 'user',
content: [{ type: 'text', text: '[Mon 2026-04-06 15:18 GMT+8] 分析 Velaria 当前未提交改动' }],
content: [{ type: 'text', text: '[Mon 2026-04-06 15:18 GMT+8] Analyze Velaria uncommitted changes' }],
timestamp: Date.now(),
},
{
@@ -57,7 +57,7 @@ const seededHistory = [
type: 'toolCall',
id: 'yield-call',
name: 'sessions_yield',
arguments: { message: '我让 coder 去拆 ~/Velaria 当前未提交改动的核心块了,等它回来我直接给你结论。' },
arguments: { message: 'I asked coder to break down the core blocks of ~/Velaria uncommitted changes; will give you the conclusion when it returns.' },
}],
timestamp: Date.now(),
},
@@ -69,12 +69,12 @@ const seededHistory = [
type: 'text',
text: JSON.stringify({
status: 'yielded',
message: '我让 coder 去拆 ~/Velaria 当前未提交改动的核心块了,等它回来我直接给你结论。',
message: 'I asked coder to break down the core blocks of ~/Velaria uncommitted changes; will give you the conclusion when it returns.',
}, null, 2),
}],
details: {
status: 'yielded',
message: '我让 coder 去拆 ~/Velaria 当前未提交改动的核心块了,等它回来我直接给你结论。',
message: 'I asked coder to break down the core blocks of ~/Velaria uncommitted changes; will give you the conclusion when it returns.',
},
isError: false,
timestamp: Date.now(),
@@ -94,7 +94,7 @@ status: completed successfully`,
},
{
role: 'assistant',
content: [{ type: 'text', text: '我让 coder 分析完了,下面是结论。' }],
content: [{ type: 'text', text: 'Coder has finished the analysis, here are the conclusions.' }],
_attachedFiles: [
{
fileName: 'CHECKLIST.md',
@@ -112,7 +112,7 @@ status: completed successfully`,
const childTranscriptMessages = [
{
role: 'user',
content: [{ type: 'text', text: '分析 ~/Velaria 当前未提交改动的核心内容' }],
content: [{ type: 'text', text: 'Analyze the core content of ~/Velaria uncommitted changes' }],
timestamp: Date.now(),
},
{
@@ -143,7 +143,7 @@ const childTranscriptMessages = [
},
{
role: 'assistant',
content: [{ type: 'text', text: '已完成分析,最关键的有 4 块。' }],
content: [{ type: 'text', text: 'Analysis complete, there are 4 key blocks.' }],
timestamp: Date.now(),
},
];
@@ -229,10 +229,145 @@ test.describe('ClawX chat execution graph', () => {
await expect(
page.locator('[data-testid="chat-execution-graph"] [data-testid="chat-execution-step"]').getByText('exec', { exact: true }),
).toBeVisible();
await expect(page.locator('[data-testid="chat-execution-graph"]').getByText('我让 coder 去拆 ~/Velaria 当前未提交改动的核心块了,等它回来我直接给你结论。')).toBeVisible();
await expect(page.locator('[data-testid="chat-execution-graph"]').getByText('I asked coder to break down the core blocks of ~/Velaria uncommitted changes; will give you the conclusion when it returns.')).toBeVisible();
await expect(page.getByText('CHECKLIST.md')).toHaveCount(0);
} finally {
await closeElectronApp(app);
}
});
test('does not duplicate the in-flight user prompt or cumulative streaming content', async ({ launchElectronApp }) => {
const app = await launchElectronApp({ skipSetup: true });
try {
await installIpcMocks(app, {
gatewayStatus: { state: 'running', port: 18789, pid: 12345 },
gatewayRpc: {
[stableStringify(['sessions.list', {}])]: {
success: true,
result: {
sessions: [{ key: PROJECT_MANAGER_SESSION_KEY, displayName: 'main' }],
},
},
[stableStringify(['chat.history', { sessionKey: PROJECT_MANAGER_SESSION_KEY, limit: 200 }])]: {
success: true,
result: {
messages: [],
},
},
},
hostApi: {
[stableStringify(['/api/gateway/status', 'GET'])]: {
ok: true,
data: {
status: 200,
ok: true,
json: { state: 'running', port: 18789, pid: 12345 },
},
},
[stableStringify(['/api/agents', 'GET'])]: {
ok: true,
data: {
status: 200,
ok: true,
json: {
success: true,
agents: [{ id: 'main', name: 'main' }],
},
},
},
},
});
await app.evaluate(async ({ app: _app }) => {
const { ipcMain } = process.mainModule!.require('electron') as typeof import('electron');
const sendPayloads: Array<{ message?: string; sessionKey?: string }> = [];
ipcMain.removeHandler('gateway:rpc');
ipcMain.handle('gateway:rpc', async (_event: unknown, method: string, payload: unknown) => {
if (method === 'sessions.list') {
return {
success: true,
result: {
sessions: [{ key: 'agent:main:main', displayName: 'main' }],
},
};
}
if (method === 'chat.history') {
return {
success: true,
result: { messages: [] },
};
}
if (method === 'chat.send') {
if (payload && typeof payload === 'object') {
const p = payload as { message?: string; sessionKey?: string };
sendPayloads.push({ message: p.message, sessionKey: p.sessionKey });
}
return {
success: true,
result: { runId: 'mock-run' },
};
}
return { success: true, result: {} };
});
(globalThis as typeof globalThis & { __clawxSendPayloads?: Array<{ message?: string; sessionKey?: string }> }).__clawxSendPayloads = sendPayloads;
});
const page = await getStableWindow(app);
try {
await page.reload();
} catch (error) {
if (!String(error).includes('ERR_FILE_NOT_FOUND')) {
throw error;
}
}
await expect(page.getByTestId('main-layout')).toBeVisible();
await page.getByTestId('chat-composer-input').fill('Open browser, search for tech news, and take a screenshot');
await page.getByTestId('chat-composer-send').click();
await expect(page.getByText('Open browser, search for tech news, and take a screenshot')).toHaveCount(1);
await expect.poll(async () => {
return await app.evaluate(() => {
const sendPayloads = (globalThis as typeof globalThis & {
__clawxSendPayloads?: Array<{ message?: string; sessionKey?: string }>;
}).__clawxSendPayloads || [];
return sendPayloads.length;
});
}).toBe(1);
await app.evaluate(async ({ BrowserWindow }) => {
const win = BrowserWindow.getAllWindows()[0];
win?.webContents.send('gateway:notification', {
method: 'agent',
params: {
runId: 'mock-run',
sessionKey: 'agent:main:main',
state: 'delta',
message: {
role: 'assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1' },
{ type: 'thinking', thinking: 'thinking 1 2' },
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'text', text: '1' },
{ type: 'text', text: '1 2' },
{ type: 'text', text: '1 2 3' },
],
},
},
});
});
await expect(page.getByText('Open browser, search for tech news, and take a screenshot')).toHaveCount(1);
await expect(page.getByText(/^thinking 1 2 3$/)).toHaveCount(1);
await expect(page.getByText(/^thinking 1 2$/)).toHaveCount(0);
await expect(page.getByText(/^thinking 1$/)).toHaveCount(0);
await expect(page.getByText(/^1 2 3$/)).toHaveCount(1);
await expect(page.getByText(/^1 2$/)).toHaveCount(0);
await expect(page.getByText(/^1$/)).toHaveCount(0);
} finally {
await closeElectronApp(app);
}
});
});

View File

@@ -41,11 +41,40 @@ vi.mock('@/stores/chat/helpers', () => ({
clearHistoryPoll: (...args: unknown[]) => clearHistoryPoll(...args),
enrichWithCachedImages: (...args: unknown[]) => enrichWithCachedImages(...args),
enrichWithToolResultFiles: (...args: unknown[]) => enrichWithToolResultFiles(...args),
getLatestOptimisticUserMessage: (messages: Array<{ role: string; timestamp?: number }>, userTimestampMs: number) =>
[...messages].reverse().find(
(message) => message.role === 'user'
&& (!message.timestamp || Math.abs(toMs(message.timestamp) - userTimestampMs) < 5000),
),
getMessageText: (...args: unknown[]) => getMessageText(...args),
hasNonToolAssistantContent: (...args: unknown[]) => hasNonToolAssistantContent(...args),
isInternalMessage: (...args: unknown[]) => isInternalMessage(...args),
isToolResultRole: (...args: unknown[]) => isToolResultRole(...args),
loadMissingPreviews: (...args: unknown[]) => loadMissingPreviews(...args),
matchesOptimisticUserMessage: (
candidate: { role: string; timestamp?: number; content?: unknown; _attachedFiles?: Array<{ filePath?: string; fileName?: string; mimeType?: string; fileSize?: number }> },
optimistic: { role: string; timestamp?: number; content?: unknown; _attachedFiles?: Array<{ filePath?: string; fileName?: string; mimeType?: string; fileSize?: number }> },
optimisticTimestampMs: number,
) => {
if (candidate.role !== 'user') return false;
const normalizeText = (content: unknown) => (typeof content === 'string' ? content : '')
.replace(/^\[(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)\s+\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}\s+[^\]]+\]\s*/i, '')
.replace(/\s+/g, ' ')
.trim();
const candidateText = normalizeText(candidate.content);
const optimisticText = normalizeText(optimistic.content);
const candidateAttachments = (candidate._attachedFiles || []).map((file) => file.filePath || `${file.fileName}|${file.mimeType}|${file.fileSize}`).sort().join('::');
const optimisticAttachments = (optimistic._attachedFiles || []).map((file) => file.filePath || `${file.fileName}|${file.mimeType}|${file.fileSize}`).sort().join('::');
const hasCandidateTimestamp = candidate.timestamp != null;
const timestampMatches = hasCandidateTimestamp
? Math.abs(toMs(candidate.timestamp as number) - optimisticTimestampMs) < 5000
: false;
if (candidateText && optimisticText && candidateText === optimisticText && candidateAttachments === optimisticAttachments) return true;
if (candidateText && optimisticText && candidateText === optimisticText && (!hasCandidateTimestamp || timestampMatches)) return true;
if (candidateAttachments && optimisticAttachments && candidateAttachments === optimisticAttachments && (!hasCandidateTimestamp || timestampMatches)) return true;
return false;
},
toMs: (...args: unknown[]) => toMs(...args as Parameters<typeof toMs>),
}));
@@ -527,4 +556,45 @@ describe('chat history actions', () => {
]);
expect(h.read().messages[0]?._attachedFiles?.[0]?.preview).toBe('data:image/png;base64,abc');
});
it('does not append an optimistic duplicate when history already includes the user message without timestamp', async () => {
const { createHistoryActions } = await import('@/stores/chat/history-actions');
const h = makeHarness({
currentSessionKey: 'agent:main:main',
sending: true,
lastUserMessageAt: 1_773_281_732_000,
messages: [
{
role: 'user',
content: '[Fri 2026-03-13 10:00 GMT+8] Open browser, search for tech news, and take a screenshot',
timestamp: 1_773_281_732,
},
],
});
const actions = createHistoryActions(h.set as never, h.get as never);
invokeIpcMock.mockResolvedValueOnce({
success: true,
result: {
messages: [
{
role: 'user',
content: 'Open browser, search for tech news, and take a screenshot',
},
{
role: 'assistant',
content: 'Processing',
timestamp: 1_773_281_733,
},
],
},
});
await actions.loadHistory(true);
expect(h.read().messages.map((message) => message.content)).toEqual([
'Open browser, search for tech news, and take a screenshot',
'Processing',
]);
});
});

View File

@@ -20,7 +20,9 @@ const makeAttachedFile = vi.fn((ref: { filePath: string; mimeType: string }, sou
filePath: ref.filePath,
source,
}));
const normalizeStreamingMessage = vi.fn((message: unknown) => message);
const setErrorRecoveryTimer = vi.fn();
const snapshotStreamingAssistantMessage = vi.fn((currentStream: unknown) => currentStream ? [currentStream] : []);
const upsertToolStatuses = vi.fn((_current, updates) => updates);
vi.mock('@/stores/chat/helpers', () => ({
@@ -37,7 +39,9 @@ vi.mock('@/stores/chat/helpers', () => ({
isToolOnlyMessage: (...args: unknown[]) => isToolOnlyMessage(...args),
isToolResultRole: (...args: unknown[]) => isToolResultRole(...args),
makeAttachedFile: (...args: unknown[]) => makeAttachedFile(...args),
normalizeStreamingMessage: (...args: unknown[]) => normalizeStreamingMessage(...args),
setErrorRecoveryTimer: (...args: unknown[]) => setErrorRecoveryTimer(...args),
snapshotStreamingAssistantMessage: (...args: unknown[]) => snapshotStreamingAssistantMessage(...args),
upsertToolStatuses: (...args: unknown[]) => upsertToolStatuses(...args),
}));
@@ -84,6 +88,8 @@ describe('chat runtime event handlers', () => {
vi.resetAllMocks();
hasErrorRecoveryTimer.mockReturnValue(false);
collectToolUpdates.mockReturnValue([]);
normalizeStreamingMessage.mockImplementation((message: unknown) => message);
snapshotStreamingAssistantMessage.mockImplementation((currentStream: unknown) => currentStream ? [currentStream as Record<string, unknown>] : []);
upsertToolStatuses.mockImplementation((_current, updates) => updates);
});
@@ -228,6 +234,100 @@ describe('chat runtime event handlers', () => {
expect(h.read().streamingMessage).toEqual(incoming);
});
it('normalizes cumulative text and thinking blocks while streaming', async () => {
const { handleRuntimeEventState } = await import('@/stores/chat/runtime-event-handlers');
const h = makeHarness({ streamingMessage: null });
normalizeStreamingMessage.mockReturnValue({
role: 'assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'text', text: '1 2 3' },
],
});
handleRuntimeEventState(h.set as never, h.get as never, {
message: {
role: 'assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1' },
{ type: 'thinking', thinking: 'thinking 1 2' },
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'text', text: '1' },
{ type: 'text', text: '1 2' },
{ type: 'text', text: '1 2 3' },
],
},
}, 'delta', 'run-stream');
expect(h.read().streamingMessage).toEqual({
role: 'assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'text', text: '1 2 3' },
],
});
});
it('snapshots normalized streaming content when tool results arrive', async () => {
const { handleRuntimeEventState } = await import('@/stores/chat/runtime-event-handlers');
normalizeStreamingMessage.mockImplementation((message: unknown) => {
const msg = message as { role: string; id: string; content: unknown[] };
return {
...msg,
content: [
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'tool_use', id: 'call-1', name: 'read', input: { filePath: '/tmp/demo.md' } },
{ type: 'text', text: '1 2 3' },
],
};
});
snapshotStreamingAssistantMessage.mockImplementation((currentStream: unknown) => {
const msg = currentStream as { role: string; id: string; content: unknown[] };
return [{
...msg,
content: [
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'tool_use', id: 'call-1', name: 'read', input: { filePath: '/tmp/demo.md' } },
{ type: 'text', text: '1 2 3' },
],
}];
});
const h = makeHarness({
streamingMessage: {
role: 'assistant',
id: 'streaming-assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1' },
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'tool_use', id: 'call-1', name: 'read', input: { filePath: '/tmp/demo.md' } },
{ type: 'text', text: '1' },
{ type: 'text', text: '1 2 3' },
],
},
});
handleRuntimeEventState(h.set as never, h.get as never, {
message: {
role: 'toolResult',
toolCallId: 'call-1',
toolName: 'read',
content: [{ type: 'text', text: 'done' }],
},
}, 'final', 'run-normalize');
expect(h.read().messages).toEqual([
{
role: 'assistant',
id: 'streaming-assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1 2 3' },
{ type: 'tool_use', id: 'call-1', name: 'read', input: { filePath: '/tmp/demo.md' } },
{ type: 'text', text: '1 2 3' },
],
},
]);
});
it('clears runtime state on aborted event', async () => {
const { handleRuntimeEventState } = await import('@/stores/chat/runtime-event-handlers');
const h = makeHarness({

View File

@@ -203,6 +203,31 @@ describe('deriveTaskSteps', () => {
]);
});
it('collapses cumulative streaming thinking details into the newest version', () => {
const steps = deriveTaskSteps({
messages: [],
streamingMessage: {
role: 'assistant',
content: [
{ type: 'thinking', thinking: 'thinking 1' },
{ type: 'thinking', thinking: 'thinking 1 2' },
{ type: 'thinking', thinking: 'thinking 1 2 3' },
],
},
streamingTools: [],
sending: true,
pendingFinal: false,
showThinking: true,
});
expect(steps).toEqual([
expect.objectContaining({
id: 'stream-thinking',
detail: 'thinking 1 2 3',
}),
]);
});
it('builds a branch for spawned subagents', () => {
const messages: RawMessage[] = [
{