diff --git a/components/AIAssist.tsx b/components/AIAssist.tsx index d44d1ef..0866ddf 100644 --- a/components/AIAssist.tsx +++ b/components/AIAssist.tsx @@ -144,12 +144,25 @@ const LiveCanvas = memo(({ data, type, isStreaming }: { data: string, type: stri LiveCanvas.displayName = "LiveCanvas"; +const ThinkingIndicator = () => ( +
+
+
+
+
+
+ Neural Link Thinking... +
+); + // --- Helper Functions --- function parseStreamingContent(text: string) { let agent = "general"; let preview: PreviewData | null = null; let chatDisplay = text.trim(); + let status: string | null = null; + const decodeHtml = (value: string) => value .replace(/</g, "<") .replace(/>/g, ">") @@ -161,27 +174,6 @@ function parseStreamingContent(text: string) { return fenced ? fenced[1].trim() : value.trim(); }; - const jsonCandidate = text.trim(); - if (jsonCandidate.startsWith("{") && jsonCandidate.endsWith("}")) { - try { - const parsed = JSON.parse(jsonCandidate); - if (parsed?.agent) agent = parsed.agent; - if (parsed?.preview?.data) { - preview = { - type: parsed.preview.type || "web", - language: parsed.preview.language || "text", - data: parsed.preview.data, - isStreaming: !text.includes("[/PREVIEW]") - }; - } - if (typeof parsed?.content === "string") { - chatDisplay = parsed.content.trim(); - } - } catch { - // Ignore malformed JSON during stream - } - } - const agentMatch = text.match(/\[AGENT:([\w-]+)\]/); if (agentMatch) agent = agentMatch[1]; @@ -193,14 +185,17 @@ function parseStreamingContent(text: string) { data: previewMatch[3].trim(), isStreaming: !text.includes("[/PREVIEW]") }; + if (preview.isStreaming) { + status = `Generating ${preview.type} artifact...`; + } } - if (/\[AGENT:|\[PREVIEW:/.test(text)) { - chatDisplay = text - .replace(/\[AGENT:[\w-]+\]/g, "") - .replace(/\[PREVIEW:[\w-]+:?[\w-]+?\][\s\S]*?(?:\[\/PREVIEW\]|$)/g, "") - .trim(); - } + // Hide tags and partial tags from display + chatDisplay = text + .replace(/\[AGENT:[\w-]+\]/g, "") + .replace(/\[PREVIEW:[\w-]+:?[\w-]+?\][\s\S]*?(?:\[\/PREVIEW\]|$)/g, "") + .replace(/\[(AGENT|PREVIEW)?(?::[\w-]*)?$/g, "") // Hide partial tags at the end + .trim(); if (!preview) { const fenced = text.match(/```(html|css|javascript|tsx|jsx|md|markdown)\s*([\s\S]*?)```/i); @@ -238,11 +233,11 @@ function parseStreamingContent(text: string) { } } - if (!chatDisplay && preview) { + if (!chatDisplay && preview && preview.isStreaming) { chatDisplay = `Rendering live artifact...`; } - return { chatDisplay, preview, agent }; + return { chatDisplay, preview, agent, status }; } // --- Main Component --- @@ -271,6 +266,8 @@ export default function AIAssist() { const [assistStep, setAssistStep] = useState<"idle" | "plan" | "generating" | "preview">("idle"); const [aiPlan, setAiPlan] = useState(null); + const [status, setStatus] = useState(null); + const scrollRef = useRef(null); const isPreviewRenderable = (preview?: PreviewData | null) => { if (!preview) return false; @@ -354,7 +351,9 @@ export default function AIAssist() { currentAgent, onChunk: (chunk) => { accumulated += chunk; - const { chatDisplay, preview, agent } = parseStreamingContent(accumulated); + const { chatDisplay, preview, agent, status: streamStatus } = parseStreamingContent(accumulated); + + if (streamStatus) setStatus(streamStatus); // If we're in planning mode and see JSON, try to parse the plan if (assistStep === "plan" || assistStep === "idle") { @@ -415,6 +414,7 @@ export default function AIAssist() { } finally { setIsProcessing(false); setAbortController(null); + setStatus(null); } }; @@ -627,6 +627,19 @@ export default function AIAssist() { )}
+ + {msg.role === "assistant" && isProcessing && i === aiAssistHistory.length - 1 && status && ( +
+
+
+
+
+ + {status} + +
+ )} +
{msg.role === "assistant" ? `Agent ${msg.agent || 'core'}` : 'Explorer'} @@ -634,6 +647,12 @@ export default function AIAssist() {
))} + + {isProcessing && aiAssistHistory[aiAssistHistory.length - 1]?.role === "user" && ( +
+ +
+ )}
{/* Input Area */}