/** * Chat Page * Native React implementation communicating with OpenClaw Gateway * via gateway:rpc IPC. Session selector, thinking toggle, and refresh * are in the toolbar; messages render with markdown + streaming. */ import { useEffect, useRef, useState } from 'react'; import { AlertCircle, Loader2, Sparkles } from 'lucide-react'; import { useChatStore, type RawMessage } from '@/stores/chat'; import { useGatewayStore } from '@/stores/gateway'; import { useAgentsStore } from '@/stores/agents'; import { LoadingSpinner } from '@/components/common/LoadingSpinner'; import { ChatMessage } from './ChatMessage'; import { ChatInput } from './ChatInput'; import { ChatToolbar } from './ChatToolbar'; import { extractImages, extractText, extractThinking, extractToolUse } from './message-utils'; import { useTranslation } from 'react-i18next'; import { cn } from '@/lib/utils'; export function Chat() { const { t } = useTranslation('chat'); const gatewayStatus = useGatewayStore((s) => s.status); const isGatewayRunning = gatewayStatus.state === 'running'; const messages = useChatStore((s) => s.messages); const loading = useChatStore((s) => s.loading); const sending = useChatStore((s) => s.sending); const error = useChatStore((s) => s.error); const showThinking = useChatStore((s) => s.showThinking); const streamingMessage = useChatStore((s) => s.streamingMessage); const streamingTools = useChatStore((s) => s.streamingTools); const pendingFinal = useChatStore((s) => s.pendingFinal); const sendMessage = useChatStore((s) => s.sendMessage); const abortRun = useChatStore((s) => s.abortRun); const clearError = useChatStore((s) => s.clearError); const fetchAgents = useAgentsStore((s) => s.fetchAgents); const cleanupEmptySession = useChatStore((s) => s.cleanupEmptySession); const messagesEndRef = useRef(null); const [streamingTimestamp, setStreamingTimestamp] = useState(0); // Load data when gateway is running. // When the store already holds messages for this session (i.e. the user // is navigating *back* to Chat), use quiet mode so the existing messages // stay visible while fresh data loads in the background. This avoids // an unnecessary messages → spinner → messages flicker. useEffect(() => { return () => { // If the user navigates away without sending any messages, remove the // empty session so it doesn't linger as a ghost entry in the sidebar. cleanupEmptySession(); }; }, [cleanupEmptySession]); useEffect(() => { void fetchAgents(); }, [fetchAgents]); // Auto-scroll on new messages, streaming, or activity changes useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); }, [messages, streamingMessage, sending, pendingFinal]); // Update timestamp when sending starts useEffect(() => { if (sending && streamingTimestamp === 0) { // eslint-disable-next-line react-hooks/set-state-in-effect setStreamingTimestamp(Date.now() / 1000); } else if (!sending && streamingTimestamp !== 0) { setStreamingTimestamp(0); } }, [sending, streamingTimestamp]); // Gateway not running block has been completely removed so the UI always renders. const streamMsg = streamingMessage && typeof streamingMessage === 'object' ? streamingMessage as unknown as { role?: string; content?: unknown; timestamp?: number } : null; const streamText = streamMsg ? extractText(streamMsg) : (typeof streamingMessage === 'string' ? streamingMessage : ''); const hasStreamText = streamText.trim().length > 0; const streamThinking = streamMsg ? extractThinking(streamMsg) : null; const hasStreamThinking = showThinking && !!streamThinking && streamThinking.trim().length > 0; const streamTools = streamMsg ? extractToolUse(streamMsg) : []; const hasStreamTools = streamTools.length > 0; const streamImages = streamMsg ? extractImages(streamMsg) : []; const hasStreamImages = streamImages.length > 0; const hasStreamToolStatus = streamingTools.length > 0; const shouldRenderStreaming = sending && (hasStreamText || hasStreamThinking || hasStreamTools || hasStreamImages || hasStreamToolStatus); const hasAnyStreamContent = hasStreamText || hasStreamThinking || hasStreamTools || hasStreamImages || hasStreamToolStatus; const isEmpty = messages.length === 0 && !loading && !sending; return (
{/* Toolbar */}
{/* Messages Area */}
{loading && !sending ? (
) : isEmpty ? ( ) : ( <> {messages.map((msg, idx) => ( ))} {/* Streaming message */} {shouldRenderStreaming && ( ), role: (typeof streamMsg.role === 'string' ? streamMsg.role : 'assistant') as RawMessage['role'], content: streamMsg.content ?? streamText, timestamp: streamMsg.timestamp ?? streamingTimestamp, } : { role: 'assistant', content: streamText, timestamp: streamingTimestamp, }) as RawMessage} showThinking={showThinking} isStreaming streamingTools={streamingTools} /> )} {/* Activity indicator: waiting for next AI turn after tool execution */} {sending && pendingFinal && !shouldRenderStreaming && ( )} {/* Typing indicator when sending but no stream content yet */} {sending && !pendingFinal && !hasAnyStreamContent && ( )} )} {/* Scroll anchor */}
{/* Error bar */} {error && (

{error}

)} {/* Input Area */}
); } // ── Welcome Screen ────────────────────────────────────────────── function WelcomeScreen() { return (

Welcome

Your AI assistant is ready. Start a conversation below.

{['Ask Questions', 'Creative Tasks', 'Brainstorming'].map((label, i) => ( ))}
); } // ── Typing Indicator ──────────────────────────────────────────── function TypingIndicator() { return (
); } // ── Activity Indicator (shown between tool cycles) ───────────── function ActivityIndicator({ phase }: { phase: 'tool_processing' }) { void phase; return (
Processing tool results…
); } export default Chat;