From d6c2986df809e21fbc66d5b10544d7ccdb337a31 Mon Sep 17 00:00:00 2001 From: Gemini AI Date: Sun, 28 Dec 2025 11:22:19 +0400 Subject: [PATCH] feat: complete overhaul of AI Assist with premium WOW level UI and stable preview engine --- app/api/ollama/chat/route.ts | 16 +- app/api/qwen/chat/route.ts | 22 +- components/AIAssist.tsx | 905 ++++++++++++++++++---------------- components/ErrorBoundary.tsx | 57 +++ lib/services/model-adapter.ts | 29 ++ lib/services/ollama-cloud.ts | 91 ++++ lib/services/qwen-oauth.ts | 101 ++++ lib/services/zai-plan.ts | 82 +++ types/index.ts | 2 +- 9 files changed, 856 insertions(+), 449 deletions(-) create mode 100644 components/ErrorBoundary.tsx diff --git a/app/api/ollama/chat/route.ts b/app/api/ollama/chat/route.ts index 6b0841c..e806071 100644 --- a/app/api/ollama/chat/route.ts +++ b/app/api/ollama/chat/route.ts @@ -38,15 +38,27 @@ export async function POST(request: NextRequest) { body: JSON.stringify(body), }); - const payload = await response.text(); if (!response.ok) { + const payload = await response.text(); return NextResponse.json( { error: "Ollama chat request failed", details: payload }, { status: response.status } ); } - return NextResponse.json(payload ? JSON.parse(payload) : {}); + // If stream is requested, pipe the response body + if (body.stream) { + return new Response(response.body, { + headers: { + "Content-Type": "application/x-ndjson", + "Cache-Control": "no-cache", + Connection: "keep-alive", + }, + }); + } + + const payload = await response.json(); + return NextResponse.json(payload); } catch (error) { console.error("Ollama chat proxy failed", error); return NextResponse.json( diff --git a/app/api/qwen/chat/route.ts b/app/api/qwen/chat/route.ts index c9adb96..d3c67c7 100644 --- a/app/api/qwen/chat/route.ts +++ b/app/api/qwen/chat/route.ts @@ -44,23 +44,27 @@ export async function POST(request: NextRequest) { }), }); - const payload = await response.text(); if (!response.ok) { + const payload = await response.text(); return NextResponse.json( { error: payload || response.statusText || "Qwen chat failed" }, { status: response.status } ); } - try { - const data = JSON.parse(payload); - return NextResponse.json(data, { status: response.status }); - } catch { - return NextResponse.json( - { error: payload || "Unexpected response format" }, - { status: 502 } - ); + // Handle streaming + if (stream) { + return new Response(response.body, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + }, + }); } + + const data = await response.json(); + return NextResponse.json(data); } catch (error) { return NextResponse.json( { error: "internal_server_error", message: error instanceof Error ? error.message : "Qwen chat failed" }, diff --git a/components/AIAssist.tsx b/components/AIAssist.tsx index 10c8304..75f1d9e 100644 --- a/components/AIAssist.tsx +++ b/components/AIAssist.tsx @@ -1,509 +1,540 @@ "use client"; -import { useState, useRef, useEffect } from "react"; +import React, { useState, useEffect, useRef, useCallback, memo } from "react"; +import { + MessageSquare, Send, Sparkles, Brain, Cpu, Code2, Palette, Search, + Terminal, Eye, Trash2, Loader2, Bot, User, X, RotateCcw, + CheckCircle2, Copy, Monitor, StopCircle, Maximize2, Minimize2, + ChevronRight, Layout, Zap, Ghost +} from "lucide-react"; +import ReactMarkdown from "react-markdown"; +import remarkGfm from "remark-gfm"; +import rehypeHighlight from "rehype-highlight"; +import { cn } from "@/lib/utils"; +import { AIAssistMessage } from "@/types"; import { Button } from "@/components/ui/button"; -import { Input } from "@/components/ui/input"; -import { Card, CardHeader, CardTitle, CardDescription, CardContent } from "@/components/ui/card"; +import { Card } from "@/components/ui/card"; import { Badge } from "@/components/ui/badge"; -import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; -import { Select } from "@/components/ui/select"; +import { Input } from "@/components/ui/input"; import useStore from "@/lib/store"; import { translations } from "@/lib/i18n/translations"; import modelAdapter from "@/lib/services/adapter-instance"; -import { - MessageSquare, Send, Sparkles, Brain, Cpu, Code2, Palette, FileText, Search, - BarChart, Rocket, Terminal, Eye, History, Trash2, Loader2, Bot, User, - Settings, Layers, AppWindow, Smartphone, Monitor, X, ArrowLeftRight, RotateCcw, - CheckCircle2 -} from "lucide-react"; -import { cn } from "@/lib/utils"; -import { AIAssistMessage } from "@/types"; -const AGENTS = [ - { id: "general", label: "General Intel", icon: Bot, color: "slate" }, - { id: "content", label: "Content Optimization", icon: FileText, color: "amber" }, - { id: "seo", label: "SEO Analyst", icon: Search, color: "emerald" }, - { id: "smm", label: "SMM Strategy", icon: BarChart, color: "pink" }, - { id: "pm", label: "Project Manager", icon: Rocket, color: "indigo" }, - { id: "code", label: "Code Architect", icon: Terminal, color: "violet" }, - { id: "design", label: "UI/UX Designer", icon: Palette, color: "orange" }, - { id: "web", label: "Web Dev Preview", icon: Monitor, color: "blue" }, - { id: "app", label: "App Dev Preview", icon: Smartphone, color: "cyan" } -]; +// --- Types --- -const AIAssist = () => { - const { language, selectedProvider, selectedModels, setSelectedModel, apiKeys, aiAssistHistory, setAIAssistHistory } = useStore(); +interface PreviewData { + type: string; + data: string; + language?: string; + isStreaming?: boolean; +} + +// --- Specialized Components --- + +/** + * A ultra-stable iframe wrapper that avoids hydration issues + * and provides a WOW visual experience. + */ +const LiveCanvas = memo(({ data, type, isStreaming }: { data: string, type: string, isStreaming: boolean }) => { + const iframeRef = useRef(null); + + useEffect(() => { + if (!iframeRef.current) return; + + const isHtml = data.includes(" + + + + + + + + + + ${data} + + + `; + iframeRef.current.srcdoc = doc; + } + }, [data, type]); + + return ( +
+