Release v1.01 Enhanced: Vi Control, TUI Gen5, Core Stability

This commit is contained in:
Gemini AI
2025-12-20 01:12:45 +04:00
Unverified
parent 2407c42eb9
commit 142aaeee1e
254 changed files with 44888 additions and 31025 deletions

View File

@@ -1,35 +1,64 @@
/**
* Streaming Buffer Hook for OpenQode TUI
*
* Prevents "reflow per token" chaos by:
* 1. Buffering incoming tokens
* 2. Flushing on newlines or after 50ms interval
* 3. Providing stable committed content for rendering
* ANTI-JITTER SYSTEM:
* 1. Buffer incoming tokens (no per-token React updates)
* 2. Flush on stable boundaries: newline, punctuation (.!?), or timeout
* 3. Freeze layout during streaming (no mid-word reflow)
* 4. Debounce resize events
* 5. Memoize heavy transforms per committed content
*/
import { useState, useRef, useCallback } from 'react';
import { useState, useRef, useCallback, useMemo } from 'react';
// Hard boundary that triggers an immediate flush.
// Newlines are stable layout boundaries and reduce mid-line jitter.
const FLUSH_HARD_BOUNDARY = /\n/;
// Soft boundary flush: when pending grows large and we hit whitespace.
const SOFT_BOUNDARY = /\s/;
const MIN_PENDING_BEFORE_SOFT_FLUSH = 140;
/**
* useStreamBuffer - Stable streaming text buffer
*
* Instead of re-rendering on every token, this hook:
* - Accumulates tokens in a pending buffer
* - Commits to state on newlines or 50ms timeout
* - Commits on sentence boundaries (newline, punctuation) or timeout
* - Prevents mid-word reflows and jitter
*
* @returns {Object} { committed, pushToken, flushNow, reset }
* @param {number} flushInterval - Max ms before forced flush (default 100ms)
* @returns {Object} { committed, pending, isStreaming, pushToken, flushNow, reset }
*/
export function useStreamBuffer(flushInterval = 50) {
export function useStreamBuffer(flushInterval = 150) {
const [committed, setCommitted] = useState('');
const [isStreaming, setIsStreaming] = useState(false);
const pendingRef = useRef('');
const flushTimerRef = useRef(null);
const lastActivityRef = useRef(0);
// Push a token to the pending buffer
const pushToken = useCallback((token) => {
pendingRef.current += token;
lastActivityRef.current = Date.now();
// Flush immediately on newline
if (token.includes('\n')) {
if (!isStreaming) {
setIsStreaming(true);
}
// Flush immediately on hard boundary (newline)
if (FLUSH_HARD_BOUNDARY.test(token)) {
if (flushTimerRef.current) {
clearTimeout(flushTimerRef.current);
flushTimerRef.current = null;
}
setCommitted(prev => prev + pendingRef.current);
pendingRef.current = '';
return;
}
// Flush on "soft" boundary to reduce reflow (avoid mid-word updates).
if (pendingRef.current.length >= MIN_PENDING_BEFORE_SOFT_FLUSH && SOFT_BOUNDARY.test(token)) {
if (flushTimerRef.current) {
clearTimeout(flushTimerRef.current);
flushTimerRef.current = null;
@@ -47,7 +76,7 @@ export function useStreamBuffer(flushInterval = 50) {
flushTimerRef.current = null;
}, flushInterval);
}
}, [flushInterval]);
}, [flushInterval, isStreaming]);
// Force immediate flush
const flushNow = useCallback(() => {
@@ -59,6 +88,7 @@ export function useStreamBuffer(flushInterval = 50) {
setCommitted(prev => prev + pendingRef.current);
pendingRef.current = '';
}
setIsStreaming(false);
}, []);
// Reset buffer (for new messages)
@@ -69,6 +99,8 @@ export function useStreamBuffer(flushInterval = 50) {
}
pendingRef.current = '';
setCommitted('');
setIsStreaming(false);
lastActivityRef.current = 0;
}, []);
// Get current total (committed + pending, for display during active streaming)
@@ -76,21 +108,50 @@ export function useStreamBuffer(flushInterval = 50) {
return committed + pendingRef.current;
}, [committed]);
// Check if actively streaming (had activity in last 500ms)
const isActivelyStreaming = useCallback(() => {
return Date.now() - lastActivityRef.current < 500;
}, []);
return {
committed,
pending: pendingRef.current,
isStreaming,
pushToken,
flushNow,
reset,
getTotal,
isActivelyStreaming,
isPending: pendingRef.current.length > 0
};
}
/**
* useFrozenLayout - Freeze layout dimensions during streaming
* Prevents "breathing" text and layout shifts
*/
export function useFrozenLayout(isStreaming, currentWidth) {
const frozenWidthRef = useRef(null);
// Freeze width when streaming starts
if (isStreaming && frozenWidthRef.current === null) {
frozenWidthRef.current = currentWidth;
}
// Unfreeze when streaming stops
if (!isStreaming) {
frozenWidthRef.current = null;
}
// Return frozen width during streaming, live width otherwise
return frozenWidthRef.current ?? currentWidth;
}
/**
* Resize debounce hook
* Only reflows content after terminal resize settles
*/
export function useResizeDebounce(callback, delay = 150) {
export function useResizeDebounce(callback, delay = 200) {
const timerRef = useRef(null);
return useCallback((cols, rows) => {
@@ -104,4 +165,48 @@ export function useResizeDebounce(callback, delay = 150) {
}, [callback, delay]);
}
export default { useStreamBuffer, useResizeDebounce };
/**
* useMemoizedParse - Memoize parsed content per committed text
* Prevents re-parsing on every render
*/
export function useMemoizedParse(committed, parseFn) {
return useMemo(() => {
if (!committed) return null;
return parseFn(committed);
}, [committed]);
}
/**
* Autoscroll control
* Only follow output if user is at bottom
*/
export function useAutoscroll(messageCount, viewportTop, viewportHeight, totalHeight) {
const wasAtBottomRef = useRef(true);
const newOutputCountRef = useRef(0);
// Check if user is at bottom
const isAtBottom = viewportTop + viewportHeight >= totalHeight - 1;
// Track new output when not at bottom
if (!isAtBottom && messageCount > 0) {
newOutputCountRef.current++;
} else {
newOutputCountRef.current = 0;
}
wasAtBottomRef.current = isAtBottom;
return {
shouldScroll: isAtBottom,
newOutputCount: newOutputCountRef.current,
isAtBottom
};
}
export default {
useStreamBuffer,
useFrozenLayout,
useResizeDebounce,
useMemoizedParse,
useAutoscroll
};