- Added Claude Code integration with full context compaction support - Added OpenClaw integration with deterministic pipeline support - Implemented parallel agent execution (4 projects x 3 roles pattern) - Added workspace isolation with permissions and quotas - Implemented Lobster-compatible YAML workflow parser - Added persistent memory store for cross-session context - Created comprehensive README with hero section This project was 100% autonomously built by Z.AI GLM-5
310 lines
6.7 KiB
TypeScript
310 lines
6.7 KiB
TypeScript
/**
|
|
* Agent System Utilities
|
|
*
|
|
* Helper functions and utilities for the agent system.
|
|
*/
|
|
|
|
import { randomUUID } from 'crypto';
|
|
|
|
/**
|
|
* Debounce a function
|
|
*/
|
|
export function debounce<T extends (...args: unknown[]) => unknown>(
|
|
fn: T,
|
|
delay: number
|
|
): (...args: Parameters<T>) => void {
|
|
let timeoutId: ReturnType<typeof setTimeout> | null = null;
|
|
|
|
return (...args: Parameters<T>) => {
|
|
if (timeoutId) clearTimeout(timeoutId);
|
|
timeoutId = setTimeout(() => fn(...args), delay);
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Throttle a function
|
|
*/
|
|
export function throttle<T extends (...args: unknown[]) => unknown>(
|
|
fn: T,
|
|
limit: number
|
|
): (...args: Parameters<T>) => void {
|
|
let inThrottle = false;
|
|
|
|
return (...args: Parameters<T>) => {
|
|
if (!inThrottle) {
|
|
fn(...args);
|
|
inThrottle = true;
|
|
setTimeout(() => { inThrottle = false; }, limit);
|
|
}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Retry a function with exponential backoff
|
|
*/
|
|
export async function retry<T>(
|
|
fn: () => Promise<T>,
|
|
options: {
|
|
maxAttempts?: number;
|
|
initialDelay?: number;
|
|
maxDelay?: number;
|
|
backoffFactor?: number;
|
|
} = {}
|
|
): Promise<T> {
|
|
const {
|
|
maxAttempts = 3,
|
|
initialDelay = 1000,
|
|
maxDelay = 30000,
|
|
backoffFactor = 2
|
|
} = options;
|
|
|
|
let lastError: Error | null = null;
|
|
let delay = initialDelay;
|
|
|
|
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
try {
|
|
return await fn();
|
|
} catch (error) {
|
|
lastError = error instanceof Error ? error : new Error(String(error));
|
|
|
|
if (attempt < maxAttempts) {
|
|
await sleep(delay);
|
|
delay = Math.min(delay * backoffFactor, maxDelay);
|
|
}
|
|
}
|
|
}
|
|
|
|
throw lastError;
|
|
}
|
|
|
|
/**
|
|
* Sleep for a specified duration
|
|
*/
|
|
export function sleep(ms: number): Promise<void> {
|
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
}
|
|
|
|
/**
|
|
* Generate a unique ID
|
|
*/
|
|
export function generateId(prefix?: string): string {
|
|
const id = randomUUID();
|
|
return prefix ? `${prefix}-${id}` : id;
|
|
}
|
|
|
|
/**
|
|
* Deep clone an object
|
|
*/
|
|
export function deepClone<T>(obj: T): T {
|
|
return JSON.parse(JSON.stringify(obj));
|
|
}
|
|
|
|
/**
|
|
* Deep merge objects
|
|
*/
|
|
export function deepMerge<T extends Record<string, unknown>>(
|
|
target: T,
|
|
...sources: Partial<T>[]
|
|
): T {
|
|
if (!sources.length) return target;
|
|
|
|
const source = sources.shift();
|
|
|
|
if (isObject(target) && isObject(source)) {
|
|
for (const key in source) {
|
|
if (isObject(source[key])) {
|
|
if (!target[key]) {
|
|
Object.assign(target, { [key]: {} });
|
|
}
|
|
deepMerge(target[key] as Record<string, unknown>, source[key] as Record<string, unknown>);
|
|
} else {
|
|
Object.assign(target, { [key]: source[key] });
|
|
}
|
|
}
|
|
}
|
|
|
|
return deepMerge(target, ...sources);
|
|
}
|
|
|
|
/**
|
|
* Check if value is an object
|
|
*/
|
|
export function isObject(item: unknown): item is Record<string, unknown> {
|
|
return item !== null && typeof item === 'object' && !Array.isArray(item);
|
|
}
|
|
|
|
/**
|
|
* Truncate text to a maximum length
|
|
*/
|
|
export function truncate(text: string, maxLength: number, suffix = '...'): string {
|
|
if (text.length <= maxLength) return text;
|
|
return text.substring(0, maxLength - suffix.length) + suffix;
|
|
}
|
|
|
|
/**
|
|
* Format bytes to human readable string
|
|
*/
|
|
export function formatBytes(bytes: number, decimals = 2): string {
|
|
if (bytes === 0) return '0 Bytes';
|
|
|
|
const k = 1024;
|
|
const dm = decimals < 0 ? 0 : decimals;
|
|
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
|
|
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
|
|
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(dm))} ${sizes[i]}`;
|
|
}
|
|
|
|
/**
|
|
* Format duration in milliseconds to human readable string
|
|
*/
|
|
export function formatDuration(ms: number): string {
|
|
if (ms < 1000) return `${ms}ms`;
|
|
if (ms < 60000) return `${(ms / 1000).toFixed(1)}s`;
|
|
if (ms < 3600000) return `${(ms / 60000).toFixed(1)}m`;
|
|
return `${(ms / 3600000).toFixed(1)}h`;
|
|
}
|
|
|
|
/**
|
|
* Create a rate limiter
|
|
*/
|
|
export function createRateLimiter(
|
|
maxRequests: number,
|
|
windowMs: number
|
|
): {
|
|
check: () => boolean;
|
|
reset: () => void;
|
|
getRemaining: () => number;
|
|
} {
|
|
let requests = 0;
|
|
let windowStart = Date.now();
|
|
|
|
const resetWindow = () => {
|
|
const now = Date.now();
|
|
if (now - windowStart >= windowMs) {
|
|
requests = 0;
|
|
windowStart = now;
|
|
}
|
|
};
|
|
|
|
return {
|
|
check: () => {
|
|
resetWindow();
|
|
if (requests < maxRequests) {
|
|
requests++;
|
|
return true;
|
|
}
|
|
return false;
|
|
},
|
|
reset: () => {
|
|
requests = 0;
|
|
windowStart = Date.now();
|
|
},
|
|
getRemaining: () => {
|
|
resetWindow();
|
|
return maxRequests - requests;
|
|
}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Create a simple cache
|
|
*/
|
|
export function createCache<T>(
|
|
ttlMs: number = 60000
|
|
): {
|
|
get: (key: string) => T | undefined;
|
|
set: (key: string, value: T) => void;
|
|
delete: (key: string) => boolean;
|
|
clear: () => void;
|
|
has: (key: string) => boolean;
|
|
} {
|
|
const cache = new Map<string, { value: T; expiry: number }>();
|
|
|
|
// Cleanup expired entries periodically
|
|
const cleanup = () => {
|
|
const now = Date.now();
|
|
for (const [key, entry] of cache.entries()) {
|
|
if (now > entry.expiry) {
|
|
cache.delete(key);
|
|
}
|
|
}
|
|
};
|
|
|
|
setInterval(cleanup, ttlMs);
|
|
|
|
return {
|
|
get: (key: string) => {
|
|
const entry = cache.get(key);
|
|
if (!entry) return undefined;
|
|
if (Date.now() > entry.expiry) {
|
|
cache.delete(key);
|
|
return undefined;
|
|
}
|
|
return entry.value;
|
|
},
|
|
set: (key: string, value: T) => {
|
|
cache.set(key, {
|
|
value,
|
|
expiry: Date.now() + ttlMs
|
|
});
|
|
},
|
|
delete: (key: string) => cache.delete(key),
|
|
clear: () => cache.clear(),
|
|
has: (key: string) => {
|
|
const entry = cache.get(key);
|
|
if (!entry) return false;
|
|
if (Date.now() > entry.expiry) {
|
|
cache.delete(key);
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Compose multiple functions
|
|
*/
|
|
export function compose<T>(
|
|
...fns: Array<(arg: T) => T>
|
|
): (arg: T) => T {
|
|
return (arg: T) => fns.reduceRight((acc, fn) => fn(acc), arg);
|
|
}
|
|
|
|
/**
|
|
* Pipe value through multiple functions
|
|
*/
|
|
export function pipe<T>(
|
|
...fns: Array<(arg: T) => T>
|
|
): (arg: T) => T {
|
|
return (arg: T) => fns.reduce((acc, fn) => fn(acc), arg);
|
|
}
|
|
|
|
/**
|
|
* Chunk an array into smaller arrays
|
|
*/
|
|
export function chunk<T>(array: T[], size: number): T[][] {
|
|
const chunks: T[][] = [];
|
|
for (let i = 0; i < array.length; i += size) {
|
|
chunks.push(array.slice(i, i + size));
|
|
}
|
|
return chunks;
|
|
}
|
|
|
|
/**
|
|
* Group array items by a key
|
|
*/
|
|
export function groupBy<T, K extends string | number | symbol>(
|
|
array: T[],
|
|
keyFn: (item: T) => K
|
|
): Record<K, T[]> {
|
|
return array.reduce((acc, item) => {
|
|
const key = keyFn(item);
|
|
if (!acc[key]) acc[key] = [];
|
|
acc[key].push(item);
|
|
return acc;
|
|
}, {} as Record<K, T[]>);
|
|
}
|