feat: Add intelligent auto-router and enhanced integrations
- Add intelligent-router.sh hook for automatic agent routing - Add AUTO-TRIGGER-SUMMARY.md documentation - Add FINAL-INTEGRATION-SUMMARY.md documentation - Complete Prometheus integration (6 commands + 4 tools) - Complete Dexto integration (12 commands + 5 tools) - Enhanced Ralph with access to all agents - Fix /clawd command (removed disable-model-invocation) - Update hooks.json to v5 with intelligent routing - 291 total skills now available - All 21 commands with automatic routing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
72
dexto/packages/core/src/utils/api-key-resolver.ts
Normal file
72
dexto/packages/core/src/utils/api-key-resolver.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import type { LLMProvider } from '../llm/types.js';
|
||||
|
||||
/**
|
||||
* Utility for resolving API keys from environment variables.
|
||||
* This consolidates the API key resolution logic used across CLI and core components.
|
||||
*/
|
||||
|
||||
// Map the provider to its corresponding API key name (in order of preference)
|
||||
export const PROVIDER_API_KEY_MAP: Record<LLMProvider, string[]> = {
|
||||
openai: ['OPENAI_API_KEY', 'OPENAI_KEY'],
|
||||
'openai-compatible': ['OPENAI_API_KEY', 'OPENAI_KEY'], // Uses same keys as openai
|
||||
anthropic: ['ANTHROPIC_API_KEY', 'ANTHROPIC_KEY', 'CLAUDE_API_KEY'],
|
||||
google: ['GOOGLE_GENERATIVE_AI_API_KEY', 'GOOGLE_API_KEY', 'GEMINI_API_KEY'],
|
||||
groq: ['GROQ_API_KEY'],
|
||||
cohere: ['COHERE_API_KEY'],
|
||||
xai: ['XAI_API_KEY', 'X_AI_API_KEY'],
|
||||
openrouter: ['OPENROUTER_API_KEY'],
|
||||
litellm: ['LITELLM_API_KEY', 'LITELLM_KEY'],
|
||||
glama: ['GLAMA_API_KEY'],
|
||||
// Vertex uses ADC (Application Default Credentials), not API keys
|
||||
// GOOGLE_APPLICATION_CREDENTIALS points to service account JSON (optional)
|
||||
// Primary config is GOOGLE_VERTEX_PROJECT (required) + GOOGLE_VERTEX_LOCATION (optional)
|
||||
vertex: [],
|
||||
// Bedrock supports two auth methods:
|
||||
// 1. AWS_BEARER_TOKEN_BEDROCK - Bedrock API key (simplest)
|
||||
// 2. AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + AWS_REGION (IAM credentials)
|
||||
// AWS_SESSION_TOKEN (optional, for temporary credentials)
|
||||
bedrock: ['AWS_BEARER_TOKEN_BEDROCK'],
|
||||
// Local providers don't require API keys
|
||||
local: [], // Native node-llama-cpp execution
|
||||
ollama: [], // Ollama server (no authentication required)
|
||||
// Dexto gateway - requires key from `dexto login`
|
||||
dexto: ['DEXTO_API_KEY'],
|
||||
// perplexity: ['PERPLEXITY_API_KEY'],
|
||||
// together: ['TOGETHER_API_KEY'],
|
||||
// fireworks: ['FIREWORKS_API_KEY'],
|
||||
// deepseek: ['DEEPSEEK_API_KEY'],
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves API key for a given provider from environment variables.
|
||||
*
|
||||
* @param provider The LLM provider
|
||||
* @returns Resolved API key or undefined if not found
|
||||
*/
|
||||
export function resolveApiKeyForProvider(provider: LLMProvider): string | undefined {
|
||||
const envVars = PROVIDER_API_KEY_MAP[provider];
|
||||
if (!envVars) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Try each environment variable in order of preference
|
||||
for (const envVar of envVars) {
|
||||
const value = process.env[envVar];
|
||||
if (value && value.trim()) {
|
||||
return value.trim();
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the primary environment variable name for a provider (for display/error messages).
|
||||
*
|
||||
* @param provider The LLM provider
|
||||
* @returns Primary environment variable name
|
||||
*/
|
||||
export function getPrimaryApiKeyEnvVar(provider: LLMProvider): string {
|
||||
const envVars = PROVIDER_API_KEY_MAP[provider];
|
||||
return envVars?.[0] || `${provider.toUpperCase()}_API_KEY`;
|
||||
}
|
||||
97
dexto/packages/core/src/utils/async-context.ts
Normal file
97
dexto/packages/core/src/utils/async-context.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
// TODO: Add fallback strategy for non-Node.js environments (browsers, edge workers)
|
||||
// For now, this will work in Node.js (CLI API server, standalone deployments).
|
||||
// Future: Consider session metadata fallback when AsyncLocalStorage is unavailable.
|
||||
|
||||
import { AsyncLocalStorage } from 'async_hooks';
|
||||
|
||||
/**
|
||||
* Context data stored in AsyncLocalStorage
|
||||
* Used for multi-tenant deployments to propagate tenant/user information
|
||||
*/
|
||||
export interface AsyncContext {
|
||||
/** Tenant ID for multi-tenant deployments */
|
||||
tenantId?: string;
|
||||
|
||||
/** User ID for tracking which user is making the request */
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* AsyncLocalStorage instance for storing request context
|
||||
* This automatically propagates across async boundaries in Node.js
|
||||
*/
|
||||
const asyncContext = new AsyncLocalStorage<AsyncContext>();
|
||||
|
||||
/**
|
||||
* Set the current async context
|
||||
* Should be called at the entry point of a request (e.g., Express middleware)
|
||||
*
|
||||
* @param ctx - Context to set
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // In Express middleware
|
||||
* app.use((req, res, next) => {
|
||||
* const { tenantId, userId } = extractAuthFromRequest(req);
|
||||
* setContext({ tenantId, userId });
|
||||
* next();
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function setContext(ctx: AsyncContext): void {
|
||||
asyncContext.enterWith(ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current async context
|
||||
* Returns undefined if no context is set
|
||||
*
|
||||
* @returns Current context or undefined
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // In plugin or service
|
||||
* const ctx = getContext();
|
||||
* if (ctx?.tenantId) {
|
||||
* // Use tenant ID for scoped operations
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function getContext(): AsyncContext | undefined {
|
||||
return asyncContext.getStore();
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a function with a specific context
|
||||
* Useful for testing or when you need to override context temporarily
|
||||
*
|
||||
* @param ctx - Context to run with
|
||||
* @param fn - Function to execute
|
||||
* @returns Result of the function
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await runWithContext({ tenantId: 'test-tenant' }, async () => {
|
||||
* // This code runs with the specified context
|
||||
* await someOperation();
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export async function runWithContext<T>(ctx: AsyncContext, fn: () => Promise<T>): Promise<T> {
|
||||
return asyncContext.run(ctx, fn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if AsyncLocalStorage is available in the current environment
|
||||
* Returns false in non-Node.js environments (browsers, edge workers)
|
||||
*
|
||||
* @returns true if AsyncLocalStorage is available
|
||||
*/
|
||||
export function isAsyncContextAvailable(): boolean {
|
||||
try {
|
||||
// Check if async_hooks module exists
|
||||
return typeof AsyncLocalStorage !== 'undefined';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
13
dexto/packages/core/src/utils/debug.ts
Normal file
13
dexto/packages/core/src/utils/debug.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export function shouldIncludeRawToolResult(): boolean {
|
||||
const flag = process.env.DEXTO_DEBUG_TOOL_RESULT_RAW;
|
||||
if (!flag) return false;
|
||||
switch (flag.trim().toLowerCase()) {
|
||||
case '1':
|
||||
case 'true':
|
||||
case 'yes':
|
||||
case 'on':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
207
dexto/packages/core/src/utils/defer.test.ts
Normal file
207
dexto/packages/core/src/utils/defer.test.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { defer } from './defer.js';
|
||||
|
||||
describe('defer', () => {
|
||||
describe('sync dispose (using keyword)', () => {
|
||||
it('should call cleanup on normal scope exit', () => {
|
||||
const cleanup = vi.fn();
|
||||
|
||||
function testScope(): void {
|
||||
using _ = defer(cleanup);
|
||||
// Normal exit
|
||||
}
|
||||
|
||||
testScope();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call cleanup on early return', () => {
|
||||
const cleanup = vi.fn();
|
||||
|
||||
function testScope(): string {
|
||||
using _ = defer(cleanup);
|
||||
return 'early';
|
||||
}
|
||||
|
||||
const result = testScope();
|
||||
expect(result).toBe('early');
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call cleanup on throw', () => {
|
||||
const cleanup = vi.fn();
|
||||
|
||||
function testScope(): void {
|
||||
using _ = defer(cleanup);
|
||||
throw new Error('test error');
|
||||
}
|
||||
|
||||
expect(() => testScope()).toThrow('test error');
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should execute multiple defers in LIFO order', () => {
|
||||
const order: number[] = [];
|
||||
|
||||
function testScope(): void {
|
||||
using _a = defer(() => {
|
||||
order.push(1);
|
||||
});
|
||||
using _b = defer(() => {
|
||||
order.push(2);
|
||||
});
|
||||
using _c = defer(() => {
|
||||
order.push(3);
|
||||
});
|
||||
}
|
||||
|
||||
testScope();
|
||||
expect(order).toEqual([3, 2, 1]);
|
||||
});
|
||||
|
||||
it('should handle async cleanup function in sync context', async () => {
|
||||
const cleanup = vi.fn().mockResolvedValue(undefined);
|
||||
const consoleError = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
function testScope(): void {
|
||||
using _ = defer(cleanup);
|
||||
}
|
||||
|
||||
testScope();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Give time for any promise rejections to surface
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
consoleError.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('async dispose (await using keyword)', () => {
|
||||
it('should call async cleanup on normal scope exit', async () => {
|
||||
const cleanup = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
async function testScope(): Promise<void> {
|
||||
await using _ = defer(cleanup);
|
||||
// Normal exit
|
||||
}
|
||||
|
||||
await testScope();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call async cleanup on throw', async () => {
|
||||
const cleanup = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
async function testScope(): Promise<void> {
|
||||
await using _ = defer(cleanup);
|
||||
throw new Error('async error');
|
||||
}
|
||||
|
||||
await expect(testScope()).rejects.toThrow('async error');
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should await async cleanup function', async () => {
|
||||
let cleanupCompleted = false;
|
||||
const cleanup = vi.fn(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
cleanupCompleted = true;
|
||||
});
|
||||
|
||||
async function testScope(): Promise<void> {
|
||||
await using _ = defer(cleanup);
|
||||
}
|
||||
|
||||
await testScope();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
expect(cleanupCompleted).toBe(true);
|
||||
});
|
||||
|
||||
it('should execute multiple async defers in LIFO order', async () => {
|
||||
const order: number[] = [];
|
||||
|
||||
async function testScope(): Promise<void> {
|
||||
await using _a = defer(async () => {
|
||||
order.push(1);
|
||||
});
|
||||
await using _b = defer(async () => {
|
||||
order.push(2);
|
||||
});
|
||||
await using _c = defer(async () => {
|
||||
order.push(3);
|
||||
});
|
||||
}
|
||||
|
||||
await testScope();
|
||||
expect(order).toEqual([3, 2, 1]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Symbol.dispose interface', () => {
|
||||
it('should implement Symbol.dispose', () => {
|
||||
const cleanup = vi.fn();
|
||||
const deferred = defer(cleanup);
|
||||
|
||||
expect(typeof deferred[Symbol.dispose]).toBe('function');
|
||||
|
||||
deferred[Symbol.dispose]();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should implement Symbol.asyncDispose', async () => {
|
||||
const cleanup = vi.fn().mockResolvedValue(undefined);
|
||||
const deferred = defer(cleanup);
|
||||
|
||||
expect(typeof deferred[Symbol.asyncDispose]).toBe('function');
|
||||
|
||||
await deferred[Symbol.asyncDispose]();
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should propagate errors from sync cleanup in sync context', () => {
|
||||
const cleanup = vi.fn(() => {
|
||||
throw new Error('cleanup error');
|
||||
});
|
||||
|
||||
function testScope(): void {
|
||||
using _ = defer(cleanup);
|
||||
}
|
||||
|
||||
expect(() => testScope()).toThrow('cleanup error');
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should propagate errors from async cleanup in async context', async () => {
|
||||
const cleanup = vi.fn().mockRejectedValue(new Error('async cleanup error'));
|
||||
|
||||
async function testScope(): Promise<void> {
|
||||
await using _ = defer(cleanup);
|
||||
}
|
||||
|
||||
await expect(testScope()).rejects.toThrow('async cleanup error');
|
||||
expect(cleanup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should log error when async cleanup fails in sync context', async () => {
|
||||
const consoleError = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
const cleanup = vi.fn().mockRejectedValue(new Error('async fail'));
|
||||
|
||||
function testScope(): void {
|
||||
using _ = defer(cleanup);
|
||||
}
|
||||
|
||||
testScope();
|
||||
|
||||
// Wait for the promise rejection to be caught
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(consoleError).toHaveBeenCalledWith(
|
||||
'Deferred async cleanup failed (used sync dispose):',
|
||||
expect.any(Error)
|
||||
);
|
||||
consoleError.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
81
dexto/packages/core/src/utils/defer.ts
Normal file
81
dexto/packages/core/src/utils/defer.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* TC39 Explicit Resource Management pattern.
|
||||
* Similar to Go's `defer`, Python's `with`, C#'s `using`.
|
||||
*
|
||||
* Benefits:
|
||||
* - Can't forget cleanup (automatic on scope exit)
|
||||
* - Works with early returns, throws, aborts
|
||||
* - Multiple defers execute in LIFO order
|
||||
* - Cleaner than try/finally chains
|
||||
*
|
||||
* @see https://github.com/tc39/proposal-explicit-resource-management
|
||||
*/
|
||||
|
||||
/**
|
||||
* Type for cleanup functions - can be sync or async.
|
||||
*/
|
||||
export type CleanupFunction = () => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Return type for defer() - implements both Disposable and AsyncDisposable.
|
||||
*/
|
||||
export interface DeferredCleanup extends Disposable, AsyncDisposable {
|
||||
[Symbol.dispose]: () => void;
|
||||
[Symbol.asyncDispose]: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a deferred cleanup resource.
|
||||
*
|
||||
* When used with the `using` keyword, the cleanup function is automatically
|
||||
* called when the enclosing scope exits - whether normally, via return,
|
||||
* or via thrown exception.
|
||||
*
|
||||
* @param cleanupFn - The function to call on cleanup. Can be sync or async.
|
||||
* @returns A disposable resource for use with `using` keyword
|
||||
*
|
||||
* @example Synchronous cleanup
|
||||
* ```typescript
|
||||
* function processData(): void {
|
||||
* using _ = defer(() => console.log('cleanup'));
|
||||
* // ... work ...
|
||||
* // 'cleanup' is logged when scope exits
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @example Async cleanup with await using
|
||||
* ```typescript
|
||||
* async function execute(): Promise<void> {
|
||||
* await using _ = defer(async () => {
|
||||
* await closeConnection();
|
||||
* });
|
||||
* // ... work ...
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @example Multiple defers (LIFO order)
|
||||
* ```typescript
|
||||
* function example(): void {
|
||||
* using a = defer(() => console.log('first'));
|
||||
* using b = defer(() => console.log('second'));
|
||||
* // Logs: 'second' then 'first' (LIFO)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function defer(cleanupFn: CleanupFunction): DeferredCleanup {
|
||||
return {
|
||||
[Symbol.dispose](): void {
|
||||
const result = cleanupFn();
|
||||
// If cleanup returns a promise in sync context, fire-and-forget with error logging
|
||||
if (result instanceof Promise) {
|
||||
result.catch((err) => {
|
||||
console.error('Deferred async cleanup failed (used sync dispose):', err);
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
[Symbol.asyncDispose](): Promise<void> {
|
||||
return Promise.resolve(cleanupFn());
|
||||
},
|
||||
};
|
||||
}
|
||||
81
dexto/packages/core/src/utils/error-conversion.ts
Normal file
81
dexto/packages/core/src/utils/error-conversion.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* Utility functions for converting various error types to proper Error instances
|
||||
* with meaningful messages instead of "[object Object]"
|
||||
*/
|
||||
|
||||
import { safeStringify } from './safe-stringify.js';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
|
||||
/**
|
||||
* Converts any error value to an Error instance with a meaningful message
|
||||
*
|
||||
* @param error - The error value to convert (can be Error, object, string, etc.)
|
||||
* @returns Error instance with extracted or serialized message
|
||||
*/
|
||||
export function toError(error: unknown, logger: IDextoLogger): Error {
|
||||
if (error instanceof Error) {
|
||||
logger.info(`error is already an Error: ${error.message}`);
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error && typeof error === 'object') {
|
||||
const errorObj = error as any;
|
||||
|
||||
// Handle Vercel AI SDK error format: parse error.error.responseBody JSON
|
||||
// TODO: this is a workaround because vercel's ai sdk errors returned are untyped garbage
|
||||
// Summary: onError callback returns this weird shape. If we try to catch the promise.all block
|
||||
// we get a useless error (added comments near the catch block).
|
||||
// Improve this once vercel ai sdk errors are typed properly.
|
||||
|
||||
// Handle Vercel AI SDK error format: error.error.data.error.message
|
||||
if (errorObj.error?.data?.error?.message) {
|
||||
logger.info(
|
||||
`Extracted error from error.error.data.error.message: ${errorObj.error.data.error.message}`
|
||||
);
|
||||
return new Error(errorObj.error.data.error.message, { cause: error });
|
||||
}
|
||||
|
||||
if (errorObj.error?.responseBody && typeof errorObj.error.responseBody === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(errorObj.error.responseBody);
|
||||
if (parsed?.error?.message) {
|
||||
logger.info(
|
||||
`Extracted error from error.error.responseBody: ${parsed.error.message}`
|
||||
);
|
||||
return new Error(parsed.error.message, { cause: error });
|
||||
}
|
||||
} catch {
|
||||
logger.info(`Failed to parse error.error.responseBody as JSON`);
|
||||
// Failed to parse, continue to other checks
|
||||
}
|
||||
}
|
||||
|
||||
// Try to extract meaningful message from error object
|
||||
if ('message' in error && typeof (error as { message?: unknown }).message === 'string') {
|
||||
return new Error((error as { message: string }).message, { cause: error });
|
||||
}
|
||||
if ('error' in error && typeof (error as { error?: unknown }).error === 'string') {
|
||||
return new Error((error as { error: string }).error, { cause: error });
|
||||
}
|
||||
if ('details' in error && typeof (error as { details?: unknown }).details === 'string') {
|
||||
return new Error((error as { details: string }).details, { cause: error });
|
||||
}
|
||||
if (
|
||||
'description' in error &&
|
||||
typeof (error as { description?: unknown }).description === 'string'
|
||||
) {
|
||||
return new Error((error as { description: string }).description, { cause: error });
|
||||
}
|
||||
// Fallback to safe serialization for complex objects
|
||||
const serialized = safeStringify(error); // Uses truncation + circular-ref safety
|
||||
logger.info(`falling back to safe serialization for complex objects: ${serialized}`);
|
||||
return new Error(serialized);
|
||||
}
|
||||
|
||||
if (typeof error === 'string') {
|
||||
return new Error(error, { cause: error });
|
||||
}
|
||||
|
||||
// For primitives and other types
|
||||
return new Error(String(error), { cause: error as unknown });
|
||||
}
|
||||
93
dexto/packages/core/src/utils/execution-context.ts
Normal file
93
dexto/packages/core/src/utils/execution-context.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
// packages/core/src/utils/execution-context.ts
|
||||
// TODO: (migration) This file is duplicated in @dexto/agent-management for short-term compatibility
|
||||
// Remove from core once all services accept paths via initialization options
|
||||
|
||||
import { walkUpDirectories } from './fs-walk.js';
|
||||
import { readFileSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
export type ExecutionContext = 'dexto-source' | 'dexto-project' | 'global-cli';
|
||||
|
||||
/**
|
||||
* Check if directory is the dexto source code itself
|
||||
* @param dirPath Directory to check
|
||||
* @returns True if directory contains the dexto source monorepo (top-level).
|
||||
*/
|
||||
function isDextoSourceDirectory(dirPath: string): boolean {
|
||||
const packageJsonPath = path.join(dirPath, 'package.json');
|
||||
|
||||
try {
|
||||
const pkg = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
||||
// Monorepo root must be named 'dexto-monorepo'. No other names are treated as source root.
|
||||
return pkg.name === 'dexto-monorepo';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory is a project that uses dexto as dependency (but is not dexto source)
|
||||
* @param dirPath Directory to check
|
||||
* @returns True if directory has dexto as dependency but is not dexto source
|
||||
*/
|
||||
function isDextoProjectDirectory(dirPath: string): boolean {
|
||||
const packageJsonPath = path.join(dirPath, 'package.json');
|
||||
|
||||
try {
|
||||
const pkg = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
||||
|
||||
// Not internal dexto packages themselves
|
||||
if (pkg.name === 'dexto' || pkg.name === '@dexto/core' || pkg.name === '@dexto/webui') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if has dexto or @dexto/core as dependency
|
||||
const allDeps = {
|
||||
...(pkg.dependencies ?? {}),
|
||||
...(pkg.devDependencies ?? {}),
|
||||
...(pkg.peerDependencies ?? {}),
|
||||
};
|
||||
|
||||
return 'dexto' in allDeps || '@dexto/core' in allDeps;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find dexto source root directory
|
||||
* @param startPath Starting directory path
|
||||
* @returns Dexto source root directory or null if not found
|
||||
*/
|
||||
export function findDextoSourceRoot(startPath: string = process.cwd()): string | null {
|
||||
return walkUpDirectories(startPath, isDextoSourceDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find dexto project root directory (projects using dexto as dependency)
|
||||
* @param startPath Starting directory path
|
||||
* @returns Dexto project root directory or null if not found
|
||||
*/
|
||||
export function findDextoProjectRoot(startPath: string = process.cwd()): string | null {
|
||||
return walkUpDirectories(startPath, isDextoProjectDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect current execution context - standardized across codebase
|
||||
* @param startPath Starting directory path (defaults to process.cwd())
|
||||
* @returns Execution context
|
||||
*/
|
||||
export function getExecutionContext(startPath: string = process.cwd()): ExecutionContext {
|
||||
// Check for Dexto source context first (most specific)
|
||||
if (findDextoSourceRoot(startPath)) {
|
||||
return 'dexto-source';
|
||||
}
|
||||
|
||||
// Check for Dexto project context
|
||||
if (findDextoProjectRoot(startPath)) {
|
||||
return 'dexto-project';
|
||||
}
|
||||
|
||||
// Default to global CLI context
|
||||
return 'global-cli';
|
||||
}
|
||||
30
dexto/packages/core/src/utils/fs-walk.ts
Normal file
30
dexto/packages/core/src/utils/fs-walk.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
// TODO: (migration) This file is duplicated in @dexto/agent-management for short-term compatibility
|
||||
// Remove from core once path utilities are fully migrated
|
||||
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Generic directory walker that searches up the directory tree
|
||||
* @param startPath Starting directory path
|
||||
* @param predicate Function that returns true when the desired condition is found
|
||||
* @returns The directory path where the condition was met, or null if not found
|
||||
*/
|
||||
export function walkUpDirectories(
|
||||
startPath: string,
|
||||
predicate: (dirPath: string) => boolean
|
||||
): string | null {
|
||||
let currentPath = path.resolve(startPath);
|
||||
const rootPath = path.parse(currentPath).root;
|
||||
|
||||
while (true) {
|
||||
if (predicate(currentPath)) {
|
||||
return currentPath;
|
||||
}
|
||||
if (currentPath === rootPath) break;
|
||||
const parent = path.dirname(currentPath);
|
||||
if (parent === currentPath) break; // safety for exotic paths
|
||||
currentPath = parent;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
26
dexto/packages/core/src/utils/index.ts
Normal file
26
dexto/packages/core/src/utils/index.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// TODO: (migration) path.js, execution-context.js, fs-walk.js, env-file.js
|
||||
// are duplicated in @dexto/agent-management for Node-specific environment management.
|
||||
// Core still needs these for FilePromptProvider, MCPClient, and FileContributor functionality.
|
||||
// These will remain in core until we refactor those features to be dependency-injected.
|
||||
|
||||
export * from './path.js';
|
||||
export * from './service-initializer.js';
|
||||
export * from './zod-schema-converter.js';
|
||||
export * from './result.js';
|
||||
export * from './error-conversion.js';
|
||||
export * from './execution-context.js';
|
||||
export * from './fs-walk.js';
|
||||
export * from './redactor.js';
|
||||
export * from './debug.js';
|
||||
export * from './safe-stringify.js';
|
||||
export * from './api-key-resolver.js';
|
||||
export * from './defer.js';
|
||||
export * from './async-context.js';
|
||||
|
||||
// API key STORAGE has been moved to @dexto/agent-management
|
||||
// These functions write to .env files and are CLI/server concerns, not core runtime
|
||||
// Import from '@dexto/agent-management' instead:
|
||||
// - updateEnvFile
|
||||
// - saveProviderApiKey
|
||||
// - getProviderKeyStatus
|
||||
// - listProviderKeyStatus
|
||||
392
dexto/packages/core/src/utils/path.test.ts
Normal file
392
dexto/packages/core/src/utils/path.test.ts
Normal file
@@ -0,0 +1,392 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { tmpdir, homedir } from 'os';
|
||||
import { getDextoPath, getDextoGlobalPath, getDextoEnvPath, findPackageRoot } from './path.js';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
function createTempDir() {
|
||||
return fs.mkdtempSync(path.join(tmpdir(), 'dexto-test-'));
|
||||
}
|
||||
|
||||
function createTempDirStructure(structure: Record<string, any>, baseDir?: string): string {
|
||||
const tempDir = baseDir || createTempDir();
|
||||
|
||||
for (const [filePath, content] of Object.entries(structure)) {
|
||||
const fullPath = path.join(tempDir, filePath);
|
||||
const dir = path.dirname(fullPath);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
if (typeof content === 'string') {
|
||||
fs.writeFileSync(fullPath, content);
|
||||
} else if (typeof content === 'object') {
|
||||
fs.writeFileSync(fullPath, JSON.stringify(content, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
return tempDir;
|
||||
}
|
||||
|
||||
describe('getDextoPath', () => {
|
||||
let tempDir: string;
|
||||
|
||||
afterEach(() => {
|
||||
if (tempDir) {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe('in dexto project', () => {
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'test-project',
|
||||
dependencies: { dexto: '^1.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns project-local path for logs', () => {
|
||||
const result = getDextoPath('logs', 'test.log', tempDir);
|
||||
expect(result).toBe(path.join(tempDir, '.dexto', 'logs', 'test.log'));
|
||||
});
|
||||
|
||||
it('returns project-local path for database', () => {
|
||||
const result = getDextoPath('database', 'dexto.db', tempDir);
|
||||
expect(result).toBe(path.join(tempDir, '.dexto', 'database', 'dexto.db'));
|
||||
});
|
||||
|
||||
it('returns directory path when no filename provided', () => {
|
||||
const result = getDextoPath('config', undefined, tempDir);
|
||||
expect(result).toBe(path.join(tempDir, '.dexto', 'config'));
|
||||
});
|
||||
|
||||
it('works from nested directories', () => {
|
||||
const nestedDir = path.join(tempDir, 'src', 'app');
|
||||
fs.mkdirSync(nestedDir, { recursive: true });
|
||||
|
||||
const result = getDextoPath('logs', 'app.log', nestedDir);
|
||||
expect(result).toBe(path.join(tempDir, '.dexto', 'logs', 'app.log'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('outside dexto project (global)', () => {
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'regular-project',
|
||||
dependencies: { express: '^4.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns global path when not in dexto project', () => {
|
||||
const originalCwd = process.cwd();
|
||||
try {
|
||||
process.chdir(tempDir);
|
||||
const result = getDextoPath('logs', 'global.log');
|
||||
expect(result).toContain('.dexto');
|
||||
expect(result).toContain('logs');
|
||||
expect(result).toContain('global.log');
|
||||
expect(result).not.toContain(tempDir);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDextoGlobalPath', () => {
|
||||
let tempDir: string;
|
||||
|
||||
afterEach(() => {
|
||||
if (tempDir) {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe('basic functionality', () => {
|
||||
it('returns global agents directory', () => {
|
||||
const result = getDextoGlobalPath('agents');
|
||||
expect(result).toContain('.dexto');
|
||||
expect(result).toContain('agents');
|
||||
expect(path.isAbsolute(result)).toBe(true);
|
||||
});
|
||||
|
||||
it('returns global path with filename', () => {
|
||||
const result = getDextoGlobalPath('agents', 'database-agent');
|
||||
expect(result).toContain('.dexto');
|
||||
expect(result).toContain('agents');
|
||||
expect(result).toContain('database-agent');
|
||||
expect(path.isAbsolute(result)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles different types correctly', () => {
|
||||
const agents = getDextoGlobalPath('agents');
|
||||
const logs = getDextoGlobalPath('logs');
|
||||
const cache = getDextoGlobalPath('cache');
|
||||
|
||||
expect(agents).toContain('agents');
|
||||
expect(logs).toContain('logs');
|
||||
expect(cache).toContain('cache');
|
||||
});
|
||||
});
|
||||
|
||||
describe('in dexto project context', () => {
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'test-project',
|
||||
dependencies: { dexto: '^1.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('always returns global path, never project-relative', () => {
|
||||
// getDextoPath returns project-relative
|
||||
const projectPath = getDextoPath('agents', 'test-agent', tempDir);
|
||||
expect(projectPath).toBe(path.join(tempDir, '.dexto', 'agents', 'test-agent'));
|
||||
|
||||
// getDextoGlobalPath should ALWAYS return global, never project-relative
|
||||
const globalPath = getDextoGlobalPath('agents', 'test-agent');
|
||||
expect(globalPath).toContain('.dexto');
|
||||
expect(globalPath).toContain('agents');
|
||||
expect(globalPath).toContain('test-agent');
|
||||
expect(globalPath).not.toContain(tempDir); // Key difference!
|
||||
expect(path.isAbsolute(globalPath)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('outside dexto project context', () => {
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'regular-project',
|
||||
dependencies: { express: '^4.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns global path (same as in project context)', () => {
|
||||
const globalPath = getDextoGlobalPath('agents', 'test-agent');
|
||||
expect(globalPath).toContain('.dexto');
|
||||
expect(globalPath).toContain('agents');
|
||||
expect(globalPath).toContain('test-agent');
|
||||
expect(globalPath).not.toContain(tempDir);
|
||||
expect(path.isAbsolute(globalPath)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPackageRoot', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDir();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns null if no package.json found', () => {
|
||||
const result = findPackageRoot(tempDir);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('returns the directory containing package.json', () => {
|
||||
fs.writeFileSync(path.join(tempDir, 'package.json'), JSON.stringify({ name: 'test-pkg' }));
|
||||
const result = findPackageRoot(tempDir);
|
||||
expect(result).toBe(tempDir);
|
||||
});
|
||||
|
||||
it('finds package.json by walking up directories', () => {
|
||||
const nestedDir = path.join(tempDir, 'nested', 'deep');
|
||||
fs.mkdirSync(nestedDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(tempDir, 'package.json'), JSON.stringify({ name: 'test-pkg' }));
|
||||
|
||||
const result = findPackageRoot(nestedDir);
|
||||
expect(result).toBe(tempDir);
|
||||
});
|
||||
});
|
||||
|
||||
// resolveBundledScript tests have been moved to @dexto/agent-management
|
||||
|
||||
describe('getDextoEnvPath', () => {
|
||||
describe('in dexto project', () => {
|
||||
let tempDir: string;
|
||||
let originalCwd: string;
|
||||
|
||||
beforeEach(() => {
|
||||
originalCwd = process.cwd();
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'test-project',
|
||||
dependencies: { dexto: '^1.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns project root .env path', () => {
|
||||
process.chdir(tempDir);
|
||||
const result = getDextoEnvPath(tempDir);
|
||||
expect(result).toBe(path.join(tempDir, '.env'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('in dexto source', () => {
|
||||
let tempDir: string;
|
||||
let originalCwd: string;
|
||||
const originalDevMode = process.env.DEXTO_DEV_MODE;
|
||||
|
||||
beforeEach(() => {
|
||||
originalCwd = process.cwd();
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'dexto-monorepo',
|
||||
version: '1.0.0',
|
||||
},
|
||||
'agents/default-agent.yml': 'mcpServers: {}',
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
// Restore original env
|
||||
if (originalDevMode === undefined) {
|
||||
delete process.env.DEXTO_DEV_MODE;
|
||||
} else {
|
||||
process.env.DEXTO_DEV_MODE = originalDevMode;
|
||||
}
|
||||
});
|
||||
|
||||
it('returns repo .env when DEXTO_DEV_MODE=true', () => {
|
||||
process.chdir(tempDir);
|
||||
process.env.DEXTO_DEV_MODE = 'true';
|
||||
const result = getDextoEnvPath(tempDir);
|
||||
expect(result).toBe(path.join(tempDir, '.env'));
|
||||
});
|
||||
|
||||
it('returns global ~/.dexto/.env when DEXTO_DEV_MODE is not set', () => {
|
||||
process.chdir(tempDir);
|
||||
delete process.env.DEXTO_DEV_MODE;
|
||||
const result = getDextoEnvPath(tempDir);
|
||||
expect(result).toBe(path.join(homedir(), '.dexto', '.env'));
|
||||
});
|
||||
|
||||
it('returns global ~/.dexto/.env when DEXTO_DEV_MODE=false', () => {
|
||||
process.chdir(tempDir);
|
||||
process.env.DEXTO_DEV_MODE = 'false';
|
||||
const result = getDextoEnvPath(tempDir);
|
||||
expect(result).toBe(path.join(homedir(), '.dexto', '.env'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('in global-cli context', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'regular-project',
|
||||
dependencies: { express: '^4.0.0' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns global ~/.dexto/.env path', () => {
|
||||
const result = getDextoEnvPath(tempDir);
|
||||
expect(result).toBe(path.join(homedir(), '.dexto', '.env'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('real-world execution contexts', () => {
|
||||
describe('SDK usage in project', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'my-app',
|
||||
dependencies: { dexto: '^1.0.0' },
|
||||
},
|
||||
'src/dexto/agents/default-agent.yml': 'mcpServers: {}',
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('uses project-local storage', () => {
|
||||
const logPath = getDextoPath('logs', 'dexto.log', tempDir);
|
||||
const dbPath = getDextoPath('database', 'dexto.db', tempDir);
|
||||
|
||||
expect(logPath).toBe(path.join(tempDir, '.dexto', 'logs', 'dexto.log'));
|
||||
expect(dbPath).toBe(path.join(tempDir, '.dexto', 'database', 'dexto.db'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('CLI in dexto source', () => {
|
||||
let tempDir: string;
|
||||
const originalDevMode = process.env.DEXTO_DEV_MODE;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = createTempDirStructure({
|
||||
'package.json': {
|
||||
name: 'dexto-monorepo',
|
||||
version: '1.0.0',
|
||||
},
|
||||
'agents/default-agent.yml': 'mcpServers: {}',
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
// Restore original env
|
||||
if (originalDevMode === undefined) {
|
||||
delete process.env.DEXTO_DEV_MODE;
|
||||
} else {
|
||||
process.env.DEXTO_DEV_MODE = originalDevMode;
|
||||
}
|
||||
});
|
||||
|
||||
it('uses local repo storage when DEXTO_DEV_MODE=true', () => {
|
||||
process.env.DEXTO_DEV_MODE = 'true';
|
||||
const logPath = getDextoPath('logs', 'dexto.log', tempDir);
|
||||
expect(logPath).toBe(path.join(tempDir, '.dexto', 'logs', 'dexto.log'));
|
||||
});
|
||||
|
||||
it('uses global storage when DEXTO_DEV_MODE is not set', () => {
|
||||
delete process.env.DEXTO_DEV_MODE;
|
||||
const logPath = getDextoPath('logs', 'dexto.log', tempDir);
|
||||
expect(logPath).toContain('.dexto');
|
||||
expect(logPath).toContain('logs');
|
||||
expect(logPath).toContain('dexto.log');
|
||||
expect(logPath).not.toContain(tempDir); // Should be global, not local
|
||||
});
|
||||
|
||||
it('uses global storage when DEXTO_DEV_MODE=false', () => {
|
||||
process.env.DEXTO_DEV_MODE = 'false';
|
||||
const logPath = getDextoPath('logs', 'dexto.log', tempDir);
|
||||
expect(logPath).toContain('.dexto');
|
||||
expect(logPath).toContain('logs');
|
||||
expect(logPath).toContain('dexto.log');
|
||||
expect(logPath).not.toContain(tempDir); // Should be global, not local
|
||||
});
|
||||
});
|
||||
});
|
||||
195
dexto/packages/core/src/utils/path.ts
Normal file
195
dexto/packages/core/src/utils/path.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
// TODO: (migration) This file is duplicated in @dexto/agent-management for short-term compatibility
|
||||
// Remove from core once all services accept paths via initialization options
|
||||
|
||||
import * as path from 'path';
|
||||
import { existsSync } from 'fs';
|
||||
import { promises as fs } from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { walkUpDirectories } from './fs-walk.js';
|
||||
import {
|
||||
getExecutionContext,
|
||||
findDextoSourceRoot,
|
||||
findDextoProjectRoot,
|
||||
} from './execution-context.js';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
|
||||
/**
|
||||
* Standard path resolver for logs/db/config/anything in dexto projects
|
||||
* Context-aware with dev mode support:
|
||||
* - dexto-source + DEXTO_DEV_MODE=true: Use local repo .dexto (isolated testing)
|
||||
* - dexto-source (normal): Use global ~/.dexto (user experience)
|
||||
* - dexto-project: Use project-local .dexto
|
||||
* - global-cli: Use global ~/.dexto
|
||||
* @param type Path type (logs, database, config, etc.)
|
||||
* @param filename Optional filename to append
|
||||
* @param startPath Starting directory for project detection
|
||||
* @returns Absolute path to the requested location
|
||||
*/
|
||||
export function getDextoPath(type: string, filename?: string, startPath?: string): string {
|
||||
const context = getExecutionContext(startPath);
|
||||
|
||||
let basePath: string;
|
||||
|
||||
switch (context) {
|
||||
case 'dexto-source': {
|
||||
// Dev mode: use local repo .dexto for isolated testing
|
||||
// Normal mode: use global ~/.dexto for user experience
|
||||
const isDevMode = process.env.DEXTO_DEV_MODE === 'true';
|
||||
if (isDevMode) {
|
||||
const sourceRoot = findDextoSourceRoot(startPath);
|
||||
if (!sourceRoot) {
|
||||
throw new Error('Not in dexto source context');
|
||||
}
|
||||
basePath = path.join(sourceRoot, '.dexto', type);
|
||||
} else {
|
||||
basePath = path.join(homedir(), '.dexto', type);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'dexto-project': {
|
||||
const projectRoot = findDextoProjectRoot(startPath);
|
||||
if (!projectRoot) {
|
||||
throw new Error('Not in dexto project context');
|
||||
}
|
||||
basePath = path.join(projectRoot, '.dexto', type);
|
||||
break;
|
||||
}
|
||||
case 'global-cli': {
|
||||
basePath = path.join(homedir(), '.dexto', type);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unknown execution context: ${context}`);
|
||||
}
|
||||
}
|
||||
|
||||
return filename ? path.join(basePath, filename) : basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Global path resolver that ALWAYS returns paths in the user's home directory
|
||||
* Used for agent registry and other global-only resources that should not be project-relative
|
||||
* @param type Path type (agents, cache, etc.)
|
||||
* @param filename Optional filename to append
|
||||
* @returns Absolute path to the global location (~/.dexto/...)
|
||||
*/
|
||||
export function getDextoGlobalPath(type: string, filename?: string): string {
|
||||
// ALWAYS return global path, ignore project context
|
||||
const basePath = path.join(homedir(), '.dexto', type);
|
||||
return filename ? path.join(basePath, filename) : basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy entire directory recursively
|
||||
* @param src Source directory path
|
||||
* @param dest Destination directory path
|
||||
*/
|
||||
export async function copyDirectory(src: string, dest: string): Promise<void> {
|
||||
await fs.mkdir(dest, { recursive: true });
|
||||
|
||||
const entries = await fs.readdir(src, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await copyDirectory(srcPath, destPath);
|
||||
} else {
|
||||
await fs.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if string looks like a file path vs registry name
|
||||
* @param str String to check
|
||||
* @returns True if looks like a path, false if looks like a registry name
|
||||
*/
|
||||
export function isPath(str: string): boolean {
|
||||
// Absolute paths
|
||||
if (path.isAbsolute(str)) return true;
|
||||
|
||||
// Relative paths with separators
|
||||
if (/[\\/]/.test(str)) return true;
|
||||
|
||||
// File extensions
|
||||
if (/\.(ya?ml|json)$/i.test(str)) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find package root (for other utilities)
|
||||
* @param startPath Starting directory path
|
||||
* @returns Directory containing package.json or null
|
||||
*/
|
||||
export function findPackageRoot(startPath: string = process.cwd()): string | null {
|
||||
return walkUpDirectories(startPath, (dirPath) => {
|
||||
const pkgPath = path.join(dirPath, 'package.json');
|
||||
return existsSync(pkgPath);
|
||||
});
|
||||
}
|
||||
|
||||
// resolveBundledScript has been moved to @dexto/agent-management
|
||||
// Core no longer needs to resolve bundled script paths - users should use
|
||||
// ${{dexto.agent_dir}} template variables in their configs instead
|
||||
|
||||
/**
|
||||
* Ensure ~/.dexto directory exists for global storage
|
||||
*/
|
||||
export async function ensureDextoGlobalDirectory(): Promise<void> {
|
||||
const dextoDir = path.join(homedir(), '.dexto');
|
||||
try {
|
||||
await fs.mkdir(dextoDir, { recursive: true });
|
||||
} catch (error) {
|
||||
// Directory might already exist, ignore EEXIST errors
|
||||
if ((error as NodeJS.ErrnoException).code !== 'EEXIST') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the appropriate .env file path for saving API keys.
|
||||
* Uses the same project detection logic as other dexto paths.
|
||||
*
|
||||
* @param startPath Starting directory for project detection
|
||||
* @param logger Optional logger instance for logging
|
||||
* @returns Absolute path to .env file for saving
|
||||
*/
|
||||
export function getDextoEnvPath(startPath: string = process.cwd(), logger?: IDextoLogger): string {
|
||||
const context = getExecutionContext(startPath);
|
||||
let envPath = '';
|
||||
switch (context) {
|
||||
case 'dexto-source': {
|
||||
// Dev mode: use local repo .env for isolated testing
|
||||
// Normal mode: use global ~/.dexto/.env for user experience
|
||||
const isDevMode = process.env.DEXTO_DEV_MODE === 'true';
|
||||
if (isDevMode) {
|
||||
const sourceRoot = findDextoSourceRoot(startPath);
|
||||
if (!sourceRoot) {
|
||||
throw new Error('Not in dexto source context');
|
||||
}
|
||||
envPath = path.join(sourceRoot, '.env');
|
||||
} else {
|
||||
envPath = path.join(homedir(), '.dexto', '.env');
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'dexto-project': {
|
||||
const projectRoot = findDextoProjectRoot(startPath);
|
||||
if (!projectRoot) {
|
||||
throw new Error('Not in dexto project context');
|
||||
}
|
||||
envPath = path.join(projectRoot, '.env');
|
||||
break;
|
||||
}
|
||||
case 'global-cli': {
|
||||
envPath = path.join(homedir(), '.dexto', '.env');
|
||||
break;
|
||||
}
|
||||
}
|
||||
logger?.debug(`Dexto env path: ${envPath}, context: ${context}`);
|
||||
return envPath;
|
||||
}
|
||||
196
dexto/packages/core/src/utils/redactor.test.ts
Normal file
196
dexto/packages/core/src/utils/redactor.test.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { redactSensitiveData as redact } from './redactor.js';
|
||||
|
||||
describe('redact', () => {
|
||||
// Basic field redaction
|
||||
test('should redact a single sensitive field', () => {
|
||||
expect(redact({ apiKey: 'secret' })).toEqual({ apiKey: '[REDACTED]' });
|
||||
});
|
||||
|
||||
test('should redact multiple sensitive fields', () => {
|
||||
expect(redact({ apiKey: 'secret', password: 'pass' })).toEqual({
|
||||
apiKey: '[REDACTED]',
|
||||
password: '[REDACTED]',
|
||||
});
|
||||
});
|
||||
|
||||
test('should perform case-insensitive field matching', () => {
|
||||
expect(redact({ ApiKey: 'secret', PASSWORD: 'pass' })).toEqual({
|
||||
ApiKey: '[REDACTED]',
|
||||
PASSWORD: '[REDACTED]',
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle mixed sensitive and non-sensitive fields', () => {
|
||||
expect(redact({ apiKey: 'secret', name: 'john' })).toEqual({
|
||||
apiKey: '[REDACTED]',
|
||||
name: 'john',
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle field names with underscores', () => {
|
||||
expect(redact({ api_key: 'secret', access_token: 'token' })).toEqual({
|
||||
api_key: '[REDACTED]',
|
||||
access_token: '[REDACTED]',
|
||||
});
|
||||
});
|
||||
|
||||
// Array Processing
|
||||
test('should handle array of objects with sensitive fields', () => {
|
||||
expect(redact([{ apiKey: 'secret' }, { password: 'pass' }])).toEqual([
|
||||
{ apiKey: '[REDACTED]' },
|
||||
{ password: '[REDACTED]' },
|
||||
]);
|
||||
});
|
||||
|
||||
test('should handle array of strings with patterns', () => {
|
||||
expect(redact(['sk-thisisafakekeyofsufficientlength', 'normal string'])).toEqual([
|
||||
'[REDACTED]',
|
||||
'normal string',
|
||||
]);
|
||||
});
|
||||
|
||||
test('should handle mixed array types', () => {
|
||||
expect(redact([{ apiKey: 'secret' }, 'sk-thisisafakekeyofsufficientlength', 42])).toEqual([
|
||||
{ apiKey: '[REDACTED]' },
|
||||
'[REDACTED]',
|
||||
42,
|
||||
]);
|
||||
});
|
||||
|
||||
test('should handle an empty array', () => {
|
||||
expect(redact([])).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle nested arrays', () => {
|
||||
expect(redact([[{ apiKey: 'secret' }]])).toEqual([[{ apiKey: '[REDACTED]' }]]);
|
||||
});
|
||||
|
||||
// Object Nesting
|
||||
test('should handle deeply nested sensitive fields', () => {
|
||||
expect(redact({ user: { config: { apiKey: 'secret' } } })).toEqual({
|
||||
user: { config: { apiKey: '[REDACTED]' } },
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle mixed nesting levels', () => {
|
||||
expect(redact({ apiKey: 'secret', user: { password: 'pass' } })).toEqual({
|
||||
apiKey: '[REDACTED]',
|
||||
user: { password: '[REDACTED]' },
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle array within object', () => {
|
||||
expect(redact({ users: [{ apiKey: 'secret' }] })).toEqual({
|
||||
users: [{ apiKey: '[REDACTED]' }],
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle object within array', () => {
|
||||
expect(redact([{ nested: { apiKey: 'secret' } }])).toEqual([
|
||||
{ nested: { apiKey: '[REDACTED]' } },
|
||||
]);
|
||||
});
|
||||
|
||||
// Primitive Types
|
||||
test('should return primitives unchanged', () => {
|
||||
expect(redact(null)).toBeNull();
|
||||
expect(redact(undefined)).toBeUndefined();
|
||||
expect(redact(42)).toBe(42);
|
||||
expect(redact(true)).toBe(true);
|
||||
const s = Symbol('foo');
|
||||
expect(redact(s)).toBe(s);
|
||||
});
|
||||
|
||||
// Sensitive Patterns
|
||||
describe('Sensitive Patterns in Strings', () => {
|
||||
test('should redact OpenAI API keys', () => {
|
||||
const text = 'My API key is sk-thisisafakekeyofsufficientlength';
|
||||
expect(redact(text)).toBe('My API key is [REDACTED]');
|
||||
});
|
||||
|
||||
test('should redact Bearer tokens', () => {
|
||||
const text = 'Authorization: Bearer my-secret-token-123';
|
||||
expect(redact(text)).toBe('Authorization: [REDACTED]');
|
||||
});
|
||||
|
||||
test('should redact emails', () => {
|
||||
const text = 'Contact me at test@example.com';
|
||||
expect(redact(text)).toBe('Contact me at [REDACTED]');
|
||||
});
|
||||
|
||||
test('should not redact normal strings', () => {
|
||||
const text = 'This is a normal sentence.';
|
||||
expect(redact(text)).toBe(text);
|
||||
});
|
||||
|
||||
test('should redact standalone JWT tokens', () => {
|
||||
const jwt =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U';
|
||||
expect(redact(jwt)).toBe('[REDACTED]');
|
||||
});
|
||||
});
|
||||
|
||||
// Signed URLs (should NOT be redacted)
|
||||
describe('Signed URLs', () => {
|
||||
test('should NOT redact Supabase signed URLs', () => {
|
||||
const url =
|
||||
'https://xxx.supabase.co/storage/v1/object/sign/bucket/file.dat?token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U';
|
||||
expect(redact(url)).toBe(url);
|
||||
});
|
||||
|
||||
test('should NOT redact AWS S3 presigned URLs', () => {
|
||||
const url =
|
||||
'https://bucket.s3.us-east-1.amazonaws.com/file.dat?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=xxx';
|
||||
expect(redact(url)).toBe(url);
|
||||
});
|
||||
|
||||
test('should NOT redact Google Cloud Storage signed URLs', () => {
|
||||
const url =
|
||||
'https://storage.googleapis.com/bucket/file.dat?Expires=123&GoogleAccessId=xxx&Signature=xxx';
|
||||
expect(redact(url)).toBe(url);
|
||||
});
|
||||
|
||||
test('should still redact JWT tokens in non-URL contexts', () => {
|
||||
const text =
|
||||
'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.dozjgNryP4J3jVmNHl0w5N_XgL0n3I9PlFUP0THsR8U';
|
||||
expect(redact(text)).toBe('[REDACTED]');
|
||||
});
|
||||
});
|
||||
|
||||
// Circular References
|
||||
describe('Circular References', () => {
|
||||
test('should handle circular references in objects', () => {
|
||||
const obj: any = { a: 1 };
|
||||
obj.b = obj; // Circular reference
|
||||
const redacted = redact(obj);
|
||||
expect(redacted).toEqual({ a: 1, b: '[REDACTED_CIRCULAR]' });
|
||||
});
|
||||
|
||||
test('should handle circular references in arrays', () => {
|
||||
const arr: any[] = [1];
|
||||
arr.push(arr); // Circular reference
|
||||
const redacted = redact(arr);
|
||||
expect(redacted).toEqual([1, '[REDACTED_CIRCULAR]']);
|
||||
});
|
||||
|
||||
test('should handle complex circular references', () => {
|
||||
const obj1: any = { name: 'obj1' };
|
||||
const obj2: any = { name: 'obj2' };
|
||||
obj1.child = obj2;
|
||||
obj2.parent = obj1; // Circular reference
|
||||
const redacted = redact(obj1);
|
||||
expect(redacted).toEqual({
|
||||
name: 'obj1',
|
||||
child: { name: 'obj2', parent: '[REDACTED_CIRCULAR]' },
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle circular references in nested arrays', () => {
|
||||
const arr: any[] = [1, [2]];
|
||||
(arr[1] as any[]).push(arr);
|
||||
const redacted = redact(arr);
|
||||
expect(redacted).toEqual([1, [2, '[REDACTED_CIRCULAR]']]);
|
||||
});
|
||||
});
|
||||
});
|
||||
140
dexto/packages/core/src/utils/redactor.ts
Normal file
140
dexto/packages/core/src/utils/redactor.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* Utility to redact sensitive information from objects, arrays, and strings.
|
||||
* - Redacts by field name (e.g., apiKey, token, password, etc.)
|
||||
* - Redacts by value pattern (e.g., OpenAI keys, Bearer tokens, emails)
|
||||
* - Handles deeply nested structures and circular references
|
||||
* - Recursive and preserves structure
|
||||
* - Easy to extend
|
||||
*/
|
||||
|
||||
// List of sensitive field names to redact (case-insensitive)
|
||||
const SENSITIVE_FIELDS = [
|
||||
'apikey',
|
||||
'api_key',
|
||||
'token',
|
||||
'access_token',
|
||||
'refresh_token',
|
||||
'password',
|
||||
'secret',
|
||||
];
|
||||
|
||||
// List of file data field names that should be truncated for logging
|
||||
const FILE_DATA_FIELDS = [
|
||||
'base64',
|
||||
'filedata',
|
||||
'file_data',
|
||||
'imagedata',
|
||||
'image_data',
|
||||
'audiodata',
|
||||
'audio_data',
|
||||
'data',
|
||||
];
|
||||
|
||||
// List of regex patterns to redact sensitive values
|
||||
const SENSITIVE_PATTERNS: RegExp[] = [
|
||||
/\bsk-[A-Za-z0-9]{20,}\b/g, // OpenAI API keys (at least 20 chars after sk-)
|
||||
/\bBearer\s+[A-Za-z0-9\-_.=]+\b/gi, // Bearer tokens
|
||||
/\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}\b/g, // Emails
|
||||
];
|
||||
|
||||
// JWT pattern - applied selectively (not to signed URLs)
|
||||
const JWT_PATTERN = /\beyJ[A-Za-z0-9_-]*\.[A-Za-z0-9_-]*\.[A-Za-z0-9_-]*/g;
|
||||
|
||||
// Patterns that indicate a URL contains a signed token that should NOT be redacted
|
||||
// These are legitimate shareable URLs, not sensitive credentials
|
||||
const SIGNED_URL_PATTERNS = [
|
||||
/supabase\.co\/storage\/.*\?token=/i, // Supabase signed URLs
|
||||
/\.r2\.cloudflarestorage\.com\/.*\?/i, // Cloudflare R2 signed URLs
|
||||
/\.s3\..*amazonaws\.com\/.*\?(X-Amz-|AWSAccessKeyId)/i, // AWS S3 presigned URLs
|
||||
/storage\.googleapis\.com\/.*\?/i, // Google Cloud Storage signed URLs
|
||||
];
|
||||
|
||||
const REDACTED = '[REDACTED]';
|
||||
const REDACTED_CIRCULAR = '[REDACTED_CIRCULAR]';
|
||||
const FILE_DATA_TRUNCATED = '[FILE_DATA_TRUNCATED]';
|
||||
|
||||
/**
|
||||
* Determines if a string looks like base64-encoded file data
|
||||
* @param value - String to check
|
||||
* @returns true if it appears to be large base64 data
|
||||
*/
|
||||
function isLargeBase64Data(value: string): boolean {
|
||||
// Check if it's a long string that looks like base64
|
||||
return value.length > 1000 && /^[A-Za-z0-9+/=]{1000,}$/.test(value.substring(0, 1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncates large file data for logging purposes
|
||||
* @param value - The value to potentially truncate
|
||||
* @param key - The field name
|
||||
* @param parent - The parent object for context checking
|
||||
* @returns Truncated value with metadata or original value
|
||||
*/
|
||||
function truncateFileData(value: unknown, key: string, parent?: Record<string, unknown>): unknown {
|
||||
if (typeof value !== 'string') return value;
|
||||
const lowerKey = key.toLowerCase();
|
||||
// Gate "data" by presence of file-ish sibling metadata to avoid false positives
|
||||
const hasFileContext =
|
||||
!!parent && ('mimeType' in parent || 'filename' in parent || 'fileName' in parent);
|
||||
const looksLikeFileField =
|
||||
FILE_DATA_FIELDS.includes(lowerKey) || (lowerKey === 'data' && hasFileContext);
|
||||
if (looksLikeFileField && isLargeBase64Data(value)) {
|
||||
// Only log a concise marker + size; no content preview to prevent leakage
|
||||
return `${FILE_DATA_TRUNCATED} (${value.length} chars)`;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Redacts sensitive data from an object, array, or string.
|
||||
* Handles circular references gracefully.
|
||||
* @param input - The data to redact
|
||||
* @param seen - Internal set to track circular references
|
||||
* @returns The redacted data
|
||||
*/
|
||||
/**
|
||||
* Checks if a string is a signed URL that should not have its token redacted
|
||||
*/
|
||||
function isSignedUrl(value: string): boolean {
|
||||
return SIGNED_URL_PATTERNS.some((pattern) => pattern.test(value));
|
||||
}
|
||||
|
||||
export function redactSensitiveData(input: unknown, seen = new WeakSet()): unknown {
|
||||
if (typeof input === 'string') {
|
||||
let result = input;
|
||||
for (const pattern of SENSITIVE_PATTERNS) {
|
||||
result = result.replace(pattern, REDACTED);
|
||||
}
|
||||
// Only redact JWTs if they're not part of a signed URL
|
||||
// Signed URLs are meant to be shared and their tokens are not credentials
|
||||
if (!isSignedUrl(result)) {
|
||||
result = result.replace(JWT_PATTERN, REDACTED);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
if (Array.isArray(input)) {
|
||||
if (seen.has(input)) return REDACTED_CIRCULAR;
|
||||
seen.add(input);
|
||||
return input.map((item) => redactSensitiveData(item, seen));
|
||||
}
|
||||
if (input && typeof input === 'object') {
|
||||
if (seen.has(input)) return REDACTED_CIRCULAR;
|
||||
seen.add(input);
|
||||
const result: any = {};
|
||||
for (const [key, value] of Object.entries(input)) {
|
||||
if (SENSITIVE_FIELDS.includes(key.toLowerCase())) {
|
||||
result[key] = REDACTED;
|
||||
} else {
|
||||
// First truncate file data (with parent context), then recursively redact
|
||||
const truncatedValue = truncateFileData(
|
||||
value,
|
||||
key,
|
||||
input as Record<string, unknown>
|
||||
);
|
||||
result[key] = redactSensitiveData(truncatedValue, seen);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
296
dexto/packages/core/src/utils/result.test.ts
Normal file
296
dexto/packages/core/src/utils/result.test.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { z, ZodError } from 'zod';
|
||||
import { zodToIssues, ok, fail, hasErrors, splitIssues } from './result.js';
|
||||
import { ErrorScope, ErrorType } from '../errors/index.js';
|
||||
import type { Issue } from '../errors/index.js';
|
||||
|
||||
// Helper to create test issues with less boilerplate
|
||||
const makeIssue = (
|
||||
code: string,
|
||||
severity: 'error' | 'warning',
|
||||
message = `Test ${severity}`
|
||||
): Issue => ({
|
||||
code,
|
||||
message,
|
||||
severity,
|
||||
scope: ErrorScope.AGENT,
|
||||
type: ErrorType.USER,
|
||||
context: {},
|
||||
});
|
||||
|
||||
describe('zodToIssues', () => {
|
||||
describe('standard error handling', () => {
|
||||
test('should convert basic Zod validation error', () => {
|
||||
const schema = z.object({
|
||||
name: z.string(),
|
||||
age: z.number(),
|
||||
});
|
||||
|
||||
const result = schema.safeParse({ name: 'John', age: 'invalid' });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
expect(issues).toHaveLength(1);
|
||||
expect(issues[0]).toMatchObject({
|
||||
code: 'schema_validation',
|
||||
message: 'Expected number, received string',
|
||||
path: ['age'],
|
||||
severity: 'error',
|
||||
scope: ErrorScope.AGENT,
|
||||
type: ErrorType.USER,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle multiple validation errors', () => {
|
||||
const schema = z.object({
|
||||
email: z.string().email(),
|
||||
age: z.number().positive(),
|
||||
});
|
||||
|
||||
const result = schema.safeParse({ email: 'invalid', age: -5 });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
expect(issues).toHaveLength(2);
|
||||
expect(issues[0]?.path).toEqual(['email']);
|
||||
expect(issues[1]?.path).toEqual(['age']);
|
||||
}
|
||||
});
|
||||
|
||||
test('should respect severity parameter', () => {
|
||||
const schema = z.string();
|
||||
const result = schema.safeParse(123);
|
||||
|
||||
if (!result.success) {
|
||||
const warningIssues = zodToIssues(result.error, 'warning');
|
||||
expect(warningIssues[0]?.severity).toBe('warning');
|
||||
|
||||
const errorIssues = zodToIssues(result.error, 'error');
|
||||
expect(errorIssues[0]?.severity).toBe('error');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('union error handling', () => {
|
||||
test('should collect errors from 2-member union', () => {
|
||||
const schema = z.union([
|
||||
z.object({ type: z.literal('a'), value: z.string() }),
|
||||
z.object({ type: z.literal('b'), count: z.number() }),
|
||||
]);
|
||||
|
||||
const result = schema.safeParse({ type: 'a', value: 123 });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
// Should have issues from both union branches
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
// At least one issue should mention the validation failure
|
||||
expect(
|
||||
issues.some((i) => i.path?.includes('value') || i.path?.includes('type'))
|
||||
).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('should collect errors from 4-member union', () => {
|
||||
// Simulates ApprovalResponseSchema structure
|
||||
const schema = z.union([
|
||||
z.object({ type: z.literal('tool'), toolId: z.string() }),
|
||||
z.object({ type: z.literal('command'), commandId: z.string() }),
|
||||
z.object({ type: z.literal('elicit'), question: z.string() }),
|
||||
z.object({ type: z.literal('custom'), data: z.object({}) }),
|
||||
]);
|
||||
|
||||
const result = schema.safeParse({ type: 'tool', toolId: 123 });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
// Should collect errors from all union branches, not just the first two
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle deeply nested union errors', () => {
|
||||
const innerSchema = z.union([z.string(), z.number()]);
|
||||
const outerSchema = z.object({
|
||||
field: innerSchema,
|
||||
});
|
||||
|
||||
const result = outerSchema.safeParse({ field: true });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues.some((i) => i.path?.includes('field'))).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle union with all failing branches (no match)', () => {
|
||||
const schema = z.union([
|
||||
z.object({ type: z.literal('a'), data: z.string() }),
|
||||
z.object({ type: z.literal('b'), data: z.number() }),
|
||||
z.object({ type: z.literal('c'), data: z.boolean() }),
|
||||
]);
|
||||
|
||||
// Input doesn't match ANY branch - all 3 should fail
|
||||
const result = schema.safeParse({ type: 'x', data: 'invalid' });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
// Should collect errors from all 3 failed branches
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
// Verify we got errors from multiple branches (not just the first)
|
||||
const uniquePaths = new Set(issues.map((i) => JSON.stringify(i.path)));
|
||||
expect(uniquePaths.size).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle very deeply nested unions (3+ levels)', () => {
|
||||
// Union inside union inside union
|
||||
const innerUnion = z.union([z.string(), z.number()]);
|
||||
const middleUnion = z.union([innerUnion, z.boolean()]);
|
||||
const outerUnion = z.union([
|
||||
middleUnion,
|
||||
z.object({ foo: z.string() }),
|
||||
z.array(z.number()),
|
||||
]);
|
||||
|
||||
// This fails at multiple nesting levels
|
||||
const result = outerUnion.safeParse({ foo: 123 });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
// Should collect errors from deeply nested union branches
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
// Should have errors mentioning the nested field
|
||||
expect(issues.some((i) => i.path?.includes('foo'))).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle fallback when no union errors are collected', () => {
|
||||
// Create a manual ZodError with invalid_union but empty unionErrors
|
||||
const error = new ZodError([
|
||||
{
|
||||
code: 'invalid_union',
|
||||
unionErrors: [] as ZodError[],
|
||||
path: ['field'],
|
||||
message: 'Invalid union type',
|
||||
} as any,
|
||||
]);
|
||||
|
||||
const issues = zodToIssues(error);
|
||||
expect(issues).toHaveLength(1);
|
||||
expect(issues[0]).toMatchObject({
|
||||
code: 'schema_validation',
|
||||
message: 'Invalid union type',
|
||||
path: ['field'],
|
||||
severity: 'error',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('discriminated union error handling', () => {
|
||||
test('should handle discriminated union errors', () => {
|
||||
const schema = z.discriminatedUnion('type', [
|
||||
z.object({ type: z.literal('success'), data: z.string() }),
|
||||
z.object({ type: z.literal('error'), code: z.number() }),
|
||||
]);
|
||||
|
||||
const result = schema.safeParse({ type: 'success', data: 123 });
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (!result.success) {
|
||||
const issues = zodToIssues(result.error);
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues.some((i) => i.path?.includes('data'))).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Result helper functions', () => {
|
||||
describe('ok', () => {
|
||||
test('should create successful result without issues', () => {
|
||||
const result = ok({ value: 42 });
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
expect(result.data).toEqual({ value: 42 });
|
||||
}
|
||||
expect(result.issues).toEqual([]);
|
||||
});
|
||||
|
||||
test('should create successful result with warnings', () => {
|
||||
const result = ok({ value: 42 }, [makeIssue('test_warning', 'warning')]);
|
||||
expect(result.ok).toBe(true);
|
||||
if (result.ok) {
|
||||
expect(result.data).toEqual({ value: 42 });
|
||||
}
|
||||
expect(result.issues).toHaveLength(1);
|
||||
expect(result.issues[0]?.severity).toBe('warning');
|
||||
});
|
||||
});
|
||||
|
||||
describe('fail', () => {
|
||||
test('should create failed result', () => {
|
||||
const result = fail([makeIssue('test_error', 'error')]);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.issues).toHaveLength(1);
|
||||
expect(result.issues[0]?.severity).toBe('error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasErrors', () => {
|
||||
test('should return true when issues contain errors', () => {
|
||||
expect(hasErrors([makeIssue('test_error', 'error')])).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false when issues only contain warnings', () => {
|
||||
expect(hasErrors([makeIssue('test_warning', 'warning')])).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for empty array', () => {
|
||||
expect(hasErrors([])).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitIssues', () => {
|
||||
test('should split errors and warnings', () => {
|
||||
const issues = [makeIssue('test_error', 'error'), makeIssue('test_warning', 'warning')];
|
||||
const { errors, warnings } = splitIssues(issues);
|
||||
|
||||
expect(errors).toHaveLength(1);
|
||||
expect(warnings).toHaveLength(1);
|
||||
expect(errors[0]?.severity).toBe('error');
|
||||
expect(warnings[0]?.severity).toBe('warning');
|
||||
});
|
||||
|
||||
test('should handle all errors', () => {
|
||||
const issues = [
|
||||
makeIssue('err1', 'error', 'Error 1'),
|
||||
makeIssue('err2', 'error', 'Error 2'),
|
||||
];
|
||||
const { errors, warnings } = splitIssues(issues);
|
||||
|
||||
expect(errors).toHaveLength(2);
|
||||
expect(warnings).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should handle all warnings', () => {
|
||||
const issues = [
|
||||
makeIssue('warn1', 'warning', 'Warning 1'),
|
||||
makeIssue('warn2', 'warning', 'Warning 2'),
|
||||
];
|
||||
const { errors, warnings } = splitIssues(issues);
|
||||
|
||||
expect(errors).toHaveLength(0);
|
||||
expect(warnings).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
280
dexto/packages/core/src/utils/result.ts
Normal file
280
dexto/packages/core/src/utils/result.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
// schemas/helpers.ts
|
||||
import { z, type ZodError } from 'zod';
|
||||
import type { DextoErrorCode, Issue } from '@core/errors/types.js';
|
||||
import { ErrorScope, ErrorType } from '@core/errors/types.js';
|
||||
|
||||
/** Trim and require non-empty after trim */
|
||||
export const NonEmptyTrimmed = z
|
||||
.string()
|
||||
.transform((s) => s.trim())
|
||||
.refine((s) => s.length > 0, { message: 'Required' });
|
||||
|
||||
/** Simple URL check (so we don’t need preprocess JUST to trim before .url()) */
|
||||
function isValidUrl(s: string): boolean {
|
||||
try {
|
||||
// Allow only http/https (adjust if you want more)
|
||||
const u = new URL(s);
|
||||
return u.protocol === 'http:' || u.protocol === 'https:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const OptionalURL = z
|
||||
.string()
|
||||
.transform((s) => s.trim())
|
||||
.refine((s) => s === '' || isValidUrl(s), { message: 'Invalid URL' })
|
||||
.transform((s) => (s === '' ? undefined : s))
|
||||
.optional();
|
||||
|
||||
// Expand $VAR and ${VAR} using the provided env, then trim.
|
||||
export const EnvExpandedString = (env?: Record<string, string | undefined>) =>
|
||||
z.string().transform((input) => {
|
||||
if (typeof input !== 'string') return '';
|
||||
// Use current process.env if no env provided (reads fresh each time)
|
||||
const envToUse = env ?? process.env;
|
||||
const out = input.replace(
|
||||
/\$([A-Z_][A-Z0-9_]*)|\${([A-Z_][A-Z0-9_]*)}/gi,
|
||||
(_, a, b) => envToUse[a || b] ?? ''
|
||||
);
|
||||
return out.trim();
|
||||
});
|
||||
|
||||
// Zod type for non-empty environment expanded string
|
||||
export const NonEmptyEnvExpandedString = (env?: Record<string, string | undefined>) =>
|
||||
EnvExpandedString(env).refine((s) => s.length > 0, {
|
||||
message: 'Value is required',
|
||||
});
|
||||
|
||||
// Zod type for URL that could be pulled from env variables
|
||||
export const RequiredEnvURL = (env?: Record<string, string | undefined>) =>
|
||||
EnvExpandedString(env).refine(
|
||||
(s) => {
|
||||
try {
|
||||
const u = new URL(s);
|
||||
return u.protocol === 'http:' || u.protocol === 'https:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
{ message: 'Invalid URL' }
|
||||
);
|
||||
|
||||
/**
|
||||
* A discriminated union result type that can be either successful or failed
|
||||
* Provides type safety by ensuring data is only available on success
|
||||
* @param T - The type of the data on success
|
||||
* @param C - The type of the context for issues
|
||||
*/
|
||||
export type Result<T, C = unknown> =
|
||||
| { ok: true; data: T; issues: Issue<C>[] }
|
||||
| { ok: false; issues: Issue<C>[] };
|
||||
|
||||
/**
|
||||
* Create a successful result with validated data and optional warnings.
|
||||
*
|
||||
* **Usage Guidelines:**
|
||||
* - Use for operations that completed successfully, even with warnings
|
||||
* - Include warnings for non-blocking issues (API key too short, fallback model used, etc.)
|
||||
* - DextoAgent methods should prefer this over throwing exceptions
|
||||
* - API layer maps this to 2xx status codes
|
||||
*
|
||||
* @param data - The successfully validated/processed data
|
||||
* @param issues - Optional warnings or informational issues (defaults to empty array)
|
||||
* @returns A successful Result with ok: true
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Success with no warnings
|
||||
* return ok(validatedConfig);
|
||||
*
|
||||
* // Success with warnings
|
||||
* return ok(validatedConfig, [
|
||||
* { code: 'llm_short_api_key', message: 'API key seems short', severity: 'warning', context: {} }
|
||||
* ]);
|
||||
* ```
|
||||
*/
|
||||
export const ok = <T, C = unknown>(data: T, issues: Issue<C>[] = []): Result<T, C> => ({
|
||||
ok: true,
|
||||
data,
|
||||
issues, // warnings live alongside errors here
|
||||
});
|
||||
|
||||
/**
|
||||
* Create a failed result with blocking errors that prevent operation completion.
|
||||
*
|
||||
* **Usage Guidelines:**
|
||||
* - Use for validation failures, business rule violations, or any error that should stop execution
|
||||
* - Do NOT mix with exceptions - choose Result pattern OR throwing, not both
|
||||
* - API layer maps this to 4xx status codes (user/validation errors)
|
||||
* - Issues should have severity: 'error' for blocking problems
|
||||
*
|
||||
* @param issues - Array of error issues that caused the failure (must not be empty)
|
||||
* @returns A failed Result with ok: false and no data
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Validation failure
|
||||
* return fail([
|
||||
* {
|
||||
* code: LLMErrorCode.SWITCH_INPUT_MISSING,
|
||||
* message: 'At least model or provider must be specified',
|
||||
* severity: 'error',
|
||||
* context: {}
|
||||
* }
|
||||
* ]);
|
||||
*
|
||||
* // Multiple validation errors
|
||||
* return fail([
|
||||
* { code: 'missing_api_key', message: 'API key required', severity: 'error', context: {} },
|
||||
* { code: 'invalid_model', message: 'Model not supported', severity: 'error', context: {} }
|
||||
* ]);
|
||||
* ```
|
||||
*/
|
||||
export const fail = <T = never, C = unknown>(issues: Issue<C>[]): Result<T, C> => ({
|
||||
ok: false,
|
||||
issues,
|
||||
});
|
||||
|
||||
/**
|
||||
* Check if a list of issues contains any blocking errors (non-warning severity).
|
||||
*
|
||||
* **Usage Guidelines:**
|
||||
* - Use to determine if a Result should be ok: false
|
||||
* - Warnings don't count as errors - operations can succeed with warnings
|
||||
* - Useful in validation functions to decide success vs failure
|
||||
*
|
||||
* @param issues - Array of issues to check
|
||||
* @returns true if any issue has severity other than 'warning', false otherwise
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const issues = [
|
||||
* { severity: 'warning', message: 'API key seems short' },
|
||||
* { severity: 'error', message: 'Model not found' }
|
||||
* ];
|
||||
*
|
||||
* if (hasErrors(issues)) {
|
||||
* return fail(issues); // Contains errors, operation fails
|
||||
* } else {
|
||||
* return ok(data, issues); // Only warnings, operation succeeds
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function hasErrors<C>(issues: Issue<C>[]) {
|
||||
return issues.some((i) => i.severity !== 'warning');
|
||||
}
|
||||
|
||||
/**
|
||||
* Separate issues into errors (blocking) and warnings (non-blocking) for different handling.
|
||||
*
|
||||
* **Usage Guidelines:**
|
||||
* - Use when you need to handle errors and warnings differently
|
||||
* - Errors should block operation, warnings should be logged/reported but allow success
|
||||
* - Useful in API responses to show both what failed and what succeeded with caveats
|
||||
*
|
||||
* @param issues - Array of mixed issues to categorize
|
||||
* @returns Object with separate arrays for errors and warnings
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const { errors, warnings } = splitIssues(allIssues);
|
||||
*
|
||||
* if (errors.length > 0) {
|
||||
* logger.error('Validation failed:', errors);
|
||||
* return fail(errors);
|
||||
* }
|
||||
*
|
||||
* if (warnings.length > 0) {
|
||||
* logger.warn('Validation succeeded with warnings:', warnings);
|
||||
* }
|
||||
*
|
||||
* return ok(data, warnings);
|
||||
* ```
|
||||
*/
|
||||
export function splitIssues<C>(issues: Issue<C>[]) {
|
||||
return {
|
||||
errors: issues.filter((i) => i.severity !== 'warning'),
|
||||
warnings: issues.filter((i) => i.severity === 'warning'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Zod validation errors to standardized Issue format for Result pattern.
|
||||
*
|
||||
* **Usage Guidelines:**
|
||||
* - Use in schema validation functions to convert Zod errors to our Issue format
|
||||
* - Allows custom error codes via Zod's params.code field in custom refinements
|
||||
* - Falls back to SCHEMA_VALIDATION code for standard Zod validation errors
|
||||
* - Typically used with severity: 'error' for blocking validation failures
|
||||
*
|
||||
* @param err - ZodError from failed schema validation
|
||||
* @param severity - Issue severity level (defaults to 'error')
|
||||
* @returns Array of Issues in our standardized format
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // In a validation function
|
||||
* const result = MySchema.safeParse(data);
|
||||
* if (!result.success) {
|
||||
* const issues = zodToIssues(result.error);
|
||||
* return fail(issues);
|
||||
* }
|
||||
*
|
||||
* // Custom error codes in Zod schema
|
||||
* const schema = z.string().refine(val => val.length > 0, {
|
||||
* message: 'Field is required',
|
||||
* params: { code: LLMErrorCode.API_KEY_MISSING }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function zodToIssues<C = unknown>(
|
||||
err: ZodError,
|
||||
severity: 'error' | 'warning' = 'error'
|
||||
): Issue<C>[] {
|
||||
const issues: Issue<C>[] = [];
|
||||
|
||||
for (const e of err.errors) {
|
||||
// Handle invalid_union errors by extracting the actual validation errors from unionErrors
|
||||
if (e.code === 'invalid_union' && (e as any).unionErrors) {
|
||||
const unionErrors = (e as any).unionErrors as ZodError[];
|
||||
// Iterate through ALL union errors to capture validation issues from every union branch
|
||||
let hasCollectedErrors = false;
|
||||
for (const unionError of unionErrors) {
|
||||
if (unionError && unionError.errors && unionError.errors.length > 0) {
|
||||
// Recursively process each union branch's errors
|
||||
issues.push(...zodToIssues<C>(unionError, severity));
|
||||
hasCollectedErrors = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: if no union errors were collected, report the invalid_union error itself
|
||||
if (!hasCollectedErrors) {
|
||||
const params = (e as any).params || {};
|
||||
issues.push({
|
||||
code: (params.code ?? 'schema_validation') as DextoErrorCode,
|
||||
message: e.message,
|
||||
scope: params.scope ?? ErrorScope.AGENT,
|
||||
type: params.type ?? ErrorType.USER,
|
||||
path: e.path,
|
||||
severity,
|
||||
context: params as C,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Standard error processing
|
||||
const params = (e as any).params || {};
|
||||
issues.push({
|
||||
code: (params.code ?? 'schema_validation') as DextoErrorCode,
|
||||
message: e.message,
|
||||
scope: params.scope ?? ErrorScope.AGENT,
|
||||
type: params.type ?? ErrorType.USER,
|
||||
path: e.path,
|
||||
severity,
|
||||
context: params as C,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
45
dexto/packages/core/src/utils/safe-stringify.ts
Normal file
45
dexto/packages/core/src/utils/safe-stringify.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { redactSensitiveData } from './redactor.js';
|
||||
|
||||
/**
|
||||
* Safe stringify that handles circular references and BigInt.
|
||||
* Also redacts sensitive data to prevent PII leaks.
|
||||
*
|
||||
* @param value - Value to stringify
|
||||
* @param maxLen - Optional maximum length. If provided, truncates with '…(truncated)' suffix.
|
||||
*/
|
||||
export function safeStringify(value: unknown, maxLen?: number): string {
|
||||
try {
|
||||
// Handle top-level BigInt without triggering JSON.stringify errors
|
||||
if (typeof value === 'bigint') {
|
||||
return value.toString();
|
||||
}
|
||||
// First redact sensitive data to prevent PII leaks
|
||||
const redacted = redactSensitiveData(value);
|
||||
const str = JSON.stringify(redacted, (_, v) => {
|
||||
if (v instanceof Error) {
|
||||
return { name: v.name, message: v.message, stack: v.stack };
|
||||
}
|
||||
if (typeof v === 'bigint') return v.toString();
|
||||
return v;
|
||||
});
|
||||
if (typeof str === 'string') {
|
||||
// Only truncate if maxLen is explicitly provided
|
||||
if (maxLen !== undefined && maxLen > 0 && str.length > maxLen) {
|
||||
const indicator = '…(truncated)';
|
||||
if (maxLen <= indicator.length) {
|
||||
return str.slice(0, maxLen);
|
||||
}
|
||||
const sliceLen = maxLen - indicator.length;
|
||||
return `${str.slice(0, sliceLen)}${indicator}`;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
return String(value);
|
||||
} catch {
|
||||
try {
|
||||
return String(value);
|
||||
} catch {
|
||||
return '[Unserializable value]';
|
||||
}
|
||||
}
|
||||
}
|
||||
405
dexto/packages/core/src/utils/schema-metadata.ts
Normal file
405
dexto/packages/core/src/utils/schema-metadata.ts
Normal file
@@ -0,0 +1,405 @@
|
||||
/**
|
||||
* Schema metadata extraction utilities for Zod schemas
|
||||
*
|
||||
* This module provides utilities to extract metadata from Zod schemas at runtime.
|
||||
*
|
||||
* IMPORTANT: This uses Zod's private `._def` API which is not officially supported
|
||||
* and may break in future Zod versions. We use this approach because:
|
||||
* 1. No public Zod API exists for runtime schema introspection
|
||||
* 2. Benefits of schema-driven UI metadata outweigh version risk
|
||||
* 3. Changes would be caught by TypeScript/tests during upgrades
|
||||
*
|
||||
* TODO: Update web UI to use these helpers to reduce total code volume and improve maintainability. Also fix these helpers if needed.
|
||||
* See packages/webui/components/AgentEditor/CustomizePanel.tsx for the UI side TODO tracking this same goal.
|
||||
*
|
||||
* If Zod provides official introspection APIs in the future, migrate to those.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
/**
|
||||
* Metadata extracted from a Zod schema
|
||||
*/
|
||||
export interface SchemaMetadata {
|
||||
/** Default values for each field */
|
||||
defaults: Record<string, unknown>;
|
||||
/** Required fields (not optional, not with defaults) */
|
||||
requiredFields: string[];
|
||||
/** Field type information */
|
||||
fieldTypes: Record<string, string>;
|
||||
/** Field descriptions from .describe() calls */
|
||||
descriptions: Record<string, string>;
|
||||
/** Enum values for enum fields (e.g., provider: ['openai', 'anthropic']) */
|
||||
enumValues: Record<string, string[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract metadata from a discriminated union schema
|
||||
* Handles schemas like McpServerConfigSchema (stdio | sse | http)
|
||||
*/
|
||||
export interface DiscriminatedUnionMetadata {
|
||||
/** The discriminator field name (e.g., "type") */
|
||||
discriminator: string;
|
||||
/** Possible discriminator values (e.g., ["stdio", "sse", "http"]) */
|
||||
options: string[];
|
||||
/** Metadata for each option */
|
||||
schemas: Record<string, SchemaMetadata>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract default value from a Zod schema
|
||||
* Returns undefined if no default is set
|
||||
*/
|
||||
function extractDefault(def: any): unknown {
|
||||
// Check for .default()
|
||||
if (def.defaultValue !== undefined) {
|
||||
return typeof def.defaultValue === 'function' ? def.defaultValue() : def.defaultValue;
|
||||
}
|
||||
|
||||
// Check for branded types (wraps the actual schema)
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
return extractDefault(def.type._def);
|
||||
}
|
||||
|
||||
// Check for optional types (unwrap to inner type)
|
||||
if (def.typeName === 'ZodOptional' && def.innerType) {
|
||||
return extractDefault(def.innerType._def);
|
||||
}
|
||||
|
||||
// Check for nullable types
|
||||
if (def.typeName === 'ZodNullable' && def.innerType) {
|
||||
return extractDefault(def.innerType._def);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract enum values from a Zod schema
|
||||
* Returns undefined if not an enum
|
||||
*/
|
||||
function extractEnumValues(def: any): string[] | undefined {
|
||||
// Handle branded types
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
return extractEnumValues(def.type._def);
|
||||
}
|
||||
|
||||
// Handle optional types
|
||||
if (def.typeName === 'ZodOptional' && def.innerType) {
|
||||
return extractEnumValues(def.innerType._def);
|
||||
}
|
||||
|
||||
// Handle nullable types
|
||||
if (def.typeName === 'ZodNullable' && def.innerType) {
|
||||
return extractEnumValues(def.innerType._def);
|
||||
}
|
||||
|
||||
// Handle effects (transforms, refinements, etc.)
|
||||
if (def.typeName === 'ZodEffects' && def.schema) {
|
||||
return extractEnumValues(def.schema._def);
|
||||
}
|
||||
|
||||
// Extract from ZodEnum
|
||||
if (def.typeName === 'ZodEnum') {
|
||||
return def.values as string[];
|
||||
}
|
||||
|
||||
// Extract from ZodLiteral (single value enum)
|
||||
if (def.typeName === 'ZodLiteral') {
|
||||
return [String(def.value)];
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field type name from Zod schema
|
||||
*/
|
||||
function extractTypeName(def: any): string {
|
||||
// Handle branded types
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
return extractTypeName(def.type._def);
|
||||
}
|
||||
|
||||
// Handle optional types
|
||||
if (def.typeName === 'ZodOptional' && def.innerType) {
|
||||
return extractTypeName(def.innerType._def) + '?';
|
||||
}
|
||||
|
||||
// Handle nullable types
|
||||
if (def.typeName === 'ZodNullable' && def.innerType) {
|
||||
return extractTypeName(def.innerType._def) + '?';
|
||||
}
|
||||
|
||||
// Handle literal types
|
||||
if (def.typeName === 'ZodLiteral') {
|
||||
return `literal(${JSON.stringify(def.value)})`;
|
||||
}
|
||||
|
||||
// Handle enum types
|
||||
if (def.typeName === 'ZodEnum') {
|
||||
return `enum(${def.values.join('|')})`;
|
||||
}
|
||||
|
||||
// Handle array types
|
||||
if (def.typeName === 'ZodArray') {
|
||||
return `array<${extractTypeName(def.type._def)}>`;
|
||||
}
|
||||
|
||||
// Handle record types
|
||||
if (def.typeName === 'ZodRecord') {
|
||||
return `record<${extractTypeName(def.valueType._def)}>`;
|
||||
}
|
||||
|
||||
// Handle effects (transforms, refinements, etc.)
|
||||
if (def.typeName === 'ZodEffects' && def.schema) {
|
||||
return extractTypeName(def.schema._def);
|
||||
}
|
||||
|
||||
// Map Zod type names to simplified names
|
||||
const typeMap: Record<string, string> = {
|
||||
ZodString: 'string',
|
||||
ZodNumber: 'number',
|
||||
ZodBoolean: 'boolean',
|
||||
ZodObject: 'object',
|
||||
ZodArray: 'array',
|
||||
ZodRecord: 'record',
|
||||
ZodUnion: 'union',
|
||||
ZodDiscriminatedUnion: 'discriminatedUnion',
|
||||
};
|
||||
|
||||
return typeMap[def.typeName] || def.typeName?.replace('Zod', '').toLowerCase() || 'unknown';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a field is required (not optional, no default)
|
||||
*/
|
||||
function isFieldRequired(def: any): boolean {
|
||||
// Has a default? Not required for user input
|
||||
if (def.defaultValue !== undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Is optional? Not required
|
||||
if (def.typeName === 'ZodOptional') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Is nullable? Not required
|
||||
if (def.typeName === 'ZodNullable') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Handle branded types
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
return isFieldRequired(def.type._def);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract metadata from a Zod object schema
|
||||
*
|
||||
* @param schema - Zod schema to extract metadata from
|
||||
* @returns SchemaMetadata object with defaults, required fields, types, and descriptions
|
||||
*/
|
||||
export function extractSchemaMetadata(schema: z.ZodTypeAny): SchemaMetadata {
|
||||
const metadata: SchemaMetadata = {
|
||||
defaults: {},
|
||||
requiredFields: [],
|
||||
fieldTypes: {},
|
||||
descriptions: {},
|
||||
enumValues: {},
|
||||
};
|
||||
|
||||
let def = (schema as any)._def;
|
||||
|
||||
// Unwrap branded types
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
def = def.type._def;
|
||||
}
|
||||
|
||||
// Handle object schemas
|
||||
if (def.typeName !== 'ZodObject') {
|
||||
throw new Error(`Expected ZodObject, got ${def.typeName}`);
|
||||
}
|
||||
|
||||
const shape = def.shape();
|
||||
|
||||
for (const [fieldName, fieldSchema] of Object.entries(shape)) {
|
||||
const fieldDef = (fieldSchema as any)._def;
|
||||
|
||||
// Extract default value
|
||||
const defaultValue = extractDefault(fieldDef);
|
||||
if (defaultValue !== undefined) {
|
||||
metadata.defaults[fieldName] = defaultValue;
|
||||
}
|
||||
|
||||
// Check if required
|
||||
if (isFieldRequired(fieldDef)) {
|
||||
metadata.requiredFields.push(fieldName);
|
||||
}
|
||||
|
||||
// Extract type
|
||||
metadata.fieldTypes[fieldName] = extractTypeName(fieldDef);
|
||||
|
||||
// Extract description
|
||||
if (fieldDef.description) {
|
||||
metadata.descriptions[fieldName] = fieldDef.description;
|
||||
}
|
||||
|
||||
// Extract enum values
|
||||
const enumVals = extractEnumValues(fieldDef);
|
||||
if (enumVals) {
|
||||
metadata.enumValues[fieldName] = enumVals;
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract metadata from a discriminated union schema
|
||||
*
|
||||
* @param schema - Zod discriminated union schema
|
||||
* @returns DiscriminatedUnionMetadata with info about each variant
|
||||
*/
|
||||
export function extractDiscriminatedUnionMetadata(
|
||||
schema: z.ZodTypeAny
|
||||
): DiscriminatedUnionMetadata {
|
||||
let def = (schema as any)._def;
|
||||
|
||||
// Unwrap branded types
|
||||
if (def.typeName === 'ZodBranded' && def.type) {
|
||||
def = def.type._def;
|
||||
}
|
||||
|
||||
// Handle effects (refinements, transforms, etc.)
|
||||
if (def.typeName === 'ZodEffects' && def.schema) {
|
||||
def = def.schema._def;
|
||||
}
|
||||
|
||||
if (def.typeName !== 'ZodDiscriminatedUnion') {
|
||||
throw new Error(`Expected ZodDiscriminatedUnion, got ${def.typeName}`);
|
||||
}
|
||||
|
||||
const discriminator = def.discriminator;
|
||||
const optionsMap = def.optionsMap;
|
||||
|
||||
const metadata: DiscriminatedUnionMetadata = {
|
||||
discriminator,
|
||||
options: Array.from(optionsMap.keys()) as string[],
|
||||
schemas: {},
|
||||
};
|
||||
|
||||
// Extract metadata for each option
|
||||
for (const [optionValue, optionSchema] of optionsMap.entries()) {
|
||||
metadata.schemas[optionValue as string] = extractSchemaMetadata(optionSchema);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract common fields from a discriminated union (fields present in all variants)
|
||||
* Useful for extracting shared defaults like 'timeout' or 'connectionMode'
|
||||
*/
|
||||
export function extractCommonFields(metadata: DiscriminatedUnionMetadata): SchemaMetadata {
|
||||
const schemas = Object.values(metadata.schemas);
|
||||
if (schemas.length === 0) {
|
||||
return {
|
||||
defaults: {},
|
||||
requiredFields: [],
|
||||
fieldTypes: {},
|
||||
descriptions: {},
|
||||
enumValues: {},
|
||||
};
|
||||
}
|
||||
|
||||
const first = schemas[0]!; // Safe: we checked length > 0
|
||||
const rest = schemas.slice(1);
|
||||
const common: SchemaMetadata = {
|
||||
defaults: { ...first.defaults },
|
||||
requiredFields: [...first.requiredFields],
|
||||
fieldTypes: { ...first.fieldTypes },
|
||||
descriptions: { ...first.descriptions },
|
||||
enumValues: { ...first.enumValues },
|
||||
};
|
||||
|
||||
// Only keep fields that exist in ALL schemas
|
||||
for (const schema of rest) {
|
||||
// Filter defaults
|
||||
for (const key of Object.keys(common.defaults)) {
|
||||
if (!(key in schema.defaults) || schema.defaults[key] !== common.defaults[key]) {
|
||||
delete common.defaults[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Filter required fields
|
||||
common.requiredFields = common.requiredFields.filter((field) =>
|
||||
schema.requiredFields.includes(field)
|
||||
);
|
||||
|
||||
// Filter field types (keep only if same in all schemas)
|
||||
for (const key of Object.keys(common.fieldTypes)) {
|
||||
if (!(key in schema.fieldTypes) || schema.fieldTypes[key] !== common.fieldTypes[key]) {
|
||||
delete common.fieldTypes[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Filter descriptions (keep only if same in all schemas)
|
||||
for (const key of Object.keys(common.descriptions)) {
|
||||
if (
|
||||
!(key in schema.descriptions) ||
|
||||
schema.descriptions[key] !== common.descriptions[key]
|
||||
) {
|
||||
delete common.descriptions[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Filter enum values (keep only if same in all schemas)
|
||||
for (const key of Object.keys(common.enumValues)) {
|
||||
if (
|
||||
!(key in schema.enumValues) ||
|
||||
JSON.stringify(schema.enumValues[key]) !== JSON.stringify(common.enumValues[key])
|
||||
) {
|
||||
delete common.enumValues[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return common;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default value for a specific field in a discriminated union variant
|
||||
*
|
||||
* @param metadata - Discriminated union metadata
|
||||
* @param discriminatorValue - The discriminator value (e.g., 'stdio', 'http')
|
||||
* @param fieldName - The field to get default for
|
||||
* @returns The default value or undefined
|
||||
*/
|
||||
export function getFieldDefault(
|
||||
metadata: DiscriminatedUnionMetadata,
|
||||
discriminatorValue: string,
|
||||
fieldName: string
|
||||
): unknown {
|
||||
return metadata.schemas[discriminatorValue]?.defaults[fieldName];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a field is required in a specific discriminated union variant
|
||||
*
|
||||
* @param metadata - Discriminated union metadata
|
||||
* @param discriminatorValue - The discriminator value (e.g., 'stdio', 'http')
|
||||
* @param fieldName - The field to check
|
||||
* @returns true if required, false otherwise
|
||||
*/
|
||||
export function isFieldRequiredInVariant(
|
||||
metadata: DiscriminatedUnionMetadata,
|
||||
discriminatorValue: string,
|
||||
fieldName: string
|
||||
): boolean {
|
||||
return metadata.schemas[discriminatorValue]?.requiredFields.includes(fieldName) ?? false;
|
||||
}
|
||||
27
dexto/packages/core/src/utils/schema.ts
Normal file
27
dexto/packages/core/src/utils/schema.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
|
||||
/**
|
||||
* Convert Zod schema to JSON Schema format for tool parameters
|
||||
*
|
||||
* TODO: Replace zod-to-json-schema with Zod v4 native JSON schema support
|
||||
* The zod-to-json-schema package is deprecated and adds ~19MB due to a packaging bug
|
||||
* (includes test files with full Zod copies in dist-test-v3 and dist-test-v4 folders).
|
||||
* Zod v4 has native toJsonSchema() support - migrate when upgrading to Zod v4.
|
||||
* See: https://github.com/StefanTerdell/zod-to-json-schema
|
||||
*/
|
||||
export function convertZodSchemaToJsonSchema(zodSchema: any, logger: IDextoLogger): any {
|
||||
try {
|
||||
// Use proper library for Zod to JSON Schema conversion
|
||||
return zodToJsonSchema(zodSchema);
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`Failed to convert Zod schema to JSON Schema: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
// Return basic object schema as fallback
|
||||
return {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
266
dexto/packages/core/src/utils/service-initializer.ts
Normal file
266
dexto/packages/core/src/utils/service-initializer.ts
Normal file
@@ -0,0 +1,266 @@
|
||||
/*
|
||||
* Service Initializer: Centralized Wiring for Dexto Core Services
|
||||
*
|
||||
* This module is responsible for initializing and wiring together all core agent services (LLM, client manager, message manager, event bus, etc.)
|
||||
* for the Dexto application. It provides a single entry point for constructing the service graph, ensuring consistent dependency injection
|
||||
* and configuration across CLI, web, and test environments.
|
||||
*
|
||||
* **Configuration Pattern:**
|
||||
* - The primary source of configuration is the config file (e.g., `agent.yml`), which allows users to declaratively specify both high-level
|
||||
* and low-level service options (such as compression strategies for ContextManager, LLM provider/model, etc.).
|
||||
* - For most use cases, the config file is sufficient and preferred, as it enables environment-specific, auditable, and user-friendly customization.
|
||||
*
|
||||
* **Service Architecture:**
|
||||
* - All services are initialized based on the provided configuration.
|
||||
* - For testing scenarios, mock the service dependencies directly using test frameworks rather than relying on service injection patterns.
|
||||
*
|
||||
* **Best Practice:**
|
||||
* - Use the config file for all user-facing and environment-specific configuration, including low-level service details.
|
||||
* - For testing, use proper mocking frameworks rather than service injection to ensure clean, maintainable tests.
|
||||
*
|
||||
* This pattern ensures a clean, scalable, and maintainable architecture, balancing flexibility with simplicity.
|
||||
*/
|
||||
|
||||
import { MCPManager } from '../mcp/manager.js';
|
||||
import { ToolManager } from '../tools/tool-manager.js';
|
||||
import { SystemPromptManager } from '../systemPrompt/manager.js';
|
||||
import { AgentStateManager } from '../agent/state-manager.js';
|
||||
import { SessionManager } from '../session/index.js';
|
||||
import { SearchService } from '../search/index.js';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { createStorageManager, StorageManager } from '../storage/index.js';
|
||||
import { createAllowedToolsProvider } from '../tools/confirmation/allowed-tools-provider/factory.js';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
import type { ValidatedAgentConfig } from '@core/agent/schemas.js';
|
||||
import { AgentEventBus } from '../events/index.js';
|
||||
import { ResourceManager } from '../resources/manager.js';
|
||||
import { ApprovalManager } from '../approval/manager.js';
|
||||
import { MemoryManager } from '../memory/index.js';
|
||||
import { PluginManager } from '../plugins/manager.js';
|
||||
import { registerBuiltInPlugins } from '../plugins/registrations/builtins.js';
|
||||
|
||||
/**
|
||||
* Type for the core agent services returned by createAgentServices
|
||||
*/
|
||||
export type AgentServices = {
|
||||
mcpManager: MCPManager;
|
||||
toolManager: ToolManager;
|
||||
systemPromptManager: SystemPromptManager;
|
||||
agentEventBus: AgentEventBus;
|
||||
stateManager: AgentStateManager;
|
||||
sessionManager: SessionManager;
|
||||
searchService: SearchService;
|
||||
storageManager: StorageManager;
|
||||
resourceManager: ResourceManager;
|
||||
approvalManager: ApprovalManager;
|
||||
memoryManager: MemoryManager;
|
||||
pluginManager: PluginManager;
|
||||
};
|
||||
|
||||
// High-level factory to load, validate, and wire up all agent services in one call
|
||||
/**
|
||||
* Initializes all agent services from a validated configuration.
|
||||
* @param config The validated agent configuration object
|
||||
* @param configPath Optional path to the config file (for relative path resolution)
|
||||
* @param logger Logger instance for this agent (dependency injection)
|
||||
* @param agentEventBus Pre-created event bus from DextoAgent constructor
|
||||
* @returns All the initialized services required for a Dexto agent
|
||||
*/
|
||||
export async function createAgentServices(
|
||||
config: ValidatedAgentConfig,
|
||||
configPath: string | undefined,
|
||||
logger: IDextoLogger,
|
||||
agentEventBus: AgentEventBus
|
||||
): Promise<AgentServices> {
|
||||
// 0. Initialize telemetry FIRST (before any decorated classes are instantiated)
|
||||
// This must happen before creating any services that use @InstrumentClass decorator
|
||||
if (config.telemetry?.enabled) {
|
||||
const { Telemetry } = await import('../telemetry/telemetry.js');
|
||||
await Telemetry.init(config.telemetry);
|
||||
logger.debug('Telemetry initialized');
|
||||
}
|
||||
|
||||
// 1. Use the event bus provided by DextoAgent constructor
|
||||
logger.debug('Using pre-created agent event bus');
|
||||
|
||||
// 2. Initialize storage manager (schema provides in-memory defaults, CLI enrichment adds filesystem paths)
|
||||
logger.debug('Initializing storage manager');
|
||||
const storageManager = await createStorageManager(config.storage, logger);
|
||||
|
||||
logger.debug('Storage manager initialized', {
|
||||
cache: config.storage.cache.type,
|
||||
database: config.storage.database.type,
|
||||
});
|
||||
|
||||
// 3. Initialize approval system (generalized user approval)
|
||||
// Created before MCP manager since MCP manager depends on it for elicitation support
|
||||
logger.debug('Initializing approval manager');
|
||||
const approvalManager = new ApprovalManager(
|
||||
{
|
||||
toolConfirmation: {
|
||||
mode: config.toolConfirmation.mode,
|
||||
...(config.toolConfirmation.timeout !== undefined && {
|
||||
timeout: config.toolConfirmation.timeout,
|
||||
}),
|
||||
},
|
||||
elicitation: {
|
||||
enabled: config.elicitation.enabled,
|
||||
...(config.elicitation.timeout !== undefined && {
|
||||
timeout: config.elicitation.timeout,
|
||||
}),
|
||||
},
|
||||
},
|
||||
logger
|
||||
);
|
||||
logger.debug('Approval system initialized');
|
||||
|
||||
// 4. Initialize MCP manager
|
||||
const mcpManager = new MCPManager(logger);
|
||||
await mcpManager.initializeFromConfig(config.mcpServers);
|
||||
|
||||
// 4.1 - Wire approval manager into MCP manager for elicitation support
|
||||
mcpManager.setApprovalManager(approvalManager);
|
||||
logger.debug('Approval manager connected to MCP manager for elicitation support');
|
||||
|
||||
// 5. Initialize search service
|
||||
const searchService = new SearchService(storageManager.getDatabase(), logger);
|
||||
|
||||
// 6. Initialize memory manager
|
||||
const memoryManager = new MemoryManager(storageManager.getDatabase(), logger);
|
||||
logger.debug('Memory manager initialized');
|
||||
|
||||
// 6.5 Initialize plugin manager
|
||||
const configDir = configPath ? dirname(resolve(configPath)) : process.cwd();
|
||||
const pluginManager = new PluginManager(
|
||||
{
|
||||
agentEventBus,
|
||||
storageManager,
|
||||
configDir,
|
||||
},
|
||||
logger
|
||||
);
|
||||
|
||||
// Register built-in plugins from registry
|
||||
registerBuiltInPlugins({ pluginManager, config });
|
||||
logger.debug('Built-in plugins registered');
|
||||
|
||||
// Initialize plugin manager (loads custom and registry plugins, validates, calls initialize())
|
||||
await pluginManager.initialize(config.plugins.custom, config.plugins.registry);
|
||||
logger.info('Plugin manager initialized');
|
||||
|
||||
// 7. Initialize resource manager (MCP + internal resources)
|
||||
// Moved before tool manager so it can be passed to internal tools
|
||||
const resourceManager = new ResourceManager(
|
||||
mcpManager,
|
||||
{
|
||||
internalResourcesConfig: config.internalResources,
|
||||
blobStore: storageManager.getBlobStore(),
|
||||
},
|
||||
logger
|
||||
);
|
||||
await resourceManager.initialize();
|
||||
|
||||
// 8. Initialize tool manager with internal tools options
|
||||
// 8.1 - Create allowed tools provider based on configuration
|
||||
const allowedToolsProvider = createAllowedToolsProvider(
|
||||
{
|
||||
type: config.toolConfirmation.allowedToolsStorage,
|
||||
storageManager,
|
||||
},
|
||||
logger
|
||||
);
|
||||
|
||||
// 8.2 - Initialize tool manager with direct ApprovalManager integration
|
||||
const toolManager = new ToolManager(
|
||||
mcpManager,
|
||||
approvalManager,
|
||||
allowedToolsProvider,
|
||||
config.toolConfirmation.mode,
|
||||
agentEventBus,
|
||||
config.toolConfirmation.toolPolicies,
|
||||
{
|
||||
internalToolsServices: {
|
||||
searchService,
|
||||
resourceManager,
|
||||
},
|
||||
internalToolsConfig: config.internalTools,
|
||||
customToolsConfig: config.customTools,
|
||||
},
|
||||
logger
|
||||
);
|
||||
// NOTE: toolManager.initialize() is called in DextoAgent.start() after agent reference is set
|
||||
// This allows custom tools to access the agent for bidirectional communication
|
||||
|
||||
const mcpServerCount = Object.keys(config.mcpServers).length;
|
||||
if (mcpServerCount === 0) {
|
||||
logger.info('Agent initialized without MCP servers - only built-in capabilities available');
|
||||
} else {
|
||||
logger.debug(`MCPManager initialized with ${mcpServerCount} MCP server(s)`);
|
||||
}
|
||||
|
||||
if (config.internalTools.length === 0) {
|
||||
logger.info('No internal tools enabled by configuration');
|
||||
} else {
|
||||
logger.info(`Internal tools enabled: ${config.internalTools.join(', ')}`);
|
||||
}
|
||||
|
||||
// 9. Initialize prompt manager
|
||||
logger.debug(
|
||||
`[ServiceInitializer] Creating SystemPromptManager with configPath: ${configPath} → configDir: ${configDir}`
|
||||
);
|
||||
const systemPromptManager = new SystemPromptManager(
|
||||
config.systemPrompt,
|
||||
configDir,
|
||||
memoryManager,
|
||||
config.memories,
|
||||
logger
|
||||
);
|
||||
|
||||
// 10. Initialize state manager for runtime state tracking
|
||||
const stateManager = new AgentStateManager(config, agentEventBus, logger);
|
||||
logger.debug('Agent state manager initialized');
|
||||
|
||||
// 11. Initialize session manager
|
||||
const sessionManager = new SessionManager(
|
||||
{
|
||||
stateManager,
|
||||
systemPromptManager,
|
||||
toolManager,
|
||||
agentEventBus,
|
||||
storageManager, // Add storage manager to session services
|
||||
resourceManager, // Add resource manager for blob storage
|
||||
pluginManager, // Add plugin manager for plugin execution
|
||||
mcpManager, // Add MCP manager for ChatSession
|
||||
},
|
||||
{
|
||||
maxSessions: config.sessions?.maxSessions,
|
||||
sessionTTL: config.sessions?.sessionTTL,
|
||||
},
|
||||
logger
|
||||
);
|
||||
|
||||
// Initialize the session manager with persistent storage
|
||||
await sessionManager.init();
|
||||
|
||||
logger.debug('Session manager initialized with storage support');
|
||||
|
||||
// 12.5 Wire up plugin support to ToolManager (after SessionManager is created)
|
||||
toolManager.setPluginSupport(pluginManager, sessionManager, stateManager);
|
||||
logger.debug('Plugin support connected to ToolManager');
|
||||
|
||||
// 13. Return the core services
|
||||
return {
|
||||
mcpManager,
|
||||
toolManager,
|
||||
systemPromptManager,
|
||||
agentEventBus,
|
||||
stateManager,
|
||||
sessionManager,
|
||||
searchService,
|
||||
storageManager,
|
||||
resourceManager,
|
||||
approvalManager,
|
||||
memoryManager,
|
||||
pluginManager,
|
||||
};
|
||||
}
|
||||
6
dexto/packages/core/src/utils/user-info.ts
Normal file
6
dexto/packages/core/src/utils/user-info.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
// Utility to get the current user ID
|
||||
// TODO: Update this logic to support multi-tenancy (e.g., from session, DB, or web app state)
|
||||
|
||||
export function getUserId(): string {
|
||||
return 'default-user';
|
||||
}
|
||||
106
dexto/packages/core/src/utils/zod-schema-converter.ts
Normal file
106
dexto/packages/core/src/utils/zod-schema-converter.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
/**
|
||||
* Converts a JSON Schema object to a Zod raw shape.
|
||||
* This is a simplified converter that handles common MCP tool schemas.
|
||||
*/
|
||||
export function jsonSchemaToZodShape(jsonSchema: any): z.ZodRawShape {
|
||||
if (!jsonSchema || typeof jsonSchema !== 'object' || jsonSchema.type !== 'object') {
|
||||
return {};
|
||||
}
|
||||
|
||||
const shape: z.ZodRawShape = {};
|
||||
|
||||
if (jsonSchema.properties) {
|
||||
for (const [key, property] of Object.entries(jsonSchema.properties)) {
|
||||
const propSchema = property as any;
|
||||
let zodType: z.ZodTypeAny;
|
||||
switch (propSchema.type) {
|
||||
case 'string':
|
||||
zodType = z.string();
|
||||
break;
|
||||
case 'number':
|
||||
zodType = z.number();
|
||||
break;
|
||||
case 'integer':
|
||||
zodType = z.number().int();
|
||||
break;
|
||||
case 'boolean':
|
||||
zodType = z.boolean();
|
||||
break;
|
||||
case 'array':
|
||||
if (propSchema.items) {
|
||||
const itemType = getZodTypeFromProperty(propSchema.items);
|
||||
zodType = z.array(itemType);
|
||||
} else {
|
||||
zodType = z.array(z.any());
|
||||
}
|
||||
break;
|
||||
case 'object':
|
||||
zodType = z.object(jsonSchemaToZodShape(propSchema));
|
||||
break;
|
||||
default:
|
||||
zodType = z.any();
|
||||
}
|
||||
|
||||
// Add description if present using custom metadata
|
||||
if (propSchema.description) {
|
||||
// Try to add description as custom property (this might get picked up by the SDK)
|
||||
(zodType as any)._def.description = propSchema.description;
|
||||
zodType = zodType.describe(propSchema.description);
|
||||
}
|
||||
|
||||
// Make optional if not in required array
|
||||
if (!jsonSchema.required || !jsonSchema.required.includes(key)) {
|
||||
zodType = zodType.optional();
|
||||
}
|
||||
|
||||
shape[key] = zodType;
|
||||
}
|
||||
}
|
||||
|
||||
return shape;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to get a Zod type from a property schema
|
||||
*/
|
||||
export function getZodTypeFromProperty(propSchema: any): z.ZodTypeAny {
|
||||
let zodType: z.ZodTypeAny;
|
||||
|
||||
switch (propSchema.type) {
|
||||
case 'string':
|
||||
zodType = z.string();
|
||||
break;
|
||||
case 'number':
|
||||
zodType = z.number();
|
||||
break;
|
||||
case 'integer':
|
||||
zodType = z.number().int();
|
||||
break;
|
||||
case 'boolean':
|
||||
zodType = z.boolean();
|
||||
break;
|
||||
case 'object':
|
||||
zodType = z.object(jsonSchemaToZodShape(propSchema));
|
||||
break;
|
||||
case 'array':
|
||||
if (propSchema.items) {
|
||||
zodType = z.array(getZodTypeFromProperty(propSchema.items));
|
||||
} else {
|
||||
zodType = z.array(z.any());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
zodType = z.any();
|
||||
}
|
||||
|
||||
// Add description if present using custom metadata
|
||||
if (propSchema.description) {
|
||||
// Try to add description as custom property (this might get picked up by the SDK)
|
||||
(zodType as any)._def.description = propSchema.description;
|
||||
zodType = zodType.describe(propSchema.description);
|
||||
}
|
||||
|
||||
return zodType;
|
||||
}
|
||||
Reference in New Issue
Block a user