feat: Add intelligent auto-router and enhanced integrations
- Add intelligent-router.sh hook for automatic agent routing - Add AUTO-TRIGGER-SUMMARY.md documentation - Add FINAL-INTEGRATION-SUMMARY.md documentation - Complete Prometheus integration (6 commands + 4 tools) - Complete Dexto integration (12 commands + 5 tools) - Enhanced Ralph with access to all agents - Fix /clawd command (removed disable-model-invocation) - Update hooks.json to v5 with intelligent routing - 291 total skills now available - All 21 commands with automatic routing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
13
dexto/packages/core/src/resources/error-codes.ts
Normal file
13
dexto/packages/core/src/resources/error-codes.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export const ResourceErrorCodes = {
|
||||
INVALID_URI_FORMAT: 'resource_invalid_uri_format',
|
||||
EMPTY_URI: 'resource_empty_uri',
|
||||
RESOURCE_NOT_FOUND: 'resource_not_found',
|
||||
PROVIDER_NOT_INITIALIZED: 'resource_provider_not_initialized',
|
||||
PROVIDER_NOT_AVAILABLE: 'resource_provider_not_available',
|
||||
READ_FAILED: 'resource_read_failed',
|
||||
ACCESS_DENIED: 'resource_access_denied',
|
||||
NO_SUITABLE_PROVIDER: 'resource_no_suitable_provider',
|
||||
PROVIDER_ERROR: 'resource_provider_error',
|
||||
} as const;
|
||||
|
||||
export type ResourceErrorCode = (typeof ResourceErrorCodes)[keyof typeof ResourceErrorCodes];
|
||||
144
dexto/packages/core/src/resources/errors.ts
Normal file
144
dexto/packages/core/src/resources/errors.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { DextoRuntimeError } from '@core/errors/DextoRuntimeError.js';
|
||||
import { ErrorScope, ErrorType } from '@core/errors/types.js';
|
||||
import { ResourceErrorCodes } from './error-codes.js';
|
||||
|
||||
/**
|
||||
* Resource management error factory
|
||||
* Creates properly typed errors for resource operations
|
||||
*/
|
||||
export class ResourceError {
|
||||
private static redactUri(uri: string): string {
|
||||
try {
|
||||
const u = new URL(uri);
|
||||
if (u.username) u.username = '***';
|
||||
if (u.password) u.password = '***';
|
||||
u.searchParams.forEach((_, k) => {
|
||||
if (/token|key|secret|sig|pwd|password/i.test(k)) u.searchParams.set(k, '***');
|
||||
});
|
||||
return u.toString();
|
||||
} catch {
|
||||
return uri
|
||||
.replace(/\/\/([^@]+)@/, '//***@')
|
||||
.replace(/((?:token|key|secret|sig|pwd|password)=)[^&]*/gi, '$1***');
|
||||
}
|
||||
}
|
||||
|
||||
private static toMessageAndRaw(reason: unknown): { message: string; raw: unknown } {
|
||||
if (reason instanceof Error) {
|
||||
return {
|
||||
message: reason.message,
|
||||
raw: { name: reason.name, message: reason.message, stack: reason.stack },
|
||||
};
|
||||
}
|
||||
if (typeof reason === 'string') return { message: reason, raw: reason };
|
||||
try {
|
||||
return { message: JSON.stringify(reason), raw: reason };
|
||||
} catch {
|
||||
return { message: String(reason), raw: reason };
|
||||
}
|
||||
}
|
||||
// URI format and parsing errors
|
||||
static invalidUriFormat(uri: string, expected?: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.INVALID_URI_FORMAT,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.USER,
|
||||
`Invalid resource URI format: '${ResourceError.redactUri(uri)}'${expected ? ` (expected ${expected})` : ''}`,
|
||||
{ uri: ResourceError.redactUri(uri), uriRaw: uri, expected },
|
||||
expected ? `Use format: ${expected}` : 'Check the resource URI format'
|
||||
);
|
||||
}
|
||||
|
||||
static emptyUri() {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.EMPTY_URI,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.USER,
|
||||
'Resource URI cannot be empty',
|
||||
{},
|
||||
'Provide a valid resource URI'
|
||||
);
|
||||
}
|
||||
|
||||
// Resource discovery and access errors
|
||||
static resourceNotFound(uri: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.RESOURCE_NOT_FOUND,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.NOT_FOUND,
|
||||
`Resource not found: '${ResourceError.redactUri(uri)}'`,
|
||||
{ uri: ResourceError.redactUri(uri), uriRaw: uri },
|
||||
'Check that the resource exists and is accessible'
|
||||
);
|
||||
}
|
||||
|
||||
static providerNotInitialized(providerType: string, uri: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.PROVIDER_NOT_INITIALIZED,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.SYSTEM,
|
||||
`${providerType} resource provider not initialized for: '${ResourceError.redactUri(uri)}'`,
|
||||
{ providerType, uri: ResourceError.redactUri(uri), uriRaw: uri },
|
||||
'Ensure the resource provider is properly configured'
|
||||
);
|
||||
}
|
||||
|
||||
static providerNotAvailable(providerType: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.PROVIDER_NOT_AVAILABLE,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.SYSTEM,
|
||||
`${providerType} resource provider is not available`,
|
||||
{ providerType },
|
||||
'Check resource provider configuration and availability'
|
||||
);
|
||||
}
|
||||
|
||||
// Content access errors
|
||||
static readFailed(uri: string, reason: unknown) {
|
||||
const { message: reasonMsg, raw: reasonRaw } = ResourceError.toMessageAndRaw(reason);
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.READ_FAILED,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.SYSTEM,
|
||||
`Failed to read resource '${ResourceError.redactUri(uri)}': ${reasonMsg}`,
|
||||
{ uri: ResourceError.redactUri(uri), uriRaw: uri, reason: reasonMsg, reasonRaw },
|
||||
'Check resource permissions and availability'
|
||||
);
|
||||
}
|
||||
|
||||
static accessDenied(uri: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.ACCESS_DENIED,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.FORBIDDEN,
|
||||
`Access denied to resource: '${ResourceError.redactUri(uri)}'`,
|
||||
{ uri: ResourceError.redactUri(uri), uriRaw: uri },
|
||||
'Ensure you have permission to access this resource'
|
||||
);
|
||||
}
|
||||
|
||||
// Provider coordination errors
|
||||
static noSuitableProvider(uri: string) {
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.NO_SUITABLE_PROVIDER,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.NOT_FOUND,
|
||||
`No suitable provider found for resource: '${ResourceError.redactUri(uri)}'`,
|
||||
{ uri: ResourceError.redactUri(uri), uriRaw: uri },
|
||||
'Check that the resource type is supported'
|
||||
);
|
||||
}
|
||||
|
||||
static providerError(providerType: string, operation: string, reason: unknown) {
|
||||
const { message: reasonMsg, raw: reasonRaw } = ResourceError.toMessageAndRaw(reason);
|
||||
return new DextoRuntimeError(
|
||||
ResourceErrorCodes.PROVIDER_ERROR,
|
||||
ErrorScope.RESOURCE,
|
||||
ErrorType.SYSTEM,
|
||||
`${providerType} provider failed during ${operation}: ${reasonMsg}`,
|
||||
{ providerType, operation, reason: reasonMsg, reasonRaw },
|
||||
'Check provider configuration and logs for details'
|
||||
);
|
||||
}
|
||||
}
|
||||
259
dexto/packages/core/src/resources/handlers/blob-handler.ts
Normal file
259
dexto/packages/core/src/resources/handlers/blob-handler.ts
Normal file
@@ -0,0 +1,259 @@
|
||||
import type { IDextoLogger } from '../../logger/v2/types.js';
|
||||
import { DextoLogComponent } from '../../logger/v2/types.js';
|
||||
import { ResourceError } from '../errors.js';
|
||||
import type { ResourceMetadata } from '../types.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { BlobStore, StoredBlobMetadata } from '../../storage/blob/types.js';
|
||||
import type { ValidatedBlobResourceConfig } from '../schemas.js';
|
||||
import type { InternalResourceHandler, InternalResourceServices } from './types.js';
|
||||
|
||||
export class BlobResourceHandler implements InternalResourceHandler {
|
||||
private config: ValidatedBlobResourceConfig;
|
||||
private blobStore: BlobStore;
|
||||
private logger: IDextoLogger;
|
||||
|
||||
constructor(config: ValidatedBlobResourceConfig, blobStore: BlobStore, logger: IDextoLogger) {
|
||||
this.config = config;
|
||||
this.blobStore = blobStore;
|
||||
this.logger = logger.createChild(DextoLogComponent.RESOURCE);
|
||||
}
|
||||
|
||||
getType(): string {
|
||||
return 'blob';
|
||||
}
|
||||
|
||||
async initialize(_services: InternalResourceServices): Promise<void> {
|
||||
// Config and blobStore are set in constructor
|
||||
this.logger.debug('BlobResourceHandler initialized with BlobStore');
|
||||
}
|
||||
|
||||
async listResources(): Promise<ResourceMetadata[]> {
|
||||
this.logger.debug('🔍 BlobResourceHandler.listResources() called');
|
||||
|
||||
try {
|
||||
const stats = await this.blobStore.getStats();
|
||||
this.logger.debug(
|
||||
`📊 BlobStore stats: ${stats.count} blobs, backend: ${stats.backendType}`
|
||||
);
|
||||
const resources: ResourceMetadata[] = [];
|
||||
|
||||
// List individual blobs from the store
|
||||
try {
|
||||
const blobs = await this.blobStore.listBlobs();
|
||||
this.logger.debug(`📄 Found ${blobs.length} individual blobs`);
|
||||
|
||||
for (const blob of blobs) {
|
||||
// Filter out 'system' source blobs (prompt .md files, custom prompt attachments).
|
||||
// These are internal config accessed via prompt system, not @-referenceable resources.
|
||||
// Prevents clutter and confusion in resource autocomplete.
|
||||
if (blob.metadata.source === 'system') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Generate a user-friendly name with proper extension
|
||||
const displayName = this.generateBlobDisplayName(blob.metadata, blob.id);
|
||||
const friendlyType = this.getFriendlyType(blob.metadata.mimeType);
|
||||
|
||||
resources.push({
|
||||
uri: blob.uri,
|
||||
name: displayName,
|
||||
description: `${friendlyType} (${this.formatSize(blob.metadata.size)})${blob.metadata.source ? ` • ${blob.metadata.source}` : ''}`,
|
||||
source: 'internal',
|
||||
size: blob.metadata.size,
|
||||
mimeType: blob.metadata.mimeType,
|
||||
lastModified: new Date(blob.metadata.createdAt),
|
||||
metadata: {
|
||||
type: 'blob',
|
||||
source: blob.metadata.source,
|
||||
hash: blob.metadata.hash,
|
||||
createdAt: blob.metadata.createdAt,
|
||||
originalName: blob.metadata.originalName,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to list individual blobs: ${String(error)}`);
|
||||
}
|
||||
|
||||
this.logger.debug(`✅ BlobResourceHandler returning ${resources.length} resources`);
|
||||
return resources;
|
||||
} catch (error) {
|
||||
this.logger.warn(`Failed to list blob resources: ${String(error)}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
canHandle(uri: string): boolean {
|
||||
return uri.startsWith('blob:');
|
||||
}
|
||||
|
||||
async readResource(uri: string): Promise<ReadResourceResult> {
|
||||
if (!this.canHandle(uri)) {
|
||||
throw ResourceError.noSuitableProvider(uri);
|
||||
}
|
||||
|
||||
try {
|
||||
// Extract blob ID from URI (remove 'blob:' prefix)
|
||||
const blobId = uri.substring(5);
|
||||
|
||||
// Validate blob ID
|
||||
if (!blobId) {
|
||||
throw ResourceError.readFailed(uri, new Error('Invalid blob URI: missing blob ID'));
|
||||
}
|
||||
|
||||
// Special case: blob store info
|
||||
if (blobId === 'store') {
|
||||
const stats = await this.blobStore.getStats();
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri,
|
||||
mimeType: 'application/json',
|
||||
text: JSON.stringify(stats, null, 2),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
// Retrieve actual blob data using blob ID
|
||||
const result = await this.blobStore.retrieve(blobId, 'base64');
|
||||
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri,
|
||||
mimeType: result.metadata.mimeType || 'application/octet-stream',
|
||||
blob: result.data as string, // base64 data from retrieve call
|
||||
},
|
||||
],
|
||||
_meta: {
|
||||
size: result.metadata.size,
|
||||
createdAt: result.metadata.createdAt,
|
||||
originalName: result.metadata.originalName,
|
||||
source: result.metadata.source,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof ResourceError) {
|
||||
throw error;
|
||||
}
|
||||
throw ResourceError.readFailed(uri, error);
|
||||
}
|
||||
}
|
||||
|
||||
async refresh(): Promise<void> {
|
||||
// BlobStore doesn't need refresh as it's not file-system based scanning
|
||||
// But we can perform cleanup of old blobs if configured
|
||||
try {
|
||||
await this.blobStore.cleanup();
|
||||
this.logger.debug('Blob store cleanup completed');
|
||||
} catch (error) {
|
||||
this.logger.warn(`Blob store cleanup failed: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
getBlobStore(): BlobStore {
|
||||
return this.blobStore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a user-friendly display name for a blob with proper file extension
|
||||
*/
|
||||
private generateBlobDisplayName(metadata: StoredBlobMetadata, _blobId: string): string {
|
||||
// If we have an original name with extension, use it
|
||||
if (metadata.originalName && metadata.originalName.includes('.')) {
|
||||
return metadata.originalName;
|
||||
}
|
||||
|
||||
// Generate a name based on MIME type and content
|
||||
let baseName =
|
||||
metadata.originalName || this.generateNameFromType(metadata.mimeType, metadata.source);
|
||||
const extension = this.getExtensionFromMimeType(metadata.mimeType);
|
||||
|
||||
// Add extension if not present
|
||||
if (extension && !baseName.toLowerCase().endsWith(extension)) {
|
||||
baseName += extension;
|
||||
}
|
||||
|
||||
return baseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a descriptive base name from MIME type and source
|
||||
*/
|
||||
private generateNameFromType(mimeType: string, source?: string): string {
|
||||
if (mimeType.startsWith('image/')) {
|
||||
if (source === 'user') return 'uploaded-image';
|
||||
if (source === 'tool') return 'generated-image';
|
||||
return 'image';
|
||||
}
|
||||
if (mimeType.startsWith('text/')) {
|
||||
if (source === 'tool') return 'tool-output';
|
||||
return 'text-file';
|
||||
}
|
||||
if (mimeType.startsWith('application/pdf')) {
|
||||
return 'document';
|
||||
}
|
||||
if (mimeType.startsWith('audio/')) {
|
||||
return 'audio-file';
|
||||
}
|
||||
if (mimeType.startsWith('video/')) {
|
||||
return 'video-file';
|
||||
}
|
||||
|
||||
// Default based on source
|
||||
if (source === 'user') return 'user-upload';
|
||||
if (source === 'tool') return 'tool-result';
|
||||
return 'file';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file extension from MIME type
|
||||
*/
|
||||
private getExtensionFromMimeType(mimeType: string): string {
|
||||
const mimeToExt: Record<string, string> = {
|
||||
'image/jpeg': '.jpg',
|
||||
'image/png': '.png',
|
||||
'image/gif': '.gif',
|
||||
'image/webp': '.webp',
|
||||
'image/svg+xml': '.svg',
|
||||
'text/plain': '.txt',
|
||||
'text/markdown': '.md',
|
||||
'text/html': '.html',
|
||||
'text/css': '.css',
|
||||
'application/json': '.json',
|
||||
'application/pdf': '.pdf',
|
||||
'application/xml': '.xml',
|
||||
'audio/mpeg': '.mp3',
|
||||
'audio/wav': '.wav',
|
||||
'video/mp4': '.mp4',
|
||||
'video/webm': '.webm',
|
||||
};
|
||||
|
||||
return mimeToExt[mimeType] || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert MIME type to user-friendly type description
|
||||
*/
|
||||
private getFriendlyType(mimeType: string): string {
|
||||
if (mimeType.startsWith('image/')) return 'Image';
|
||||
if (mimeType.startsWith('text/')) return 'Text File';
|
||||
if (mimeType.startsWith('audio/')) return 'Audio File';
|
||||
if (mimeType.startsWith('video/')) return 'Video File';
|
||||
if (mimeType === 'application/pdf') return 'PDF Document';
|
||||
if (mimeType === 'application/json') return 'JSON Data';
|
||||
return 'File';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format file size in human-readable format
|
||||
*/
|
||||
private formatSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
|
||||
}
|
||||
}
|
||||
37
dexto/packages/core/src/resources/handlers/factory.ts
Normal file
37
dexto/packages/core/src/resources/handlers/factory.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ResourceError } from '../errors.js';
|
||||
import { FileSystemResourceHandler } from './filesystem-handler.js';
|
||||
import { BlobResourceHandler } from './blob-handler.js';
|
||||
import type { InternalResourceServices, InternalResourceHandler } from './types.js';
|
||||
import type { ValidatedInternalResourceConfig } from '../schemas.js';
|
||||
import type { IDextoLogger } from '../../logger/v2/types.js';
|
||||
|
||||
/**
|
||||
* Factory function for creating internal resource handlers
|
||||
*/
|
||||
export function createInternalResourceHandler(
|
||||
config: ValidatedInternalResourceConfig,
|
||||
services: InternalResourceServices,
|
||||
logger: IDextoLogger
|
||||
): InternalResourceHandler {
|
||||
const type = config.type;
|
||||
if (type === 'filesystem') {
|
||||
// Pass blob storage path to filesystem handler to avoid scanning blob directories
|
||||
const blobStoragePath = services.blobStore.getStoragePath();
|
||||
return new FileSystemResourceHandler(config, logger, blobStoragePath);
|
||||
}
|
||||
if (type === 'blob') {
|
||||
return new BlobResourceHandler(config, services.blobStore, logger);
|
||||
}
|
||||
throw ResourceError.providerError(
|
||||
'Internal',
|
||||
'createInternalResourceHandler',
|
||||
`Unsupported internal resource handler type: ${type}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all supported internal resource handler types
|
||||
*/
|
||||
export function getInternalResourceHandlerTypes(): string[] {
|
||||
return ['filesystem', 'blob'];
|
||||
}
|
||||
432
dexto/packages/core/src/resources/handlers/filesystem-handler.ts
Normal file
432
dexto/packages/core/src/resources/handlers/filesystem-handler.ts
Normal file
@@ -0,0 +1,432 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import type { IDextoLogger } from '../../logger/v2/types.js';
|
||||
import { DextoLogComponent } from '../../logger/v2/types.js';
|
||||
import { ResourceError } from '../errors.js';
|
||||
import type { ResourceMetadata } from '../types.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { ValidatedFileSystemResourceConfig } from '../schemas.js';
|
||||
import type { InternalResourceHandler, InternalResourceServices } from './types.js';
|
||||
|
||||
export class FileSystemResourceHandler implements InternalResourceHandler {
|
||||
private config: ValidatedFileSystemResourceConfig;
|
||||
private resourcesCache: Map<string, ResourceMetadata> = new Map();
|
||||
private visitedPaths: Set<string> = new Set();
|
||||
private fileCount: number = 0;
|
||||
private canonicalRoots: string[] = [];
|
||||
private blobStoragePath: string | undefined;
|
||||
private logger: IDextoLogger;
|
||||
|
||||
constructor(
|
||||
config: ValidatedFileSystemResourceConfig,
|
||||
logger: IDextoLogger,
|
||||
blobStoragePath?: string
|
||||
) {
|
||||
this.config = config;
|
||||
this.logger = logger.createChild(DextoLogComponent.RESOURCE);
|
||||
this.blobStoragePath = blobStoragePath;
|
||||
}
|
||||
|
||||
getType(): string {
|
||||
return 'filesystem';
|
||||
}
|
||||
|
||||
async initialize(_services: InternalResourceServices): Promise<void> {
|
||||
// Config is set in constructor, just do async initialization
|
||||
this.canonicalRoots = [];
|
||||
for (const configPath of this.config.paths) {
|
||||
try {
|
||||
const canonicalRoot = await fs.realpath(path.resolve(configPath));
|
||||
this.canonicalRoots.push(canonicalRoot);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Failed to canonicalize root path '${configPath}': ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await this.buildResourceCache();
|
||||
}
|
||||
|
||||
async listResources(): Promise<ResourceMetadata[]> {
|
||||
return Array.from(this.resourcesCache.values());
|
||||
}
|
||||
|
||||
canHandle(uri: string): boolean {
|
||||
return uri.startsWith('fs://');
|
||||
}
|
||||
|
||||
private isPathAllowed(canonicalPath: string): boolean {
|
||||
return this.canonicalRoots.some((root) => {
|
||||
const normalizedPath = path.normalize(canonicalPath);
|
||||
const normalizedRoot = path.normalize(root);
|
||||
return (
|
||||
normalizedPath.startsWith(normalizedRoot + path.sep) ||
|
||||
normalizedPath === normalizedRoot
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a path is a blob storage directory that should be excluded
|
||||
* from filesystem resource scanning to avoid conflicts with BlobResourceHandler
|
||||
*/
|
||||
private isBlobStorageDirectory(canonicalPath: string): boolean {
|
||||
if (!this.blobStoragePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if this path is under the actual blob storage directory
|
||||
const normalizedPath = path.normalize(canonicalPath);
|
||||
const normalizedBlobPath = path.normalize(this.blobStoragePath);
|
||||
|
||||
return (
|
||||
normalizedPath === normalizedBlobPath ||
|
||||
normalizedPath.startsWith(normalizedBlobPath + path.sep)
|
||||
);
|
||||
}
|
||||
|
||||
async readResource(uri: string): Promise<ReadResourceResult> {
|
||||
if (!this.canHandle(uri)) {
|
||||
throw ResourceError.noSuitableProvider(uri);
|
||||
}
|
||||
|
||||
const filePath = uri.replace('fs://', '');
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
let canonicalPath: string;
|
||||
try {
|
||||
canonicalPath = await fs.realpath(resolvedPath);
|
||||
} catch (_error) {
|
||||
throw ResourceError.resourceNotFound(uri);
|
||||
}
|
||||
|
||||
if (!this.isPathAllowed(canonicalPath)) {
|
||||
throw ResourceError.accessDenied(uri);
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = await fs.stat(canonicalPath);
|
||||
if (stat.size > 10 * 1024 * 1024) {
|
||||
throw ResourceError.readFailed(uri, `File too large (${stat.size} bytes)`);
|
||||
}
|
||||
|
||||
if (this.isBinaryFile(canonicalPath)) {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri,
|
||||
mimeType: 'text/plain',
|
||||
text: `[Binary file: ${path.basename(canonicalPath)} (${stat.size} bytes)]`,
|
||||
},
|
||||
],
|
||||
_meta: {
|
||||
isBinary: true,
|
||||
size: stat.size,
|
||||
originalMimeType: this.getMimeType(canonicalPath),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const content = await fs.readFile(canonicalPath, 'utf-8');
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri,
|
||||
mimeType: this.getMimeType(canonicalPath),
|
||||
text: content,
|
||||
},
|
||||
],
|
||||
_meta: { size: stat.size },
|
||||
};
|
||||
} catch (error) {
|
||||
throw ResourceError.readFailed(uri, error);
|
||||
}
|
||||
}
|
||||
|
||||
private isBinaryFile(filePath: string): boolean {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
const binaryExtensions = [
|
||||
'.exe',
|
||||
'.dll',
|
||||
'.so',
|
||||
'.dylib',
|
||||
'.bin',
|
||||
'.dat',
|
||||
'.db',
|
||||
'.sqlite',
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.png',
|
||||
'.gif',
|
||||
'.bmp',
|
||||
'.ico',
|
||||
'.tiff',
|
||||
'.webp',
|
||||
'.mp3',
|
||||
'.mp4',
|
||||
'.avi',
|
||||
'.mov',
|
||||
'.wmv',
|
||||
'.flv',
|
||||
'.mkv',
|
||||
'.webm',
|
||||
'.pdf',
|
||||
'.zip',
|
||||
'.tar',
|
||||
'.gz',
|
||||
'.7z',
|
||||
'.rar',
|
||||
'.dmg',
|
||||
'.iso',
|
||||
'.woff',
|
||||
'.woff2',
|
||||
'.ttf',
|
||||
'.otf',
|
||||
'.eot',
|
||||
'.class',
|
||||
'.jar',
|
||||
'.war',
|
||||
'.ear',
|
||||
'.o',
|
||||
'.obj',
|
||||
'.lib',
|
||||
'.a',
|
||||
];
|
||||
return binaryExtensions.includes(ext);
|
||||
}
|
||||
|
||||
async refresh(): Promise<void> {
|
||||
await this.buildResourceCache();
|
||||
}
|
||||
|
||||
private async buildResourceCache(): Promise<void> {
|
||||
if (!this.config) return;
|
||||
|
||||
this.resourcesCache.clear();
|
||||
this.visitedPaths.clear();
|
||||
this.fileCount = 0;
|
||||
|
||||
const { maxFiles, paths } = this.config;
|
||||
|
||||
for (const configPath of paths) {
|
||||
if (this.fileCount >= maxFiles) {
|
||||
this.logger.warn(`Reached maximum file limit (${maxFiles}), stopping scan`);
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
const root = await fs.realpath(path.resolve(configPath));
|
||||
await this.scanPath(root, 0, root);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Failed to scan path '${configPath}': ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`FileSystem resources cached: ${this.resourcesCache.size} resources (${this.fileCount} files scanned)`
|
||||
);
|
||||
}
|
||||
|
||||
private async scanPath(
|
||||
targetPath: string,
|
||||
currentDepth: number,
|
||||
rootBase?: string
|
||||
): Promise<void> {
|
||||
const resolvedPath = path.resolve(targetPath);
|
||||
let canonical: string;
|
||||
try {
|
||||
canonical = await fs.realpath(resolvedPath);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (!this.isPathAllowed(canonical)) return;
|
||||
|
||||
// Skip blob storage directories to avoid conflicts with BlobResourceHandler
|
||||
if (this.isBlobStorageDirectory(canonical)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Config has defaults already applied by schema validation
|
||||
const { maxDepth, maxFiles, includeHidden, includeExtensions } = this.config;
|
||||
|
||||
if (this.fileCount >= maxFiles) return;
|
||||
if (currentDepth > maxDepth) {
|
||||
// silly to avoid spamming the logs
|
||||
this.logger.silly(`Skipping path due to depth limit (${maxDepth}): ${canonical}`);
|
||||
return;
|
||||
}
|
||||
if (this.visitedPaths.has(canonical)) return;
|
||||
this.visitedPaths.add(canonical);
|
||||
|
||||
try {
|
||||
const stat = await fs.stat(canonical);
|
||||
if (stat.isFile()) {
|
||||
if (!this.shouldIncludeFile(canonical, includeExtensions, includeHidden)) return;
|
||||
|
||||
// Use absolute canonical path to ensure readResource resolves correctly
|
||||
const uri = `fs://${canonical.replace(/\\/g, '/')}`;
|
||||
this.resourcesCache.set(uri, {
|
||||
uri,
|
||||
name: this.generateCleanFileName(canonical),
|
||||
description: 'Filesystem resource',
|
||||
source: 'internal',
|
||||
size: stat.size,
|
||||
lastModified: stat.mtime,
|
||||
});
|
||||
this.fileCount++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
const entries = await fs.readdir(canonical);
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(canonical, entry);
|
||||
await this.scanPath(
|
||||
entryPath,
|
||||
currentDepth + 1,
|
||||
rootBase ?? this.canonicalRoots.find((r) => canonical.startsWith(r))
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.debug(
|
||||
`Skipping inaccessible path: ${canonical} - ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private shouldIncludeFile(
|
||||
filePath: string,
|
||||
includeExtensions: string[],
|
||||
includeHidden: boolean
|
||||
): boolean {
|
||||
const basename = path.basename(filePath).toLowerCase();
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
|
||||
if (basename.startsWith('.')) {
|
||||
if (!includeHidden) {
|
||||
const allowedDotfiles = [
|
||||
'.gitignore',
|
||||
'.env',
|
||||
'.env.example',
|
||||
'.npmignore',
|
||||
'.dockerignore',
|
||||
'.editorconfig',
|
||||
];
|
||||
if (!allowedDotfiles.includes(basename)) return false;
|
||||
}
|
||||
if (basename === '.env' || basename.startsWith('.env.')) return true;
|
||||
}
|
||||
|
||||
if (!ext) {
|
||||
const commonNoExtFiles = [
|
||||
'dockerfile',
|
||||
'makefile',
|
||||
'readme',
|
||||
'license',
|
||||
'changelog',
|
||||
'contributing',
|
||||
];
|
||||
return commonNoExtFiles.some((common) => basename.includes(common));
|
||||
}
|
||||
|
||||
if (basename === '.gitignore') return true;
|
||||
|
||||
return includeExtensions.includes(ext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a clean, user-friendly filename from a potentially messy path
|
||||
*/
|
||||
private generateCleanFileName(filePath: string): string {
|
||||
const basename = path.basename(filePath);
|
||||
|
||||
// For screenshot files with timestamps, clean them up
|
||||
if (basename.startsWith('Screenshot ') && basename.includes(' at ')) {
|
||||
// "Screenshot 2025-09-14 at 11.39.20 PM.png" -> "Screenshot 2025-09-14.png"
|
||||
const match = basename.match(/^Screenshot (\d{4}-\d{2}-\d{2}).*?(\.[^.]+)$/);
|
||||
if (match) {
|
||||
return `Screenshot ${match[1]}${match[2]}`;
|
||||
}
|
||||
}
|
||||
|
||||
// For other temp files, just use the basename as-is
|
||||
// but remove any weird prefixes or temp markers
|
||||
if (basename.length > 50) {
|
||||
// If filename is too long, try to extract meaningful parts
|
||||
const ext = path.extname(basename);
|
||||
const nameWithoutExt = path.basename(basename, ext);
|
||||
|
||||
// Look for recognizable patterns
|
||||
const patterns = [
|
||||
/Screenshot.*(\d{4}-\d{2}-\d{2})/,
|
||||
/([A-Za-z\s]+\d{4}-\d{2}-\d{2})/,
|
||||
/(image|photo|file).*(\d+)/i,
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = nameWithoutExt.match(pattern);
|
||||
if (match) {
|
||||
return `${match[1] || match[0]}${ext}`;
|
||||
}
|
||||
}
|
||||
|
||||
// If no pattern matches, truncate intelligently
|
||||
if (nameWithoutExt.length > 30) {
|
||||
return `${nameWithoutExt.substring(0, 30)}...${ext}`;
|
||||
}
|
||||
}
|
||||
|
||||
return basename;
|
||||
}
|
||||
|
||||
private getMimeType(filePath: string): string {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
const mimeTypes: Record<string, string> = {
|
||||
'.txt': 'text/plain',
|
||||
'.md': 'text/markdown',
|
||||
'.markdown': 'text/markdown',
|
||||
'.html': 'text/html',
|
||||
'.htm': 'text/html',
|
||||
'.css': 'text/css',
|
||||
'.js': 'text/javascript',
|
||||
'.mjs': 'text/javascript',
|
||||
'.jsx': 'text/javascript',
|
||||
'.ts': 'text/typescript',
|
||||
'.tsx': 'text/typescript',
|
||||
'.vue': 'text/x-vue',
|
||||
'.json': 'application/json',
|
||||
'.xml': 'text/xml',
|
||||
'.yaml': 'text/yaml',
|
||||
'.yml': 'text/yaml',
|
||||
'.toml': 'text/toml',
|
||||
'.ini': 'text/plain',
|
||||
'.cfg': 'text/plain',
|
||||
'.conf': 'text/plain',
|
||||
'.py': 'text/x-python',
|
||||
'.rb': 'text/x-ruby',
|
||||
'.php': 'text/x-php',
|
||||
'.java': 'text/x-java',
|
||||
'.kt': 'text/x-kotlin',
|
||||
'.swift': 'text/x-swift',
|
||||
'.go': 'text/x-go',
|
||||
'.rs': 'text/x-rust',
|
||||
'.cpp': 'text/x-c++',
|
||||
'.c': 'text/x-c',
|
||||
'.h': 'text/x-c',
|
||||
'.hpp': 'text/x-c++',
|
||||
'.sh': 'text/x-shellscript',
|
||||
'.bash': 'text/x-shellscript',
|
||||
'.zsh': 'text/x-shellscript',
|
||||
'.fish': 'text/x-shellscript',
|
||||
'.sql': 'text/x-sql',
|
||||
'.rst': 'text/x-rst',
|
||||
'.tex': 'text/x-tex',
|
||||
'.dockerfile': 'text/x-dockerfile',
|
||||
};
|
||||
return mimeTypes[ext] || 'text/plain';
|
||||
}
|
||||
}
|
||||
16
dexto/packages/core/src/resources/handlers/types.ts
Normal file
16
dexto/packages/core/src/resources/handlers/types.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import type { ResourceMetadata } from '../types.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { BlobStore } from '../../storage/blob/types.js';
|
||||
|
||||
export type InternalResourceServices = {
|
||||
blobStore: BlobStore;
|
||||
};
|
||||
|
||||
export interface InternalResourceHandler {
|
||||
getType(): string;
|
||||
initialize(services: InternalResourceServices): Promise<void>;
|
||||
listResources(): Promise<ResourceMetadata[]>;
|
||||
readResource(uri: string): Promise<ReadResourceResult>;
|
||||
canHandle(uri: string): boolean;
|
||||
refresh?(): Promise<void>;
|
||||
}
|
||||
66
dexto/packages/core/src/resources/index.ts
Normal file
66
dexto/packages/core/src/resources/index.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
/**
|
||||
* Resource management module exports
|
||||
* Organized into thematic sub-barrels for better tree-shaking and maintainability
|
||||
*/
|
||||
|
||||
// Core resource types and manager
|
||||
export type { ResourceSource, ResourceMetadata, ResourceProvider, ResourceSet } from './types.js';
|
||||
|
||||
export type {
|
||||
InternalResourcesConfig,
|
||||
ValidatedInternalResourcesConfig,
|
||||
ValidatedInternalResourceConfig,
|
||||
ValidatedFileSystemResourceConfig,
|
||||
ValidatedBlobResourceConfig,
|
||||
} from './schemas.js';
|
||||
|
||||
export { ResourceManager } from './manager.js';
|
||||
export { ResourceError } from './errors.js';
|
||||
export { ResourceErrorCodes } from './error-codes.js';
|
||||
|
||||
// Internal resources provider and handlers
|
||||
export type { InternalResourceHandler, InternalResourceServices } from './handlers/types.js';
|
||||
export { InternalResourcesProvider } from './internal-provider.js';
|
||||
export {
|
||||
createInternalResourceHandler,
|
||||
getInternalResourceHandlerTypes,
|
||||
} from './handlers/factory.js';
|
||||
|
||||
// Resource reference parsing and expansion
|
||||
/**
|
||||
* Resource References
|
||||
*
|
||||
* Resource references allow you to include resource content in messages using @ syntax:
|
||||
*
|
||||
* Syntax:
|
||||
* - Simple name: @filename.txt
|
||||
* - URI with brackets: @<file:///path/to/file.txt>
|
||||
* - Server-scoped: @servername:resource-identifier
|
||||
*
|
||||
* Important: @ symbols are ONLY treated as resource references if they:
|
||||
* 1. Are at the start of the message, OR
|
||||
* 2. Are preceded by whitespace (space, tab, newline)
|
||||
*
|
||||
* This means email addresses like "user@example.com" are automatically ignored
|
||||
* without requiring any escape sequences. No special handling needed!
|
||||
*
|
||||
* Examples:
|
||||
* - "@myfile.txt" → resource reference (at start)
|
||||
* - "Check @myfile.txt" → resource reference (after space)
|
||||
* - "user@example.com" → literal text (no leading space)
|
||||
* - "See @file1.txt and email user@example.com" → only @file1.txt is a reference
|
||||
*/
|
||||
export type { ResourceReference, ResourceExpansionResult } from './reference-parser.js';
|
||||
export {
|
||||
parseResourceReferences,
|
||||
resolveResourceReferences,
|
||||
expandMessageReferences,
|
||||
formatResourceContent,
|
||||
} from './reference-parser.js';
|
||||
|
||||
// Schemas and validation
|
||||
export {
|
||||
InternalResourceConfigSchema,
|
||||
InternalResourcesSchema,
|
||||
isInternalResourcesEnabled,
|
||||
} from './schemas.js';
|
||||
146
dexto/packages/core/src/resources/internal-provider.ts
Normal file
146
dexto/packages/core/src/resources/internal-provider.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { ResourceProvider, ResourceMetadata, ResourceSource } from './types.js';
|
||||
import { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
import { DextoLogComponent } from '../logger/v2/types.js';
|
||||
import { createInternalResourceHandler } from './handlers/factory.js';
|
||||
import type { InternalResourceHandler, InternalResourceServices } from './handlers/types.js';
|
||||
import type {
|
||||
ValidatedInternalResourcesConfig,
|
||||
ValidatedInternalResourceConfig,
|
||||
} from './schemas.js';
|
||||
import { InternalResourceConfigSchema } from './schemas.js';
|
||||
import { ResourceError } from './errors.js';
|
||||
|
||||
export class InternalResourcesProvider implements ResourceProvider {
|
||||
private config: ValidatedInternalResourcesConfig;
|
||||
private handlers: Map<string, InternalResourceHandler> = new Map();
|
||||
private services: InternalResourceServices;
|
||||
private logger: IDextoLogger;
|
||||
|
||||
constructor(
|
||||
config: ValidatedInternalResourcesConfig,
|
||||
services: InternalResourceServices,
|
||||
logger: IDextoLogger
|
||||
) {
|
||||
this.config = config;
|
||||
this.services = services;
|
||||
this.logger = logger.createChild(DextoLogComponent.RESOURCE);
|
||||
this.logger.debug(
|
||||
`InternalResourcesProvider initialized with config: ${JSON.stringify(config)}`
|
||||
);
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
if (!this.config.enabled || this.config.resources.length === 0) {
|
||||
this.logger.debug('Internal resources disabled or no resources configured');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const resourceConfig of this.config.resources) {
|
||||
try {
|
||||
const parsedConfig = InternalResourceConfigSchema.parse(resourceConfig);
|
||||
const handler = createInternalResourceHandler(
|
||||
parsedConfig,
|
||||
this.services,
|
||||
this.logger
|
||||
);
|
||||
await handler.initialize(this.services);
|
||||
this.handlers.set(resourceConfig.type, handler);
|
||||
this.logger.debug(`Initialized ${resourceConfig.type} resource handler`);
|
||||
} catch (error) {
|
||||
this.logger.error(`Failed to initialize ${resourceConfig.type} resource handler`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`InternalResourcesProvider initialized with ${this.handlers.size} resource handlers`
|
||||
);
|
||||
}
|
||||
|
||||
getSource(): ResourceSource {
|
||||
return 'internal';
|
||||
}
|
||||
|
||||
async listResources(): Promise<ResourceMetadata[]> {
|
||||
const allResources: ResourceMetadata[] = [];
|
||||
for (const [type, handler] of this.handlers.entries()) {
|
||||
try {
|
||||
const resources = await handler.listResources();
|
||||
allResources.push(...resources);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to list resources from ${type} handler: ${error instanceof Error ? error.message : String(error)}`,
|
||||
{ error: error instanceof Error ? error.message : String(error) }
|
||||
);
|
||||
}
|
||||
}
|
||||
return allResources;
|
||||
}
|
||||
|
||||
async hasResource(uri: string): Promise<boolean> {
|
||||
for (const handler of this.handlers.values()) {
|
||||
if (handler.canHandle(uri)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async readResource(uri: string): Promise<ReadResourceResult> {
|
||||
for (const [type, handler] of this.handlers.entries()) {
|
||||
if (handler.canHandle(uri)) {
|
||||
try {
|
||||
return await handler.readResource(uri);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to read resource ${uri} from ${type} handler: ${error instanceof Error ? error.message : String(error)}`,
|
||||
{ error: error instanceof Error ? error.message : String(error) }
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw ResourceError.noSuitableProvider(uri);
|
||||
}
|
||||
|
||||
async refresh(): Promise<void> {
|
||||
for (const [type, handler] of this.handlers.entries()) {
|
||||
if (handler.refresh) {
|
||||
try {
|
||||
await handler.refresh();
|
||||
this.logger.debug(`Refreshed ${type} resource handler`);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to refresh ${type} resource handler: ${error instanceof Error ? error.message : String(error)}`,
|
||||
{ error: error instanceof Error ? error.message : String(error) }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async addResourceConfig(config: ValidatedInternalResourceConfig): Promise<void> {
|
||||
try {
|
||||
const parsedConfig = InternalResourceConfigSchema.parse(config);
|
||||
const handler = createInternalResourceHandler(parsedConfig, this.services, this.logger);
|
||||
await handler.initialize(this.services);
|
||||
this.handlers.set(config.type, handler);
|
||||
this.config.resources.push(parsedConfig);
|
||||
this.logger.info(`Added new ${config.type} resource handler`);
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to add ${config.type} resource handler: ${error instanceof Error ? error.message : String(error)}`,
|
||||
{ error: error instanceof Error ? error.message : String(error) }
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async removeResourceHandler(type: string): Promise<void> {
|
||||
if (this.handlers.has(type)) {
|
||||
this.handlers.delete(type);
|
||||
this.config.resources = this.config.resources.filter((r) => r.type !== type);
|
||||
this.logger.info(`Removed ${type} resource handler`);
|
||||
}
|
||||
}
|
||||
}
|
||||
248
dexto/packages/core/src/resources/manager.ts
Normal file
248
dexto/packages/core/src/resources/manager.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import type { MCPManager } from '../mcp/manager.js';
|
||||
import type { ResourceSet, ResourceMetadata } from './types.js';
|
||||
import { InternalResourcesProvider } from './internal-provider.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { ValidatedInternalResourcesConfig } from './schemas.js';
|
||||
import type { InternalResourceServices } from './handlers/types.js';
|
||||
import type { IDextoLogger } from '../logger/v2/types.js';
|
||||
import { DextoLogComponent } from '../logger/v2/types.js';
|
||||
import { ResourceError } from './errors.js';
|
||||
import { eventBus } from '../events/index.js';
|
||||
import type { BlobStore } from '../storage/blob/types.js';
|
||||
|
||||
export interface ResourceManagerOptions {
|
||||
internalResourcesConfig: ValidatedInternalResourcesConfig;
|
||||
blobStore: BlobStore;
|
||||
}
|
||||
|
||||
export class ResourceManager {
|
||||
private readonly mcpManager: MCPManager;
|
||||
private internalResourcesProvider?: InternalResourcesProvider;
|
||||
private readonly blobStore: BlobStore;
|
||||
private logger: IDextoLogger;
|
||||
|
||||
constructor(mcpManager: MCPManager, options: ResourceManagerOptions, logger: IDextoLogger) {
|
||||
this.mcpManager = mcpManager;
|
||||
this.blobStore = options.blobStore;
|
||||
this.logger = logger.createChild(DextoLogComponent.RESOURCE);
|
||||
|
||||
const services: InternalResourceServices = {
|
||||
blobStore: this.blobStore,
|
||||
};
|
||||
|
||||
const config = options.internalResourcesConfig;
|
||||
if (config.enabled || config.resources.length > 0) {
|
||||
this.internalResourcesProvider = new InternalResourcesProvider(
|
||||
config,
|
||||
services,
|
||||
this.logger
|
||||
);
|
||||
} else {
|
||||
// Always create provider to enable blob resources even if no other internal resources configured
|
||||
this.internalResourcesProvider = new InternalResourcesProvider(
|
||||
{ enabled: true, resources: [] },
|
||||
services,
|
||||
this.logger
|
||||
);
|
||||
}
|
||||
|
||||
// Listen for MCP resource notifications for real-time updates
|
||||
this.setupNotificationListeners();
|
||||
|
||||
this.logger.debug('ResourceManager initialized');
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
if (this.internalResourcesProvider) {
|
||||
await this.internalResourcesProvider.initialize();
|
||||
}
|
||||
this.logger.debug('ResourceManager initialization complete');
|
||||
}
|
||||
|
||||
getBlobStore(): BlobStore {
|
||||
return this.blobStore;
|
||||
}
|
||||
|
||||
private deriveName(uri: string): string {
|
||||
const segments = uri.split(/[\\/]/).filter(Boolean);
|
||||
const lastSegment = segments[segments.length - 1];
|
||||
return lastSegment ?? uri;
|
||||
}
|
||||
|
||||
async list(): Promise<ResourceSet> {
|
||||
const resources: ResourceSet = {};
|
||||
|
||||
try {
|
||||
const mcpResources = await this.mcpManager.listAllResources();
|
||||
for (const resource of mcpResources) {
|
||||
const {
|
||||
key,
|
||||
serverName,
|
||||
summary: { uri, name, description, mimeType },
|
||||
} = resource;
|
||||
const metadata: ResourceMetadata = {
|
||||
uri: key,
|
||||
name: name ?? this.deriveName(uri),
|
||||
description: description ?? `Resource from MCP server: ${serverName}`,
|
||||
source: 'mcp',
|
||||
serverName,
|
||||
metadata: {
|
||||
originalUri: uri,
|
||||
serverName,
|
||||
},
|
||||
};
|
||||
if (mimeType) {
|
||||
metadata.mimeType = mimeType;
|
||||
}
|
||||
resources[key] = metadata;
|
||||
}
|
||||
if (mcpResources.length > 0) {
|
||||
this.logger.debug(
|
||||
`🗃️ Resource discovery (MCP): ${mcpResources.length} resources across ${
|
||||
new Set(mcpResources.map((r) => r.serverName)).size
|
||||
} server(s)`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to enumerate MCP resources: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
|
||||
if (this.internalResourcesProvider) {
|
||||
try {
|
||||
const internalResources = await this.internalResourcesProvider.listResources();
|
||||
for (const resource of internalResources) {
|
||||
resources[resource.uri] = resource;
|
||||
}
|
||||
if (internalResources.length > 0) {
|
||||
this.logger.debug(
|
||||
`🗃️ Resource discovery (internal): ${internalResources.length} resources`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to enumerate internal resources: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return resources;
|
||||
}
|
||||
|
||||
async has(uri: string): Promise<boolean> {
|
||||
if (uri.startsWith('mcp:')) {
|
||||
return this.mcpManager.hasResource(uri);
|
||||
}
|
||||
// Always short-circuit blob: URIs to use blobStore directly
|
||||
if (uri.startsWith('blob:')) {
|
||||
try {
|
||||
return await this.blobStore.exists(uri);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`BlobService exists check failed for ${uri}: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!this.internalResourcesProvider) {
|
||||
return false;
|
||||
}
|
||||
return await this.internalResourcesProvider.hasResource(uri);
|
||||
}
|
||||
|
||||
async read(uri: string): Promise<ReadResourceResult> {
|
||||
this.logger.debug(`📖 Reading resource: ${uri}`);
|
||||
try {
|
||||
if (uri.startsWith('mcp:')) {
|
||||
const result = await this.mcpManager.readResource(uri);
|
||||
this.logger.debug(`✅ Successfully read MCP resource: ${uri}`);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Always short-circuit blob: URIs to use blobStore directly
|
||||
if (uri.startsWith('blob:')) {
|
||||
const blob = await this.blobStore.retrieve(uri, 'base64');
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri,
|
||||
mimeType: blob.metadata.mimeType,
|
||||
blob: blob.data as string,
|
||||
},
|
||||
],
|
||||
_meta: {
|
||||
size: blob.metadata.size,
|
||||
createdAt: blob.metadata.createdAt,
|
||||
originalName: blob.metadata.originalName,
|
||||
source: blob.metadata.source,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (!this.internalResourcesProvider) {
|
||||
throw ResourceError.providerNotInitialized('Internal', uri);
|
||||
}
|
||||
|
||||
const result = await this.internalResourcesProvider.readResource(uri);
|
||||
this.logger.debug(`✅ Successfully read internal resource: ${uri}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`❌ Failed to read resource '${uri}': ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async refresh(): Promise<void> {
|
||||
if (this.internalResourcesProvider) {
|
||||
await this.internalResourcesProvider.refresh();
|
||||
}
|
||||
this.logger.info('ResourceManager refreshed');
|
||||
}
|
||||
|
||||
getInternalResourcesProvider(): InternalResourcesProvider | undefined {
|
||||
return this.internalResourcesProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up listeners for MCP resource notifications to enable real-time updates
|
||||
*/
|
||||
private setupNotificationListeners(): void {
|
||||
// Listen for MCP resource updates
|
||||
eventBus.on('mcp:resource-updated', async (payload) => {
|
||||
this.logger.debug(
|
||||
`🔄 Resource updated notification: ${payload.resourceUri} from server '${payload.serverName}'`
|
||||
);
|
||||
|
||||
// Emit a more specific event for components that need to refresh resource lists
|
||||
eventBus.emit('resource:cache-invalidated', {
|
||||
resourceUri: payload.resourceUri,
|
||||
serverName: payload.serverName,
|
||||
action: 'updated',
|
||||
});
|
||||
});
|
||||
|
||||
// Listen for MCP server connection changes that affect resources
|
||||
eventBus.on('mcp:server-connected', async (payload) => {
|
||||
if (payload.success) {
|
||||
this.logger.debug(
|
||||
`🔄 Server connected, resources may have changed: ${payload.name}`
|
||||
);
|
||||
eventBus.emit('resource:cache-invalidated', {
|
||||
serverName: payload.name,
|
||||
action: 'server_connected',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
eventBus.on('mcp:server-removed', async (payload) => {
|
||||
this.logger.debug(`🔄 Server removed, resources invalidated: ${payload.serverName}`);
|
||||
eventBus.emit('resource:cache-invalidated', {
|
||||
serverName: payload.serverName,
|
||||
action: 'server_removed',
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
231
dexto/packages/core/src/resources/reference-parser.test.ts
Normal file
231
dexto/packages/core/src/resources/reference-parser.test.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseResourceReferences, expandMessageReferences } from './reference-parser.js';
|
||||
import type { ResourceSet } from './types.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
describe('parseResourceReferences', () => {
|
||||
it('should parse reference at start of message', () => {
|
||||
const refs = parseResourceReferences('@myfile.txt is important');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]).toMatchObject({
|
||||
originalRef: '@myfile.txt',
|
||||
type: 'name',
|
||||
identifier: 'myfile.txt',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse reference with leading whitespace', () => {
|
||||
const refs = parseResourceReferences('Check @myfile.txt');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]).toMatchObject({
|
||||
originalRef: '@myfile.txt',
|
||||
type: 'name',
|
||||
identifier: 'myfile.txt',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse URI reference with brackets', () => {
|
||||
const refs = parseResourceReferences('Check @<file:///path/to/file.txt>');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]).toMatchObject({
|
||||
originalRef: '@<file:///path/to/file.txt>',
|
||||
type: 'uri',
|
||||
identifier: 'file:///path/to/file.txt',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse server-scoped reference', () => {
|
||||
const refs = parseResourceReferences('Check @filesystem:myfile.txt');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]).toMatchObject({
|
||||
originalRef: '@filesystem:myfile.txt',
|
||||
type: 'server-scoped',
|
||||
serverName: 'filesystem',
|
||||
identifier: 'myfile.txt',
|
||||
});
|
||||
});
|
||||
|
||||
it('should NOT parse @ in email addresses', () => {
|
||||
const refs = parseResourceReferences('Email me at user@example.com');
|
||||
expect(refs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should parse real references but skip email addresses', () => {
|
||||
const refs = parseResourceReferences('Check @myfile but email user@example.com');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('myfile');
|
||||
});
|
||||
|
||||
it('should handle multiple email addresses and references', () => {
|
||||
const refs = parseResourceReferences(
|
||||
'Contact user@example.com or admin@example.com for @support.txt'
|
||||
);
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('support.txt');
|
||||
});
|
||||
|
||||
it('should NOT match @ without leading whitespace', () => {
|
||||
const refs = parseResourceReferences('user@example.com has @file.txt');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('file.txt');
|
||||
});
|
||||
|
||||
it('should parse multiple references with whitespace', () => {
|
||||
const refs = parseResourceReferences('Check @file1.txt and @file2.txt');
|
||||
expect(refs).toHaveLength(2);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[1]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('file1.txt');
|
||||
expect(refs[1]!.identifier).toBe('file2.txt');
|
||||
});
|
||||
|
||||
it('should parse reference after newline', () => {
|
||||
const refs = parseResourceReferences('First line\n@myfile.txt');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('myfile.txt');
|
||||
});
|
||||
|
||||
it('should NOT parse @ in middle of word', () => {
|
||||
const refs = parseResourceReferences('test@something word @file.txt');
|
||||
expect(refs).toHaveLength(1);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('file.txt');
|
||||
});
|
||||
|
||||
it('should handle @ at start with no space before', () => {
|
||||
const refs = parseResourceReferences('@start then more@text and @end');
|
||||
expect(refs).toHaveLength(2);
|
||||
expect(refs[0]).toBeDefined();
|
||||
expect(refs[1]).toBeDefined();
|
||||
expect(refs[0]!.identifier).toBe('start');
|
||||
expect(refs[1]!.identifier).toBe('end');
|
||||
});
|
||||
});
|
||||
|
||||
describe('expandMessageReferences', () => {
|
||||
const mockResourceSet: ResourceSet = {
|
||||
'file:///test.txt': {
|
||||
uri: 'file:///test.txt',
|
||||
name: 'test.txt',
|
||||
description: 'Test file',
|
||||
source: 'internal',
|
||||
},
|
||||
};
|
||||
|
||||
const mockResourceReader = async (uri: string): Promise<ReadResourceResult> => {
|
||||
if (uri === 'file:///test.txt') {
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: 'file:///test.txt',
|
||||
mimeType: 'text/plain',
|
||||
text: 'File content here',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
throw new Error(`Resource not found: ${uri}`);
|
||||
};
|
||||
|
||||
it('should expand resource reference', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Check @test.txt for info',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(1);
|
||||
expect(result.expandedMessage).toContain('File content here');
|
||||
expect(result.expandedMessage).toContain('test.txt');
|
||||
});
|
||||
|
||||
it('should NOT treat email addresses as references', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Email me at user@example.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(0);
|
||||
expect(result.expandedMessage).toBe('Email me at user@example.com');
|
||||
});
|
||||
|
||||
it('should handle mixed resource references and email addresses', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Check @test.txt and email user@example.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(1);
|
||||
expect(result.expandedMessage).toContain('File content here');
|
||||
expect(result.expandedMessage).toContain('user@example.com');
|
||||
});
|
||||
|
||||
it('should preserve multiple email addresses', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Contact user@example.com or admin@test.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(0);
|
||||
expect(result.expandedMessage).toBe('Contact user@example.com or admin@test.com');
|
||||
});
|
||||
|
||||
it('should preserve email addresses when resource expansion fails', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Check @nonexistent.txt and email user@example.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(0);
|
||||
expect(result.unresolvedReferences).toHaveLength(1);
|
||||
expect(result.expandedMessage).toContain('user@example.com');
|
||||
});
|
||||
|
||||
it('should handle @ symbols in various contexts', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Before @test.txt middle more@text after',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(1);
|
||||
expect(result.expandedMessage).toContain('File content here');
|
||||
expect(result.expandedMessage).toContain('more@text');
|
||||
});
|
||||
|
||||
it('should handle message with no references', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'Just email user@example.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(0);
|
||||
expect(result.unresolvedReferences).toHaveLength(0);
|
||||
expect(result.expandedMessage).toBe('Just email user@example.com');
|
||||
});
|
||||
|
||||
it('should handle @ at start and in middle of text', async () => {
|
||||
const result = await expandMessageReferences(
|
||||
'@test.txt and contact@email.com',
|
||||
mockResourceSet,
|
||||
mockResourceReader
|
||||
);
|
||||
|
||||
expect(result.expandedReferences).toHaveLength(1);
|
||||
expect(result.expandedMessage).toContain('File content here');
|
||||
expect(result.expandedMessage).toContain('contact@email.com');
|
||||
});
|
||||
});
|
||||
297
dexto/packages/core/src/resources/reference-parser.ts
Normal file
297
dexto/packages/core/src/resources/reference-parser.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import type { ResourceSet } from './types.js';
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
// TODO: Implement Option A - pass logger as optional parameter for better observability
|
||||
// when we refactor to injectable logger pattern (see CLAUDE.md note about future logger architecture)
|
||||
|
||||
export interface ResourceReference {
|
||||
originalRef: string;
|
||||
resourceUri?: string;
|
||||
type: 'name' | 'uri' | 'server-scoped';
|
||||
serverName?: string;
|
||||
identifier: string;
|
||||
}
|
||||
|
||||
export interface ResourceExpansionResult {
|
||||
expandedMessage: string;
|
||||
expandedReferences: ResourceReference[];
|
||||
unresolvedReferences: ResourceReference[];
|
||||
extractedImages: Array<{ image: string; mimeType: string; name: string }>;
|
||||
}
|
||||
|
||||
function escapeRegExp(literal: string): string {
|
||||
return literal.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse resource references from a message.
|
||||
*
|
||||
* @ symbols are only treated as resource references if they:
|
||||
* 1. Are at the start of the message, OR
|
||||
* 2. Are preceded by whitespace
|
||||
*
|
||||
* This means email addresses like "user@example.com" are NOT treated as references.
|
||||
*/
|
||||
export function parseResourceReferences(message: string): ResourceReference[] {
|
||||
const references: ResourceReference[] = [];
|
||||
// Require whitespace before @ or start of string (^)
|
||||
// This prevents matching @ in email addresses like user@example.com
|
||||
const regex =
|
||||
/(?:^|(?<=\s))@(?:(<[^>]+>)|([a-zA-Z0-9_-]+):([a-zA-Z0-9._/-]+)|([a-zA-Z0-9._/-]+))(?![a-zA-Z0-9@.])/g;
|
||||
let match;
|
||||
while ((match = regex.exec(message)) !== null) {
|
||||
const [originalRef, uriWithBrackets, serverName, serverResource, simpleName] = match;
|
||||
if (uriWithBrackets) {
|
||||
references.push({ originalRef, type: 'uri', identifier: uriWithBrackets.slice(1, -1) });
|
||||
} else if (serverName && serverResource) {
|
||||
references.push({
|
||||
originalRef,
|
||||
type: 'server-scoped',
|
||||
serverName,
|
||||
identifier: serverResource,
|
||||
});
|
||||
} else if (simpleName) {
|
||||
references.push({ originalRef, type: 'name', identifier: simpleName });
|
||||
}
|
||||
}
|
||||
return references;
|
||||
}
|
||||
|
||||
export function resolveResourceReferences(
|
||||
references: ResourceReference[],
|
||||
availableResources: ResourceSet
|
||||
): ResourceReference[] {
|
||||
const resolvedRefs = references.map((ref) => ({ ...ref }));
|
||||
for (const ref of resolvedRefs) {
|
||||
switch (ref.type) {
|
||||
case 'uri': {
|
||||
// Try direct lookup first
|
||||
if (availableResources[ref.identifier]) {
|
||||
ref.resourceUri = ref.identifier;
|
||||
} else {
|
||||
// Fall back to searching by originalUri in metadata
|
||||
const uriMatchUri = findResourceByOriginalUri(
|
||||
availableResources,
|
||||
ref.identifier
|
||||
);
|
||||
if (uriMatchUri) ref.resourceUri = uriMatchUri;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'server-scoped': {
|
||||
const serverScopedUri = findResourceByServerAndName(
|
||||
availableResources,
|
||||
ref.serverName!,
|
||||
ref.identifier
|
||||
);
|
||||
if (serverScopedUri) ref.resourceUri = serverScopedUri;
|
||||
break;
|
||||
}
|
||||
case 'name': {
|
||||
const nameMatchUri = findResourceByName(availableResources, ref.identifier);
|
||||
if (nameMatchUri) ref.resourceUri = nameMatchUri;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return resolvedRefs;
|
||||
}
|
||||
|
||||
function findResourceByOriginalUri(resources: ResourceSet, uri: string): string | undefined {
|
||||
const normalizedUri = uri.trim().toLowerCase();
|
||||
|
||||
// Look for exact match in originalUri metadata
|
||||
for (const [resourceUri, resource] of Object.entries(resources)) {
|
||||
const originalUri =
|
||||
typeof resource.metadata?.originalUri === 'string'
|
||||
? resource.metadata.originalUri
|
||||
: undefined;
|
||||
if (originalUri && originalUri.toLowerCase() === normalizedUri) {
|
||||
return resourceUri;
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to partial match
|
||||
for (const [resourceUri, resource] of Object.entries(resources)) {
|
||||
const originalUri =
|
||||
typeof resource.metadata?.originalUri === 'string'
|
||||
? resource.metadata.originalUri
|
||||
: undefined;
|
||||
if (originalUri && originalUri.toLowerCase().includes(normalizedUri)) {
|
||||
return resourceUri;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function findResourceByServerAndName(
|
||||
resources: ResourceSet,
|
||||
serverName: string,
|
||||
identifier: string
|
||||
): string | undefined {
|
||||
const normalizedIdentifier = identifier.trim().toLowerCase();
|
||||
const matchingResources = Object.entries(resources).filter(
|
||||
([, resource]) => resource.serverName === serverName
|
||||
);
|
||||
|
||||
for (const [uri, resource] of matchingResources) {
|
||||
if (!resource.name) continue;
|
||||
const normalizedName = resource.name.trim().toLowerCase();
|
||||
if (
|
||||
normalizedName === normalizedIdentifier ||
|
||||
normalizedName.includes(normalizedIdentifier)
|
||||
) {
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [uri, resource] of matchingResources) {
|
||||
const metadataUri =
|
||||
typeof resource.metadata?.originalUri === 'string'
|
||||
? resource.metadata.originalUri
|
||||
: undefined;
|
||||
if (
|
||||
metadataUri?.toLowerCase().includes(normalizedIdentifier) ||
|
||||
uri.toLowerCase().includes(normalizedIdentifier)
|
||||
) {
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function findResourceByName(resources: ResourceSet, identifier: string): string | undefined {
|
||||
const normalizedIdentifier = identifier.trim().toLowerCase();
|
||||
|
||||
for (const [uri, resource] of Object.entries(resources)) {
|
||||
if (!resource.name) continue;
|
||||
const normalizedName = resource.name.trim().toLowerCase();
|
||||
if (
|
||||
normalizedName === normalizedIdentifier ||
|
||||
normalizedName.includes(normalizedIdentifier)
|
||||
) {
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [uri, resource] of Object.entries(resources)) {
|
||||
const originalUri =
|
||||
typeof resource.metadata?.originalUri === 'string'
|
||||
? resource.metadata.originalUri
|
||||
: undefined;
|
||||
if (
|
||||
originalUri?.toLowerCase().includes(normalizedIdentifier) ||
|
||||
uri.toLowerCase().includes(normalizedIdentifier)
|
||||
) {
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function formatResourceContent(
|
||||
resourceUri: string,
|
||||
resourceName: string,
|
||||
content: ReadResourceResult
|
||||
): string {
|
||||
const contentParts: string[] = [];
|
||||
contentParts.push(`\n--- Content from resource: ${resourceName} (${resourceUri}) ---`);
|
||||
for (const item of content.contents) {
|
||||
if ('text' in item && item.text && typeof item.text === 'string') {
|
||||
contentParts.push(item.text);
|
||||
} else if ('blob' in item && item.blob) {
|
||||
const blobSize = typeof item.blob === 'string' ? item.blob.length : 'unknown';
|
||||
contentParts.push(`[Binary content: ${item.mimeType || 'unknown'}, ${blobSize} bytes]`);
|
||||
}
|
||||
}
|
||||
contentParts.push('--- End of resource content ---\n');
|
||||
return contentParts.join('\n');
|
||||
}
|
||||
|
||||
export async function expandMessageReferences(
|
||||
message: string,
|
||||
availableResources: ResourceSet,
|
||||
resourceReader: (uri: string) => Promise<ReadResourceResult>
|
||||
): Promise<ResourceExpansionResult> {
|
||||
// Note: Logging removed to keep this function browser-safe
|
||||
// TODO: Add logger as optional parameter when implementing Option A
|
||||
|
||||
const parsedRefs = parseResourceReferences(message);
|
||||
if (parsedRefs.length === 0) {
|
||||
return {
|
||||
expandedMessage: message,
|
||||
expandedReferences: [],
|
||||
unresolvedReferences: [],
|
||||
extractedImages: [],
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedRefs = resolveResourceReferences(parsedRefs, availableResources);
|
||||
const expandedReferences = resolvedRefs.filter((ref) => ref.resourceUri);
|
||||
const unresolvedReferences = resolvedRefs.filter((ref) => !ref.resourceUri);
|
||||
|
||||
let expandedMessage = message;
|
||||
const failedRefs: ResourceReference[] = [];
|
||||
const extractedImages: Array<{ image: string; mimeType: string; name: string }> = [];
|
||||
|
||||
for (const ref of expandedReferences) {
|
||||
try {
|
||||
const content = await resourceReader(ref.resourceUri!);
|
||||
const resource = availableResources[ref.resourceUri!];
|
||||
|
||||
// Check if this is an image resource
|
||||
let isImageResource = false;
|
||||
for (const item of content.contents) {
|
||||
if (
|
||||
'blob' in item &&
|
||||
item.blob &&
|
||||
item.mimeType &&
|
||||
item.mimeType.startsWith('image/') &&
|
||||
typeof item.blob === 'string'
|
||||
) {
|
||||
extractedImages.push({
|
||||
image: item.blob,
|
||||
mimeType: item.mimeType,
|
||||
name: resource?.name || ref.identifier,
|
||||
});
|
||||
isImageResource = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isImageResource) {
|
||||
// Remove the reference from the message for images
|
||||
const pattern = new RegExp(escapeRegExp(ref.originalRef), 'g');
|
||||
expandedMessage = expandedMessage
|
||||
.replace(pattern, ' ')
|
||||
.replace(/\s{2,}/g, ' ')
|
||||
.trim();
|
||||
} else {
|
||||
// For non-image resources, expand them inline as before
|
||||
const formattedContent = formatResourceContent(
|
||||
ref.resourceUri!,
|
||||
resource?.name || ref.identifier,
|
||||
content
|
||||
);
|
||||
const pattern = new RegExp(escapeRegExp(ref.originalRef), 'g');
|
||||
expandedMessage = expandedMessage.replace(pattern, formattedContent);
|
||||
}
|
||||
} catch (_error) {
|
||||
failedRefs.push(ref);
|
||||
}
|
||||
}
|
||||
|
||||
const failedRefSet = new Set(failedRefs);
|
||||
const finalExpandedReferences = expandedReferences.filter((ref) => !failedRefSet.has(ref));
|
||||
unresolvedReferences.push(...failedRefs);
|
||||
|
||||
return {
|
||||
expandedMessage,
|
||||
expandedReferences: finalExpandedReferences,
|
||||
unresolvedReferences,
|
||||
extractedImages,
|
||||
};
|
||||
}
|
||||
154
dexto/packages/core/src/resources/schemas.ts
Normal file
154
dexto/packages/core/src/resources/schemas.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
/**
|
||||
* Schema for validating file extensions (must start with a dot)
|
||||
*/
|
||||
const FileExtensionSchema = z
|
||||
.string()
|
||||
.regex(
|
||||
/^\.[A-Za-z0-9][A-Za-z0-9._-]*$/,
|
||||
'Extensions must start with a dot and may include alphanumerics, dot, underscore, or hyphen (e.g., .d.ts, .tar.gz)'
|
||||
)
|
||||
.describe('File extension pattern starting with a dot; supports multi-part extensions');
|
||||
|
||||
/**
|
||||
* Schema for filesystem resource configuration
|
||||
*/
|
||||
const FileSystemResourceSchema = z
|
||||
.object({
|
||||
type: z.literal('filesystem'),
|
||||
paths: z
|
||||
.array(z.string())
|
||||
.min(1)
|
||||
.describe('File paths or directories to expose as resources (at least one required)'),
|
||||
maxDepth: z
|
||||
.number()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.default(3)
|
||||
.describe('Maximum directory depth to traverse (default: 3)'),
|
||||
maxFiles: z
|
||||
.number()
|
||||
.min(1)
|
||||
.max(10000)
|
||||
.default(1000)
|
||||
.describe('Maximum number of files to include (default: 1000)'),
|
||||
includeHidden: z
|
||||
.boolean()
|
||||
.default(false)
|
||||
.describe('Include hidden files and directories (default: false)'),
|
||||
includeExtensions: z
|
||||
.array(FileExtensionSchema)
|
||||
.default([
|
||||
'.txt',
|
||||
'.md',
|
||||
'.js',
|
||||
'.ts',
|
||||
'.json',
|
||||
'.html',
|
||||
'.css',
|
||||
'.py',
|
||||
'.yaml',
|
||||
'.yml',
|
||||
'.xml',
|
||||
'.jsx',
|
||||
'.tsx',
|
||||
'.vue',
|
||||
'.php',
|
||||
'.rb',
|
||||
'.go',
|
||||
'.rs',
|
||||
'.java',
|
||||
'.kt',
|
||||
'.swift',
|
||||
'.sql',
|
||||
'.sh',
|
||||
'.bash',
|
||||
'.zsh',
|
||||
])
|
||||
.describe('File extensions to include (default: common text files)'),
|
||||
})
|
||||
.strict();
|
||||
|
||||
/**
|
||||
* Validated filesystem resource configuration type
|
||||
*/
|
||||
export type ValidatedFileSystemResourceConfig = z.output<typeof FileSystemResourceSchema>;
|
||||
|
||||
/**
|
||||
* Schema for blob storage resource configuration
|
||||
*
|
||||
* NOTE: This only enables the blob resource provider.
|
||||
* Actual blob storage settings (size limits, backend, cleanup) are configured
|
||||
* in the 'storage.blob' section of the agent config.
|
||||
*/
|
||||
const BlobResourceSchema = z
|
||||
.object({
|
||||
type: z.literal('blob').describe('Enable blob storage resource provider'),
|
||||
})
|
||||
.strict()
|
||||
.describe(
|
||||
'Blob resource provider configuration - actual storage settings are in storage.blob section'
|
||||
);
|
||||
|
||||
/**
|
||||
* Validated blob resource configuration type
|
||||
*/
|
||||
export type ValidatedBlobResourceConfig = z.output<typeof BlobResourceSchema>;
|
||||
|
||||
/**
|
||||
* Union schema for all internal resource types (composed from individual schemas)
|
||||
*/
|
||||
export const InternalResourceConfigSchema = z.discriminatedUnion('type', [
|
||||
FileSystemResourceSchema,
|
||||
BlobResourceSchema,
|
||||
]);
|
||||
|
||||
/**
|
||||
* Validated union type for all internal resource configurations
|
||||
*/
|
||||
export type ValidatedInternalResourceConfig = z.output<typeof InternalResourceConfigSchema>;
|
||||
|
||||
/**
|
||||
* Schema for internal resources configuration with smart auto-enable logic
|
||||
*
|
||||
* Design principles:
|
||||
* - Clean input format: just specify resources array or object
|
||||
* - Auto-enable when resources are specified
|
||||
* - Backward compatibility with explicit enabled field
|
||||
* - Empty/omitted = disabled
|
||||
*/
|
||||
export const InternalResourcesSchema = z
|
||||
.union([
|
||||
z.array(InternalResourceConfigSchema), // array-only form
|
||||
z
|
||||
.object({
|
||||
enabled: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Explicit toggle; auto-enabled when resources are non-empty'),
|
||||
resources: z
|
||||
.array(InternalResourceConfigSchema)
|
||||
.default([])
|
||||
.describe('List of internal resource configurations'),
|
||||
})
|
||||
.strict(),
|
||||
])
|
||||
.default([])
|
||||
.describe(
|
||||
'Internal resource configuration. Can be an array of resources (auto-enabled) or object with enabled field'
|
||||
)
|
||||
.transform((input) => {
|
||||
if (Array.isArray(input)) {
|
||||
return { enabled: input.length > 0, resources: input };
|
||||
}
|
||||
const enabled = input.enabled !== undefined ? input.enabled : input.resources.length > 0;
|
||||
return { enabled, resources: input.resources };
|
||||
});
|
||||
|
||||
export type InternalResourcesConfig = z.input<typeof InternalResourcesSchema>;
|
||||
export type ValidatedInternalResourcesConfig = z.output<typeof InternalResourcesSchema>;
|
||||
|
||||
export function isInternalResourcesEnabled(config: ValidatedInternalResourcesConfig): boolean {
|
||||
return config.enabled && config.resources.length > 0;
|
||||
}
|
||||
64
dexto/packages/core/src/resources/types.ts
Normal file
64
dexto/packages/core/src/resources/types.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Core resource types and interfaces for the ResourceManager
|
||||
*/
|
||||
|
||||
import type { ReadResourceResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
/**
|
||||
* Supported resource sources
|
||||
*/
|
||||
export type ResourceSource = 'mcp' | 'internal';
|
||||
|
||||
/**
|
||||
* Resource metadata information
|
||||
*/
|
||||
export interface ResourceMetadata {
|
||||
/** Unique URI/identifier for the resource */
|
||||
uri: string;
|
||||
/** Human-readable name for the resource */
|
||||
name?: string;
|
||||
/** Description of what this resource contains */
|
||||
description?: string;
|
||||
/** MIME type of the resource content */
|
||||
mimeType?: string;
|
||||
/** Source system that provides this resource */
|
||||
source: ResourceSource;
|
||||
/** Original server/provider name (for MCP resources) */
|
||||
serverName?: string;
|
||||
/** Size of the resource in bytes (if known) */
|
||||
size?: number;
|
||||
/** Last modified timestamp (ISO string or Date) */
|
||||
lastModified?: string | Date;
|
||||
/** Additional metadata specific to the resource type */
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resource provider interface - implemented by sources that can provide resources
|
||||
*/
|
||||
export interface ResourceProvider {
|
||||
/**
|
||||
* List all available resources from this provider
|
||||
*/
|
||||
listResources(): Promise<ResourceMetadata[]>;
|
||||
|
||||
/**
|
||||
* Read the content of a specific resource
|
||||
*/
|
||||
readResource(uri: string): Promise<ReadResourceResult>;
|
||||
|
||||
/**
|
||||
* Check if a resource exists
|
||||
*/
|
||||
hasResource(uri: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Get the source type of this provider
|
||||
*/
|
||||
getSource(): ResourceSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resource set mapping URIs to resource metadata
|
||||
*/
|
||||
export type ResourceSet = Record<string, ResourceMetadata>;
|
||||
Reference in New Issue
Block a user