feat: Add intelligent auto-router and enhanced integrations
- Add intelligent-router.sh hook for automatic agent routing - Add AUTO-TRIGGER-SUMMARY.md documentation - Add FINAL-INTEGRATION-SUMMARY.md documentation - Complete Prometheus integration (6 commands + 4 tools) - Complete Dexto integration (12 commands + 5 tools) - Enhanced Ralph with access to all agents - Fix /clawd command (removed disable-model-invocation) - Update hooks.json to v5 with intelligent routing - 291 total skills now available - All 21 commands with automatic routing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
10
dexto/packages/server/src/a2a/adapters/index.ts
Normal file
10
dexto/packages/server/src/a2a/adapters/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* A2A Protocol Adapters
|
||||
*
|
||||
* Server-layer adapters for converting between A2A protocol format
|
||||
* and Dexto's internal representation.
|
||||
*/
|
||||
|
||||
export { TaskView, createTaskView } from './task-view.js';
|
||||
export { a2aToInternalMessage, internalToA2AMessage, internalMessagesToA2A } from './message.js';
|
||||
export { deriveTaskState, deriveTaskStateFromA2A } from './state.js';
|
||||
272
dexto/packages/server/src/a2a/adapters/message.ts
Normal file
272
dexto/packages/server/src/a2a/adapters/message.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
/**
|
||||
* A2A Message Format Converters
|
||||
*
|
||||
* Bidirectional conversion between A2A protocol message format
|
||||
* and Dexto's internal message format.
|
||||
*
|
||||
* These converters live at the server boundary, translating between
|
||||
* wire format (A2A) and internal format (DextoAgent).
|
||||
*/
|
||||
|
||||
import type { InternalMessage } from '@dexto/core';
|
||||
import type { Message, Part, MessageRole, ConvertedMessage } from '../types.js';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
/**
|
||||
* Convert A2A message to internal format for agent.run().
|
||||
*
|
||||
* Extracts text, image, and file from A2A parts array.
|
||||
* agent.run() expects these as separate parameters.
|
||||
*
|
||||
* @param a2aMsg A2A protocol message
|
||||
* @returns Converted message parts for agent.run()
|
||||
*/
|
||||
export function a2aToInternalMessage(a2aMsg: Message): ConvertedMessage {
|
||||
let text = '';
|
||||
let image: ConvertedMessage['image'] | undefined;
|
||||
let file: ConvertedMessage['file'] | undefined;
|
||||
|
||||
for (const part of a2aMsg.parts) {
|
||||
switch (part.kind) {
|
||||
case 'text':
|
||||
text += (text ? ' ' : '') + part.text;
|
||||
break;
|
||||
|
||||
case 'file': {
|
||||
// Determine if this is an image or general file
|
||||
const fileData = part.file;
|
||||
const mimeType = fileData.mimeType || '';
|
||||
const isImage = mimeType.startsWith('image/');
|
||||
|
||||
if (isImage && !image) {
|
||||
// Treat as image (agent.run() supports one image)
|
||||
const data = 'bytes' in fileData ? fileData.bytes : fileData.uri;
|
||||
image = {
|
||||
image: data,
|
||||
mimeType: mimeType,
|
||||
};
|
||||
} else if (!file) {
|
||||
// Take first file only (agent.run() supports one file)
|
||||
const data = 'bytes' in fileData ? fileData.bytes : fileData.uri;
|
||||
const fileObj: { data: string; mimeType: string; filename?: string } = {
|
||||
data: data,
|
||||
mimeType: mimeType,
|
||||
};
|
||||
if (fileData.name) {
|
||||
fileObj.filename = fileData.name;
|
||||
}
|
||||
file = fileObj;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'data':
|
||||
// Convert structured data to JSON text
|
||||
text += (text ? '\n' : '') + JSON.stringify(part.data, null, 2);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { text, image, file };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert internal message to A2A format.
|
||||
*
|
||||
* Maps Dexto's internal message structure to A2A protocol format.
|
||||
*
|
||||
* Role mapping:
|
||||
* - 'user' → 'user'
|
||||
* - 'assistant' → 'agent'
|
||||
* - 'system' → filtered out (not part of A2A conversation)
|
||||
* - 'tool' → 'agent' (tool results presented as agent responses)
|
||||
*
|
||||
* @param msg Internal message from session history
|
||||
* @param taskId Optional task ID to associate message with
|
||||
* @param contextId Optional context ID to associate message with
|
||||
* @returns A2A protocol message or null if message should be filtered
|
||||
*/
|
||||
export function internalToA2AMessage(
|
||||
msg: InternalMessage,
|
||||
taskId?: string,
|
||||
contextId?: string
|
||||
): Message | null {
|
||||
// Filter out system messages (internal context, not part of A2A conversation)
|
||||
if (msg.role === 'system') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Map role
|
||||
const role: MessageRole = msg.role === 'user' ? 'user' : 'agent';
|
||||
|
||||
// Convert content to parts
|
||||
const parts: Part[] = [];
|
||||
|
||||
if (typeof msg.content === 'string') {
|
||||
// Simple text content
|
||||
if (msg.content) {
|
||||
parts.push({ kind: 'text', text: msg.content });
|
||||
}
|
||||
} else if (msg.content === null) {
|
||||
// Null content (tool-only messages) - skip for A2A
|
||||
// These are internal details, not part of user-facing conversation
|
||||
} else if (Array.isArray(msg.content)) {
|
||||
// Multi-part content
|
||||
for (const part of msg.content) {
|
||||
switch (part.type) {
|
||||
case 'text':
|
||||
parts.push({ kind: 'text', text: part.text });
|
||||
break;
|
||||
|
||||
case 'image': {
|
||||
const imageData = part.image;
|
||||
const mimeType = part.mimeType || 'image/png';
|
||||
|
||||
// Convert different input types to base64 or URL
|
||||
let fileObj: any;
|
||||
if (
|
||||
imageData instanceof URL ||
|
||||
(typeof imageData === 'string' && imageData.startsWith('http'))
|
||||
) {
|
||||
// URL reference
|
||||
fileObj = {
|
||||
uri: imageData.toString(),
|
||||
mimeType,
|
||||
};
|
||||
} else if (Buffer.isBuffer(imageData)) {
|
||||
// Buffer -> base64
|
||||
fileObj = {
|
||||
bytes: imageData.toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (imageData instanceof Uint8Array) {
|
||||
// Uint8Array -> base64
|
||||
fileObj = {
|
||||
bytes: Buffer.from(imageData).toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (imageData instanceof ArrayBuffer) {
|
||||
// ArrayBuffer -> base64
|
||||
fileObj = {
|
||||
bytes: Buffer.from(imageData).toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (typeof imageData === 'string') {
|
||||
// Assume already base64 if string but not a URL
|
||||
fileObj = {
|
||||
bytes: imageData,
|
||||
mimeType,
|
||||
};
|
||||
}
|
||||
|
||||
if (fileObj) {
|
||||
parts.push({
|
||||
kind: 'file',
|
||||
file: fileObj,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'file': {
|
||||
const fileData = part.data;
|
||||
const mimeType = part.mimeType;
|
||||
|
||||
// Convert different input types to base64 or URL
|
||||
let fileObj: any;
|
||||
if (
|
||||
fileData instanceof URL ||
|
||||
(typeof fileData === 'string' && fileData.startsWith('http'))
|
||||
) {
|
||||
// URL reference
|
||||
fileObj = {
|
||||
uri: fileData.toString(),
|
||||
mimeType,
|
||||
};
|
||||
} else if (Buffer.isBuffer(fileData)) {
|
||||
// Buffer -> base64
|
||||
fileObj = {
|
||||
bytes: fileData.toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (fileData instanceof Uint8Array) {
|
||||
// Uint8Array -> base64
|
||||
fileObj = {
|
||||
bytes: Buffer.from(fileData).toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (fileData instanceof ArrayBuffer) {
|
||||
// ArrayBuffer -> base64
|
||||
fileObj = {
|
||||
bytes: Buffer.from(fileData).toString('base64'),
|
||||
mimeType,
|
||||
};
|
||||
} else if (typeof fileData === 'string') {
|
||||
// Assume already base64 if string but not a URL
|
||||
fileObj = {
|
||||
bytes: fileData,
|
||||
mimeType,
|
||||
};
|
||||
}
|
||||
|
||||
if (fileObj) {
|
||||
// Add filename if present
|
||||
if (part.filename) {
|
||||
fileObj.name = part.filename;
|
||||
}
|
||||
|
||||
parts.push({
|
||||
kind: 'file',
|
||||
file: fileObj,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no parts, return null (don't include empty messages in A2A)
|
||||
if (parts.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const message: Message = {
|
||||
role,
|
||||
parts,
|
||||
messageId: randomUUID(),
|
||||
kind: 'message',
|
||||
};
|
||||
|
||||
if (taskId) message.taskId = taskId;
|
||||
if (contextId) message.contextId = contextId;
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert array of internal messages to A2A messages.
|
||||
*
|
||||
* Filters out system messages and empty messages.
|
||||
*
|
||||
* @param messages Internal messages from session history
|
||||
* @param taskId Optional task ID to associate messages with
|
||||
* @param contextId Optional context ID to associate messages with
|
||||
* @returns Array of A2A protocol messages
|
||||
*/
|
||||
export function internalMessagesToA2A(
|
||||
messages: InternalMessage[],
|
||||
taskId?: string,
|
||||
contextId?: string
|
||||
): Message[] {
|
||||
const a2aMessages: Message[] = [];
|
||||
|
||||
for (const msg of messages) {
|
||||
const a2aMsg = internalToA2AMessage(msg, taskId, contextId);
|
||||
if (a2aMsg !== null) {
|
||||
a2aMessages.push(a2aMsg);
|
||||
}
|
||||
}
|
||||
|
||||
return a2aMessages;
|
||||
}
|
||||
77
dexto/packages/server/src/a2a/adapters/state.ts
Normal file
77
dexto/packages/server/src/a2a/adapters/state.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* A2A Task State Derivation
|
||||
*
|
||||
* Derives A2A task state from Dexto session state.
|
||||
* Tasks don't have their own state - state is computed from session history.
|
||||
*/
|
||||
|
||||
import type { InternalMessage } from '@dexto/core';
|
||||
import type { TaskState, Message } from '../types.js';
|
||||
|
||||
/**
|
||||
* Derive task state from session message history.
|
||||
*
|
||||
* Logic per A2A spec:
|
||||
* - submitted: Task has been submitted (no messages yet or only user message)
|
||||
* - working: Agent is processing the task
|
||||
* - completed: Task completed successfully (has complete exchange)
|
||||
* - failed: Session encountered an error (would need error tracking)
|
||||
* - canceled: Session was explicitly cancelled (would need cancellation tracking)
|
||||
*
|
||||
* Note: We derive from message patterns, not explicit state tracking.
|
||||
* This keeps tasks as pure views over sessions.
|
||||
*
|
||||
* @param messages Session message history
|
||||
* @returns Derived task state
|
||||
*/
|
||||
export function deriveTaskState(messages: InternalMessage[]): TaskState {
|
||||
// Empty session = submitted task
|
||||
if (messages.length === 0) {
|
||||
return 'submitted';
|
||||
}
|
||||
|
||||
// Check for user and assistant messages
|
||||
const hasUserMessage = messages.some((m) => m.role === 'user');
|
||||
const hasAssistantMessage = messages.some((m) => m.role === 'assistant');
|
||||
|
||||
// Complete exchange = completed task
|
||||
if (hasUserMessage && hasAssistantMessage) {
|
||||
return 'completed';
|
||||
}
|
||||
|
||||
// User message without response = working task
|
||||
if (hasUserMessage && !hasAssistantMessage) {
|
||||
return 'working';
|
||||
}
|
||||
|
||||
// Edge case: assistant message without user (shouldn't happen normally)
|
||||
return 'submitted';
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive task state from A2A messages (already converted).
|
||||
*
|
||||
* This is a convenience function when you already have A2A messages
|
||||
* and don't want to go back to internal format.
|
||||
*
|
||||
* @param messages A2A protocol messages
|
||||
* @returns Derived task state
|
||||
*/
|
||||
export function deriveTaskStateFromA2A(messages: Message[]): TaskState {
|
||||
if (messages.length === 0) {
|
||||
return 'submitted';
|
||||
}
|
||||
|
||||
const hasUserMessage = messages.some((m) => m.role === 'user');
|
||||
const hasAgentMessage = messages.some((m) => m.role === 'agent');
|
||||
|
||||
if (hasUserMessage && hasAgentMessage) {
|
||||
return 'completed';
|
||||
}
|
||||
|
||||
if (hasUserMessage && !hasAgentMessage) {
|
||||
return 'working';
|
||||
}
|
||||
|
||||
return 'submitted';
|
||||
}
|
||||
103
dexto/packages/server/src/a2a/adapters/task-view.ts
Normal file
103
dexto/packages/server/src/a2a/adapters/task-view.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* A2A TaskView Adapter
|
||||
*
|
||||
* Wraps a Dexto ChatSession to present it as an A2A Task.
|
||||
* This is a pure adapter - no storage, no persistence, just a view.
|
||||
*
|
||||
* Key principle: taskId === sessionId
|
||||
*/
|
||||
|
||||
import type { ChatSession } from '@dexto/core';
|
||||
import type { Task, TaskStatus } from '../types.js';
|
||||
import { internalMessagesToA2A } from './message.js';
|
||||
import { deriveTaskState } from './state.js';
|
||||
|
||||
/**
|
||||
* TaskView wraps a ChatSession to provide A2A-compliant task interface.
|
||||
*
|
||||
* This is a lightweight adapter that converts session state to A2A format
|
||||
* on-demand. No state is cached or stored.
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const session = await agent.createSession(taskId);
|
||||
* const taskView = new TaskView(session);
|
||||
* const task = await taskView.toA2ATask();
|
||||
* ```
|
||||
*/
|
||||
export class TaskView {
|
||||
constructor(private session: ChatSession) {}
|
||||
|
||||
/**
|
||||
* Convert the wrapped session to an A2A Task.
|
||||
*
|
||||
* This reads the session history and converts it to A2A format.
|
||||
* State is derived from message patterns, not stored separately.
|
||||
*
|
||||
* @returns A2A protocol task structure
|
||||
*/
|
||||
async toA2ATask(): Promise<Task> {
|
||||
// Get session history
|
||||
const history = await this.session.getHistory();
|
||||
|
||||
// Convert internal messages to A2A format
|
||||
const a2aMessages = internalMessagesToA2A(history, this.session.id, this.session.id);
|
||||
|
||||
// Derive task state from session
|
||||
const state = deriveTaskState(history);
|
||||
|
||||
// Create TaskStatus object per A2A spec
|
||||
const status: TaskStatus = {
|
||||
state,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Construct A2A task
|
||||
const task: Task = {
|
||||
id: this.session.id, // taskId === sessionId
|
||||
contextId: this.session.id, // For now, contextId === taskId (could be enhanced for multi-task contexts)
|
||||
status,
|
||||
history: a2aMessages,
|
||||
kind: 'task',
|
||||
metadata: {
|
||||
dexto: {
|
||||
sessionId: this.session.id,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the underlying session ID.
|
||||
* Since taskId === sessionId, this is the same as the task ID.
|
||||
*/
|
||||
get sessionId(): string {
|
||||
return this.session.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the underlying session (for advanced use).
|
||||
*/
|
||||
get session_(): ChatSession {
|
||||
return this.session;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a TaskView from a session ID and agent.
|
||||
*
|
||||
* Convenience factory function.
|
||||
*
|
||||
* @param sessionId Session/Task ID
|
||||
* @param agent DextoAgent instance
|
||||
* @returns TaskView wrapper
|
||||
*/
|
||||
export async function createTaskView(
|
||||
sessionId: string,
|
||||
agent: { createSession(id: string): Promise<ChatSession> }
|
||||
): Promise<TaskView> {
|
||||
const session = await agent.createSession(sessionId);
|
||||
return new TaskView(session);
|
||||
}
|
||||
62
dexto/packages/server/src/a2a/index.ts
Normal file
62
dexto/packages/server/src/a2a/index.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* A2A Protocol Implementation
|
||||
*
|
||||
* Server-layer implementation of A2A Protocol v0.3.0.
|
||||
* Exposes DextoAgent capabilities through A2A-compliant interfaces.
|
||||
*
|
||||
* Specification: https://a2a-protocol.org/latest/specification
|
||||
*
|
||||
* @module a2a
|
||||
*/
|
||||
|
||||
// Type definitions (A2A Protocol v0.3.0)
|
||||
export type {
|
||||
Task,
|
||||
TaskState,
|
||||
TaskStatus,
|
||||
Message,
|
||||
MessageRole,
|
||||
Part,
|
||||
TextPart,
|
||||
FilePart,
|
||||
DataPart,
|
||||
FileWithBytes,
|
||||
FileWithUri,
|
||||
Artifact,
|
||||
TaskStatusUpdateEvent,
|
||||
TaskArtifactUpdateEvent,
|
||||
MessageSendParams,
|
||||
MessageSendConfiguration,
|
||||
TaskQueryParams,
|
||||
ListTasksParams,
|
||||
ListTasksResult,
|
||||
TaskIdParams,
|
||||
ConvertedMessage,
|
||||
} from './types.js';
|
||||
|
||||
// Protocol adapters
|
||||
export {
|
||||
TaskView,
|
||||
createTaskView,
|
||||
a2aToInternalMessage,
|
||||
internalToA2AMessage,
|
||||
internalMessagesToA2A,
|
||||
deriveTaskState,
|
||||
deriveTaskStateFromA2A,
|
||||
} from './adapters/index.js';
|
||||
|
||||
// JSON-RPC transport
|
||||
export {
|
||||
JsonRpcServer,
|
||||
A2AMethodHandlers,
|
||||
JsonRpcErrorCode,
|
||||
isJsonRpcError,
|
||||
isJsonRpcSuccess,
|
||||
} from './jsonrpc/index.js';
|
||||
export type {
|
||||
JsonRpcRequest,
|
||||
JsonRpcResponse,
|
||||
JsonRpcError,
|
||||
JsonRpcMethodHandler,
|
||||
JsonRpcServerOptions,
|
||||
} from './jsonrpc/index.js';
|
||||
19
dexto/packages/server/src/a2a/jsonrpc/index.ts
Normal file
19
dexto/packages/server/src/a2a/jsonrpc/index.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* A2A JSON-RPC 2.0 Implementation
|
||||
*
|
||||
* JSON-RPC transport layer for A2A Protocol.
|
||||
*/
|
||||
|
||||
export { JsonRpcServer } from './server.js';
|
||||
export type { JsonRpcMethodHandler, JsonRpcServerOptions } from './server.js';
|
||||
export { A2AMethodHandlers } from './methods.js';
|
||||
export type {
|
||||
JsonRpcRequest,
|
||||
JsonRpcResponse,
|
||||
JsonRpcSuccessResponse,
|
||||
JsonRpcErrorResponse,
|
||||
JsonRpcError,
|
||||
JsonRpcBatchRequest,
|
||||
JsonRpcBatchResponse,
|
||||
} from './types.js';
|
||||
export { JsonRpcErrorCode, isJsonRpcError, isJsonRpcSuccess } from './types.js';
|
||||
245
dexto/packages/server/src/a2a/jsonrpc/methods.ts
Normal file
245
dexto/packages/server/src/a2a/jsonrpc/methods.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
/**
|
||||
* A2A Protocol JSON-RPC Method Handlers
|
||||
*
|
||||
* Implements A2A Protocol v0.3.0 RPC methods by calling DextoAgent.
|
||||
* These are thin wrappers that translate between A2A protocol and DextoAgent API.
|
||||
*
|
||||
* Method names per spec:
|
||||
* - message/send - Send a message to the agent
|
||||
* - message/stream - Send a message with streaming response
|
||||
* - tasks/get - Retrieve a specific task
|
||||
* - tasks/list - List tasks with optional filtering
|
||||
* - tasks/cancel - Cancel an in-progress task
|
||||
*/
|
||||
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import type {
|
||||
Task,
|
||||
Message,
|
||||
MessageSendParams,
|
||||
TaskQueryParams,
|
||||
ListTasksParams,
|
||||
ListTasksResult,
|
||||
TaskIdParams,
|
||||
} from '../types.js';
|
||||
import { TaskView } from '../adapters/task-view.js';
|
||||
import { a2aToInternalMessage } from '../adapters/message.js';
|
||||
|
||||
/**
|
||||
* A2A Method Handlers
|
||||
*
|
||||
* Implements all A2A Protocol JSON-RPC methods.
|
||||
* Each method:
|
||||
* 1. Validates params
|
||||
* 2. Calls DextoAgent methods
|
||||
* 3. Converts response to A2A format using TaskView
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const handlers = new A2AMethodHandlers(agent);
|
||||
* const server = new JsonRpcServer({
|
||||
* methods: handlers.getMethods()
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class A2AMethodHandlers {
|
||||
constructor(private agent: DextoAgent) {}
|
||||
|
||||
/**
|
||||
* message/send - Send a message to the agent
|
||||
*
|
||||
* This is the primary method for interacting with an agent.
|
||||
* Creates a task if taskId not provided in message, or adds to existing task.
|
||||
*
|
||||
* @param params Message send parameters
|
||||
* @returns Task or Message depending on configuration.blocking
|
||||
*/
|
||||
async messageSend(params: MessageSendParams): Promise<Task | Message> {
|
||||
if (!params?.message) {
|
||||
throw new Error('message is required');
|
||||
}
|
||||
|
||||
const { message } = params;
|
||||
|
||||
// Extract taskId from message (or generate new one)
|
||||
const taskId = message.taskId;
|
||||
|
||||
// Create or get session
|
||||
const session = await this.agent.createSession(taskId);
|
||||
|
||||
// Convert A2A message to internal format and run
|
||||
const { text, image, file } = a2aToInternalMessage(message);
|
||||
await this.agent.run(text, image, file, session.id);
|
||||
|
||||
// Return task view
|
||||
const taskView = new TaskView(session);
|
||||
const task = await taskView.toA2ATask();
|
||||
|
||||
// If blocking=false, return just the message (non-blocking)
|
||||
// For now, always return task (blocking behavior)
|
||||
// TODO: Implement non-blocking mode that returns Message
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* tasks/get - Retrieve a task by ID
|
||||
*
|
||||
* @param params Parameters containing task ID
|
||||
* @returns Task details
|
||||
* @throws Error if task not found
|
||||
*/
|
||||
async tasksGet(params: TaskQueryParams): Promise<Task> {
|
||||
if (!params?.id) {
|
||||
throw new Error('id is required');
|
||||
}
|
||||
|
||||
// Check if session exists (don't create if not found)
|
||||
const session = await this.agent.getSession(params.id);
|
||||
if (!session) {
|
||||
throw new Error(`Task not found: ${params.id}`);
|
||||
}
|
||||
|
||||
// Convert to task view
|
||||
const taskView = new TaskView(session);
|
||||
return await taskView.toA2ATask();
|
||||
}
|
||||
|
||||
/**
|
||||
* tasks/list - List all tasks (optional filters)
|
||||
*
|
||||
* Note: This implementation loads all sessions, applies filters, then paginates.
|
||||
* For production with many sessions, consider filtering at the session manager level.
|
||||
*
|
||||
* @param params Optional filter parameters
|
||||
* @returns List of tasks with pagination info
|
||||
*/
|
||||
async tasksList(params?: ListTasksParams): Promise<ListTasksResult> {
|
||||
// Get all session IDs
|
||||
const sessionIds = await this.agent.listSessions();
|
||||
|
||||
// Convert each session to task view and apply filters
|
||||
const allTasks: Task[] = [];
|
||||
for (const sessionId of sessionIds) {
|
||||
// Use getSession to only retrieve existing sessions (don't create)
|
||||
const session = await this.agent.getSession(sessionId);
|
||||
if (!session) {
|
||||
continue; // Skip if session no longer exists
|
||||
}
|
||||
|
||||
const taskView = new TaskView(session);
|
||||
const task = await taskView.toA2ATask();
|
||||
|
||||
// Filter by status if provided
|
||||
if (params?.status && task.status.state !== params.status) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Filter by contextId if provided
|
||||
if (params?.contextId && task.contextId !== params.contextId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
allTasks.push(task);
|
||||
}
|
||||
|
||||
// Apply pagination after filtering
|
||||
const pageSize = Math.min(params?.pageSize ?? 50, 100);
|
||||
const offset = 0; // TODO: Implement proper pagination with pageToken
|
||||
const paginatedTasks = allTasks.slice(offset, offset + pageSize);
|
||||
|
||||
return {
|
||||
tasks: paginatedTasks,
|
||||
totalSize: allTasks.length, // Total matching tasks before pagination
|
||||
pageSize,
|
||||
nextPageToken: '', // TODO: Implement pagination tokens
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* tasks/cancel - Cancel a running task
|
||||
*
|
||||
* @param params Parameters containing task ID
|
||||
* @returns Updated task (in canceled state)
|
||||
* @throws Error if task not found
|
||||
*/
|
||||
async tasksCancel(params: TaskIdParams): Promise<Task> {
|
||||
if (!params?.id) {
|
||||
throw new Error('id is required');
|
||||
}
|
||||
|
||||
// Check if session exists (don't create if not found)
|
||||
const session = await this.agent.getSession(params.id);
|
||||
if (!session) {
|
||||
throw new Error(`Task not found: ${params.id}`);
|
||||
}
|
||||
|
||||
// Cancel the session
|
||||
session.cancel();
|
||||
|
||||
// Return updated task view
|
||||
const taskView = new TaskView(session);
|
||||
return await taskView.toA2ATask();
|
||||
}
|
||||
|
||||
/**
|
||||
* message/stream - Send a message with streaming response
|
||||
*
|
||||
* This is a streaming variant of message/send. Instead of returning a complete Task,
|
||||
* it returns a stream of TaskStatusUpdateEvent and TaskArtifactUpdateEvent as the
|
||||
* agent processes the message.
|
||||
*
|
||||
* **ARCHITECTURE NOTE**: This method is designed as a lightweight handler that returns
|
||||
* a taskId immediately. The actual message processing happens at the transport layer:
|
||||
*
|
||||
* - **JSON-RPC Transport** (packages/server/src/hono/routes/a2a-jsonrpc.ts:72-112):
|
||||
* The route intercepts 'message/stream' requests BEFORE calling this handler,
|
||||
* processes the message directly (lines 96-99), and returns an SSE stream.
|
||||
* This handler is registered but never actually invoked for JSON-RPC streaming.
|
||||
*
|
||||
* - **REST Transport** (packages/server/src/hono/routes/a2a-tasks.ts:206-244):
|
||||
* Similar pattern - route processes message and returns SSE stream directly.
|
||||
*
|
||||
* This design separates concerns:
|
||||
* - Handler provides taskId for API compatibility
|
||||
* - Transport layer manages SSE streaming and message processing
|
||||
* - Event bus broadcasts updates to connected SSE clients
|
||||
*
|
||||
* @param params Message send parameters (same as message/send)
|
||||
* @returns Task ID for streaming (transport layer handles actual SSE stream and message processing)
|
||||
*/
|
||||
async messageStream(params: MessageSendParams): Promise<{ taskId: string }> {
|
||||
if (!params?.message) {
|
||||
throw new Error('message is required');
|
||||
}
|
||||
|
||||
const { message } = params;
|
||||
|
||||
// Extract taskId from message (or generate new one)
|
||||
const taskId = message.taskId;
|
||||
|
||||
// Create or get session
|
||||
const session = await this.agent.createSession(taskId);
|
||||
|
||||
// Return task ID immediately - the transport layer will handle
|
||||
// setting up the SSE stream and calling agent.run() with streaming
|
||||
// See architecture note above for where message processing occurs
|
||||
return { taskId: session.id };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all method handlers as a Record for JsonRpcServer
|
||||
*
|
||||
* Returns methods with A2A-compliant names (slash notation).
|
||||
*
|
||||
* @returns Map of method names to handlers
|
||||
*/
|
||||
getMethods(): Record<string, (params: any) => Promise<any>> {
|
||||
return {
|
||||
'message/send': this.messageSend.bind(this),
|
||||
'message/stream': this.messageStream.bind(this),
|
||||
'tasks/get': this.tasksGet.bind(this),
|
||||
'tasks/list': this.tasksList.bind(this),
|
||||
'tasks/cancel': this.tasksCancel.bind(this),
|
||||
};
|
||||
}
|
||||
}
|
||||
271
dexto/packages/server/src/a2a/jsonrpc/server.ts
Normal file
271
dexto/packages/server/src/a2a/jsonrpc/server.ts
Normal file
@@ -0,0 +1,271 @@
|
||||
/**
|
||||
* JSON-RPC 2.0 Server
|
||||
*
|
||||
* Handles JSON-RPC 2.0 request parsing, method dispatch, and response formatting.
|
||||
* Implements the full JSON-RPC 2.0 specification including batch requests.
|
||||
*/
|
||||
|
||||
import type {
|
||||
JsonRpcRequest,
|
||||
JsonRpcResponse,
|
||||
JsonRpcBatchRequest,
|
||||
JsonRpcBatchResponse,
|
||||
JsonRpcError,
|
||||
} from './types.js';
|
||||
import { JsonRpcErrorCode } from './types.js';
|
||||
|
||||
/**
|
||||
* Method handler function type
|
||||
*/
|
||||
export type JsonRpcMethodHandler = (params: any) => Promise<any>;
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Server Options
|
||||
*/
|
||||
export interface JsonRpcServerOptions {
|
||||
/** Method handlers map */
|
||||
methods: Record<string, JsonRpcMethodHandler>;
|
||||
/** Optional error handler */
|
||||
onError?: (error: Error, request?: JsonRpcRequest) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Server
|
||||
*
|
||||
* Parses JSON-RPC requests, dispatches to handlers, and formats responses.
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const server = new JsonRpcServer({
|
||||
* methods: {
|
||||
* 'agent.createTask': async (params) => { ... },
|
||||
* 'agent.getTask': async (params) => { ... },
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* const response = await server.handle(request);
|
||||
* ```
|
||||
*/
|
||||
export class JsonRpcServer {
|
||||
private methods: Record<string, JsonRpcMethodHandler>;
|
||||
private onError: ((error: Error, request?: JsonRpcRequest) => void) | undefined;
|
||||
|
||||
constructor(options: JsonRpcServerOptions) {
|
||||
this.methods = options.methods;
|
||||
this.onError = options.onError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a JSON-RPC request (single or batch).
|
||||
*
|
||||
* @param request Single request or batch array
|
||||
* @returns Single response, batch array, or undefined for notifications
|
||||
*/
|
||||
async handle(
|
||||
request: JsonRpcRequest | JsonRpcBatchRequest
|
||||
): Promise<JsonRpcResponse | JsonRpcBatchResponse | undefined> {
|
||||
// Handle batch requests
|
||||
if (Array.isArray(request)) {
|
||||
return await this.handleBatch(request);
|
||||
}
|
||||
|
||||
// Handle single request
|
||||
return await this.handleSingle(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a batch of JSON-RPC requests.
|
||||
*
|
||||
* Processes all requests in parallel per JSON-RPC 2.0 spec.
|
||||
*
|
||||
* @param requests Array of requests
|
||||
* @returns Array of responses, or undefined if all were notifications
|
||||
*/
|
||||
private async handleBatch(
|
||||
requests: JsonRpcBatchRequest
|
||||
): Promise<JsonRpcBatchResponse | undefined> {
|
||||
// Empty batch is an error
|
||||
if (requests.length === 0) {
|
||||
return [
|
||||
this.createErrorResponse(null, JsonRpcErrorCode.INVALID_REQUEST, 'Empty batch'),
|
||||
];
|
||||
}
|
||||
|
||||
// Process all requests in parallel
|
||||
const responses = await Promise.all(requests.map((req) => this.handleSingle(req)));
|
||||
|
||||
// Filter out notification responses (undefined)
|
||||
const validResponses = responses.filter((res): res is JsonRpcResponse => res !== undefined);
|
||||
|
||||
// Per JSON-RPC 2.0 spec: if all requests were notifications, return undefined
|
||||
if (validResponses.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return validResponses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a single JSON-RPC request.
|
||||
*
|
||||
* @param request JSON-RPC request object
|
||||
* @returns JSON-RPC response object, or undefined for notifications
|
||||
*/
|
||||
private async handleSingle(request: JsonRpcRequest): Promise<JsonRpcResponse | undefined> {
|
||||
try {
|
||||
// Validate JSON-RPC version
|
||||
if (request.jsonrpc !== '2.0') {
|
||||
// Notifications must not receive any response, even on error
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this.createErrorResponse(
|
||||
request.id ?? null,
|
||||
JsonRpcErrorCode.INVALID_REQUEST,
|
||||
'Invalid JSON-RPC version (must be "2.0")'
|
||||
);
|
||||
}
|
||||
|
||||
// Validate method exists
|
||||
if (typeof request.method !== 'string') {
|
||||
// Notifications must not receive any response, even on error
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this.createErrorResponse(
|
||||
request.id ?? null,
|
||||
JsonRpcErrorCode.INVALID_REQUEST,
|
||||
'Method must be a string'
|
||||
);
|
||||
}
|
||||
|
||||
// Check if method exists
|
||||
const handler = this.methods[request.method];
|
||||
if (!handler) {
|
||||
// Notifications must not receive any response, even on error
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this.createErrorResponse(
|
||||
request.id ?? null,
|
||||
JsonRpcErrorCode.METHOD_NOT_FOUND,
|
||||
`Method not found: ${request.method}`
|
||||
);
|
||||
}
|
||||
|
||||
// Execute method handler
|
||||
try {
|
||||
const result = await handler(request.params);
|
||||
|
||||
// Notifications (id is undefined) don't get responses
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return this.createSuccessResponse(request.id ?? null, result);
|
||||
} catch (error) {
|
||||
// Call error handler if provided (always log server-side)
|
||||
if (this.onError) {
|
||||
this.onError(
|
||||
error instanceof Error ? error : new Error(String(error)),
|
||||
request
|
||||
);
|
||||
}
|
||||
|
||||
// Notifications must not receive any response, even on error
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Method execution error - return error response
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
// Don't leak stack traces to clients (already logged via onError)
|
||||
const errorData = error instanceof Error ? { name: error.name } : undefined;
|
||||
|
||||
return this.createErrorResponse(
|
||||
request.id ?? null,
|
||||
JsonRpcErrorCode.INTERNAL_ERROR,
|
||||
errorMessage,
|
||||
errorData
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
// Request parsing/validation error - if notification, still no response
|
||||
if (request.id === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return this.createErrorResponse(null, JsonRpcErrorCode.INVALID_REQUEST, errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a success response.
|
||||
*/
|
||||
private createSuccessResponse(id: string | number | null, result: any): JsonRpcResponse {
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
result,
|
||||
id,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an error response.
|
||||
*/
|
||||
private createErrorResponse(
|
||||
id: string | number | null,
|
||||
code: number,
|
||||
message: string,
|
||||
data?: any
|
||||
): JsonRpcResponse {
|
||||
const error: JsonRpcError = { code, message };
|
||||
if (data !== undefined) {
|
||||
error.data = data;
|
||||
}
|
||||
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
error,
|
||||
id,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new method handler.
|
||||
*
|
||||
* @param method Method name
|
||||
* @param handler Handler function
|
||||
*/
|
||||
registerMethod(method: string, handler: JsonRpcMethodHandler): void {
|
||||
this.methods[method] = handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a method handler.
|
||||
*
|
||||
* @param method Method name
|
||||
*/
|
||||
unregisterMethod(method: string): void {
|
||||
delete this.methods[method];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a method is registered.
|
||||
*
|
||||
* @param method Method name
|
||||
* @returns True if method exists
|
||||
*/
|
||||
hasMethod(method: string): boolean {
|
||||
return method in this.methods;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of registered method names.
|
||||
*
|
||||
* @returns Array of method names
|
||||
*/
|
||||
getMethods(): string[] {
|
||||
return Object.keys(this.methods);
|
||||
}
|
||||
}
|
||||
104
dexto/packages/server/src/a2a/jsonrpc/types.ts
Normal file
104
dexto/packages/server/src/a2a/jsonrpc/types.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
/**
|
||||
* JSON-RPC 2.0 Type Definitions
|
||||
*
|
||||
* Implements JSON-RPC 2.0 specification for A2A Protocol transport.
|
||||
* @see https://www.jsonrpc.org/specification
|
||||
*/
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Request
|
||||
*/
|
||||
export interface JsonRpcRequest {
|
||||
/** JSON-RPC version (must be "2.0") */
|
||||
jsonrpc: '2.0';
|
||||
/** Method name to invoke */
|
||||
method: string;
|
||||
/** Method parameters (optional) */
|
||||
params?: any;
|
||||
/** Request ID (can be string, number, or null for notifications) */
|
||||
id?: string | number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Response (Success)
|
||||
*/
|
||||
export interface JsonRpcSuccessResponse {
|
||||
/** JSON-RPC version (must be "2.0") */
|
||||
jsonrpc: '2.0';
|
||||
/** Result of the method invocation */
|
||||
result: any;
|
||||
/** Request ID (matches request) */
|
||||
id: string | number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Response (Error)
|
||||
*/
|
||||
export interface JsonRpcErrorResponse {
|
||||
/** JSON-RPC version (must be "2.0") */
|
||||
jsonrpc: '2.0';
|
||||
/** Error object */
|
||||
error: JsonRpcError;
|
||||
/** Request ID (matches request, or null if ID couldn't be determined) */
|
||||
id: string | number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Error Object
|
||||
*/
|
||||
export interface JsonRpcError {
|
||||
/** Error code (integer) */
|
||||
code: number;
|
||||
/** Error message (short description) */
|
||||
message: string;
|
||||
/** Optional additional error data */
|
||||
data?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Union type for JSON-RPC responses
|
||||
*/
|
||||
export type JsonRpcResponse = JsonRpcSuccessResponse | JsonRpcErrorResponse;
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Batch Request
|
||||
*/
|
||||
export type JsonRpcBatchRequest = JsonRpcRequest[];
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Batch Response
|
||||
*/
|
||||
export type JsonRpcBatchResponse = JsonRpcResponse[];
|
||||
|
||||
/**
|
||||
* Standard JSON-RPC 2.0 Error Codes
|
||||
*/
|
||||
export enum JsonRpcErrorCode {
|
||||
/** Invalid JSON was received by the server */
|
||||
PARSE_ERROR = -32700,
|
||||
/** The JSON sent is not a valid Request object */
|
||||
INVALID_REQUEST = -32600,
|
||||
/** The method does not exist / is not available */
|
||||
METHOD_NOT_FOUND = -32601,
|
||||
/** Invalid method parameter(s) */
|
||||
INVALID_PARAMS = -32602,
|
||||
/** Internal JSON-RPC error */
|
||||
INTERNAL_ERROR = -32603,
|
||||
/** Reserved for implementation-defined server-errors (-32000 to -32099) */
|
||||
SERVER_ERROR_START = -32099,
|
||||
SERVER_ERROR_END = -32000,
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if response is an error
|
||||
*/
|
||||
export function isJsonRpcError(response: JsonRpcResponse): response is JsonRpcErrorResponse {
|
||||
return 'error' in response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if response is success
|
||||
*/
|
||||
export function isJsonRpcSuccess(response: JsonRpcResponse): response is JsonRpcSuccessResponse {
|
||||
return 'result' in response;
|
||||
}
|
||||
262
dexto/packages/server/src/a2a/types.ts
Normal file
262
dexto/packages/server/src/a2a/types.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
/**
|
||||
* TODO: fetch from a2a sdk to avoid drift over time
|
||||
* A2A Protocol Type Definitions
|
||||
*
|
||||
* Type definitions compliant with A2A Protocol v0.3.0 specification.
|
||||
* Based on: https://a2a-protocol.org/latest/specification
|
||||
*
|
||||
* @module a2a/types
|
||||
*/
|
||||
|
||||
/**
|
||||
* Task state per A2A Protocol specification.
|
||||
*
|
||||
* States:
|
||||
* - submitted: Task has been submitted
|
||||
* - working: Task is being processed
|
||||
* - input-required: Task needs user input
|
||||
* - completed: Task completed successfully
|
||||
* - canceled: Task was canceled
|
||||
* - failed: Task failed with error
|
||||
* - rejected: Task was rejected
|
||||
* - auth-required: Authentication required
|
||||
* - unknown: State is unknown
|
||||
*/
|
||||
export type TaskState =
|
||||
| 'submitted'
|
||||
| 'working'
|
||||
| 'input-required'
|
||||
| 'completed'
|
||||
| 'canceled'
|
||||
| 'failed'
|
||||
| 'rejected'
|
||||
| 'auth-required'
|
||||
| 'unknown';
|
||||
|
||||
/**
|
||||
* Message role per A2A Protocol specification.
|
||||
*/
|
||||
export type MessageRole = 'user' | 'agent';
|
||||
|
||||
/**
|
||||
* Base interface for all part types.
|
||||
*/
|
||||
export interface PartBase {
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Text part - contains text content.
|
||||
*/
|
||||
export interface TextPart extends PartBase {
|
||||
readonly kind: 'text';
|
||||
text: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* File base interface.
|
||||
*/
|
||||
export interface FileBase {
|
||||
name?: string;
|
||||
mimeType?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* File with base64-encoded bytes.
|
||||
*/
|
||||
export interface FileWithBytes extends FileBase {
|
||||
bytes: string; // Base64 encoded
|
||||
uri?: never;
|
||||
}
|
||||
|
||||
/**
|
||||
* File with URI reference.
|
||||
*/
|
||||
export interface FileWithUri extends FileBase {
|
||||
uri: string;
|
||||
bytes?: never;
|
||||
}
|
||||
|
||||
/**
|
||||
* File part - contains file data.
|
||||
*/
|
||||
export interface FilePart extends PartBase {
|
||||
readonly kind: 'file';
|
||||
file: FileWithBytes | FileWithUri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Data part - contains structured JSON data.
|
||||
*/
|
||||
export interface DataPart extends PartBase {
|
||||
readonly kind: 'data';
|
||||
data: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Union of all part types per A2A specification.
|
||||
*/
|
||||
export type Part = TextPart | FilePart | DataPart;
|
||||
|
||||
/**
|
||||
* A2A Protocol message structure.
|
||||
*/
|
||||
export interface Message {
|
||||
readonly role: MessageRole;
|
||||
parts: Part[]; // Required: Array of message parts
|
||||
metadata?: { [key: string]: any }; // Optional: Extension metadata
|
||||
extensions?: string[]; // Optional: Extension identifiers
|
||||
referenceTaskIds?: string[]; // Optional: Referenced task IDs
|
||||
messageId: string; // Required: Unique message identifier
|
||||
taskId?: string; // Optional: Associated task ID
|
||||
contextId?: string; // Optional: Context identifier
|
||||
readonly kind: 'message'; // Required: Discriminator
|
||||
}
|
||||
|
||||
/**
|
||||
* Task status structure.
|
||||
*/
|
||||
export interface TaskStatus {
|
||||
state: TaskState; // Required: Current state
|
||||
message?: Message; // Optional: Status message
|
||||
timestamp?: string; // Optional: ISO 8601 timestamp
|
||||
}
|
||||
|
||||
/**
|
||||
* Artifact - generated output from the agent.
|
||||
*/
|
||||
export interface Artifact {
|
||||
artifactId: string; // Required: Unique artifact ID
|
||||
name?: string; // Optional: Artifact name
|
||||
description?: string; // Optional: Description
|
||||
parts: Part[]; // Required: Artifact content
|
||||
metadata?: { [key: string]: any }; // Optional: Metadata
|
||||
extensions?: string[]; // Optional: Extension IDs
|
||||
}
|
||||
|
||||
/**
|
||||
* A2A Protocol task structure.
|
||||
*/
|
||||
export interface Task {
|
||||
id: string; // Required: Unique task identifier
|
||||
contextId: string; // Required: Context across related tasks
|
||||
status: TaskStatus; // Required: Current task status
|
||||
history?: Message[]; // Optional: Conversation history
|
||||
artifacts?: Artifact[]; // Optional: Task artifacts
|
||||
metadata?: { [key: string]: any }; // Optional: Extension metadata
|
||||
readonly kind: 'task'; // Required: Discriminator
|
||||
}
|
||||
|
||||
/**
|
||||
* Task status update event (streaming).
|
||||
*/
|
||||
export interface TaskStatusUpdateEvent {
|
||||
taskId: string;
|
||||
contextId: string;
|
||||
readonly kind: 'status-update';
|
||||
status: TaskStatus;
|
||||
final: boolean; // True for final event
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Task artifact update event (streaming).
|
||||
*/
|
||||
export interface TaskArtifactUpdateEvent {
|
||||
taskId: string;
|
||||
contextId: string;
|
||||
readonly kind: 'artifact-update';
|
||||
artifact: Artifact;
|
||||
append?: boolean; // Append to existing artifact
|
||||
lastChunk?: boolean; // Final chunk
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Push notification configuration.
|
||||
*/
|
||||
export interface PushNotificationConfig {
|
||||
url: string;
|
||||
headers?: { [key: string]: string };
|
||||
}
|
||||
|
||||
/**
|
||||
* Message send configuration.
|
||||
*/
|
||||
export interface MessageSendConfiguration {
|
||||
acceptedOutputModes?: string[];
|
||||
historyLength?: number;
|
||||
pushNotificationConfig?: PushNotificationConfig;
|
||||
blocking?: boolean; // Wait for completion
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for message/send and message/stream methods.
|
||||
*/
|
||||
export interface MessageSendParams {
|
||||
message: Message; // Required
|
||||
configuration?: MessageSendConfiguration; // Optional
|
||||
metadata?: { [key: string]: any }; // Optional
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for tasks/get method.
|
||||
*/
|
||||
export interface TaskQueryParams {
|
||||
id: string; // Required: Task ID
|
||||
historyLength?: number; // Optional: Limit history items
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for tasks/list method.
|
||||
*/
|
||||
export interface ListTasksParams {
|
||||
contextId?: string;
|
||||
status?: TaskState;
|
||||
pageSize?: number; // 1-100, default 50
|
||||
pageToken?: string;
|
||||
historyLength?: number;
|
||||
lastUpdatedAfter?: number; // Unix timestamp
|
||||
includeArtifacts?: boolean;
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Result for tasks/list method.
|
||||
*/
|
||||
export interface ListTasksResult {
|
||||
tasks: Task[];
|
||||
totalSize: number;
|
||||
pageSize: number;
|
||||
nextPageToken: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for tasks/cancel and tasks/resubscribe methods.
|
||||
*/
|
||||
export interface TaskIdParams {
|
||||
id: string; // Required: Task ID
|
||||
metadata?: { [key: string]: any };
|
||||
}
|
||||
|
||||
/**
|
||||
* Converted message parts for internal use (compatibility layer).
|
||||
* Used by adapters to convert between A2A and Dexto internal format.
|
||||
*/
|
||||
export interface ConvertedMessage {
|
||||
text: string;
|
||||
image:
|
||||
| {
|
||||
image: string;
|
||||
mimeType: string;
|
||||
}
|
||||
| undefined;
|
||||
file:
|
||||
| {
|
||||
data: string;
|
||||
mimeType: string;
|
||||
filename?: string;
|
||||
}
|
||||
| undefined;
|
||||
}
|
||||
90
dexto/packages/server/src/approval/approval-coordinator.ts
Normal file
90
dexto/packages/server/src/approval/approval-coordinator.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { EventEmitter } from 'node:events';
|
||||
import type { ApprovalRequest, ApprovalResponse } from '@dexto/core';
|
||||
|
||||
/**
|
||||
* Event coordinator for approval request/response flow between handler and server.
|
||||
*
|
||||
* Provides explicit separation between agent lifecycle events (on AgentEventBus)
|
||||
* and server-mode coordination events (on ApprovalCoordinator).
|
||||
*
|
||||
* Used by:
|
||||
* - ManualApprovalHandler: Emits requests, listens for responses
|
||||
* - Streaming endpoints: Listens for requests, helps emit responses
|
||||
* - Approval routes: Emits responses from client submissions
|
||||
*/
|
||||
export class ApprovalCoordinator extends EventEmitter {
|
||||
// Track approvalId -> sessionId mapping for multi-client SSE routing
|
||||
private approvalSessions = new Map<string, string | undefined>();
|
||||
|
||||
/**
|
||||
* Emit an approval request.
|
||||
* Called by ManualApprovalHandler when tool/command needs approval.
|
||||
*/
|
||||
public emitRequest(request: ApprovalRequest): void {
|
||||
// Store sessionId mapping for later lookup when client submits response
|
||||
this.approvalSessions.set(request.approvalId, request.sessionId);
|
||||
this.emit('approval:request', request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit an approval response.
|
||||
* Called by API routes when user submits decision.
|
||||
*/
|
||||
public emitResponse(response: ApprovalResponse): void {
|
||||
this.emit('approval:response', response);
|
||||
// Clean up the mapping after response is emitted
|
||||
this.approvalSessions.delete(response.approvalId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the sessionId associated with an approval request.
|
||||
* Used by API routes to attach sessionId to responses for SSE routing.
|
||||
*/
|
||||
public getSessionId(approvalId: string): string | undefined {
|
||||
return this.approvalSessions.get(approvalId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to approval requests.
|
||||
* Used by streaming endpoints to forward requests to SSE clients.
|
||||
*
|
||||
* @param handler Callback to handle approval requests
|
||||
* @param options Optional AbortSignal for cleanup
|
||||
*/
|
||||
public onRequest(
|
||||
handler: (request: ApprovalRequest) => void,
|
||||
options?: { signal?: AbortSignal }
|
||||
): void {
|
||||
const listener = (request: ApprovalRequest) => handler(request);
|
||||
this.on('approval:request', listener);
|
||||
|
||||
// Cleanup on abort signal
|
||||
if (options?.signal) {
|
||||
options.signal.addEventListener('abort', () => {
|
||||
this.off('approval:request', listener);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to approval responses.
|
||||
* Used by ManualApprovalHandler to resolve pending approval promises.
|
||||
*
|
||||
* @param handler Callback to handle approval responses
|
||||
* @param options Optional AbortSignal for cleanup
|
||||
*/
|
||||
public onResponse(
|
||||
handler: (response: ApprovalResponse) => void,
|
||||
options?: { signal?: AbortSignal }
|
||||
): void {
|
||||
const listener = (response: ApprovalResponse) => handler(response);
|
||||
this.on('approval:response', listener);
|
||||
|
||||
// Cleanup on abort signal
|
||||
if (options?.signal) {
|
||||
options.signal.addEventListener('abort', () => {
|
||||
this.off('approval:response', listener);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,311 @@
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import type { ApprovalRequest, ApprovalResponse } from '@dexto/core';
|
||||
import { ApprovalType, ApprovalStatus, DenialReason } from '@dexto/core';
|
||||
import { createManualApprovalHandler } from './manual-approval-handler.js';
|
||||
import type { ApprovalCoordinator } from './approval-coordinator.js';
|
||||
|
||||
describe('createManualApprovalHandler', () => {
|
||||
let mockCoordinator: ApprovalCoordinator;
|
||||
let listeners: Map<string, ((response: ApprovalResponse) => void)[]>;
|
||||
|
||||
beforeEach(() => {
|
||||
listeners = new Map();
|
||||
|
||||
mockCoordinator = {
|
||||
on: vi.fn((event: string, listener: (response: ApprovalResponse) => void) => {
|
||||
const eventListeners = listeners.get(event) || [];
|
||||
eventListeners.push(listener);
|
||||
listeners.set(event, eventListeners);
|
||||
}),
|
||||
off: vi.fn((event: string, listener: (response: ApprovalResponse) => void) => {
|
||||
const eventListeners = listeners.get(event) || [];
|
||||
const index = eventListeners.indexOf(listener);
|
||||
if (index > -1) {
|
||||
eventListeners.splice(index, 1);
|
||||
}
|
||||
}),
|
||||
emitRequest: vi.fn(),
|
||||
emitResponse: vi.fn(),
|
||||
} as unknown as ApprovalCoordinator;
|
||||
});
|
||||
|
||||
describe('Timeout Configuration', () => {
|
||||
it('should not timeout when timeout is undefined (infinite wait)', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-infinite-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
// No timeout - should wait indefinitely
|
||||
metadata: {
|
||||
toolName: 'test_tool',
|
||||
toolCallId: 'test-call-id',
|
||||
args: {},
|
||||
},
|
||||
};
|
||||
|
||||
// Start the approval request (won't resolve until we emit a response)
|
||||
const approvalPromise = handler(request);
|
||||
|
||||
// Verify the request was emitted
|
||||
expect(mockCoordinator.emitRequest).toHaveBeenCalledWith(request);
|
||||
|
||||
// Wait a bit to ensure no timeout occurred
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Manually resolve by emitting a response
|
||||
const eventListeners = listeners.get('approval:response') || [];
|
||||
eventListeners.forEach((listener) => {
|
||||
listener({
|
||||
approvalId: 'test-infinite-1',
|
||||
status: ApprovalStatus.APPROVED,
|
||||
});
|
||||
});
|
||||
|
||||
const response = await approvalPromise;
|
||||
expect(response.status).toBe(ApprovalStatus.APPROVED);
|
||||
});
|
||||
|
||||
it('should timeout when timeout is specified', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-timeout-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
timeout: 50, // 50ms timeout
|
||||
metadata: {
|
||||
toolName: 'test_tool',
|
||||
toolCallId: 'test-call-id',
|
||||
args: {},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await handler(request);
|
||||
|
||||
expect(response.status).toBe(ApprovalStatus.CANCELLED);
|
||||
expect(response.reason).toBe(DenialReason.TIMEOUT);
|
||||
expect(response.message).toContain('timed out');
|
||||
expect(response.timeoutMs).toBe(50);
|
||||
});
|
||||
|
||||
it('should emit timeout response to coordinator when timeout occurs', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-timeout-emit',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
timeout: 50,
|
||||
metadata: {
|
||||
toolName: 'test_tool',
|
||||
toolCallId: 'test-call-id',
|
||||
args: {},
|
||||
},
|
||||
};
|
||||
|
||||
await handler(request);
|
||||
|
||||
// Verify coordinator received the timeout response
|
||||
expect(mockCoordinator.emitResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
approvalId: 'test-timeout-emit',
|
||||
status: ApprovalStatus.CANCELLED,
|
||||
reason: DenialReason.TIMEOUT,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should clear timeout when response is received before timeout', async () => {
|
||||
vi.useFakeTimers();
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-clear-timeout',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
timeout: 5000, // 5 second timeout
|
||||
metadata: {
|
||||
toolName: 'test_tool',
|
||||
toolCallId: 'test-call-id',
|
||||
args: {},
|
||||
},
|
||||
};
|
||||
|
||||
const approvalPromise = handler(request);
|
||||
|
||||
// Emit response before timeout
|
||||
const eventListeners = listeners.get('approval:response') || [];
|
||||
eventListeners.forEach((listener) => {
|
||||
listener({
|
||||
approvalId: 'test-clear-timeout',
|
||||
status: ApprovalStatus.APPROVED,
|
||||
});
|
||||
});
|
||||
|
||||
const response = await approvalPromise;
|
||||
expect(response.status).toBe(ApprovalStatus.APPROVED);
|
||||
|
||||
// Advance time past the timeout - should not cause any issues
|
||||
vi.advanceTimersByTime(6000);
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should handle elicitation with no timeout (infinite wait)', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-elicitation-infinite',
|
||||
type: ApprovalType.ELICITATION,
|
||||
timestamp: new Date(),
|
||||
// No timeout for elicitation
|
||||
metadata: {
|
||||
schema: { type: 'object' as const, properties: {} },
|
||||
prompt: 'Enter data',
|
||||
serverName: 'TestServer',
|
||||
},
|
||||
};
|
||||
|
||||
const approvalPromise = handler(request);
|
||||
|
||||
// Wait briefly
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Resolve the elicitation
|
||||
const eventListeners = listeners.get('approval:response') || [];
|
||||
eventListeners.forEach((listener) => {
|
||||
listener({
|
||||
approvalId: 'test-elicitation-infinite',
|
||||
status: ApprovalStatus.APPROVED,
|
||||
data: { formData: { name: 'test' } },
|
||||
});
|
||||
});
|
||||
|
||||
const response = await approvalPromise;
|
||||
expect(response.status).toBe(ApprovalStatus.APPROVED);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cancellation Support', () => {
|
||||
it('should support cancelling pending approvals', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-cancel-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: {
|
||||
toolName: 'test_tool',
|
||||
toolCallId: 'test-call-id',
|
||||
args: {},
|
||||
},
|
||||
};
|
||||
|
||||
const approvalPromise = handler(request);
|
||||
|
||||
// Cancel the approval
|
||||
handler.cancel?.('test-cancel-1');
|
||||
|
||||
const response = await approvalPromise;
|
||||
expect(response.status).toBe(ApprovalStatus.CANCELLED);
|
||||
expect(response.reason).toBe(DenialReason.SYSTEM_CANCELLED);
|
||||
});
|
||||
|
||||
it('should track pending approvals', () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request1: ApprovalRequest = {
|
||||
approvalId: 'pending-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: { toolName: 'tool1', toolCallId: 'test-call-id-1', args: {} },
|
||||
};
|
||||
|
||||
const request2: ApprovalRequest = {
|
||||
approvalId: 'pending-2',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: { toolName: 'tool2', toolCallId: 'test-call-id-2', args: {} },
|
||||
};
|
||||
|
||||
// Start both requests (don't await)
|
||||
handler(request1);
|
||||
handler(request2);
|
||||
|
||||
const pending = handler.getPending?.() || [];
|
||||
expect(pending).toContain('pending-1');
|
||||
expect(pending).toContain('pending-2');
|
||||
});
|
||||
|
||||
it('should cancel all pending approvals', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request1: ApprovalRequest = {
|
||||
approvalId: 'cancel-all-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: { toolName: 'tool1', toolCallId: 'test-call-id-1', args: {} },
|
||||
};
|
||||
|
||||
const request2: ApprovalRequest = {
|
||||
approvalId: 'cancel-all-2',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: { toolName: 'tool2', toolCallId: 'test-call-id-2', args: {} },
|
||||
};
|
||||
|
||||
const promise1 = handler(request1);
|
||||
const promise2 = handler(request2);
|
||||
|
||||
// Cancel all
|
||||
handler.cancelAll?.();
|
||||
|
||||
const [response1, response2] = await Promise.all([promise1, promise2]);
|
||||
|
||||
expect(response1.status).toBe(ApprovalStatus.CANCELLED);
|
||||
expect(response2.status).toBe(ApprovalStatus.CANCELLED);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Response Handling', () => {
|
||||
it('should only handle responses for matching approvalId', async () => {
|
||||
const handler = createManualApprovalHandler(mockCoordinator);
|
||||
|
||||
const request: ApprovalRequest = {
|
||||
approvalId: 'test-match-1',
|
||||
type: ApprovalType.TOOL_CONFIRMATION,
|
||||
timestamp: new Date(),
|
||||
metadata: { toolName: 'test_tool', toolCallId: 'test-call-id', args: {} },
|
||||
};
|
||||
|
||||
const approvalPromise = handler(request);
|
||||
|
||||
// Emit response for different approvalId - should be ignored
|
||||
const eventListeners = listeners.get('approval:response') || [];
|
||||
eventListeners.forEach((listener) => {
|
||||
listener({
|
||||
approvalId: 'different-id',
|
||||
status: ApprovalStatus.APPROVED,
|
||||
});
|
||||
});
|
||||
|
||||
// Wait a bit - request should still be pending
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
|
||||
// Now emit correct response
|
||||
eventListeners.forEach((listener) => {
|
||||
listener({
|
||||
approvalId: 'test-match-1',
|
||||
status: ApprovalStatus.DENIED,
|
||||
reason: DenialReason.USER_DENIED,
|
||||
});
|
||||
});
|
||||
|
||||
const response = await approvalPromise;
|
||||
expect(response.status).toBe(ApprovalStatus.DENIED);
|
||||
expect(response.reason).toBe(DenialReason.USER_DENIED);
|
||||
});
|
||||
});
|
||||
});
|
||||
195
dexto/packages/server/src/approval/manual-approval-handler.ts
Normal file
195
dexto/packages/server/src/approval/manual-approval-handler.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import type { ApprovalHandler, ApprovalRequest, ApprovalResponse } from '@dexto/core';
|
||||
import { ApprovalStatus, DenialReason } from '@dexto/core';
|
||||
import type { ApprovalCoordinator } from './approval-coordinator.js';
|
||||
|
||||
/**
|
||||
* Creates a manual approval handler that uses ApprovalCoordinator for server communication.
|
||||
*
|
||||
* This handler emits `approval:request` and waits for `approval:response` via the coordinator,
|
||||
* enabling SSE-based approval flows where:
|
||||
* 1. Handler emits approval:request → Coordinator → SSE endpoint forwards to client
|
||||
* 2. Client sends decision via POST /api/approvals/{approvalId}
|
||||
* 3. API route emits approval:response → Coordinator → Handler resolves
|
||||
*
|
||||
* The returned handler implements the optional cancellation methods (cancel, cancelAll, getPending)
|
||||
* for managing pending approval requests.
|
||||
*
|
||||
* Timeouts are handled per-request using the timeout value from ApprovalRequest, which
|
||||
* is set by ApprovalManager based on the request type (tool confirmation vs elicitation).
|
||||
*
|
||||
* @param coordinator The approval coordinator for request/response communication
|
||||
* @returns ApprovalHandler with cancellation support
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const coordinator = new ApprovalCoordinator();
|
||||
* const handler = createManualApprovalHandler(coordinator);
|
||||
* agent.setApprovalHandler(handler);
|
||||
*
|
||||
* // Later, cancel a specific approval (if handler supports it)
|
||||
* handler.cancel?.('approval-id-123');
|
||||
* ```
|
||||
*/
|
||||
export function createManualApprovalHandler(coordinator: ApprovalCoordinator): ApprovalHandler {
|
||||
// Track pending approvals for cancellation support
|
||||
const pendingApprovals = new Map<
|
||||
string,
|
||||
{
|
||||
cleanup: () => void;
|
||||
resolve: (response: ApprovalResponse) => void;
|
||||
request: ApprovalRequest;
|
||||
}
|
||||
>();
|
||||
|
||||
const handleApproval = (request: ApprovalRequest): Promise<ApprovalResponse> => {
|
||||
return new Promise<ApprovalResponse>((resolve) => {
|
||||
// Use per-request timeout (optional - undefined means no timeout)
|
||||
// - Tool confirmations use config.toolConfirmation.timeout
|
||||
// - Elicitations use config.elicitation.timeout
|
||||
const effectiveTimeout = request.timeout;
|
||||
|
||||
// Set timeout timer ONLY if timeout is specified
|
||||
// If undefined, wait indefinitely for user response
|
||||
let timer: NodeJS.Timeout | undefined;
|
||||
if (effectiveTimeout !== undefined) {
|
||||
timer = setTimeout(() => {
|
||||
cleanup();
|
||||
pendingApprovals.delete(request.approvalId);
|
||||
|
||||
// Emit timeout response so UI/clients can dismiss the prompt
|
||||
const timeoutResponse: ApprovalResponse = {
|
||||
approvalId: request.approvalId,
|
||||
status: ApprovalStatus.CANCELLED,
|
||||
sessionId: request.sessionId,
|
||||
reason: DenialReason.TIMEOUT,
|
||||
message: `Approval request timed out after ${effectiveTimeout}ms`,
|
||||
timeoutMs: effectiveTimeout,
|
||||
};
|
||||
coordinator.emitResponse(timeoutResponse);
|
||||
|
||||
// Resolve with CANCELLED response (not reject) to match auto-approve/deny behavior
|
||||
// Callers can uniformly check response.status instead of handling exceptions
|
||||
resolve(timeoutResponse);
|
||||
}, effectiveTimeout);
|
||||
}
|
||||
|
||||
// Cleanup function to remove listener and clear timeout
|
||||
let cleanupListener: (() => void) | null = null;
|
||||
const cleanup = () => {
|
||||
if (timer !== undefined) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
if (cleanupListener) {
|
||||
cleanupListener();
|
||||
cleanupListener = null;
|
||||
}
|
||||
};
|
||||
|
||||
// Listen for approval:response events
|
||||
const listener = (res: ApprovalResponse) => {
|
||||
// Only handle responses for this specific approval
|
||||
if (res.approvalId === request.approvalId) {
|
||||
cleanup();
|
||||
pendingApprovals.delete(request.approvalId);
|
||||
resolve(res);
|
||||
}
|
||||
};
|
||||
|
||||
// Register listener
|
||||
coordinator.on('approval:response', listener);
|
||||
cleanupListener = () => coordinator.off('approval:response', listener);
|
||||
|
||||
// Store for cancellation support
|
||||
pendingApprovals.set(request.approvalId, {
|
||||
cleanup,
|
||||
resolve,
|
||||
request,
|
||||
});
|
||||
|
||||
// Emit the approval:request event via coordinator
|
||||
// SSE endpoints will subscribe to coordinator and forward to clients
|
||||
coordinator.emitRequest(request);
|
||||
});
|
||||
};
|
||||
|
||||
const handler: ApprovalHandler = Object.assign(handleApproval, {
|
||||
cancel: (approvalId: string): void => {
|
||||
const pending = pendingApprovals.get(approvalId);
|
||||
if (pending) {
|
||||
pending.cleanup();
|
||||
pendingApprovals.delete(approvalId);
|
||||
|
||||
// Create cancellation response
|
||||
const cancelResponse: ApprovalResponse = {
|
||||
approvalId,
|
||||
status: ApprovalStatus.CANCELLED,
|
||||
sessionId: pending.request.sessionId,
|
||||
reason: DenialReason.SYSTEM_CANCELLED,
|
||||
message: 'Approval request was cancelled',
|
||||
};
|
||||
|
||||
// Emit cancellation event so UI listeners can dismiss the prompt
|
||||
coordinator.emitResponse(cancelResponse);
|
||||
|
||||
// Resolve with CANCELLED response (not reject) to match auto-approve/deny behavior
|
||||
// Callers can uniformly check response.status instead of handling exceptions
|
||||
pending.resolve(cancelResponse);
|
||||
}
|
||||
},
|
||||
|
||||
cancelAll: (): void => {
|
||||
for (const [approvalId] of pendingApprovals) {
|
||||
handler.cancel?.(approvalId);
|
||||
}
|
||||
},
|
||||
|
||||
getPending: (): string[] => {
|
||||
return Array.from(pendingApprovals.keys());
|
||||
},
|
||||
|
||||
getPendingRequests: (): ApprovalRequest[] => {
|
||||
return Array.from(pendingApprovals.values()).map((p) => p.request);
|
||||
},
|
||||
|
||||
/**
|
||||
* Auto-approve pending requests that match a predicate.
|
||||
* Used when a pattern is remembered to auto-approve other parallel requests
|
||||
* that would now match the same pattern.
|
||||
*/
|
||||
autoApprovePending: (
|
||||
predicate: (request: ApprovalRequest) => boolean,
|
||||
responseData?: Record<string, unknown>
|
||||
): number => {
|
||||
let count = 0;
|
||||
|
||||
// Find all pending approvals that match the predicate
|
||||
for (const [approvalId, pending] of pendingApprovals) {
|
||||
if (predicate(pending.request)) {
|
||||
// Clean up the pending state
|
||||
pending.cleanup();
|
||||
pendingApprovals.delete(approvalId);
|
||||
|
||||
// Create auto-approval response
|
||||
const autoApproveResponse: ApprovalResponse = {
|
||||
approvalId,
|
||||
status: ApprovalStatus.APPROVED,
|
||||
sessionId: pending.request.sessionId,
|
||||
message: 'Auto-approved due to matching remembered pattern',
|
||||
data: responseData,
|
||||
};
|
||||
|
||||
// Emit response so UI can update
|
||||
coordinator.emitResponse(autoApproveResponse);
|
||||
|
||||
// Resolve the pending promise
|
||||
pending.resolve(autoApproveResponse);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
},
|
||||
});
|
||||
|
||||
return handler;
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import type { WebhookConfig } from '../webhook-types.js';
|
||||
import { AgentEventBus } from '@dexto/core';
|
||||
import { WebhookEventSubscriber } from '../webhook-subscriber.js';
|
||||
|
||||
// Create a mock fetch function
|
||||
const mockFetch = vi.fn();
|
||||
|
||||
// We'll use fake timers selectively for specific tests
|
||||
// TODO: temporarily DUPE OF cli
|
||||
describe('WebhookEventSubscriber', () => {
|
||||
let webhookSubscriber: WebhookEventSubscriber;
|
||||
let agentEventBus: AgentEventBus;
|
||||
|
||||
beforeEach(() => {
|
||||
// Set test environment before creating subscriber
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
// Completely reset the mock
|
||||
mockFetch.mockReset();
|
||||
|
||||
// Set default mock implementation (no artificial delay needed with fake timers)
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
} as any);
|
||||
|
||||
// Create webhook subscriber with mocked fetch
|
||||
webhookSubscriber = new WebhookEventSubscriber({ fetchFn: mockFetch as any });
|
||||
agentEventBus = new AgentEventBus();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up webhook subscriber and abort controllers
|
||||
webhookSubscriber.cleanup();
|
||||
|
||||
// Reset all mocks
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Clear the test environment
|
||||
delete process.env.NODE_ENV;
|
||||
});
|
||||
|
||||
describe('Webhook Management', () => {
|
||||
it('should add a webhook', () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
secret: 'secret123',
|
||||
description: 'Test webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const retrievedWebhook = webhookSubscriber.getWebhook('wh_test_123');
|
||||
expect(retrievedWebhook).toEqual(webhook);
|
||||
});
|
||||
|
||||
it('should remove a webhook', () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
expect(webhookSubscriber.getWebhook('wh_test_123')).toBeDefined();
|
||||
|
||||
const removed = webhookSubscriber.removeWebhook('wh_test_123');
|
||||
expect(removed).toBe(true);
|
||||
expect(webhookSubscriber.getWebhook('wh_test_123')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return false when removing non-existent webhook', () => {
|
||||
const removed = webhookSubscriber.removeWebhook('non_existent');
|
||||
expect(removed).toBe(false);
|
||||
});
|
||||
|
||||
it('should list all webhooks', () => {
|
||||
const webhook1: WebhookConfig = {
|
||||
id: 'wh_test_1',
|
||||
url: 'https://example.com/webhook1',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
const webhook2: WebhookConfig = {
|
||||
id: 'wh_test_2',
|
||||
url: 'https://example.com/webhook2',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook1);
|
||||
webhookSubscriber.addWebhook(webhook2);
|
||||
|
||||
const webhooks = webhookSubscriber.getWebhooks();
|
||||
expect(webhooks).toHaveLength(2);
|
||||
expect(webhooks).toContainEqual(webhook1);
|
||||
expect(webhooks).toContainEqual(webhook2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Event Subscription', () => {
|
||||
it('should subscribe to agent events', () => {
|
||||
const mockOn = vi.spyOn(agentEventBus, 'on');
|
||||
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
// Verify that all expected events are subscribed to
|
||||
expect(mockOn).toHaveBeenCalledWith('llm:thinking', expect.any(Function), {
|
||||
signal: expect.any(AbortSignal),
|
||||
});
|
||||
expect(mockOn).toHaveBeenCalledWith('llm:response', expect.any(Function), {
|
||||
signal: expect.any(AbortSignal),
|
||||
});
|
||||
expect(mockOn).toHaveBeenCalledWith('session:reset', expect.any(Function), {
|
||||
signal: expect.any(AbortSignal),
|
||||
});
|
||||
});
|
||||
|
||||
it('should clean up event listeners on cleanup', () => {
|
||||
// Subscribe first to create abort controller
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
// Spy on the abort method of the actual abort controller
|
||||
const abortController = (webhookSubscriber as any).abortController;
|
||||
expect(abortController).toBeDefined();
|
||||
const mockAbort = vi.spyOn(abortController, 'abort');
|
||||
|
||||
// Call cleanup
|
||||
webhookSubscriber.cleanup();
|
||||
|
||||
// Verify abort was called
|
||||
expect(mockAbort).toHaveBeenCalled();
|
||||
|
||||
// Verify abortController is cleaned up
|
||||
expect((webhookSubscriber as any).abortController).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Event Delivery', () => {
|
||||
// Default mock is already set up in parent beforeEach
|
||||
|
||||
it('should deliver events to registered webhooks', async () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
secret: 'secret123',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
// Emit event and wait for async delivery
|
||||
agentEventBus.emit('session:reset', { sessionId: 'test-session' });
|
||||
|
||||
// Wait for async delivery to complete (much shorter in test env due to 1ms delays)
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
// Check if fetch was called
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'https://example.com/webhook',
|
||||
expect.objectContaining({
|
||||
method: 'POST',
|
||||
headers: expect.objectContaining({
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'DextoAgent/1.0',
|
||||
'X-Dexto-Event-Type': 'session:reset',
|
||||
'X-Dexto-Signature-256': expect.stringMatching(/^sha256=[a-f0-9]{64}$/),
|
||||
}),
|
||||
body: expect.stringContaining('"type":"session:reset"'),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should not deliver events when no webhooks are registered', async () => {
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
agentEventBus.emit('session:reset', { sessionId: 'test-session' });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
|
||||
expect(mockFetch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should include proper webhook event structure', async () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
agentEventBus.emit('llm:response', {
|
||||
content: 'Hello world',
|
||||
sessionId: 'test-session',
|
||||
tokenUsage: { totalTokens: 2 },
|
||||
model: 'test-model',
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalled();
|
||||
expect(mockFetch.mock.calls[0]).toBeDefined();
|
||||
const [_url, requestOptions] = mockFetch.mock.calls[0]!;
|
||||
const requestBody = JSON.parse((requestOptions as any).body);
|
||||
|
||||
expect(requestBody).toMatchObject({
|
||||
id: expect.stringMatching(/^evt_/),
|
||||
type: 'llm:response',
|
||||
data: {
|
||||
content: 'Hello world',
|
||||
sessionId: 'test-session',
|
||||
tokenUsage: { totalTokens: 2 },
|
||||
model: 'test-model',
|
||||
},
|
||||
created: expect.any(String),
|
||||
apiVersion: '2025-07-03',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Webhook Testing', () => {
|
||||
it('should test webhook successfully', async () => {
|
||||
// Use default mock which includes delay for responseTime
|
||||
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const result = await webhookSubscriber.testWebhook('wh_test_123');
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.statusCode).toBe(200);
|
||||
expect(result.responseTime).toBeGreaterThanOrEqual(0);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'https://example.com/webhook',
|
||||
expect.objectContaining({
|
||||
method: 'POST',
|
||||
body: expect.stringContaining('"type":"tools:available-updated"'),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when testing non-existent webhook', async () => {
|
||||
await expect(webhookSubscriber.testWebhook('non_existent')).rejects.toThrow(
|
||||
'Webhook not found: non_existent'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Retry Logic', () => {
|
||||
it('should retry failed requests', async () => {
|
||||
// First two calls fail, third succeeds
|
||||
mockFetch
|
||||
.mockRejectedValueOnce(new Error('Network error'))
|
||||
.mockRejectedValueOnce(new Error('Network error'))
|
||||
.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
} as any);
|
||||
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const result = await webhookSubscriber.testWebhook('wh_test_123');
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(mockFetch).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should fail after max retries', async () => {
|
||||
mockFetch.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const result = await webhookSubscriber.testWebhook('wh_test_123');
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('Network error');
|
||||
expect(mockFetch).toHaveBeenCalledTimes(3); // Default max retries
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security', () => {
|
||||
it('should generate HMAC signature when secret is provided', async () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
secret: 'test-secret',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
agentEventBus.emit('session:reset', { sessionId: 'test-session' });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalled();
|
||||
expect(mockFetch.mock.calls[0]).toBeDefined();
|
||||
const [_url, requestOptions] = mockFetch.mock.calls[0]!;
|
||||
expect((requestOptions as any).headers['X-Dexto-Signature-256']).toMatch(
|
||||
/^sha256=[a-f0-9]{64}$/
|
||||
);
|
||||
});
|
||||
|
||||
it('should not include signature when no secret is provided', async () => {
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
agentEventBus.emit('session:reset', { sessionId: 'test-session' });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalled();
|
||||
expect(mockFetch.mock.calls[0]).toBeDefined();
|
||||
const [_url, requestOptions] = mockFetch.mock.calls[0]!;
|
||||
expect((requestOptions as any).headers['X-Dexto-Signature-256']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle HTTP error responses', async () => {
|
||||
mockFetch.mockImplementation(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1));
|
||||
return {
|
||||
ok: false,
|
||||
status: 404,
|
||||
statusText: 'Not Found',
|
||||
} as any;
|
||||
});
|
||||
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const result = await webhookSubscriber.testWebhook('wh_test_123');
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.statusCode).toBe(404);
|
||||
expect(result.error).toBe('HTTP 404: Not Found');
|
||||
});
|
||||
|
||||
it('should handle timeout errors', async () => {
|
||||
const abortError = new Error('The operation was aborted');
|
||||
abortError.name = 'AbortError';
|
||||
mockFetch.mockRejectedValue(abortError);
|
||||
|
||||
const webhook: WebhookConfig = {
|
||||
id: 'wh_test_123',
|
||||
url: 'https://example.com/webhook',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
|
||||
const result = await webhookSubscriber.testWebhook('wh_test_123');
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('aborted');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple Webhooks', () => {
|
||||
it('should deliver events to multiple webhooks', async () => {
|
||||
const webhook1: WebhookConfig = {
|
||||
id: 'wh_test_1',
|
||||
url: 'https://example.com/webhook1',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
const webhook2: WebhookConfig = {
|
||||
id: 'wh_test_2',
|
||||
url: 'https://example.com/webhook2',
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook1);
|
||||
webhookSubscriber.addWebhook(webhook2);
|
||||
webhookSubscriber.subscribe(agentEventBus);
|
||||
|
||||
agentEventBus.emit('session:reset', { sessionId: 'test-session' });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(2);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'https://example.com/webhook1',
|
||||
expect.any(Object)
|
||||
);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'https://example.com/webhook2',
|
||||
expect.any(Object)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
337
dexto/packages/server/src/events/a2a-sse-subscriber.ts
Normal file
337
dexto/packages/server/src/events/a2a-sse-subscriber.ts
Normal file
@@ -0,0 +1,337 @@
|
||||
/**
|
||||
* A2A SSE (Server-Sent Events) Event Subscriber
|
||||
*
|
||||
* Subscribes to agent events and streams them to SSE clients for A2A tasks.
|
||||
* Uses standard SSE protocol (text/event-stream).
|
||||
*
|
||||
* Design:
|
||||
* - Filters events by taskId/sessionId for targeted streaming
|
||||
* - Uses standard SSE format: event: name\ndata: json\n\n
|
||||
* - Supports multiple concurrent SSE connections
|
||||
*/
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
import { setMaxListeners } from 'events';
|
||||
import { AgentEventBus } from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
|
||||
/**
|
||||
* SSE connection state
|
||||
*/
|
||||
interface SSEConnection {
|
||||
/** Task/Session ID to filter events */
|
||||
taskId: string;
|
||||
/** Controller to write SSE events */
|
||||
controller: ReadableStreamDefaultController;
|
||||
/** Abort signal for cleanup */
|
||||
abortController: AbortController;
|
||||
/** Connection timestamp */
|
||||
connectedAt: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* A2A SSE Event Subscriber
|
||||
*
|
||||
* Manages Server-Sent Events connections for A2A Protocol task streaming.
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const sseSubscriber = new A2ASseEventSubscriber();
|
||||
* sseSubscriber.subscribe(agent.agentEventBus);
|
||||
*
|
||||
* // In route handler
|
||||
* const stream = sseSubscriber.createStream(taskId);
|
||||
* return new Response(stream, {
|
||||
* headers: {
|
||||
* 'Content-Type': 'text/event-stream',
|
||||
* 'Cache-Control': 'no-cache',
|
||||
* 'Connection': 'keep-alive'
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class A2ASseEventSubscriber {
|
||||
private connections: Map<string, SSEConnection> = new Map();
|
||||
private eventBus?: AgentEventBus;
|
||||
private globalAbortController?: AbortController;
|
||||
|
||||
/**
|
||||
* Subscribe to agent event bus.
|
||||
* Sets up global event listeners that broadcast to all SSE connections.
|
||||
*
|
||||
* @param eventBus Agent event bus to subscribe to
|
||||
*/
|
||||
subscribe(eventBus: AgentEventBus): void {
|
||||
// Abort any previous subscription
|
||||
this.globalAbortController?.abort();
|
||||
|
||||
// Create new AbortController for this subscription
|
||||
this.globalAbortController = new AbortController();
|
||||
const { signal } = this.globalAbortController;
|
||||
|
||||
// Increase max listeners
|
||||
const MAX_SHARED_SIGNAL_LISTENERS = 20;
|
||||
setMaxListeners(MAX_SHARED_SIGNAL_LISTENERS, signal);
|
||||
|
||||
this.eventBus = eventBus;
|
||||
|
||||
// Subscribe to agent events
|
||||
eventBus.on(
|
||||
'llm:thinking',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.thinking', {
|
||||
taskId: payload.sessionId,
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'llm:chunk',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.chunk', {
|
||||
taskId: payload.sessionId,
|
||||
type: payload.chunkType,
|
||||
content: payload.content,
|
||||
isComplete: payload.isComplete,
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'llm:tool-call',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.toolCall', {
|
||||
taskId: payload.sessionId,
|
||||
toolName: payload.toolName,
|
||||
args: payload.args,
|
||||
callId: payload.callId,
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'llm:tool-result',
|
||||
(payload) => {
|
||||
const data: Record<string, unknown> = {
|
||||
taskId: payload.sessionId,
|
||||
toolName: payload.toolName,
|
||||
callId: payload.callId,
|
||||
success: payload.success,
|
||||
sanitized: payload.sanitized,
|
||||
};
|
||||
if (payload.rawResult !== undefined) {
|
||||
data.rawResult = payload.rawResult;
|
||||
}
|
||||
this.broadcastToTask(payload.sessionId, 'task.toolResult', data);
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'llm:response',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.message', {
|
||||
taskId: payload.sessionId,
|
||||
message: {
|
||||
role: 'agent',
|
||||
content: [{ type: 'text', text: payload.content }],
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
tokenUsage: payload.tokenUsage,
|
||||
provider: payload.provider,
|
||||
model: payload.model,
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'llm:error',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.error', {
|
||||
taskId: payload.sessionId,
|
||||
error: {
|
||||
message: payload.error.message,
|
||||
recoverable: payload.recoverable,
|
||||
},
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
eventBus.on(
|
||||
'session:reset',
|
||||
(payload) => {
|
||||
this.broadcastToTask(payload.sessionId, 'task.reset', {
|
||||
taskId: payload.sessionId,
|
||||
});
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
logger.debug('A2ASseEventSubscriber subscribed to agent events');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new SSE stream for a specific task.
|
||||
*
|
||||
* Returns a ReadableStream that emits SSE events for the task.
|
||||
*
|
||||
* @param taskId Task/Session ID to stream events for
|
||||
* @returns ReadableStream for SSE connection
|
||||
*/
|
||||
createStream(taskId: string): ReadableStream<Uint8Array> {
|
||||
const connectionId = `${taskId}-${Date.now()}`;
|
||||
|
||||
return new ReadableStream({
|
||||
start: (controller) => {
|
||||
// Create connection state
|
||||
const connection: SSEConnection = {
|
||||
taskId,
|
||||
controller,
|
||||
abortController: new AbortController(),
|
||||
connectedAt: Date.now(),
|
||||
};
|
||||
|
||||
this.connections.set(connectionId, connection);
|
||||
logger.debug(`SSE connection opened for task ${taskId}`);
|
||||
|
||||
// Send initial connection event
|
||||
this.sendSSEEvent(controller, 'connected', {
|
||||
taskId,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
|
||||
// Send keepalive every 30 seconds
|
||||
const keepaliveInterval = setInterval(() => {
|
||||
try {
|
||||
this.sendSSEComment(controller, 'keepalive');
|
||||
} catch (_error) {
|
||||
clearInterval(keepaliveInterval);
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
// Cleanup on abort
|
||||
connection.abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(keepaliveInterval);
|
||||
});
|
||||
},
|
||||
|
||||
cancel: () => {
|
||||
// Client disconnected, cleanup
|
||||
const connection = this.connections.get(connectionId);
|
||||
if (connection) {
|
||||
connection.abortController.abort();
|
||||
this.connections.delete(connectionId);
|
||||
logger.debug(`SSE connection closed for task ${taskId}`);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast an event to a specific task's SSE connections.
|
||||
*
|
||||
* @param taskId Task ID to broadcast to
|
||||
* @param eventName SSE event name
|
||||
* @param data Event data
|
||||
*/
|
||||
private broadcastToTask(
|
||||
taskId: string,
|
||||
eventName: string,
|
||||
data: Record<string, unknown>
|
||||
): void {
|
||||
let sent = 0;
|
||||
for (const [connectionId, connection] of this.connections.entries()) {
|
||||
if (connection.taskId === taskId) {
|
||||
try {
|
||||
this.sendSSEEvent(connection.controller, eventName, data);
|
||||
sent++;
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
logger.warn(`Failed to send SSE event to ${connectionId}: ${errorMessage}`);
|
||||
// Clean up failed connection
|
||||
connection.abortController.abort();
|
||||
this.connections.delete(connectionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sent > 0) {
|
||||
logger.debug(`Broadcast ${eventName} to ${sent} SSE connection(s) for task ${taskId}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an SSE event to a specific controller.
|
||||
*
|
||||
* Format: event: name\ndata: json\n\n
|
||||
*
|
||||
* @param controller Stream controller
|
||||
* @param eventName Event name
|
||||
* @param data Event data
|
||||
*/
|
||||
private sendSSEEvent(
|
||||
controller: ReadableStreamDefaultController,
|
||||
eventName: string,
|
||||
data: Record<string, unknown>
|
||||
): void {
|
||||
const event = `event: ${eventName}\ndata: ${JSON.stringify(data)}\n\n`;
|
||||
controller.enqueue(new TextEncoder().encode(event));
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an SSE comment (for keepalive).
|
||||
*
|
||||
* Format: : comment\n
|
||||
*
|
||||
* @param controller Stream controller
|
||||
* @param comment Comment text
|
||||
*/
|
||||
private sendSSEComment(controller: ReadableStreamDefaultController, comment: string): void {
|
||||
const line = `: ${comment}\n`;
|
||||
controller.enqueue(new TextEncoder().encode(line));
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all connections and cleanup.
|
||||
*/
|
||||
cleanup(): void {
|
||||
logger.debug(`Cleaning up ${this.connections.size} SSE connections`);
|
||||
|
||||
for (const [_connectionId, connection] of this.connections.entries()) {
|
||||
connection.abortController.abort();
|
||||
try {
|
||||
connection.controller.close();
|
||||
} catch (_error) {
|
||||
// Ignore errors on close
|
||||
}
|
||||
}
|
||||
|
||||
this.connections.clear();
|
||||
this.globalAbortController?.abort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active connection count.
|
||||
*/
|
||||
getConnectionCount(): number {
|
||||
return this.connections.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get connection count for a specific task.
|
||||
*/
|
||||
getTaskConnectionCount(taskId: string): number {
|
||||
let count = 0;
|
||||
for (const connection of this.connections.values()) {
|
||||
if (connection.taskId === taskId) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
}
|
||||
16
dexto/packages/server/src/events/types.ts
Normal file
16
dexto/packages/server/src/events/types.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { AgentEventBus } from '@dexto/core';
|
||||
|
||||
/**
|
||||
* Generic interface for subscribing to core events.
|
||||
*/
|
||||
export interface EventSubscriber {
|
||||
/**
|
||||
* Attach event handlers to the given event bus.
|
||||
*/
|
||||
subscribe(eventBus: AgentEventBus): void;
|
||||
|
||||
/**
|
||||
* Clean up event listeners and resources.
|
||||
*/
|
||||
cleanup?(): void;
|
||||
}
|
||||
353
dexto/packages/server/src/events/webhook-subscriber.ts
Normal file
353
dexto/packages/server/src/events/webhook-subscriber.ts
Normal file
@@ -0,0 +1,353 @@
|
||||
import crypto from 'crypto';
|
||||
import { setMaxListeners } from 'events';
|
||||
import {
|
||||
AgentEventBus,
|
||||
INTEGRATION_EVENTS,
|
||||
type AgentEventMap,
|
||||
type AgentEventName,
|
||||
} from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
import { EventSubscriber } from './types.js';
|
||||
import {
|
||||
type WebhookConfig,
|
||||
type DextoWebhookEvent,
|
||||
type WebhookDeliveryResult,
|
||||
type WebhookDeliveryOptions,
|
||||
} from './webhook-types.js';
|
||||
|
||||
/**
|
||||
* Default configuration for webhook delivery
|
||||
*/
|
||||
const DEFAULT_DELIVERY_OPTIONS: Required<WebhookDeliveryOptions> = {
|
||||
maxRetries: 3,
|
||||
timeout: 10000, // 10 seconds
|
||||
includeSignature: true,
|
||||
};
|
||||
|
||||
/**
|
||||
* Webhook event subscriber that delivers agent events via HTTP POST
|
||||
*/
|
||||
export class WebhookEventSubscriber implements EventSubscriber {
|
||||
private webhooks: Map<string, WebhookConfig> = new Map();
|
||||
private abortController?: AbortController;
|
||||
private deliveryOptions: Required<WebhookDeliveryOptions>;
|
||||
private fetchFn: typeof globalThis.fetch;
|
||||
|
||||
constructor({
|
||||
fetchFn,
|
||||
...deliveryOptions
|
||||
}: WebhookDeliveryOptions & { fetchFn?: typeof globalThis.fetch } = {}) {
|
||||
this.deliveryOptions = { ...DEFAULT_DELIVERY_OPTIONS, ...deliveryOptions };
|
||||
// Use native fetch (Node.js 20+) or injected implementation (tests)
|
||||
this.fetchFn = fetchFn || fetch;
|
||||
logger.debug('WebhookEventSubscriber initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to agent events and deliver them to registered webhooks
|
||||
*/
|
||||
subscribe(eventBus: AgentEventBus): void {
|
||||
// Abort any previous subscription before creating a new one
|
||||
this.abortController?.abort();
|
||||
|
||||
// Create new AbortController for this subscription
|
||||
this.abortController = new AbortController();
|
||||
const { signal } = this.abortController;
|
||||
|
||||
// Increase max listeners since we intentionally share this signal across multiple events
|
||||
// This prevents the MaxListenersExceededWarning
|
||||
// INTEGRATION_EVENTS currently has 24 events, so we set this higher with buffer
|
||||
const MAX_SHARED_SIGNAL_LISTENERS = 50;
|
||||
setMaxListeners(MAX_SHARED_SIGNAL_LISTENERS, signal);
|
||||
|
||||
// Subscribe to all INTEGRATION_EVENTS (tier 2 visibility)
|
||||
// This includes streaming events + lifecycle/state events
|
||||
INTEGRATION_EVENTS.forEach((eventName) => {
|
||||
eventBus.on(
|
||||
eventName,
|
||||
(payload) => {
|
||||
this.deliverEvent(eventName, payload);
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
});
|
||||
|
||||
logger.info(`Webhook subscriber active with ${this.webhooks.size} registered webhooks`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new webhook endpoint
|
||||
*/
|
||||
addWebhook(webhook: WebhookConfig): void {
|
||||
this.webhooks.set(webhook.id, webhook);
|
||||
logger.info(`Webhook registered: ${webhook.id} -> ${webhook.url}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a webhook endpoint
|
||||
*/
|
||||
removeWebhook(webhookId: string): boolean {
|
||||
const removed = this.webhooks.delete(webhookId);
|
||||
if (removed) {
|
||||
logger.info(`Webhook removed: ${webhookId}`);
|
||||
} else {
|
||||
logger.warn(`Attempted to remove non-existent webhook: ${webhookId}`);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered webhooks
|
||||
*/
|
||||
getWebhooks(): WebhookConfig[] {
|
||||
return Array.from(this.webhooks.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific webhook by ID
|
||||
*/
|
||||
getWebhook(webhookId: string): WebhookConfig | undefined {
|
||||
return this.webhooks.get(webhookId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a webhook by sending a sample event
|
||||
*/
|
||||
async testWebhook(webhookId: string): Promise<WebhookDeliveryResult> {
|
||||
const webhook = this.webhooks.get(webhookId);
|
||||
if (!webhook) {
|
||||
throw new Error(`Webhook not found: ${webhookId}`);
|
||||
}
|
||||
|
||||
const testEvent: DextoWebhookEvent<'tools:available-updated'> = {
|
||||
id: `evt_test_${Date.now()}`,
|
||||
type: 'tools:available-updated',
|
||||
data: {
|
||||
tools: ['test-tool'],
|
||||
source: 'mcp',
|
||||
},
|
||||
created: new Date().toISOString(),
|
||||
apiVersion: '2025-07-03',
|
||||
};
|
||||
|
||||
return this.deliverToWebhook(webhook, testEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up event listeners and resources
|
||||
*/
|
||||
cleanup(): void {
|
||||
if (this.abortController) {
|
||||
this.abortController.abort();
|
||||
delete (this as any).abortController;
|
||||
}
|
||||
|
||||
this.webhooks.clear();
|
||||
logger.debug('Webhook event subscriber cleaned up');
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from current event bus without clearing registered webhooks
|
||||
*/
|
||||
unsubscribe(): void {
|
||||
if (this.abortController) {
|
||||
const controller = this.abortController;
|
||||
delete this.abortController;
|
||||
try {
|
||||
controller.abort();
|
||||
} catch (error) {
|
||||
logger.debug(
|
||||
`Error aborting controller during unsubscribe: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
{
|
||||
location: 'WebhookEventSubscriber.unsubscribe',
|
||||
...(error instanceof Error
|
||||
? { stack: error.stack }
|
||||
: { value: String(error) }),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deliver an event to all registered webhooks
|
||||
*/
|
||||
private async deliverEvent<T extends AgentEventName>(
|
||||
eventType: T,
|
||||
eventData: AgentEventMap[T]
|
||||
): Promise<void> {
|
||||
if (this.webhooks.size === 0) {
|
||||
return; // No webhooks to deliver to
|
||||
}
|
||||
|
||||
const webhookEvent: DextoWebhookEvent<T> = {
|
||||
id: `evt_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`,
|
||||
type: eventType,
|
||||
data: eventData,
|
||||
created: new Date().toISOString(),
|
||||
apiVersion: '2025-07-03',
|
||||
};
|
||||
|
||||
logger.debug(`Delivering webhook event: ${eventType} to ${this.webhooks.size} webhooks`);
|
||||
|
||||
// Deliver to all webhooks in parallel
|
||||
const deliveryPromises = Array.from(this.webhooks.values()).map((webhook) => ({
|
||||
webhook,
|
||||
promise: this.deliverToWebhook(webhook, webhookEvent),
|
||||
}));
|
||||
|
||||
const handleSettled = (results: PromiseSettledResult<WebhookDeliveryResult>[]) => {
|
||||
results.forEach((result, i) => {
|
||||
if (result.status === 'rejected') {
|
||||
const webhook = deliveryPromises[i]?.webhook;
|
||||
if (webhook) {
|
||||
logger.error(
|
||||
`Webhook delivery failed for ${webhook.id}: ${result.reason instanceof Error ? result.reason.message : String(result.reason)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// For testing purposes, we can await this if needed
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
const results = await Promise.allSettled(deliveryPromises.map((p) => p.promise));
|
||||
handleSettled(results);
|
||||
} else {
|
||||
// Fire-and-forget in production
|
||||
Promise.allSettled(deliveryPromises.map((p) => p.promise)).then(handleSettled);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deliver an event to a specific webhook with retry logic
|
||||
*/
|
||||
private async deliverToWebhook(
|
||||
webhook: WebhookConfig,
|
||||
event: DextoWebhookEvent
|
||||
): Promise<WebhookDeliveryResult> {
|
||||
const startTime = Date.now();
|
||||
let lastError: Error | undefined;
|
||||
let lastStatusCode: number | undefined;
|
||||
|
||||
for (let attempt = 1; attempt <= this.deliveryOptions.maxRetries; attempt++) {
|
||||
try {
|
||||
const result = await this.sendWebhookRequest(webhook, event, attempt);
|
||||
if (result.success) {
|
||||
return result;
|
||||
}
|
||||
// Don't duplicate "HTTP xxx:" prefix if it's already in the error message
|
||||
lastError = new Error(result.error || `HTTP ${result.statusCode}`);
|
||||
lastStatusCode = result.statusCode;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
logger.warn(
|
||||
`Webhook delivery attempt ${attempt}/${this.deliveryOptions.maxRetries} failed for ${webhook.id}: ${lastError.message}`
|
||||
);
|
||||
}
|
||||
|
||||
// Wait before retry (exponential backoff with jitter)
|
||||
if (attempt < this.deliveryOptions.maxRetries) {
|
||||
// Use shorter delays in test environment for faster tests
|
||||
const baseDelay = process.env.NODE_ENV === 'test' ? 1 : 1000;
|
||||
const exp = baseDelay * Math.pow(2, attempt - 1);
|
||||
const jitter = exp * 0.2 * Math.random(); // ±20%
|
||||
const backoffMs = Math.min(exp + jitter, 10000);
|
||||
await new Promise((resolve) => setTimeout(resolve, backoffMs));
|
||||
}
|
||||
}
|
||||
|
||||
// All attempts failed
|
||||
const totalTime = Date.now() - startTime;
|
||||
const result: WebhookDeliveryResult = {
|
||||
success: false,
|
||||
error: lastError?.message || 'Unknown error',
|
||||
responseTime: totalTime,
|
||||
attempt: this.deliveryOptions.maxRetries,
|
||||
...(lastStatusCode !== undefined && { statusCode: lastStatusCode }),
|
||||
};
|
||||
|
||||
logger.error(
|
||||
`Webhook delivery failed after ${this.deliveryOptions.maxRetries} attempts for ${webhook.id}: ${result.error}`
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send HTTP request to webhook endpoint
|
||||
*/
|
||||
private async sendWebhookRequest(
|
||||
webhook: WebhookConfig,
|
||||
event: DextoWebhookEvent,
|
||||
attempt: number
|
||||
): Promise<WebhookDeliveryResult> {
|
||||
const startTime = Date.now();
|
||||
const payload = JSON.stringify(event);
|
||||
|
||||
// Prepare headers
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'DextoAgent/1.0',
|
||||
'X-Dexto-Event-Type': event.type,
|
||||
'X-Dexto-Event-Id': event.id,
|
||||
'X-Dexto-Delivery-Attempt': attempt.toString(),
|
||||
};
|
||||
|
||||
// Add signature if secret is provided
|
||||
if (webhook.secret && this.deliveryOptions.includeSignature) {
|
||||
const signature = this.generateSignature(payload, webhook.secret);
|
||||
headers['X-Dexto-Signature-256'] = signature;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await this.fetchFn(webhook.url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: payload,
|
||||
signal: AbortSignal.timeout(this.deliveryOptions.timeout),
|
||||
});
|
||||
|
||||
const responseTime = Date.now() - startTime;
|
||||
const success = response.ok;
|
||||
|
||||
const result: WebhookDeliveryResult = {
|
||||
success,
|
||||
statusCode: response.status,
|
||||
responseTime,
|
||||
attempt,
|
||||
};
|
||||
|
||||
if (!success) {
|
||||
result.error = `HTTP ${response.status}: ${response.statusText}`;
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Webhook delivery ${success ? 'succeeded' : 'failed'} for ${webhook.id}: ${response.status} in ${responseTime}ms`
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
const responseTime = Date.now() - startTime;
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
responseTime,
|
||||
attempt,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate HMAC signature for webhook verification
|
||||
*/
|
||||
private generateSignature(payload: string, secret: string): string {
|
||||
const hmac = crypto.createHmac('sha256', secret);
|
||||
hmac.update(payload, 'utf8');
|
||||
return `sha256=${hmac.digest('hex')}`;
|
||||
}
|
||||
}
|
||||
100
dexto/packages/server/src/events/webhook-types.ts
Normal file
100
dexto/packages/server/src/events/webhook-types.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import type { AgentEventMap, AgentEventName } from '@dexto/core';
|
||||
|
||||
/**
|
||||
* TODO: temporarily DUPE OF cli
|
||||
* Webhook configuration interface
|
||||
* Represents a registered webhook endpoint
|
||||
*/
|
||||
export interface WebhookConfig {
|
||||
/** Unique identifier for the webhook */
|
||||
id: string;
|
||||
/** The URL to send webhook events to */
|
||||
url: string;
|
||||
/** Optional secret for signature verification */
|
||||
secret?: string;
|
||||
/** When the webhook was registered */
|
||||
createdAt: Date;
|
||||
/** Optional description for the webhook */
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Webhook event payload interface
|
||||
* Mirrors Stripe.Event structure for familiar developer experience
|
||||
*/
|
||||
export interface DextoWebhookEvent<T extends AgentEventName = AgentEventName> {
|
||||
/** Unique identifier for this webhook event */
|
||||
id: string;
|
||||
/** The type of event - provides IDE autocomplete */
|
||||
type: T;
|
||||
/** The event data payload - typed based on event type */
|
||||
data: AgentEventMap[T];
|
||||
/** When the event was created */
|
||||
created: string;
|
||||
/** API version for future compatibility */
|
||||
apiVersion: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Webhook delivery attempt result
|
||||
*/
|
||||
export interface WebhookDeliveryResult {
|
||||
/** Whether the delivery was successful */
|
||||
success: boolean;
|
||||
/** HTTP status code received */
|
||||
statusCode?: number;
|
||||
/** Error message if delivery failed */
|
||||
error?: string;
|
||||
/** Response time in milliseconds */
|
||||
responseTime: number;
|
||||
/** Number of delivery attempts */
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Webhook registration request body
|
||||
*/
|
||||
export interface WebhookRegistrationRequest {
|
||||
/** The URL to send webhook events to */
|
||||
url: string;
|
||||
/** Optional secret for signature verification */
|
||||
secret?: string;
|
||||
/** Optional description for the webhook */
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Webhook test event payload
|
||||
*/
|
||||
export interface WebhookTestEvent extends DextoWebhookEvent<'tools:available-updated'> {
|
||||
/** Indicates this is a test event */
|
||||
test: true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-safe webhook handler function signature
|
||||
* Provides autocomplete for event types and typed data payloads
|
||||
*/
|
||||
export type WebhookHandler<T extends AgentEventName = AgentEventName> = (
|
||||
event: DextoWebhookEvent<T>
|
||||
) => Promise<void> | void;
|
||||
|
||||
/**
|
||||
* Webhook handler mapping for type-safe event routing
|
||||
* Provides IDE autocomplete for event names like Stripe webhooks
|
||||
*/
|
||||
export type WebhookEventHandlers = {
|
||||
[K in AgentEventName]?: WebhookHandler<K>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Webhook delivery options
|
||||
*/
|
||||
export interface WebhookDeliveryOptions {
|
||||
/** Maximum number of retry attempts */
|
||||
maxRetries?: number;
|
||||
/** Timeout in milliseconds */
|
||||
timeout?: number;
|
||||
/** Include signature verification header */
|
||||
includeSignature?: boolean;
|
||||
}
|
||||
@@ -0,0 +1,244 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import { createTestAgent, startTestServer, httpRequest, type TestServer } from './test-fixtures.js';
|
||||
import { DextoAgent } from '@dexto/core';
|
||||
import { AgentFactory } from '@dexto/agent-management';
|
||||
import type { CreateDextoAppOptions } from '../index.js';
|
||||
|
||||
describe('Hono API Integration Tests - Agent Routes', () => {
|
||||
let testServer: TestServer | undefined;
|
||||
let initialAgent: DextoAgent;
|
||||
let mockAgents: Array<{
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
author: string;
|
||||
tags: string[];
|
||||
type: 'builtin' | 'custom';
|
||||
}> = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
initialAgent = await createTestAgent();
|
||||
|
||||
// Mock AgentFactory.listAgents to return test agents
|
||||
mockAgents = [
|
||||
{
|
||||
id: 'test-agent-1',
|
||||
name: 'Test Agent 1',
|
||||
description: 'First test agent',
|
||||
author: 'Test Author',
|
||||
tags: ['test'],
|
||||
type: 'builtin' as const,
|
||||
},
|
||||
{
|
||||
id: 'test-agent-2',
|
||||
name: 'Test Agent 2',
|
||||
description: 'Second test agent',
|
||||
author: 'Test Author',
|
||||
tags: ['test'],
|
||||
type: 'builtin' as const,
|
||||
},
|
||||
];
|
||||
|
||||
vi.spyOn(AgentFactory, 'listAgents').mockResolvedValue({
|
||||
installed: mockAgents,
|
||||
available: [],
|
||||
});
|
||||
|
||||
// Create agentsContext with switching functions
|
||||
let activeAgent = initialAgent;
|
||||
let activeAgentId = 'test-agent-1';
|
||||
let isSwitching = false;
|
||||
|
||||
const agentsContext: CreateDextoAppOptions['agentsContext'] = {
|
||||
switchAgentById: async (id: string) => {
|
||||
if (isSwitching) throw new Error('Agent switch in progress');
|
||||
isSwitching = true;
|
||||
try {
|
||||
// Create a new test agent instance (no need to use AgentFactory.createAgent in tests)
|
||||
const newAgent = await createTestAgent();
|
||||
await newAgent.start();
|
||||
if (activeAgent.isStarted()) {
|
||||
await activeAgent.stop();
|
||||
}
|
||||
activeAgent = newAgent;
|
||||
activeAgentId = id;
|
||||
return { id, name: mockAgents.find((a) => a.id === id)?.name ?? id };
|
||||
} finally {
|
||||
isSwitching = false;
|
||||
}
|
||||
},
|
||||
switchAgentByPath: async (filePath: string) => {
|
||||
if (isSwitching) throw new Error('Agent switch in progress');
|
||||
isSwitching = true;
|
||||
try {
|
||||
const newAgent = await createTestAgent();
|
||||
await newAgent.start();
|
||||
if (activeAgent.isStarted()) {
|
||||
await activeAgent.stop();
|
||||
}
|
||||
activeAgent = newAgent;
|
||||
activeAgentId = `agent-from-${filePath}`;
|
||||
return { id: activeAgentId, name: 'Agent from Path' };
|
||||
} finally {
|
||||
isSwitching = false;
|
||||
}
|
||||
},
|
||||
resolveAgentInfo: async (id: string) => {
|
||||
const agent = mockAgents.find((a) => a.id === id);
|
||||
return {
|
||||
id,
|
||||
name: agent?.name ?? id,
|
||||
};
|
||||
},
|
||||
ensureAgentAvailable: () => {
|
||||
if (isSwitching) throw new Error('Agent switch in progress');
|
||||
if (!activeAgent.isStarted()) throw new Error('Agent not started');
|
||||
},
|
||||
getActiveAgentId: () => activeAgentId,
|
||||
};
|
||||
|
||||
testServer = await startTestServer(initialAgent, undefined, agentsContext);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
vi.restoreAllMocks();
|
||||
if (testServer) {
|
||||
await testServer.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
describe('Agent Management Routes', () => {
|
||||
it('GET /api/agents returns list of agents', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/agents');
|
||||
expect(res.status).toBe(200);
|
||||
expect(Array.isArray((res.body as { installed: unknown[] }).installed)).toBe(true);
|
||||
expect(
|
||||
(res.body as { installed: Array<{ id: string }> }).installed.length
|
||||
).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('GET /api/agents/current returns current agent', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/agents/current');
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { id: string }).id).toBeDefined();
|
||||
});
|
||||
|
||||
it('POST /api/agents/switch validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agents/switch', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('POST /api/agents/switch switches agent by ID', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Note: Agent switching requires updating getAgent() closure which is complex
|
||||
// For now, we test the endpoint accepts valid input
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agents/switch', {
|
||||
id: 'test-agent-2',
|
||||
});
|
||||
// May return 400 if validation fails or 200 if switch succeeds
|
||||
// The actual switch logic is complex and requires getAgent() to be dynamic
|
||||
expect([200, 400]).toContain(res.status);
|
||||
if (res.status === 200) {
|
||||
const body = res.body as { switched: boolean; id: string; name: string };
|
||||
expect(body.switched).toBe(true);
|
||||
expect(body.id).toBe('test-agent-2');
|
||||
expect(typeof body.name).toBe('string');
|
||||
}
|
||||
});
|
||||
|
||||
it('POST /api/agents/validate-name validates agent name', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agents/validate-name', {
|
||||
id: 'valid-agent-name-that-does-not-exist',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as { valid: boolean; message?: string };
|
||||
expect(body.valid).toBe(true);
|
||||
});
|
||||
|
||||
it('POST /api/agents/validate-name rejects invalid names', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agents/validate-name', {
|
||||
id: 'test-agent-1', // This conflicts with our mock
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as { valid: boolean; conflict?: string; message?: string };
|
||||
expect(body.valid).toBe(false);
|
||||
expect(body.conflict).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent Config Routes', () => {
|
||||
// Note: Agent path/config routes require agent to have configPath set
|
||||
// These are skipped in test environment as we use in-memory agents
|
||||
it.skip('GET /api/agent/path returns agent path', async () => {
|
||||
// Requires agent with configPath - test agents don't have this
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/agent/path');
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as {
|
||||
path: string;
|
||||
relativePath: string;
|
||||
name: string;
|
||||
isDefault: boolean;
|
||||
};
|
||||
expect(typeof body.path).toBe('string');
|
||||
expect(typeof body.relativePath).toBe('string');
|
||||
expect(typeof body.name).toBe('string');
|
||||
expect(typeof body.isDefault).toBe('boolean');
|
||||
});
|
||||
|
||||
it.skip('GET /api/agent/config returns agent config', async () => {
|
||||
// Requires agent with configPath - test agents don't have this
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/agent/config');
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as { config: unknown; path: string; lastModified?: unknown };
|
||||
expect(body.config).toBeDefined();
|
||||
expect(typeof body.path).toBe('string');
|
||||
});
|
||||
|
||||
it('GET /api/agent/config/export exports config', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/agent/config/export');
|
||||
expect(res.status).toBe(200);
|
||||
// Export returns YAML text, not JSON
|
||||
expect(res.headers['content-type']).toContain('yaml');
|
||||
expect(typeof res.text).toBe('string');
|
||||
expect(res.text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('POST /api/agent/validate validates config', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agent/validate', {
|
||||
yaml: 'systemPrompt: "You are a helpful assistant."\ngreeting: Hello\nllm:\n provider: openai\n model: gpt-5\n apiKey: sk-test-key-for-validation',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as { valid: boolean; errors?: unknown[]; warnings?: unknown[] };
|
||||
expect(body.valid).toBe(true);
|
||||
// errors may be undefined or empty array
|
||||
expect(
|
||||
body.errors === undefined ||
|
||||
(Array.isArray(body.errors) && body.errors.length === 0)
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('POST /api/agent/validate rejects invalid config', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/agent/validate', {
|
||||
yaml: 'invalid: yaml: content: [',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
const body = res.body as { valid: boolean; errors: unknown[]; warnings: unknown[] };
|
||||
expect(body.valid).toBe(false);
|
||||
expect(Array.isArray(body.errors)).toBe(true);
|
||||
expect(body.errors.length).toBeGreaterThan(0);
|
||||
const firstError = body.errors[0] as { code: string; message: string };
|
||||
expect(typeof firstError.code).toBe('string');
|
||||
expect(typeof firstError.message).toBe('string');
|
||||
});
|
||||
});
|
||||
});
|
||||
706
dexto/packages/server/src/hono/__tests__/api.integration.test.ts
Normal file
706
dexto/packages/server/src/hono/__tests__/api.integration.test.ts
Normal file
@@ -0,0 +1,706 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { TextDecoder } from 'node:util';
|
||||
import type { StreamingEvent } from '@dexto/core';
|
||||
import {
|
||||
createTestAgent,
|
||||
startTestServer,
|
||||
httpRequest,
|
||||
type TestServer,
|
||||
expectResponseStructure,
|
||||
validators,
|
||||
} from './test-fixtures.js';
|
||||
|
||||
describe('Hono API Integration Tests', () => {
|
||||
let testServer: TestServer | undefined;
|
||||
|
||||
beforeAll(async () => {
|
||||
const agent = await createTestAgent();
|
||||
testServer = await startTestServer(agent);
|
||||
}, 30000); // 30 second timeout for server startup
|
||||
|
||||
afterAll(async () => {
|
||||
if (testServer) {
|
||||
await testServer.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
describe('Health', () => {
|
||||
it('GET /health returns OK', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/health');
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.text).toBe('OK');
|
||||
});
|
||||
});
|
||||
|
||||
describe('LLM Routes', () => {
|
||||
it('GET /api/llm/current returns current LLM config', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/llm/current');
|
||||
expect(res.status).toBe(200);
|
||||
expectResponseStructure(res.body, {
|
||||
config: validators.object,
|
||||
});
|
||||
const config = (
|
||||
res.body as {
|
||||
config: {
|
||||
provider: string;
|
||||
model: string;
|
||||
displayName?: string;
|
||||
};
|
||||
}
|
||||
).config;
|
||||
expect(config.provider).toBe('openai');
|
||||
expect(config.model).toBe('gpt-5-nano');
|
||||
expect(typeof config.displayName === 'string' || config.displayName === undefined).toBe(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
it('GET /api/llm/current with sessionId returns session-specific config', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create a session first
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-llm',
|
||||
});
|
||||
expect(createRes.status).toBe(201);
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/llm/current?sessionId=test-session-llm'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { config: unknown }).config).toBeDefined();
|
||||
});
|
||||
|
||||
it('GET /api/llm/catalog returns LLM catalog', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/llm/catalog');
|
||||
expect(res.status).toBe(200);
|
||||
expectResponseStructure(res.body, {
|
||||
providers: validators.object,
|
||||
});
|
||||
const providers = (res.body as { providers: Record<string, unknown> }).providers;
|
||||
expect(Object.keys(providers).length).toBeGreaterThan(0);
|
||||
// Validate provider structure
|
||||
const firstProvider = Object.values(providers)[0] as {
|
||||
models: unknown;
|
||||
};
|
||||
expect(firstProvider).toBeDefined();
|
||||
expect(typeof firstProvider === 'object').toBe(true);
|
||||
});
|
||||
|
||||
it('POST /api/llm/switch validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/llm/switch', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('POST /api/llm/switch with model update succeeds', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/llm/switch', {
|
||||
model: 'gpt-5',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sessions Routes', () => {
|
||||
it('GET /api/sessions returns empty list initially', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/sessions');
|
||||
expect(res.status).toBe(200);
|
||||
expectResponseStructure(res.body, {
|
||||
sessions: validators.array,
|
||||
});
|
||||
const sessions = (res.body as { sessions: unknown[] }).sessions;
|
||||
// May have sessions from previous tests in integration suite
|
||||
expect(sessions.length).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it('POST /api/sessions creates a new session', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-1',
|
||||
});
|
||||
expect(res.status).toBe(201);
|
||||
expectResponseStructure(res.body, {
|
||||
session: validators.object,
|
||||
});
|
||||
const session = (
|
||||
res.body as {
|
||||
session: {
|
||||
id: string;
|
||||
createdAt: number | null;
|
||||
lastActivity: number | null;
|
||||
messageCount: number;
|
||||
title: string | null;
|
||||
};
|
||||
}
|
||||
).session;
|
||||
expect(session.id).toBe('test-session-1');
|
||||
expect(typeof session.messageCount).toBe('number');
|
||||
expect(session.createdAt === null || typeof session.createdAt === 'number').toBe(true);
|
||||
});
|
||||
|
||||
it('GET /api/sessions/:id returns session details', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-details',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/sessions/test-session-details'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { session: { id: string } }).session.id).toBe(
|
||||
'test-session-details'
|
||||
);
|
||||
});
|
||||
|
||||
it('GET /api/sessions/:id returns 404 for non-existent session', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/sessions/non-existent-session'
|
||||
);
|
||||
expect(res.status).toBe(404);
|
||||
});
|
||||
|
||||
it('GET /api/sessions/:id/load validates and returns session info', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-load',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/sessions/test-session-load/load'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body).toHaveProperty('session');
|
||||
expect((res.body as { session: { id: string } }).session.id).toBe('test-session-load');
|
||||
});
|
||||
|
||||
it('GET /api/sessions/:id/history returns session history', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-history',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/sessions/test-session-history/history'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect(Array.isArray((res.body as { history: unknown[] }).history)).toBe(true);
|
||||
});
|
||||
|
||||
it('DELETE /api/sessions/:id deletes session', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-delete',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'DELETE',
|
||||
'/api/sessions/test-session-delete'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
// Verify deletion
|
||||
const getRes = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/sessions/test-session-delete'
|
||||
);
|
||||
expect(getRes.status).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Search Routes', () => {
|
||||
it('GET /api/search/messages requires query parameter', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/search/messages');
|
||||
expect(res.status).toBe(400);
|
||||
});
|
||||
|
||||
it('GET /api/search/messages with query returns results', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/search/messages?q=test');
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { results: unknown[] }).results).toBeDefined();
|
||||
});
|
||||
|
||||
it('GET /api/search/sessions requires query parameter', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/search/sessions');
|
||||
expect(res.status).toBe(400);
|
||||
});
|
||||
|
||||
it('GET /api/search/sessions with query returns results', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/search/sessions?q=test');
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { results: unknown[] }).results).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Memory Routes', () => {
|
||||
it('GET /api/memory returns empty list initially', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/memory');
|
||||
expect(res.status).toBe(200);
|
||||
expect(Array.isArray((res.body as { memories: unknown[] }).memories)).toBe(true);
|
||||
});
|
||||
|
||||
it('POST /api/memory creates a memory', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/memory', {
|
||||
content: 'Test memory content',
|
||||
tags: ['test'],
|
||||
});
|
||||
expect(res.status).toBe(201);
|
||||
expect((res.body as { memory: { id: string } }).memory.id).toBeDefined();
|
||||
expect((res.body as { memory: { content: string } }).memory.content).toBe(
|
||||
'Test memory content'
|
||||
);
|
||||
});
|
||||
|
||||
it('POST /api/memory validates required fields', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/memory', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('GET /api/memory/:id returns memory details', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create memory first
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/memory', {
|
||||
content: 'Memory to retrieve',
|
||||
tags: ['test'],
|
||||
});
|
||||
const memoryId = (createRes.body as { memory: { id: string } }).memory.id;
|
||||
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', `/api/memory/${memoryId}`);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { memory: { id: string } }).memory.id).toBe(memoryId);
|
||||
});
|
||||
|
||||
it('PUT /api/memory/:id updates memory', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create memory first
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/memory', {
|
||||
content: 'Original content',
|
||||
tags: ['test'],
|
||||
});
|
||||
const memoryId = (createRes.body as { memory: { id: string } }).memory.id;
|
||||
|
||||
const res = await httpRequest(testServer.baseUrl, 'PUT', `/api/memory/${memoryId}`, {
|
||||
content: 'Updated content',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { memory: { content: string } }).memory.content).toBe(
|
||||
'Updated content'
|
||||
);
|
||||
});
|
||||
|
||||
it('DELETE /api/memory/:id deletes memory', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create memory first
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/memory', {
|
||||
content: 'Memory to delete',
|
||||
tags: ['test'],
|
||||
});
|
||||
const memoryId = (createRes.body as { memory: { id: string } }).memory.id;
|
||||
|
||||
const res = await httpRequest(testServer.baseUrl, 'DELETE', `/api/memory/${memoryId}`);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
// Verify deletion
|
||||
const getRes = await httpRequest(testServer.baseUrl, 'GET', `/api/memory/${memoryId}`);
|
||||
expect(getRes.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('MCP Routes', () => {
|
||||
it('GET /api/mcp/servers returns server list', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/mcp/servers');
|
||||
expect(res.status).toBe(200);
|
||||
expect(typeof res.body).toBe('object');
|
||||
});
|
||||
|
||||
it('POST /api/mcp/servers validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/mcp/servers', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Prompts Routes', () => {
|
||||
it('GET /api/prompts returns prompt list', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/prompts');
|
||||
expect(res.status).toBe(200);
|
||||
expect(typeof res.body).toBe('object');
|
||||
});
|
||||
|
||||
it('GET /api/prompts/:name returns prompt details', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/prompts/non-existent-prompt'
|
||||
);
|
||||
// May return 404 or empty result depending on implementation
|
||||
expect([200, 404]).toContain(res.status);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Resources Routes', () => {
|
||||
it('GET /api/resources returns resource list', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/resources');
|
||||
expect(res.status).toBe(200);
|
||||
expect(typeof res.body).toBe('object');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Webhooks Routes', () => {
|
||||
it('GET /api/webhooks returns webhook list', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/webhooks');
|
||||
expect(res.status).toBe(200);
|
||||
expect(Array.isArray((res.body as { webhooks: unknown[] }).webhooks)).toBe(true);
|
||||
});
|
||||
|
||||
it('POST /api/webhooks validates URL', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/webhooks', {
|
||||
url: 'not-a-url',
|
||||
});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('POST /api/webhooks creates webhook', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/webhooks', {
|
||||
url: 'https://example.com/webhook',
|
||||
});
|
||||
expect(res.status).toBe(201);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Greeting Route', () => {
|
||||
it('GET /api/greeting returns greeting', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/api/greeting');
|
||||
expect(res.status).toBe(200);
|
||||
// greeting might be undefined if not set in config, which is valid
|
||||
expect(res.body).toBeDefined();
|
||||
expect(
|
||||
typeof (res.body as { greeting?: unknown }).greeting === 'string' ||
|
||||
(res.body as { greeting?: unknown }).greeting === undefined
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('GET /api/greeting with sessionId returns session-specific greeting', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-greeting',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/greeting?sessionId=test-session-greeting'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
// greeting might be undefined if not set in config, which is valid
|
||||
expect(res.body).toBeDefined();
|
||||
expect(
|
||||
typeof (res.body as { greeting?: unknown }).greeting === 'string' ||
|
||||
(res.body as { greeting?: unknown }).greeting === undefined
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('A2A Routes', () => {
|
||||
it('GET /.well-known/agent-card.json returns agent card', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/.well-known/agent-card.json'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { name: unknown }).name).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Message Routes', () => {
|
||||
it('POST /api/message validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/message', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('POST /api/message-sync validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/message-sync', {});
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('POST /api/reset resets conversation', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-session-reset',
|
||||
});
|
||||
const res = await httpRequest(testServer.baseUrl, 'POST', '/api/reset', {
|
||||
sessionId: 'test-session-reset',
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it('POST /api/message-stream returns SSE stream directly', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
|
||||
const sessionId = 'stream-session';
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', { sessionId });
|
||||
|
||||
const agent = testServer.agent;
|
||||
const originalStream = agent.stream;
|
||||
const fakeEvents: StreamingEvent[] = [
|
||||
{
|
||||
name: 'llm:thinking',
|
||||
sessionId,
|
||||
},
|
||||
{
|
||||
name: 'llm:chunk',
|
||||
content: 'hello',
|
||||
chunkType: 'text',
|
||||
isComplete: false,
|
||||
sessionId,
|
||||
},
|
||||
{
|
||||
name: 'llm:response',
|
||||
content: 'hello',
|
||||
tokenUsage: { inputTokens: 1, outputTokens: 1, totalTokens: 2 },
|
||||
sessionId,
|
||||
provider: 'openai',
|
||||
model: 'test-model',
|
||||
},
|
||||
];
|
||||
|
||||
agent.stream = async function (
|
||||
_message: string,
|
||||
_options
|
||||
): Promise<AsyncIterableIterator<StreamingEvent>> {
|
||||
async function* generator() {
|
||||
for (const event of fakeEvents) {
|
||||
yield event;
|
||||
}
|
||||
}
|
||||
return generator();
|
||||
} as typeof agent.stream;
|
||||
|
||||
try {
|
||||
// POST to /api/message-stream - response IS the SSE stream
|
||||
const response = await fetch(`${testServer.baseUrl}/api/message-stream`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
sessionId,
|
||||
content: 'Say hello',
|
||||
}),
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get('content-type')).toBe('text/event-stream');
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
if (!reader) throw new Error('Response does not contain a readable body');
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
let received = '';
|
||||
let chunks = 0;
|
||||
while (chunks < 50) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
chunks++;
|
||||
received += decoder.decode(value, { stream: true });
|
||||
if (received.includes('event: llm:response')) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await reader.cancel();
|
||||
|
||||
expect(received).toContain('event: llm:thinking');
|
||||
expect(received).toContain('event: llm:response');
|
||||
} finally {
|
||||
agent.stream = originalStream;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Queue Routes', () => {
|
||||
it('GET /api/queue/:sessionId returns empty queue initially', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-queue-session',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/queue/test-queue-session'
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { messages: unknown[]; count: number }).messages).toEqual([]);
|
||||
expect((res.body as { count: number }).count).toBe(0);
|
||||
});
|
||||
|
||||
it('GET /api/queue/:sessionId returns 404 for non-existent session', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/queue/non-existent-queue-session'
|
||||
);
|
||||
expect(res.status).toBe(404);
|
||||
});
|
||||
|
||||
it('POST /api/queue/:sessionId queues a message', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-queue-post-session',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'POST',
|
||||
'/api/queue/test-queue-post-session',
|
||||
{ content: 'Hello from queue' }
|
||||
);
|
||||
expect(res.status).toBe(201);
|
||||
expect((res.body as { queued: boolean }).queued).toBe(true);
|
||||
expect((res.body as { id: string }).id).toBeDefined();
|
||||
expect((res.body as { position: number }).position).toBe(1);
|
||||
|
||||
// Verify message is in queue
|
||||
const getRes = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'GET',
|
||||
'/api/queue/test-queue-post-session'
|
||||
);
|
||||
expect((getRes.body as { count: number }).count).toBe(1);
|
||||
});
|
||||
|
||||
it('POST /api/queue/:sessionId validates input', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
// Create session first
|
||||
await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId: 'test-queue-validate-session',
|
||||
});
|
||||
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'POST',
|
||||
'/api/queue/test-queue-validate-session',
|
||||
{} // Empty body should fail validation
|
||||
);
|
||||
expect(res.status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('DELETE /api/queue/:sessionId/:messageId removes a queued message', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const sessionId = `queue-delete-msg-${Date.now()}`;
|
||||
|
||||
// Create session and queue a message
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId,
|
||||
});
|
||||
expect(createRes.status).toBe(201);
|
||||
|
||||
const queueRes = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'POST',
|
||||
`/api/queue/${sessionId}`,
|
||||
{ content: 'Message to delete' }
|
||||
);
|
||||
expect(queueRes.status).toBe(201);
|
||||
const messageId = (queueRes.body as { id: string }).id;
|
||||
|
||||
// Delete the message
|
||||
const res = await httpRequest(
|
||||
testServer.baseUrl,
|
||||
'DELETE',
|
||||
`/api/queue/${sessionId}/${messageId}`
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { removed: boolean }).removed).toBe(true);
|
||||
|
||||
// Verify queue is empty
|
||||
const getRes = await httpRequest(testServer.baseUrl, 'GET', `/api/queue/${sessionId}`);
|
||||
expect((getRes.body as { count: number }).count).toBe(0);
|
||||
});
|
||||
|
||||
it('DELETE /api/queue/:sessionId clears all queued messages', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const sessionId = `queue-clear-${Date.now()}`;
|
||||
|
||||
// Create session and queue multiple messages
|
||||
const createRes = await httpRequest(testServer.baseUrl, 'POST', '/api/sessions', {
|
||||
sessionId,
|
||||
});
|
||||
expect(createRes.status).toBe(201);
|
||||
|
||||
const q1 = await httpRequest(testServer.baseUrl, 'POST', `/api/queue/${sessionId}`, {
|
||||
content: 'Message 1',
|
||||
});
|
||||
expect(q1.status).toBe(201);
|
||||
const q2 = await httpRequest(testServer.baseUrl, 'POST', `/api/queue/${sessionId}`, {
|
||||
content: 'Message 2',
|
||||
});
|
||||
expect(q2.status).toBe(201);
|
||||
|
||||
// Clear the queue
|
||||
const res = await httpRequest(testServer.baseUrl, 'DELETE', `/api/queue/${sessionId}`);
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { cleared: boolean }).cleared).toBe(true);
|
||||
expect((res.body as { count: number }).count).toBe(2);
|
||||
|
||||
// Verify queue is empty
|
||||
const getRes = await httpRequest(testServer.baseUrl, 'GET', `/api/queue/${sessionId}`);
|
||||
expect((getRes.body as { count: number }).count).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('OpenAPI Schema', () => {
|
||||
it('GET /openapi.json returns OpenAPI schema', async () => {
|
||||
if (!testServer) throw new Error('Test server not initialized');
|
||||
const res = await httpRequest(testServer.baseUrl, 'GET', '/openapi.json');
|
||||
expect(res.status).toBe(200);
|
||||
expect((res.body as { openapi: string }).openapi).toBe('3.0.0');
|
||||
});
|
||||
});
|
||||
});
|
||||
294
dexto/packages/server/src/hono/__tests__/test-fixtures.ts
Normal file
294
dexto/packages/server/src/hono/__tests__/test-fixtures.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
import { DextoAgent, createAgentCard } from '@dexto/core';
|
||||
import type { AgentConfig, AgentCard } from '@dexto/core';
|
||||
import type { Server as HttpServer } from 'node:http';
|
||||
import type { Context } from 'hono';
|
||||
import { createDextoApp } from '../index.js';
|
||||
import type { DextoApp } from '../types.js';
|
||||
import { createNodeServer, type NodeBridgeResult } from '../node/index.js';
|
||||
import type { CreateDextoAppOptions } from '../index.js';
|
||||
|
||||
/**
|
||||
* Test configuration for integration tests
|
||||
* Uses in-memory storage to avoid side effects
|
||||
*/
|
||||
export function createTestAgentConfig(): AgentConfig {
|
||||
return {
|
||||
systemPrompt: 'You are a test assistant.',
|
||||
llm: {
|
||||
provider: 'openai',
|
||||
model: 'gpt-5-nano',
|
||||
apiKey: 'test-key-123', // Mock key for testing
|
||||
maxIterations: 10,
|
||||
},
|
||||
mcpServers: {},
|
||||
storage: {
|
||||
cache: { type: 'in-memory' },
|
||||
database: { type: 'in-memory' },
|
||||
blob: { type: 'local', storePath: '/tmp/test-blobs' },
|
||||
},
|
||||
sessions: {
|
||||
maxSessions: 50, // Increased to accommodate all integration tests
|
||||
sessionTTL: 3600,
|
||||
},
|
||||
toolConfirmation: {
|
||||
mode: 'auto-approve',
|
||||
timeout: 120000,
|
||||
},
|
||||
elicitation: {
|
||||
enabled: false,
|
||||
timeout: 120000,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a real DextoAgent instance with in-memory storage
|
||||
* No mocks - uses real implementations
|
||||
*/
|
||||
export async function createTestAgent(config?: AgentConfig): Promise<DextoAgent> {
|
||||
const agentConfig = config ?? createTestAgentConfig();
|
||||
const agent = new DextoAgent(agentConfig);
|
||||
await agent.start();
|
||||
return agent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test server setup result
|
||||
*/
|
||||
export interface TestServer {
|
||||
server: HttpServer;
|
||||
app: DextoApp;
|
||||
bridge: NodeBridgeResult;
|
||||
agent: DextoAgent;
|
||||
agentCard: AgentCard;
|
||||
baseUrl: string;
|
||||
port: number;
|
||||
cleanup: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a real HTTP server for testing
|
||||
* Uses createDextoApp and createNodeServer directly
|
||||
* @param agent - The agent instance to use
|
||||
* @param port - Optional port (auto-selected if not provided)
|
||||
* @param agentsContext - Optional agent switching context (enables /api/agents routes)
|
||||
*/
|
||||
export async function startTestServer(
|
||||
agent: DextoAgent,
|
||||
port?: number,
|
||||
agentsContext?: CreateDextoAppOptions['agentsContext']
|
||||
): Promise<TestServer> {
|
||||
// Use provided port or find an available port
|
||||
const serverPort = port ?? (await findAvailablePort());
|
||||
|
||||
// Create agent card
|
||||
const agentCard = createAgentCard({
|
||||
defaultName: 'test-agent',
|
||||
defaultVersion: '1.0.0',
|
||||
defaultBaseUrl: `http://localhost:${serverPort}`,
|
||||
});
|
||||
|
||||
// Create getter functions
|
||||
// Note: For agent switching tests, getAgent needs to reference activeAgent from agentsContext
|
||||
// This is handled by the agentsContext implementation itself
|
||||
const getAgent = (_ctx: Context) => agent;
|
||||
const getAgentCard = () => agentCard;
|
||||
|
||||
// Create event subscribers and approval coordinator for test
|
||||
const { WebhookEventSubscriber } = await import('../../events/webhook-subscriber.js');
|
||||
const { A2ASseEventSubscriber } = await import('../../events/a2a-sse-subscriber.js');
|
||||
const { ApprovalCoordinator } = await import('../../approval/approval-coordinator.js');
|
||||
|
||||
const webhookSubscriber = new WebhookEventSubscriber();
|
||||
const sseSubscriber = new A2ASseEventSubscriber();
|
||||
const approvalCoordinator = new ApprovalCoordinator();
|
||||
|
||||
// Subscribe to agent's event bus
|
||||
webhookSubscriber.subscribe(agent.agentEventBus);
|
||||
sseSubscriber.subscribe(agent.agentEventBus);
|
||||
|
||||
// Create Hono app
|
||||
const app = createDextoApp({
|
||||
getAgent,
|
||||
getAgentCard,
|
||||
approvalCoordinator,
|
||||
webhookSubscriber,
|
||||
sseSubscriber,
|
||||
...(agentsContext ? { agentsContext } : {}), // Include agentsContext only if provided
|
||||
});
|
||||
|
||||
// Create Node server bridge
|
||||
const bridge = createNodeServer(app, {
|
||||
getAgent: () => agent,
|
||||
port: serverPort,
|
||||
});
|
||||
|
||||
// Agent card (no updates needed after bridge creation in SSE migration)
|
||||
const updatedAgentCard = createAgentCard({
|
||||
defaultName: 'test-agent',
|
||||
defaultVersion: '1.0.0',
|
||||
defaultBaseUrl: `http://localhost:${serverPort}`,
|
||||
});
|
||||
|
||||
// Start the server
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
bridge.server.listen(serverPort, '0.0.0.0', () => {
|
||||
resolve();
|
||||
});
|
||||
bridge.server.on('error', reject);
|
||||
});
|
||||
|
||||
const baseUrl = `http://localhost:${serverPort}`;
|
||||
|
||||
return {
|
||||
server: bridge.server,
|
||||
app,
|
||||
bridge,
|
||||
agent,
|
||||
agentCard: updatedAgentCard,
|
||||
baseUrl,
|
||||
port: serverPort,
|
||||
cleanup: async () => {
|
||||
// Cleanup subscribers to prevent memory leaks
|
||||
webhookSubscriber.cleanup();
|
||||
sseSubscriber.cleanup();
|
||||
approvalCoordinator.removeAllListeners();
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
bridge.server.close((err) => {
|
||||
if (err) reject(err);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
if (agent.isStarted()) {
|
||||
await agent.stop();
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds an available port starting from a random port in the ephemeral range
|
||||
* Uses ports 49152-65535 (IANA ephemeral port range)
|
||||
*/
|
||||
async function findAvailablePort(): Promise<number> {
|
||||
const { createServer } = await import('node:http');
|
||||
// Start from a random port in the ephemeral range to avoid conflicts
|
||||
const startPort = 49152 + Math.floor(Math.random() * 1000);
|
||||
|
||||
for (let port = startPort; port < 65535; port++) {
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const server = createServer();
|
||||
server.on('error', (err: NodeJS.ErrnoException) => {
|
||||
if (err.code === 'EADDRINUSE') {
|
||||
reject(new Error(`Port ${port} is in use`));
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
server.listen(port, () => {
|
||||
server.close(() => resolve());
|
||||
});
|
||||
});
|
||||
return port;
|
||||
} catch {
|
||||
// Port is in use, try next
|
||||
continue;
|
||||
}
|
||||
}
|
||||
throw new Error(`Could not find an available port starting from ${startPort}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to make HTTP requests to the test server
|
||||
*/
|
||||
export async function httpRequest(
|
||||
baseUrl: string,
|
||||
method: string,
|
||||
path: string,
|
||||
body?: unknown,
|
||||
headers?: Record<string, string>
|
||||
): Promise<{
|
||||
status: number;
|
||||
headers: Record<string, string>;
|
||||
body: unknown;
|
||||
text: string;
|
||||
}> {
|
||||
const url = `${baseUrl}${path}`;
|
||||
const options: RequestInit = {
|
||||
method,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
};
|
||||
|
||||
if (body !== undefined) {
|
||||
options.body = JSON.stringify(body);
|
||||
}
|
||||
|
||||
const response = await fetch(url, options);
|
||||
const text = await response.text();
|
||||
let parsedBody: unknown;
|
||||
try {
|
||||
parsedBody = JSON.parse(text);
|
||||
} catch {
|
||||
parsedBody = text;
|
||||
}
|
||||
|
||||
// Convert Headers to plain object for serialization
|
||||
const headersObject: Record<string, string> = {};
|
||||
response.headers.forEach((value, key) => {
|
||||
headersObject[key] = value;
|
||||
});
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
headers: headersObject,
|
||||
body: parsedBody,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a response has the expected structure
|
||||
*/
|
||||
export function expectResponseStructure(
|
||||
body: unknown,
|
||||
schema: Record<string, (value: unknown) => boolean>
|
||||
): void {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
throw new Error(`Expected object response, got ${typeof body}`);
|
||||
}
|
||||
|
||||
const bodyObj = body as Record<string, unknown>;
|
||||
for (const [key, validator] of Object.entries(schema)) {
|
||||
if (!(key in bodyObj)) {
|
||||
throw new Error(`Missing required field: ${key}`);
|
||||
}
|
||||
if (!validator(bodyObj[key])) {
|
||||
throw new Error(
|
||||
`Invalid type for field '${key}': expected validator to return true, got false`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Common response validators
|
||||
*/
|
||||
export const validators = {
|
||||
string: (value: unknown): boolean => typeof value === 'string',
|
||||
number: (value: unknown): boolean => typeof value === 'number',
|
||||
boolean: (value: unknown): boolean => typeof value === 'boolean',
|
||||
array: (value: unknown): boolean => Array.isArray(value),
|
||||
object: (value: unknown): boolean =>
|
||||
typeof value === 'object' && value !== null && !Array.isArray(value),
|
||||
optionalString: (value: unknown): boolean => value === undefined || typeof value === 'string',
|
||||
optionalNumber: (value: unknown): boolean => value === undefined || typeof value === 'number',
|
||||
optionalArray: (value: unknown): boolean => value === undefined || Array.isArray(value),
|
||||
optionalObject: (value: unknown): boolean =>
|
||||
value === undefined ||
|
||||
(typeof value === 'object' && value !== null && !Array.isArray(value)),
|
||||
};
|
||||
305
dexto/packages/server/src/hono/index.ts
Normal file
305
dexto/packages/server/src/hono/index.ts
Normal file
@@ -0,0 +1,305 @@
|
||||
import { OpenAPIHono } from '@hono/zod-openapi';
|
||||
import type { Context } from 'hono';
|
||||
import type { DextoAgent, AgentCard } from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
import { createHealthRouter } from './routes/health.js';
|
||||
import { createGreetingRouter } from './routes/greeting.js';
|
||||
import { createMessagesRouter } from './routes/messages.js';
|
||||
import { createLlmRouter } from './routes/llm.js';
|
||||
import { createSessionsRouter } from './routes/sessions.js';
|
||||
import { createSearchRouter } from './routes/search.js';
|
||||
import { createMcpRouter } from './routes/mcp.js';
|
||||
import { createA2aRouter } from './routes/a2a.js';
|
||||
import { createA2AJsonRpcRouter } from './routes/a2a-jsonrpc.js';
|
||||
import { createA2ATasksRouter } from './routes/a2a-tasks.js';
|
||||
import { createWebhooksRouter } from './routes/webhooks.js';
|
||||
import { createPromptsRouter } from './routes/prompts.js';
|
||||
import { createResourcesRouter } from './routes/resources.js';
|
||||
import { createMemoryRouter } from './routes/memory.js';
|
||||
import { createAgentsRouter, type AgentsRouterContext } from './routes/agents.js';
|
||||
import { createApprovalsRouter } from './routes/approvals.js';
|
||||
import { createQueueRouter } from './routes/queue.js';
|
||||
import { createOpenRouterRouter } from './routes/openrouter.js';
|
||||
import { createKeyRouter } from './routes/key.js';
|
||||
import { createToolsRouter } from './routes/tools.js';
|
||||
import { createDiscoveryRouter } from './routes/discovery.js';
|
||||
import { createModelsRouter } from './routes/models.js';
|
||||
import { createDextoAuthRouter } from './routes/dexto-auth.js';
|
||||
import {
|
||||
createStaticRouter,
|
||||
createSpaFallbackHandler,
|
||||
type WebUIRuntimeConfig,
|
||||
} from './routes/static.js';
|
||||
import { WebhookEventSubscriber } from '../events/webhook-subscriber.js';
|
||||
import { A2ASseEventSubscriber } from '../events/a2a-sse-subscriber.js';
|
||||
import { handleHonoError } from './middleware/error.js';
|
||||
import { prettyJsonMiddleware, redactionMiddleware } from './middleware/redaction.js';
|
||||
import { createCorsMiddleware } from './middleware/cors.js';
|
||||
import { createAuthMiddleware } from './middleware/auth.js';
|
||||
import { ApprovalCoordinator } from '../approval/approval-coordinator.js';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const packageJson = JSON.parse(readFileSync(join(__dirname, '../../package.json'), 'utf-8')) as {
|
||||
version: string;
|
||||
};
|
||||
|
||||
// Dummy context for type inference and runtime fallback
|
||||
// Used when running in single-agent mode (CLI, Docker, etc.) where multi-agent
|
||||
// features aren't available. Agents router is always mounted for consistent API
|
||||
// structure, but will return clear errors if multi-agent endpoints are called.
|
||||
// This ensures type safety across different deployment modes.
|
||||
const dummyAgentsContext: AgentsRouterContext = {
|
||||
switchAgentById: async () => {
|
||||
throw new Error('Multi-agent features not available in single-agent mode');
|
||||
},
|
||||
switchAgentByPath: async () => {
|
||||
throw new Error('Multi-agent features not available in single-agent mode');
|
||||
},
|
||||
resolveAgentInfo: async () => {
|
||||
throw new Error('Multi-agent features not available in single-agent mode');
|
||||
},
|
||||
ensureAgentAvailable: () => {},
|
||||
getActiveAgentId: () => undefined,
|
||||
};
|
||||
|
||||
// Type for async getAgent with context support
|
||||
export type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
export type CreateDextoAppOptions = {
|
||||
/**
|
||||
* Prefix for API routes. Defaults to '/api'.
|
||||
*/
|
||||
apiPrefix?: string;
|
||||
getAgent: GetAgentFn;
|
||||
getAgentCard: () => AgentCard;
|
||||
approvalCoordinator: ApprovalCoordinator;
|
||||
webhookSubscriber: WebhookEventSubscriber;
|
||||
sseSubscriber: A2ASseEventSubscriber;
|
||||
agentsContext?: AgentsRouterContext;
|
||||
/** Absolute path to WebUI build output. If provided, static files will be served. */
|
||||
webRoot?: string;
|
||||
/** Runtime configuration to inject into WebUI (analytics, etc.) */
|
||||
webUIConfig?: WebUIRuntimeConfig;
|
||||
/** Disable built-in auth middleware. Use when you have your own auth layer. */
|
||||
disableAuth?: boolean;
|
||||
};
|
||||
|
||||
// Default API prefix as a const literal for type inference
|
||||
const DEFAULT_API_PREFIX = '/api' as const;
|
||||
|
||||
export function createDextoApp(options: CreateDextoAppOptions) {
|
||||
const {
|
||||
apiPrefix,
|
||||
getAgent,
|
||||
getAgentCard,
|
||||
approvalCoordinator,
|
||||
webhookSubscriber,
|
||||
sseSubscriber,
|
||||
agentsContext,
|
||||
webRoot,
|
||||
webUIConfig,
|
||||
disableAuth = false,
|
||||
} = options;
|
||||
|
||||
// Security check: Warn when auth is disabled
|
||||
if (disableAuth) {
|
||||
logger.warn(
|
||||
`⚠️ Authentication disabled (disableAuth=true). createAuthMiddleware() skipped. Ensure external auth is in place.`
|
||||
);
|
||||
}
|
||||
|
||||
const app = new OpenAPIHono({ strict: false });
|
||||
|
||||
// Global CORS middleware for cross-origin requests (must be first)
|
||||
app.use('*', createCorsMiddleware());
|
||||
|
||||
// Global authentication middleware (after CORS, before routes)
|
||||
// Can be disabled when using an external auth layer
|
||||
if (!disableAuth) {
|
||||
app.use('*', createAuthMiddleware());
|
||||
}
|
||||
|
||||
// Global error handling for all routes
|
||||
app.onError((err, ctx) => handleHonoError(ctx, err));
|
||||
|
||||
// Normalize prefix: strip trailing slashes, treat '' as '/'
|
||||
const rawPrefix = apiPrefix ?? DEFAULT_API_PREFIX;
|
||||
const normalizedPrefix = rawPrefix === '' ? '/' : rawPrefix.replace(/\/+$/, '') || '/';
|
||||
const middlewarePattern = normalizedPrefix === '/' ? '/*' : `${normalizedPrefix}/*`;
|
||||
|
||||
app.use(middlewarePattern, prettyJsonMiddleware);
|
||||
app.use(middlewarePattern, redactionMiddleware);
|
||||
|
||||
// Cast to literal type for RPC client type inference (webui uses default '/api')
|
||||
const routePrefix = normalizedPrefix as typeof DEFAULT_API_PREFIX;
|
||||
|
||||
// Mount all API routers at the configured prefix for proper type inference
|
||||
// Each router is mounted individually so Hono can properly track route types
|
||||
const fullApp = app
|
||||
// Public health endpoint
|
||||
.route('/health', createHealthRouter(getAgent))
|
||||
// Follows A2A discovery protocol
|
||||
.route('/', createA2aRouter(getAgentCard))
|
||||
.route('/', createA2AJsonRpcRouter(getAgent, sseSubscriber))
|
||||
.route('/', createA2ATasksRouter(getAgent, sseSubscriber))
|
||||
// Add agent-specific routes
|
||||
.route(routePrefix, createGreetingRouter(getAgent))
|
||||
.route(routePrefix, createMessagesRouter(getAgent, approvalCoordinator))
|
||||
.route(routePrefix, createLlmRouter(getAgent))
|
||||
.route(routePrefix, createSessionsRouter(getAgent))
|
||||
.route(routePrefix, createSearchRouter(getAgent))
|
||||
.route(routePrefix, createMcpRouter(getAgent))
|
||||
.route(routePrefix, createWebhooksRouter(getAgent, webhookSubscriber))
|
||||
.route(routePrefix, createPromptsRouter(getAgent))
|
||||
.route(routePrefix, createResourcesRouter(getAgent))
|
||||
.route(routePrefix, createMemoryRouter(getAgent))
|
||||
.route(routePrefix, createApprovalsRouter(getAgent, approvalCoordinator))
|
||||
.route(routePrefix, createAgentsRouter(getAgent, agentsContext || dummyAgentsContext))
|
||||
.route(routePrefix, createQueueRouter(getAgent))
|
||||
.route(routePrefix, createOpenRouterRouter())
|
||||
.route(routePrefix, createKeyRouter())
|
||||
.route(routePrefix, createToolsRouter(getAgent))
|
||||
.route(routePrefix, createDiscoveryRouter())
|
||||
.route(routePrefix, createModelsRouter())
|
||||
.route(routePrefix, createDextoAuthRouter(getAgent));
|
||||
|
||||
// Expose OpenAPI document
|
||||
// Current approach uses @hono/zod-openapi's .doc() method for OpenAPI spec generation
|
||||
// Alternative: Use openAPIRouteHandler from hono-openapi (third-party) for auto-generation
|
||||
// Keeping current approach since:
|
||||
// 1. @hono/zod-openapi is official Hono package with first-class support
|
||||
// 2. We already generate spec via scripts/generate-openapi-spec.ts to docs/
|
||||
// 3. Switching would require adding hono-openapi dependency and migration effort
|
||||
// See: https://honohub.dev/docs/openapi/zod#generating-the-openapi-spec
|
||||
fullApp.doc('/openapi.json', {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Dexto API',
|
||||
version: packageJson.version,
|
||||
description: 'OpenAPI spec for the Dexto REST API server',
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: 'http://localhost:3001',
|
||||
description: 'Local development server (default port)',
|
||||
},
|
||||
{
|
||||
url: 'http://localhost:{port}',
|
||||
description: 'Local development server (custom port)',
|
||||
variables: {
|
||||
port: {
|
||||
default: '3001',
|
||||
description: 'API server port',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
name: 'system',
|
||||
description: 'System health and status endpoints',
|
||||
},
|
||||
{
|
||||
name: 'config',
|
||||
description: 'Agent configuration and greeting management',
|
||||
},
|
||||
{
|
||||
name: 'messages',
|
||||
description: 'Send messages to the agent and manage conversations',
|
||||
},
|
||||
{
|
||||
name: 'sessions',
|
||||
description: 'Create and manage conversation sessions',
|
||||
},
|
||||
{
|
||||
name: 'llm',
|
||||
description: 'Configure and switch between LLM providers and models',
|
||||
},
|
||||
{
|
||||
name: 'mcp',
|
||||
description: 'Manage Model Context Protocol (MCP) servers and tools',
|
||||
},
|
||||
{
|
||||
name: 'webhooks',
|
||||
description: 'Register and manage webhook endpoints for agent events',
|
||||
},
|
||||
{
|
||||
name: 'search',
|
||||
description: 'Search through messages and sessions',
|
||||
},
|
||||
{
|
||||
name: 'memory',
|
||||
description: 'Store and retrieve agent memories for context',
|
||||
},
|
||||
{
|
||||
name: 'prompts',
|
||||
description: 'Manage custom prompts and templates',
|
||||
},
|
||||
{
|
||||
name: 'resources',
|
||||
description: 'Access and manage resources from MCP servers and internal providers',
|
||||
},
|
||||
{
|
||||
name: 'agent',
|
||||
description: 'Current agent configuration and file operations',
|
||||
},
|
||||
{
|
||||
name: 'agents',
|
||||
description: 'Install, switch, and manage agent configurations',
|
||||
},
|
||||
{
|
||||
name: 'queue',
|
||||
description: 'Manage message queue for busy sessions',
|
||||
},
|
||||
{
|
||||
name: 'openrouter',
|
||||
description: 'OpenRouter model validation and cache management',
|
||||
},
|
||||
{
|
||||
name: 'discovery',
|
||||
description: 'Discover available providers and capabilities',
|
||||
},
|
||||
{
|
||||
name: 'tools',
|
||||
description:
|
||||
'List and inspect available tools from internal, custom, and MCP sources',
|
||||
},
|
||||
{
|
||||
name: 'models',
|
||||
description: 'List and manage local GGUF models and Ollama models',
|
||||
},
|
||||
{
|
||||
name: 'auth',
|
||||
description: 'Dexto authentication status and management',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Mount static file router for WebUI if webRoot is provided
|
||||
if (webRoot) {
|
||||
fullApp.route('/', createStaticRouter(webRoot));
|
||||
// SPA fallback: serve index.html for unmatched routes without file extensions
|
||||
// Must be registered as notFound handler so it runs AFTER all routes (including /openapi.json)
|
||||
// webUIConfig is injected into index.html for runtime configuration (analytics, etc.)
|
||||
fullApp.notFound(createSpaFallbackHandler(webRoot, webUIConfig));
|
||||
}
|
||||
|
||||
// NOTE: Subscribers and approval handler are wired in CLI layer before agent.start()
|
||||
// This ensures proper initialization order and validation
|
||||
// We attach webhookSubscriber as a property but don't include it in the return type
|
||||
// to preserve Hono's route type inference
|
||||
Object.assign(fullApp, { webhookSubscriber });
|
||||
|
||||
return fullApp;
|
||||
}
|
||||
|
||||
// Export inferred AppType
|
||||
// Routes are now properly typed since they're all mounted directly
|
||||
export type AppType = ReturnType<typeof createDextoApp>;
|
||||
|
||||
// Re-export types needed by CLI
|
||||
export type { WebUIRuntimeConfig } from './routes/static.js';
|
||||
89
dexto/packages/server/src/hono/middleware/auth.ts
Normal file
89
dexto/packages/server/src/hono/middleware/auth.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import type { MiddlewareHandler } from 'hono';
|
||||
import { logger } from '@dexto/core';
|
||||
|
||||
/**
|
||||
* Authentication middleware for API security
|
||||
*
|
||||
* Security model:
|
||||
* 1. Default (no env): Development mode - no auth required
|
||||
* 2. NODE_ENV=production: Production mode - auth required
|
||||
* 3. DEXTO_SERVER_REQUIRE_AUTH=true: Explicit auth enforcement
|
||||
* 4. Public routes (health check, A2A discovery) are always accessible
|
||||
*
|
||||
* Usage:
|
||||
* Development (default):
|
||||
* npm start # No auth needed, existing scripts work
|
||||
*
|
||||
* Production:
|
||||
* DEXTO_SERVER_API_KEY=your-key NODE_ENV=production npm start
|
||||
* Clients must send: Authorization: Bearer <DEXTO_SERVER_API_KEY>
|
||||
*/
|
||||
|
||||
const PUBLIC_ROUTES = ['/health', '/.well-known/agent-card.json', '/openapi.json'];
|
||||
|
||||
export function createAuthMiddleware(): MiddlewareHandler {
|
||||
const apiKey = process.env.DEXTO_SERVER_API_KEY;
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const requireAuth = process.env.DEXTO_SERVER_REQUIRE_AUTH === 'true'; // Explicit opt-in
|
||||
|
||||
// Log security configuration on startup
|
||||
if (isProduction && !apiKey) {
|
||||
logger.warn(
|
||||
`⚠️ SECURITY WARNING: Running in production mode (NODE_ENV=production) without DEXTO_SERVER_API_KEY. Dexto Server API is UNPROTECTED. Set DEXTO_SERVER_API_KEY environment variable to secure your API.`
|
||||
);
|
||||
}
|
||||
|
||||
return async (ctx, next) => {
|
||||
const path = ctx.req.path;
|
||||
|
||||
// Always allow public routes
|
||||
if (PUBLIC_ROUTES.some((route) => path === route || path.startsWith(route))) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Default behavior: Development mode (no auth required)
|
||||
// This ensures existing dev scripts don't break
|
||||
if (!isProduction && !requireAuth) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Production mode or explicit DEXTO_SERVER_REQUIRE_AUTH=true
|
||||
// Requires API key to be set - fail closed for security
|
||||
if (!apiKey) {
|
||||
return ctx.json(
|
||||
{
|
||||
error: 'Configuration Error',
|
||||
message: requireAuth
|
||||
? 'DEXTO_SERVER_REQUIRE_AUTH=true but DEXTO_SERVER_API_KEY not set. Set DEXTO_SERVER_API_KEY environment variable.'
|
||||
: 'NODE_ENV=production requires DEXTO_SERVER_API_KEY. Set DEXTO_SERVER_API_KEY environment variable to secure your API.',
|
||||
},
|
||||
500
|
||||
);
|
||||
}
|
||||
|
||||
// API key is set - validate it
|
||||
const authHeader = ctx.req.header('Authorization');
|
||||
const providedKey = authHeader?.replace(/^Bearer\s+/i, '');
|
||||
|
||||
if (!providedKey || providedKey !== apiKey) {
|
||||
logger.warn('Unauthorized API access attempt', {
|
||||
path,
|
||||
hasKey: !!providedKey,
|
||||
origin: ctx.req.header('origin'),
|
||||
userAgent: ctx.req.header('user-agent'),
|
||||
});
|
||||
|
||||
return ctx.json(
|
||||
{
|
||||
error: 'Unauthorized',
|
||||
message:
|
||||
'Invalid or missing API key. Provide Authorization: Bearer <api-key> header.',
|
||||
},
|
||||
401
|
||||
);
|
||||
}
|
||||
|
||||
// Valid API key - proceed
|
||||
await next();
|
||||
};
|
||||
}
|
||||
49
dexto/packages/server/src/hono/middleware/cors.ts
Normal file
49
dexto/packages/server/src/hono/middleware/cors.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { cors } from 'hono/cors';
|
||||
import type { MiddlewareHandler } from 'hono';
|
||||
|
||||
/**
|
||||
* CORS middleware that allows:
|
||||
* 1. All localhost/127.0.0.1 origins on any port (for local development)
|
||||
* 2. Custom origins specified in DEXTO_ALLOWED_ORIGINS environment variable
|
||||
* 3. Server-to-server requests with no origin header
|
||||
*/
|
||||
export function createCorsMiddleware(): MiddlewareHandler {
|
||||
return cors({
|
||||
origin: (origin) => {
|
||||
// If no origin header (server-to-server), omit CORS headers
|
||||
// Returning null allows the request without Access-Control-Allow-Origin header
|
||||
// This is compatible with credentials: true (unlike '*')
|
||||
if (!origin) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const originUrl = new URL(origin);
|
||||
const hostname = originUrl.hostname;
|
||||
|
||||
// Always allow localhost/127.0.0.1 on any port for local development
|
||||
if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1') {
|
||||
return origin;
|
||||
}
|
||||
|
||||
// Check custom allowed origins from environment variable
|
||||
const customOrigins = process.env.DEXTO_ALLOWED_ORIGINS;
|
||||
if (customOrigins) {
|
||||
const allowedList = customOrigins.split(',').map((o) => o.trim());
|
||||
if (allowedList.includes(origin)) {
|
||||
return origin;
|
||||
}
|
||||
}
|
||||
|
||||
// Origin not allowed
|
||||
return null;
|
||||
} catch {
|
||||
// Invalid URL format, reject
|
||||
return null;
|
||||
}
|
||||
},
|
||||
allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS', 'HEAD'],
|
||||
allowHeaders: ['Content-Type', 'Authorization'],
|
||||
credentials: true,
|
||||
});
|
||||
}
|
||||
129
dexto/packages/server/src/hono/middleware/error.ts
Normal file
129
dexto/packages/server/src/hono/middleware/error.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { DextoRuntimeError, DextoValidationError, ErrorType, zodToIssues } from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
// TODO: Standardize error responses across all server routes.
|
||||
// Currently, routes use inconsistent error response formats:
|
||||
// - Some throw typed errors (approvals.ts, prompts.ts) → middleware handles → standard format
|
||||
// - Others return ad-hoc shapes like { error: '...' } or { ok: false, error: '...' }
|
||||
// (mcp.ts, webhooks.ts, sessions.ts, queue.ts, a2a-tasks.ts)
|
||||
//
|
||||
// Target: All routes should throw DextoRuntimeError/DextoValidationError for errors,
|
||||
// letting this middleware handle conversion to the standard response format.
|
||||
// See also: packages/server/src/hono/schemas/responses.ts for OpenAPI schema limitations.
|
||||
|
||||
export const mapErrorTypeToStatus = (type: ErrorType): number => {
|
||||
switch (type) {
|
||||
case ErrorType.USER:
|
||||
return 400;
|
||||
case ErrorType.PAYMENT_REQUIRED:
|
||||
return 402;
|
||||
case ErrorType.FORBIDDEN:
|
||||
return 403;
|
||||
case ErrorType.NOT_FOUND:
|
||||
return 404;
|
||||
case ErrorType.TIMEOUT:
|
||||
return 408;
|
||||
case ErrorType.CONFLICT:
|
||||
return 409;
|
||||
case ErrorType.RATE_LIMIT:
|
||||
return 429;
|
||||
case ErrorType.SYSTEM:
|
||||
return 500;
|
||||
case ErrorType.THIRD_PARTY:
|
||||
return 502;
|
||||
case ErrorType.UNKNOWN:
|
||||
default:
|
||||
return 500;
|
||||
}
|
||||
};
|
||||
|
||||
export const statusForValidation = (issues: ReturnType<typeof zodToIssues>): number => {
|
||||
const firstError = issues.find((i) => i.severity === 'error');
|
||||
const type = firstError?.type ?? ErrorType.USER;
|
||||
return mapErrorTypeToStatus(type);
|
||||
};
|
||||
|
||||
export function handleHonoError(ctx: any, err: unknown) {
|
||||
// Extract endpoint information for better error context
|
||||
const endpoint = ctx.req.path || 'unknown';
|
||||
const method = ctx.req.method || 'unknown';
|
||||
|
||||
if (err instanceof DextoRuntimeError) {
|
||||
return ctx.json(
|
||||
{
|
||||
...err.toJSON(),
|
||||
endpoint,
|
||||
method,
|
||||
},
|
||||
mapErrorTypeToStatus(err.type)
|
||||
);
|
||||
}
|
||||
|
||||
if (err instanceof DextoValidationError) {
|
||||
return ctx.json(
|
||||
{
|
||||
...err.toJSON(),
|
||||
endpoint,
|
||||
method,
|
||||
},
|
||||
statusForValidation(err.issues)
|
||||
);
|
||||
}
|
||||
|
||||
if (err instanceof ZodError) {
|
||||
const issues = zodToIssues(err);
|
||||
const dexErr = new DextoValidationError(issues);
|
||||
return ctx.json(
|
||||
{
|
||||
...dexErr.toJSON(),
|
||||
endpoint,
|
||||
method,
|
||||
},
|
||||
statusForValidation(issues)
|
||||
);
|
||||
}
|
||||
|
||||
// Some hono specific handlers (e.g., ctx.req.json()) may throw SyntaxError for invalid/empty JSON
|
||||
if (err instanceof SyntaxError) {
|
||||
return ctx.json(
|
||||
{
|
||||
code: 'invalid_json',
|
||||
message: err.message || 'Invalid JSON body',
|
||||
scope: 'agent',
|
||||
type: 'user',
|
||||
severity: 'error',
|
||||
endpoint,
|
||||
method,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
const errorStack = err instanceof Error ? err.stack : undefined;
|
||||
logger.error(
|
||||
`Unhandled error in API middleware: ${errorMessage}, endpoint: ${method} ${endpoint}, stack: ${errorStack}, type: ${typeof err}`
|
||||
);
|
||||
|
||||
// Only expose error details in development, use generic message in production
|
||||
const isDevelopment = process.env.NODE_ENV === 'development';
|
||||
const userMessage = isDevelopment
|
||||
? `An unexpected error occurred: ${errorMessage}`
|
||||
: 'An unexpected error occurred. Please try again later.';
|
||||
|
||||
return ctx.json(
|
||||
{
|
||||
code: 'internal_error',
|
||||
message: userMessage,
|
||||
scope: 'system',
|
||||
type: 'system',
|
||||
severity: 'error',
|
||||
endpoint,
|
||||
method,
|
||||
// Only include stack traces in development to avoid exposing internals
|
||||
...(isDevelopment && errorStack ? { stack: errorStack } : {}),
|
||||
},
|
||||
500
|
||||
);
|
||||
}
|
||||
22
dexto/packages/server/src/hono/middleware/redaction.ts
Normal file
22
dexto/packages/server/src/hono/middleware/redaction.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { prettyJSON } from 'hono/pretty-json';
|
||||
import type { MiddlewareHandler } from 'hono';
|
||||
import { redactSensitiveData } from '@dexto/core';
|
||||
|
||||
export const prettyJsonMiddleware = prettyJSON();
|
||||
|
||||
export const redactionMiddleware: MiddlewareHandler = async (ctx, next) => {
|
||||
// TODO: tighten types once Hono exposes typed overrides for ctx.json/ctx.body
|
||||
const originalJson = ctx.json.bind(ctx) as any;
|
||||
ctx.json = ((data: any, status?: any, headers?: any) => {
|
||||
const redacted = redactSensitiveData(data);
|
||||
return originalJson(redacted, status, headers);
|
||||
}) as typeof ctx.json;
|
||||
|
||||
const originalBody = ctx.body.bind(ctx) as any;
|
||||
ctx.body = ((data: any, status?: any, headers?: any) => {
|
||||
const payload = typeof data === 'string' ? redactSensitiveData(data) : data;
|
||||
return originalBody(payload, status, headers);
|
||||
}) as typeof ctx.body;
|
||||
|
||||
await next();
|
||||
};
|
||||
154
dexto/packages/server/src/hono/node/index.ts
Normal file
154
dexto/packages/server/src/hono/node/index.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { createServer, type IncomingMessage, type ServerResponse } from 'node:http';
|
||||
import { Readable } from 'node:stream';
|
||||
import type { ReadableStream as NodeReadableStream } from 'stream/web';
|
||||
import type { DextoApp } from '../types.js';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
import type { WebhookEventSubscriber } from '../../events/webhook-subscriber.js';
|
||||
|
||||
type FetchRequest = globalThis.Request;
|
||||
type FetchBodyInit = globalThis.BodyInit;
|
||||
|
||||
export type NodeBridgeOptions = {
|
||||
getAgent: () => DextoAgent;
|
||||
port?: number;
|
||||
hostname?: string;
|
||||
mcpHandlers?: {
|
||||
handlePost: (
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
body: unknown
|
||||
) => Promise<void> | void;
|
||||
handleGet: (req: IncomingMessage, res: ServerResponse) => Promise<void> | void;
|
||||
} | null;
|
||||
};
|
||||
|
||||
export type NodeBridgeResult = {
|
||||
server: ReturnType<typeof createServer>;
|
||||
webhookSubscriber?: WebhookEventSubscriber;
|
||||
};
|
||||
|
||||
export function createNodeServer(app: DextoApp, options: NodeBridgeOptions): NodeBridgeResult {
|
||||
const { getAgent: _getAgent } = options;
|
||||
const webhookSubscriber = app.webhookSubscriber;
|
||||
|
||||
const server = createServer(async (req, res) => {
|
||||
try {
|
||||
if (options.mcpHandlers && req.url?.startsWith('/mcp')) {
|
||||
if (req.method === 'GET') {
|
||||
await options.mcpHandlers.handleGet(req, res);
|
||||
return;
|
||||
}
|
||||
if (req.method === 'POST') {
|
||||
req.setEncoding('utf8');
|
||||
let body = '';
|
||||
const MAX_BODY_SIZE = 10 * 1024 * 1024; // 10MB limit
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk;
|
||||
if (body.length > MAX_BODY_SIZE) {
|
||||
req.destroy();
|
||||
res.statusCode = 413;
|
||||
res.end('Payload too large');
|
||||
}
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body.length > 0 ? JSON.parse(body) : undefined;
|
||||
await options.mcpHandlers!.handlePost(req, res, parsed);
|
||||
} catch (err) {
|
||||
logger.error(`Failed to process MCP POST body: ${String(err)}`);
|
||||
res.statusCode = 400;
|
||||
res.end('Invalid JSON body');
|
||||
}
|
||||
});
|
||||
req.on('error', (err: Error) => {
|
||||
logger.error(`Error reading MCP POST body: ${String(err)}`);
|
||||
res.statusCode = 500;
|
||||
res.end('Failed to read request body');
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const request = await toRequest(req);
|
||||
const response = await app.fetch(request);
|
||||
await sendNodeResponse(res, response);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`Unhandled error in Node bridge: ${message}`, { error });
|
||||
res.statusCode = 500;
|
||||
res.end('Internal Server Error');
|
||||
}
|
||||
});
|
||||
|
||||
server.on('close', () => {
|
||||
webhookSubscriber?.cleanup?.();
|
||||
});
|
||||
|
||||
if (typeof options.port === 'number') {
|
||||
const hostname = options.hostname ?? '0.0.0.0';
|
||||
server.listen(options.port, hostname, () => {
|
||||
logger.info(`Hono Node bridge listening on http://${hostname}:${options.port}`);
|
||||
});
|
||||
}
|
||||
|
||||
const result: NodeBridgeResult = {
|
||||
server,
|
||||
};
|
||||
|
||||
if (webhookSubscriber) {
|
||||
result.webhookSubscriber = webhookSubscriber;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function toRequest(req: IncomingMessage): Promise<FetchRequest> {
|
||||
const protocol = (req.socket as any)?.encrypted ? 'https' : 'http';
|
||||
const host = req.headers.host ?? 'localhost';
|
||||
const url = new URL(req.url ?? '/', `${protocol}://${host}`);
|
||||
|
||||
const headers = new globalThis.Headers();
|
||||
for (const [key, value] of Object.entries(req.headers)) {
|
||||
if (value === undefined) continue;
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((entry) => headers.append(key, entry));
|
||||
} else {
|
||||
headers.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const method = req.method ?? 'GET';
|
||||
const body: FetchBodyInit | null =
|
||||
method === 'GET' || method === 'HEAD' ? null : (req as unknown as FetchBodyInit);
|
||||
|
||||
return new globalThis.Request(url, {
|
||||
method,
|
||||
headers,
|
||||
body: body ?? undefined,
|
||||
duplex: body ? 'half' : undefined,
|
||||
} as RequestInit);
|
||||
}
|
||||
|
||||
async function sendNodeResponse(res: ServerResponse, response: Response) {
|
||||
res.statusCode = response.status;
|
||||
response.headers.forEach((value, key) => {
|
||||
if (key.toLowerCase() === 'content-length') {
|
||||
return;
|
||||
}
|
||||
res.setHeader(key, value);
|
||||
});
|
||||
|
||||
if (!response.body) {
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const webStream = response.body as unknown as NodeReadableStream<any>;
|
||||
const readable = Readable.fromWeb(webStream);
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
readable.on('error', reject);
|
||||
res.on('finish', resolve);
|
||||
readable.pipe(res);
|
||||
});
|
||||
}
|
||||
176
dexto/packages/server/src/hono/routes/a2a-jsonrpc.ts
Normal file
176
dexto/packages/server/src/hono/routes/a2a-jsonrpc.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
/**
|
||||
* A2A JSON-RPC HTTP Endpoint
|
||||
*
|
||||
* Exposes A2A Protocol JSON-RPC methods via HTTP POST endpoint.
|
||||
* Implements JSON-RPC 2.0 over HTTP transport.
|
||||
*/
|
||||
|
||||
import { Hono } from 'hono';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { JsonRpcServer } from '../../a2a/jsonrpc/server.js';
|
||||
import { A2AMethodHandlers } from '../../a2a/jsonrpc/methods.js';
|
||||
import { logger } from '@dexto/core';
|
||||
import type { A2ASseEventSubscriber } from '../../events/a2a-sse-subscriber.js';
|
||||
import { a2aToInternalMessage } from '../../a2a/adapters/message.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
/**
|
||||
* Create A2A JSON-RPC router
|
||||
*
|
||||
* Exposes POST /jsonrpc endpoint for A2A Protocol communication.
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const a2aRouter = createA2AJsonRpcRouter(getAgent, sseSubscriber);
|
||||
* app.route('/', a2aRouter);
|
||||
* ```
|
||||
*
|
||||
* Example request:
|
||||
* ```json
|
||||
* POST /jsonrpc
|
||||
* Content-Type: application/json
|
||||
*
|
||||
* {
|
||||
* "jsonrpc": "2.0",
|
||||
* "method": "message/send",
|
||||
* "params": {
|
||||
* "message": {
|
||||
* "role": "user",
|
||||
* "parts": [{ "kind": "text", "text": "Hello!" }],
|
||||
* "messageId": "msg-123",
|
||||
* "kind": "message"
|
||||
* }
|
||||
* },
|
||||
* "id": 1
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param getAgent Function to get current DextoAgent instance
|
||||
* @param sseSubscriber SSE event subscriber for streaming methods
|
||||
* @returns Hono router with /jsonrpc endpoint
|
||||
*/
|
||||
export function createA2AJsonRpcRouter(getAgent: GetAgentFn, sseSubscriber: A2ASseEventSubscriber) {
|
||||
const app = new Hono();
|
||||
|
||||
/**
|
||||
* POST /jsonrpc - JSON-RPC 2.0 endpoint
|
||||
*
|
||||
* Accepts JSON-RPC requests (single or batch) and returns JSON-RPC responses.
|
||||
* For streaming methods (message/stream), returns SSE stream.
|
||||
*/
|
||||
app.post('/jsonrpc', async (ctx) => {
|
||||
try {
|
||||
const agent = await getAgent(ctx);
|
||||
const requestBody = await ctx.req.json();
|
||||
|
||||
// Check if this is a streaming method request
|
||||
const isStreamingRequest =
|
||||
!Array.isArray(requestBody) && requestBody.method === 'message/stream';
|
||||
|
||||
if (isStreamingRequest) {
|
||||
// Handle streaming request with SSE
|
||||
logger.info('JSON-RPC streaming request: message/stream');
|
||||
|
||||
const params = requestBody.params;
|
||||
if (!params?.message) {
|
||||
return ctx.json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32602,
|
||||
message: 'Invalid params: message is required',
|
||||
},
|
||||
id: requestBody.id,
|
||||
});
|
||||
}
|
||||
|
||||
// Create or get session
|
||||
const taskId = params.message.taskId;
|
||||
const session = await agent.createSession(taskId);
|
||||
|
||||
// Create SSE stream
|
||||
const stream = sseSubscriber.createStream(session.id);
|
||||
|
||||
// Start agent processing in background
|
||||
const { text, image, file } = a2aToInternalMessage(params.message);
|
||||
agent.run(text, image, file, session.id).catch((error) => {
|
||||
logger.error(`Error in streaming task ${session.id}: ${error}`);
|
||||
});
|
||||
|
||||
logger.info(`JSON-RPC SSE stream opened for task ${session.id}`);
|
||||
|
||||
// Return stream with SSE headers
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Handle regular (non-streaming) JSON-RPC request
|
||||
const handlers = new A2AMethodHandlers(agent);
|
||||
const rpcServer = new JsonRpcServer({
|
||||
methods: handlers.getMethods(),
|
||||
onError: (error, request) => {
|
||||
logger.error(`JSON-RPC error for method ${request?.method}: ${error.message}`, {
|
||||
error,
|
||||
request,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
logger.debug(`A2A JSON-RPC request received`, {
|
||||
method: Array.isArray(requestBody)
|
||||
? `batch(${requestBody.length})`
|
||||
: requestBody.method,
|
||||
});
|
||||
|
||||
const response = await rpcServer.handle(requestBody);
|
||||
return ctx.json(response);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to process JSON-RPC request: ${error}`, { error });
|
||||
|
||||
return ctx.json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32700,
|
||||
message: 'Parse error',
|
||||
data: error instanceof Error ? error.message : String(error),
|
||||
},
|
||||
id: null,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /jsonrpc - Info endpoint (non-standard, for debugging)
|
||||
*
|
||||
* Returns information about available JSON-RPC methods.
|
||||
*/
|
||||
app.get('/jsonrpc', async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const handlers = new A2AMethodHandlers(agent);
|
||||
|
||||
return ctx.json({
|
||||
service: 'A2A JSON-RPC 2.0',
|
||||
version: '0.3.0',
|
||||
endpoint: '/jsonrpc',
|
||||
methods: Object.keys(handlers.getMethods()),
|
||||
usage: {
|
||||
method: 'POST',
|
||||
contentType: 'application/json',
|
||||
example: {
|
||||
jsonrpc: '2.0',
|
||||
method: 'agent.getInfo',
|
||||
params: {},
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
return app;
|
||||
}
|
||||
423
dexto/packages/server/src/hono/routes/a2a-tasks.ts
Normal file
423
dexto/packages/server/src/hono/routes/a2a-tasks.ts
Normal file
@@ -0,0 +1,423 @@
|
||||
/**
|
||||
* A2A REST Task API (Compliant with A2A Protocol v0.3.0)
|
||||
*
|
||||
* RESTful HTTP+JSON endpoints for A2A Protocol task management.
|
||||
* Follows the /v1/ URL pattern per A2A specification.
|
||||
*
|
||||
* Endpoint mappings per spec:
|
||||
* - POST /v1/message:send → message/send
|
||||
* - GET /v1/tasks/{id} → tasks/get
|
||||
* - GET /v1/tasks → tasks/list
|
||||
* - POST /v1/tasks/{id}:cancel → tasks/cancel
|
||||
*/
|
||||
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { A2AMethodHandlers } from '../../a2a/jsonrpc/methods.js';
|
||||
import { logger } from '@dexto/core';
|
||||
import type { A2ASseEventSubscriber } from '../../events/a2a-sse-subscriber.js';
|
||||
import { a2aToInternalMessage } from '../../a2a/adapters/message.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
// Request/Response Schemas for OpenAPI (using A2A-compliant schema)
|
||||
|
||||
const PartSchema = z
|
||||
.discriminatedUnion('kind', [
|
||||
z.object({
|
||||
kind: z.literal('text').describe('Part type discriminator'),
|
||||
text: z.string().describe('Text content'),
|
||||
metadata: z.record(z.any()).optional().describe('Extension metadata'),
|
||||
}),
|
||||
z.object({
|
||||
kind: z.literal('file').describe('Part type discriminator'),
|
||||
file: z
|
||||
.union([
|
||||
z.object({
|
||||
bytes: z.string().describe('Base64-encoded file data'),
|
||||
name: z.string().optional().describe('File name'),
|
||||
mimeType: z.string().optional().describe('MIME type'),
|
||||
}),
|
||||
z.object({
|
||||
uri: z.string().describe('File URI'),
|
||||
name: z.string().optional().describe('File name'),
|
||||
mimeType: z.string().optional().describe('MIME type'),
|
||||
}),
|
||||
])
|
||||
.describe('File data (bytes or URI)'),
|
||||
metadata: z.record(z.any()).optional().describe('Extension metadata'),
|
||||
}),
|
||||
z.object({
|
||||
kind: z.literal('data').describe('Part type discriminator'),
|
||||
data: z.record(z.any()).describe('Structured JSON data'),
|
||||
metadata: z.record(z.any()).optional().describe('Extension metadata'),
|
||||
}),
|
||||
])
|
||||
.describe('Message part (text, file, or data)');
|
||||
|
||||
const MessageSchema = z
|
||||
.object({
|
||||
role: z.enum(['user', 'agent']).describe('Message role'),
|
||||
parts: z.array(PartSchema).describe('Message parts'),
|
||||
messageId: z.string().describe('Unique message identifier'),
|
||||
taskId: z.string().optional().describe('Associated task ID'),
|
||||
contextId: z.string().optional().describe('Context identifier'),
|
||||
metadata: z.record(z.any()).optional().describe('Extension metadata'),
|
||||
extensions: z.array(z.string()).optional().describe('Extension identifiers'),
|
||||
referenceTaskIds: z.array(z.string()).optional().describe('Referenced task IDs'),
|
||||
kind: z.literal('message').describe('Object type discriminator'),
|
||||
})
|
||||
.describe('A2A Protocol message');
|
||||
|
||||
const TaskStatusSchema = z
|
||||
.object({
|
||||
state: z
|
||||
.enum([
|
||||
'submitted',
|
||||
'working',
|
||||
'input-required',
|
||||
'completed',
|
||||
'canceled',
|
||||
'failed',
|
||||
'rejected',
|
||||
'auth-required',
|
||||
'unknown',
|
||||
])
|
||||
.describe('Current task state'),
|
||||
message: MessageSchema.optional().describe('Status message'),
|
||||
timestamp: z.string().optional().describe('ISO 8601 timestamp'),
|
||||
})
|
||||
.describe('Task status');
|
||||
|
||||
const TaskSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique task identifier'),
|
||||
contextId: z.string().describe('Context identifier across related tasks'),
|
||||
status: TaskStatusSchema.describe('Current task status'),
|
||||
history: z.array(MessageSchema).optional().describe('Conversation history'),
|
||||
artifacts: z.array(z.any()).optional().describe('Task artifacts'),
|
||||
metadata: z.record(z.any()).optional().describe('Extension metadata'),
|
||||
kind: z.literal('task').describe('Object type discriminator'),
|
||||
})
|
||||
.describe('A2A Protocol task');
|
||||
|
||||
const MessageSendRequestSchema = z
|
||||
.object({
|
||||
message: MessageSchema.describe('Message to send to the agent'),
|
||||
configuration: z
|
||||
.object({
|
||||
acceptedOutputModes: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('Accepted output MIME types'),
|
||||
historyLength: z.number().optional().describe('Limit conversation history length'),
|
||||
pushNotificationConfig: z
|
||||
.object({
|
||||
url: z.string().describe('Push notification webhook URL'),
|
||||
headers: z
|
||||
.record(z.string())
|
||||
.optional()
|
||||
.describe('HTTP headers for webhook'),
|
||||
})
|
||||
.optional()
|
||||
.describe('Push notification configuration'),
|
||||
blocking: z.boolean().optional().describe('Wait for task completion'),
|
||||
})
|
||||
.optional()
|
||||
.describe('Optional configuration'),
|
||||
metadata: z.record(z.any()).optional().describe('Optional metadata'),
|
||||
})
|
||||
.describe('Request body for message/send');
|
||||
|
||||
const TaskListQuerySchema = z
|
||||
.object({
|
||||
contextId: z.string().optional().describe('Filter by context ID'),
|
||||
status: z
|
||||
.enum([
|
||||
'submitted',
|
||||
'working',
|
||||
'input-required',
|
||||
'completed',
|
||||
'canceled',
|
||||
'failed',
|
||||
'rejected',
|
||||
'auth-required',
|
||||
'unknown',
|
||||
])
|
||||
.optional()
|
||||
.describe('Filter by task state'),
|
||||
pageSize: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((v) => {
|
||||
if (!v) return undefined;
|
||||
const n = Number.parseInt(v, 10);
|
||||
// Enforce 1-100 range, return undefined for invalid values
|
||||
if (Number.isNaN(n) || n < 1 || n > 100) return undefined;
|
||||
return n;
|
||||
})
|
||||
.describe('Number of results (1-100, default 50)'),
|
||||
pageToken: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Pagination token (not yet implemented - reserved for future use)'),
|
||||
historyLength: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((v) => {
|
||||
if (!v) return undefined;
|
||||
const n = Number.parseInt(v, 10);
|
||||
return Number.isNaN(n) ? undefined : n;
|
||||
})
|
||||
.describe('Limit history items (not yet implemented - reserved for future use)'),
|
||||
lastUpdatedAfter: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((v) => {
|
||||
if (!v) return undefined;
|
||||
const n = Number.parseInt(v, 10);
|
||||
return Number.isNaN(n) ? undefined : n;
|
||||
})
|
||||
.describe('Unix timestamp filter (not yet implemented - reserved for future use)'),
|
||||
includeArtifacts: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((v) => v === 'true')
|
||||
.describe(
|
||||
'Include artifacts in response (not yet implemented - reserved for future use)'
|
||||
),
|
||||
})
|
||||
.describe('Query parameters for tasks/list');
|
||||
|
||||
/**
|
||||
* Create A2A REST Task router
|
||||
*
|
||||
* Exposes RESTful endpoints for A2A task management per v0.3.0 spec.
|
||||
*
|
||||
* Endpoints:
|
||||
* - POST /v1/message:send - Send message to agent
|
||||
* - POST /v1/message:stream - Send message with SSE streaming
|
||||
* - GET /v1/tasks/{id} - Get task
|
||||
* - GET /v1/tasks - List tasks
|
||||
* - POST /v1/tasks/{id}:cancel - Cancel task
|
||||
*
|
||||
* @param getAgent Function to get current DextoAgent instance
|
||||
* @param sseSubscriber SSE event subscriber for streaming
|
||||
* @returns OpenAPIHono router with REST task endpoints
|
||||
*/
|
||||
export function createA2ATasksRouter(getAgent: GetAgentFn, sseSubscriber: A2ASseEventSubscriber) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
// POST /v1/message:send - Send message to agent
|
||||
const messageSendRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/v1/message:send',
|
||||
summary: 'Send Message',
|
||||
description: 'Send a message to the agent (A2A message/send)',
|
||||
tags: ['a2a'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: MessageSendRequestSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Task with agent response',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: TaskSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// GET /v1/tasks - List tasks
|
||||
const listTasksRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/v1/tasks',
|
||||
summary: 'List Tasks',
|
||||
description: 'List all A2A tasks with optional filtering (A2A tasks/list)',
|
||||
tags: ['a2a'],
|
||||
request: {
|
||||
query: TaskListQuerySchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Task list',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
tasks: z.array(TaskSchema).describe('Array of tasks'),
|
||||
totalSize: z.number().describe('Total number of tasks'),
|
||||
pageSize: z.number().describe('Number of tasks in this page'),
|
||||
nextPageToken: z.string().describe('Token for next page'),
|
||||
})
|
||||
.describe('Response body for tasks/list'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// GET /v1/tasks/{id} - Get a specific task
|
||||
const getTaskRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/v1/tasks/{id}',
|
||||
summary: 'Get Task',
|
||||
description: 'Retrieve a specific task by ID (A2A tasks/get)',
|
||||
tags: ['a2a'],
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string().describe('Task ID'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Task details',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: TaskSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Task not found',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// POST /v1/tasks/{id}:cancel - Cancel task
|
||||
const cancelTaskRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/v1/tasks/{id}:cancel',
|
||||
summary: 'Cancel Task',
|
||||
description: 'Cancel a running task (A2A tasks/cancel)',
|
||||
tags: ['a2a'],
|
||||
request: {
|
||||
params: z.object({
|
||||
id: z.string().describe('Task ID'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Task cancelled',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: TaskSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Task not found',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// POST /v1/message:stream - Send message with streaming response
|
||||
app.post('/v1/message:stream', async (ctx) => {
|
||||
try {
|
||||
const body = await ctx.req.json();
|
||||
|
||||
// Validate with Zod schema
|
||||
const parseResult = MessageSendRequestSchema.safeParse(body);
|
||||
if (!parseResult.success) {
|
||||
return ctx.json(
|
||||
{
|
||||
error: 'Invalid request body',
|
||||
details: parseResult.error.issues,
|
||||
},
|
||||
400
|
||||
);
|
||||
}
|
||||
|
||||
const validatedBody = parseResult.data;
|
||||
logger.info('REST: message/stream', { hasMessage: !!validatedBody.message });
|
||||
|
||||
// Create or get session
|
||||
const taskId = validatedBody.message.taskId;
|
||||
const agent = await getAgent(ctx);
|
||||
const session = await agent.createSession(taskId);
|
||||
|
||||
// Create SSE stream
|
||||
const stream = sseSubscriber.createStream(session.id);
|
||||
|
||||
// Start agent processing in background
|
||||
// Note: Errors are automatically broadcast via the event bus (llm:error event)
|
||||
const { text, image, file } = a2aToInternalMessage(validatedBody.message as any);
|
||||
agent.run(text, image, file, session.id).catch((error) => {
|
||||
logger.error(`Error in streaming task ${session.id}: ${error}`);
|
||||
});
|
||||
|
||||
logger.info(`REST SSE stream opened for task ${session.id}`);
|
||||
|
||||
// Return stream with SSE headers
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Failed to handle message:stream: ${error}`);
|
||||
return ctx.json({ error: 'Failed to initiate streaming' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(messageSendRoute, async (ctx) => {
|
||||
const handlers = new A2AMethodHandlers(await getAgent(ctx));
|
||||
const body = ctx.req.valid('json');
|
||||
|
||||
logger.info('REST: message/send', { hasMessage: !!body.message });
|
||||
|
||||
// Type cast required: Zod infers readonly modifiers and exactOptionalPropertyTypes differs
|
||||
// from mutable handler types. Structurally compatible at runtime.
|
||||
const result = await handlers.messageSend(body as any);
|
||||
|
||||
return ctx.json(result as any);
|
||||
})
|
||||
.openapi(listTasksRoute, async (ctx) => {
|
||||
const handlers = new A2AMethodHandlers(await getAgent(ctx));
|
||||
const query = ctx.req.valid('query');
|
||||
|
||||
// Type cast required: Zod infers readonly modifiers and exactOptionalPropertyTypes differs
|
||||
// from mutable handler types. Structurally compatible at runtime.
|
||||
const result = await handlers.tasksList(query as any);
|
||||
|
||||
return ctx.json(result);
|
||||
})
|
||||
.openapi(getTaskRoute, async (ctx) => {
|
||||
const handlers = new A2AMethodHandlers(await getAgent(ctx));
|
||||
const { id } = ctx.req.valid('param');
|
||||
|
||||
try {
|
||||
const task = await handlers.tasksGet({ id });
|
||||
return ctx.json(task);
|
||||
} catch (error) {
|
||||
logger.warn(`Task ${id} not found: ${error}`);
|
||||
return ctx.json({ error: 'Task not found' }, 404);
|
||||
}
|
||||
})
|
||||
.openapi(cancelTaskRoute, async (ctx) => {
|
||||
const handlers = new A2AMethodHandlers(await getAgent(ctx));
|
||||
const { id } = ctx.req.valid('param');
|
||||
|
||||
logger.info(`REST: tasks/cancel ${id}`);
|
||||
|
||||
try {
|
||||
const task = await handlers.tasksCancel({ id });
|
||||
return ctx.json(task);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to cancel task ${id}: ${error}`);
|
||||
return ctx.json({ error: 'Task not found' }, 404);
|
||||
}
|
||||
});
|
||||
}
|
||||
11
dexto/packages/server/src/hono/routes/a2a.ts
Normal file
11
dexto/packages/server/src/hono/routes/a2a.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Hono } from 'hono';
|
||||
import type { AgentCard } from '@dexto/core';
|
||||
|
||||
export function createA2aRouter(getAgentCard: () => AgentCard) {
|
||||
const app = new Hono();
|
||||
app.get('/.well-known/agent-card.json', (ctx) => {
|
||||
const agentCard = getAgentCard();
|
||||
return ctx.json(agentCard, 200);
|
||||
});
|
||||
return app;
|
||||
}
|
||||
956
dexto/packages/server/src/hono/routes/agents.ts
Normal file
956
dexto/packages/server/src/hono/routes/agents.ts
Normal file
@@ -0,0 +1,956 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import {
|
||||
logger,
|
||||
safeStringify,
|
||||
AgentConfigSchema,
|
||||
type LLMProvider,
|
||||
zodToIssues,
|
||||
} from '@dexto/core';
|
||||
import {
|
||||
getPrimaryApiKeyEnvVar,
|
||||
saveProviderApiKey,
|
||||
reloadAgentConfigFromFile,
|
||||
enrichAgentConfig,
|
||||
deriveDisplayName,
|
||||
AgentFactory,
|
||||
} from '@dexto/agent-management';
|
||||
import { stringify as yamlStringify, parse as yamlParse } from 'yaml';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import { DextoValidationError, AgentErrorCode, ErrorScope, ErrorType } from '@dexto/core';
|
||||
import { AgentRegistryEntrySchema } from '../schemas/responses.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
/**
|
||||
* OpenAPI-safe version of AgentConfigSchema
|
||||
*
|
||||
* This simplified schema is used ONLY for OpenAPI documentation generation.
|
||||
* Runtime validation still uses the full AgentConfigSchema with complete validation.
|
||||
*
|
||||
* Why: The real AgentConfigSchema uses z.lazy() for CustomToolConfigSchema,
|
||||
* which cannot be serialized to OpenAPI JSON by @hono/zod-openapi.
|
||||
*
|
||||
* See lines 780 and 854 where AgentConfigSchema.safeParse() is used for actual validation.
|
||||
*/
|
||||
const AgentConfigSchemaForOpenAPI = z
|
||||
.record(z.any())
|
||||
.describe(
|
||||
'Complete agent configuration. See AgentConfig type documentation for full schema details.'
|
||||
);
|
||||
|
||||
const AgentIdentifierSchema = z
|
||||
.object({
|
||||
id: z
|
||||
.string()
|
||||
.min(1, 'Agent id is required')
|
||||
.describe('Unique agent identifier (e.g., "database-agent")'),
|
||||
path: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Optional absolute file path for file-based agents (e.g., "/path/to/agent.yml")'
|
||||
),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent identifier for switching agents by ID or file path');
|
||||
|
||||
const UninstallAgentSchema = z
|
||||
.object({
|
||||
id: z
|
||||
.string()
|
||||
.min(1, 'Agent id is required')
|
||||
.describe('Unique agent identifier to uninstall'),
|
||||
force: z
|
||||
.boolean()
|
||||
.default(false)
|
||||
.describe('Force uninstall even if agent is currently active'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Request body for uninstalling an agent');
|
||||
|
||||
const CustomAgentInstallSchema = z
|
||||
.object({
|
||||
id: z.string().min(1, 'Agent id is required').describe('Unique agent identifier'),
|
||||
name: z.string().optional().describe('Display name (defaults to derived from id)'),
|
||||
sourcePath: z.string().min(1).describe('Path to agent configuration file or directory'),
|
||||
metadata: z
|
||||
.object({
|
||||
description: z.string().min(1).describe('Human-readable description of the agent'),
|
||||
author: z.string().min(1).describe('Agent author or organization name'),
|
||||
tags: z.array(z.string()).describe('Tags for categorizing the agent'),
|
||||
main: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Main configuration file name within source directory'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent metadata including description, author, and tags'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Request body for installing a custom agent from file system')
|
||||
.transform((value) => {
|
||||
const displayName = value.name?.trim() || deriveDisplayName(value.id);
|
||||
return {
|
||||
id: value.id,
|
||||
displayName,
|
||||
sourcePath: value.sourcePath,
|
||||
metadata: value.metadata,
|
||||
};
|
||||
});
|
||||
|
||||
const CustomAgentCreateSchema = z
|
||||
.object({
|
||||
// Registry metadata
|
||||
id: z
|
||||
.string()
|
||||
.min(1, 'Agent ID is required')
|
||||
.regex(
|
||||
/^[a-z0-9-]+$/,
|
||||
'Agent ID must contain only lowercase letters, numbers, and hyphens'
|
||||
)
|
||||
.describe('Unique agent identifier'),
|
||||
name: z.string().min(1, 'Agent name is required').describe('Display name for the agent'),
|
||||
description: z
|
||||
.string()
|
||||
.min(1, 'Description is required')
|
||||
.describe('One-line description of the agent'),
|
||||
author: z.string().optional().describe('Author or organization'),
|
||||
tags: z.array(z.string()).default([]).describe('Tags for discovery'),
|
||||
// Full agent configuration
|
||||
config: AgentConfigSchemaForOpenAPI.describe('Complete agent configuration'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Request body for creating a new custom agent with full configuration');
|
||||
|
||||
const AgentConfigValidateSchema = z
|
||||
.object({
|
||||
yaml: z.string().describe('YAML agent configuration content to validate'),
|
||||
})
|
||||
.describe('Request body for validating agent configuration YAML');
|
||||
|
||||
const AgentConfigSaveSchema = z
|
||||
.object({
|
||||
yaml: z
|
||||
.string()
|
||||
.min(1, 'YAML content is required')
|
||||
.describe('YAML agent configuration content to save'),
|
||||
})
|
||||
.describe('Request body for saving agent configuration YAML');
|
||||
|
||||
// Response schemas for agent endpoints
|
||||
|
||||
const AgentInfoNullableSchema = z
|
||||
.object({
|
||||
id: z.string().nullable().describe('Agent identifier (null if no active agent)'),
|
||||
name: z.string().nullable().describe('Agent display name (null if no active agent)'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Basic agent information (nullable)');
|
||||
|
||||
const ListAgentsResponseSchema = z
|
||||
.object({
|
||||
installed: z.array(AgentRegistryEntrySchema).describe('Agents installed locally'),
|
||||
available: z.array(AgentRegistryEntrySchema).describe('Agents available from registry'),
|
||||
current: AgentInfoNullableSchema.describe('Currently active agent'),
|
||||
})
|
||||
.strict()
|
||||
.describe('List of all agents');
|
||||
|
||||
const InstallAgentResponseSchema = z
|
||||
.object({
|
||||
installed: z.literal(true).describe('Indicates successful installation'),
|
||||
id: z.string().describe('Installed agent ID'),
|
||||
name: z.string().describe('Installed agent name'),
|
||||
type: z.enum(['builtin', 'custom']).describe('Type of agent installed'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent installation response');
|
||||
|
||||
const SwitchAgentResponseSchema = z
|
||||
.object({
|
||||
switched: z.literal(true).describe('Indicates successful agent switch'),
|
||||
id: z.string().describe('New active agent ID'),
|
||||
name: z.string().describe('New active agent name'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent switch response');
|
||||
|
||||
const ValidateNameResponseSchema = z
|
||||
.object({
|
||||
valid: z.boolean().describe('Whether the agent name is valid'),
|
||||
conflict: z.string().optional().describe('Type of conflict if name is invalid'),
|
||||
message: z.string().optional().describe('Validation message'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent name validation result');
|
||||
|
||||
const UninstallAgentResponseSchema = z
|
||||
.object({
|
||||
uninstalled: z.literal(true).describe('Indicates successful uninstallation'),
|
||||
id: z.string().describe('Uninstalled agent ID'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent uninstallation response');
|
||||
|
||||
const AgentPathResponseSchema = z
|
||||
.object({
|
||||
path: z.string().describe('Absolute path to agent configuration file'),
|
||||
relativePath: z.string().describe('Relative path or basename'),
|
||||
name: z.string().describe('Agent configuration filename without extension'),
|
||||
isDefault: z.boolean().describe('Whether this is the default agent'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent file path information');
|
||||
|
||||
const AgentConfigResponseSchema = z
|
||||
.object({
|
||||
yaml: z.string().describe('Raw YAML configuration content'),
|
||||
path: z.string().describe('Absolute path to configuration file'),
|
||||
relativePath: z.string().describe('Relative path or basename'),
|
||||
lastModified: z.date().describe('Last modification timestamp'),
|
||||
warnings: z.array(z.string()).describe('Configuration warnings'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent configuration content');
|
||||
|
||||
const SaveConfigResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful save'),
|
||||
path: z.string().describe('Path to saved configuration file'),
|
||||
reloaded: z.boolean().describe('Whether configuration was reloaded'),
|
||||
restarted: z.boolean().describe('Whether agent was restarted'),
|
||||
changesApplied: z.array(z.string()).describe('List of changes that were applied'),
|
||||
message: z.string().describe('Success message'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Configuration save result');
|
||||
|
||||
export type AgentsRouterContext = {
|
||||
switchAgentById: (agentId: string) => Promise<{ id: string; name: string }>;
|
||||
switchAgentByPath: (filePath: string) => Promise<{ id: string; name: string }>;
|
||||
resolveAgentInfo: (agentId: string) => Promise<{ id: string; name: string }>;
|
||||
ensureAgentAvailable: () => void;
|
||||
getActiveAgentId: () => string | undefined;
|
||||
};
|
||||
|
||||
export function createAgentsRouter(getAgent: GetAgentFn, context: AgentsRouterContext) {
|
||||
const app = new OpenAPIHono();
|
||||
const { switchAgentById, switchAgentByPath, resolveAgentInfo, getActiveAgentId } = context;
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/agents',
|
||||
summary: 'List Agents',
|
||||
description: 'Retrieves all agents (installed, available, and current active agent)',
|
||||
tags: ['agents'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List all agents',
|
||||
content: { 'application/json': { schema: ListAgentsResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const currentRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/agents/current',
|
||||
summary: 'Get Current Agent',
|
||||
description: 'Retrieves the currently active agent',
|
||||
tags: ['agents'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Current agent',
|
||||
content: { 'application/json': { schema: AgentInfoNullableSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const installRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agents/install',
|
||||
summary: 'Install Agent',
|
||||
description: 'Installs an agent from the registry or from a custom source',
|
||||
tags: ['agents'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.union([CustomAgentInstallSchema, AgentIdentifierSchema]),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Agent installed',
|
||||
content: { 'application/json': { schema: InstallAgentResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const switchRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agents/switch',
|
||||
summary: 'Switch Agent',
|
||||
description: 'Switches to a different agent by ID or file path',
|
||||
tags: ['agents'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentIdentifierSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Agent switched',
|
||||
content: { 'application/json': { schema: SwitchAgentResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const validateNameRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agents/validate-name',
|
||||
summary: 'Validate Agent Name',
|
||||
description: 'Checks if an agent ID conflicts with existing agents',
|
||||
tags: ['agents'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentIdentifierSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Name validation result',
|
||||
content: { 'application/json': { schema: ValidateNameResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const uninstallRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agents/uninstall',
|
||||
summary: 'Uninstall Agent',
|
||||
description:
|
||||
'Removes an agent from the system. Custom agents are removed from registry; builtin agents can be reinstalled',
|
||||
tags: ['agents'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: UninstallAgentSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Agent uninstalled',
|
||||
content: { 'application/json': { schema: UninstallAgentResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const customCreateRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agents/custom/create',
|
||||
summary: 'Create Custom Agent',
|
||||
description: 'Creates a new custom agent from scratch via the UI/API',
|
||||
tags: ['agents'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: CustomAgentCreateSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Custom agent created',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
created: z.literal(true).describe('Creation success indicator'),
|
||||
id: z.string().describe('Agent identifier'),
|
||||
name: z.string().describe('Agent name'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getPathRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/agent/path',
|
||||
summary: 'Get Agent File Path',
|
||||
description: 'Retrieves the file path of the currently active agent configuration',
|
||||
tags: ['agent'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Agent file path',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentPathResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getConfigRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/agent/config',
|
||||
summary: 'Get Agent Configuration',
|
||||
description: 'Retrieves the raw YAML configuration of the currently active agent',
|
||||
tags: ['agent'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Agent configuration',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentConfigResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const validateConfigRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agent/validate',
|
||||
summary: 'Validate Agent Configuration',
|
||||
description: 'Validates YAML agent configuration without saving it',
|
||||
tags: ['agent'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentConfigValidateSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Validation result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
valid: z.boolean().describe('Whether configuration is valid'),
|
||||
errors: z
|
||||
.array(
|
||||
z
|
||||
.object({
|
||||
line: z
|
||||
.number()
|
||||
.int()
|
||||
.optional()
|
||||
.describe('Line number'),
|
||||
column: z
|
||||
.number()
|
||||
.int()
|
||||
.optional()
|
||||
.describe('Column number'),
|
||||
path: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Configuration path'),
|
||||
message: z.string().describe('Error message'),
|
||||
code: z.string().describe('Error code'),
|
||||
})
|
||||
.passthrough()
|
||||
)
|
||||
.describe('Validation errors'),
|
||||
warnings: z
|
||||
.array(
|
||||
z
|
||||
.object({
|
||||
path: z.string().describe('Configuration path'),
|
||||
message: z.string().describe('Warning message'),
|
||||
code: z.string().describe('Warning code'),
|
||||
})
|
||||
.strict()
|
||||
)
|
||||
.describe('Configuration warnings'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const saveConfigRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/agent/config',
|
||||
summary: 'Save Agent Configuration',
|
||||
description: 'Saves and applies YAML agent configuration. Creates backup before saving',
|
||||
tags: ['agent'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: AgentConfigSaveSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Configuration saved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: SaveConfigResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const exportConfigRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/agent/config/export',
|
||||
summary: 'Export Agent Configuration',
|
||||
description: 'Exports the effective agent configuration with sensitive values redacted',
|
||||
tags: ['agent'],
|
||||
request: {
|
||||
query: z.object({
|
||||
sessionId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Session identifier to export session-specific configuration'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Exported configuration',
|
||||
content: { 'application/x-yaml': { schema: z.string() } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const agents = await AgentFactory.listAgents();
|
||||
const currentId = getActiveAgentId() ?? null;
|
||||
return ctx.json({
|
||||
installed: agents.installed,
|
||||
available: agents.available,
|
||||
current: currentId ? await resolveAgentInfo(currentId) : { id: null, name: null },
|
||||
});
|
||||
})
|
||||
.openapi(currentRoute, async (ctx) => {
|
||||
const currentId = getActiveAgentId() ?? null;
|
||||
if (!currentId) {
|
||||
return ctx.json({ id: null, name: null });
|
||||
}
|
||||
return ctx.json(await resolveAgentInfo(currentId));
|
||||
})
|
||||
.openapi(installRoute, async (ctx) => {
|
||||
const body = ctx.req.valid('json');
|
||||
|
||||
// Check if this is a custom agent installation (has sourcePath and metadata)
|
||||
if ('sourcePath' in body && 'metadata' in body) {
|
||||
const { id, displayName, sourcePath, metadata } = body as ReturnType<
|
||||
typeof CustomAgentInstallSchema.parse
|
||||
>;
|
||||
|
||||
await AgentFactory.installCustomAgent(id, sourcePath, {
|
||||
name: displayName,
|
||||
description: metadata.description,
|
||||
author: metadata.author,
|
||||
tags: metadata.tags,
|
||||
});
|
||||
return ctx.json(
|
||||
{ installed: true as const, id, name: displayName, type: 'custom' as const },
|
||||
201
|
||||
);
|
||||
} else {
|
||||
// Registry agent installation
|
||||
const { id } = body as z.output<typeof AgentIdentifierSchema>;
|
||||
await AgentFactory.installAgent(id);
|
||||
const agentInfo = await resolveAgentInfo(id);
|
||||
return ctx.json(
|
||||
{
|
||||
installed: true as const,
|
||||
...agentInfo,
|
||||
type: 'builtin' as const,
|
||||
},
|
||||
201
|
||||
);
|
||||
}
|
||||
})
|
||||
.openapi(switchRoute, async (ctx) => {
|
||||
const { id, path: filePath } = ctx.req.valid('json');
|
||||
|
||||
// Route based on presence of path parameter
|
||||
const result = filePath ? await switchAgentByPath(filePath) : await switchAgentById(id);
|
||||
|
||||
return ctx.json({ switched: true as const, ...result });
|
||||
})
|
||||
.openapi(validateNameRoute, async (ctx) => {
|
||||
const { id } = ctx.req.valid('json');
|
||||
const agents = await AgentFactory.listAgents();
|
||||
|
||||
// Check if name exists in installed agents
|
||||
const installedAgent = agents.installed.find((a) => a.id === id);
|
||||
if (installedAgent) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
conflict: installedAgent.type,
|
||||
message: `Agent id '${id}' already exists (${installedAgent.type})`,
|
||||
});
|
||||
}
|
||||
|
||||
// Check if name exists in available agents (registry)
|
||||
const availableAgent = agents.available.find((a) => a.id === id);
|
||||
if (availableAgent) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
conflict: availableAgent.type,
|
||||
message: `Agent id '${id}' conflicts with ${availableAgent.type} agent`,
|
||||
});
|
||||
}
|
||||
|
||||
return ctx.json({ valid: true });
|
||||
})
|
||||
.openapi(uninstallRoute, async (ctx) => {
|
||||
const { id, force } = ctx.req.valid('json');
|
||||
await AgentFactory.uninstallAgent(id, force);
|
||||
return ctx.json({ uninstalled: true as const, id });
|
||||
})
|
||||
.openapi(customCreateRoute, async (ctx) => {
|
||||
const { id, name, description, author, tags, config } = ctx.req.valid('json');
|
||||
|
||||
// Handle API key: if it's a raw key, store securely and use env var reference
|
||||
const provider: LLMProvider = config.llm.provider;
|
||||
let agentConfig = config;
|
||||
|
||||
if (config.llm.apiKey && !config.llm.apiKey.startsWith('$')) {
|
||||
// Raw API key provided - store securely and get env var reference
|
||||
const meta = await saveProviderApiKey(provider, config.llm.apiKey, process.cwd());
|
||||
const apiKeyRef = `$${meta.envVar}`;
|
||||
logger.info(
|
||||
`Stored API key securely for ${provider}, using env var: ${meta.envVar}`
|
||||
);
|
||||
// Update config with env var reference
|
||||
agentConfig = {
|
||||
...config,
|
||||
llm: {
|
||||
...config.llm,
|
||||
apiKey: apiKeyRef,
|
||||
},
|
||||
};
|
||||
} else if (!config.llm.apiKey) {
|
||||
// No API key provided, use default env var
|
||||
agentConfig = {
|
||||
...config,
|
||||
llm: {
|
||||
...config.llm,
|
||||
apiKey: `$${getPrimaryApiKeyEnvVar(provider)}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const yamlContent = yamlStringify(agentConfig);
|
||||
logger.info(
|
||||
`Creating agent config for ${id}: agentConfig=${safeStringify(agentConfig)}, yamlContent=${yamlContent}`
|
||||
);
|
||||
|
||||
// Create temporary file
|
||||
const tmpDir = os.tmpdir();
|
||||
const tmpFile = path.join(tmpDir, `${id}-${Date.now()}.yml`);
|
||||
await fs.writeFile(tmpFile, yamlContent, 'utf-8');
|
||||
|
||||
try {
|
||||
// Install the custom agent
|
||||
await AgentFactory.installCustomAgent(id, tmpFile, {
|
||||
name,
|
||||
description,
|
||||
author: author || 'Custom',
|
||||
tags: tags || [],
|
||||
});
|
||||
|
||||
// Clean up temp file
|
||||
await fs.unlink(tmpFile).catch(() => {});
|
||||
|
||||
return ctx.json({ created: true as const, id, name }, 201);
|
||||
} catch (installError) {
|
||||
// Clean up temp file on error
|
||||
await fs.unlink(tmpFile).catch(() => {});
|
||||
throw installError;
|
||||
}
|
||||
})
|
||||
.openapi(getPathRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const agentPath = agent.getAgentFilePath();
|
||||
|
||||
const relativePath = path.basename(agentPath);
|
||||
const ext = path.extname(agentPath);
|
||||
const name = path.basename(agentPath, ext);
|
||||
|
||||
return ctx.json({
|
||||
path: agentPath,
|
||||
relativePath,
|
||||
name,
|
||||
isDefault: name === 'coding-agent',
|
||||
});
|
||||
})
|
||||
.openapi(getConfigRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
|
||||
// Get the agent file path being used
|
||||
const agentPath = agent.getAgentFilePath();
|
||||
|
||||
// Read raw YAML from file (not expanded env vars)
|
||||
const yamlContent = await fs.readFile(agentPath, 'utf-8');
|
||||
|
||||
// Get metadata
|
||||
const stats = await fs.stat(agentPath);
|
||||
|
||||
return ctx.json({
|
||||
yaml: yamlContent,
|
||||
path: agentPath,
|
||||
relativePath: path.basename(agentPath),
|
||||
lastModified: stats.mtime,
|
||||
warnings: [
|
||||
'Environment variables ($VAR) will be resolved at runtime',
|
||||
'API keys should use environment variables',
|
||||
],
|
||||
});
|
||||
})
|
||||
.openapi(validateConfigRoute, async (ctx) => {
|
||||
const { yaml } = ctx.req.valid('json');
|
||||
|
||||
// Parse YAML
|
||||
let parsed;
|
||||
try {
|
||||
parsed = yamlParse(yaml);
|
||||
} catch (parseError: any) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
errors: [
|
||||
{
|
||||
line: parseError.linePos?.[0]?.line || 1,
|
||||
column: parseError.linePos?.[0]?.col || 1,
|
||||
message: parseError.message,
|
||||
code: 'YAML_PARSE_ERROR',
|
||||
},
|
||||
],
|
||||
warnings: [],
|
||||
});
|
||||
}
|
||||
|
||||
// Check that parsed content is a valid object (not null, array, or primitive)
|
||||
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
errors: [
|
||||
{
|
||||
line: 1,
|
||||
column: 1,
|
||||
message: 'Configuration must be a valid YAML object',
|
||||
code: 'INVALID_CONFIG_TYPE',
|
||||
},
|
||||
],
|
||||
warnings: [],
|
||||
});
|
||||
}
|
||||
|
||||
// Enrich config with defaults/paths to satisfy schema requirements
|
||||
// Pass undefined for validation-only (no real file path)
|
||||
// AgentId will be derived from agentCard.name or fall back to 'coding-agent'
|
||||
const enriched = enrichAgentConfig(parsed, undefined);
|
||||
|
||||
// Validate against schema
|
||||
const result = AgentConfigSchema.safeParse(enriched);
|
||||
|
||||
if (!result.success) {
|
||||
// Use zodToIssues to extract detailed validation errors (handles union errors properly)
|
||||
const issues = zodToIssues(result.error);
|
||||
const errors = issues.map((issue) => ({
|
||||
path: issue.path?.join('.') ?? 'root',
|
||||
message: issue.message,
|
||||
code: 'SCHEMA_VALIDATION_ERROR',
|
||||
}));
|
||||
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
errors,
|
||||
warnings: [],
|
||||
});
|
||||
}
|
||||
|
||||
// Check for warnings (e.g., plain text API keys)
|
||||
const warnings: Array<{ path: string; message: string; code: string }> = [];
|
||||
if (parsed.llm?.apiKey && !parsed.llm.apiKey.startsWith('$')) {
|
||||
warnings.push({
|
||||
path: 'llm.apiKey',
|
||||
message: 'Consider using environment variable instead of plain text',
|
||||
code: 'SECURITY_WARNING',
|
||||
});
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
valid: true,
|
||||
errors: [],
|
||||
warnings,
|
||||
});
|
||||
})
|
||||
.openapi(saveConfigRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { yaml } = ctx.req.valid('json');
|
||||
|
||||
// Validate YAML syntax first
|
||||
let parsed;
|
||||
try {
|
||||
parsed = yamlParse(yaml);
|
||||
} catch (parseError: any) {
|
||||
throw new DextoValidationError([
|
||||
{
|
||||
code: AgentErrorCode.INVALID_CONFIG,
|
||||
message: `Invalid YAML syntax: ${parseError.message}`,
|
||||
scope: ErrorScope.AGENT,
|
||||
type: ErrorType.USER,
|
||||
severity: 'error',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
// Check that parsed content is a valid object (not null, array, or primitive)
|
||||
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new DextoValidationError([
|
||||
{
|
||||
code: AgentErrorCode.INVALID_CONFIG,
|
||||
message: 'Configuration must be a valid YAML object',
|
||||
scope: ErrorScope.AGENT,
|
||||
type: ErrorType.USER,
|
||||
severity: 'error',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
// Get target file path for enrichment
|
||||
const agentPath = agent.getAgentFilePath();
|
||||
|
||||
// Enrich config with defaults/paths before validation (same as validation endpoint)
|
||||
const enriched = enrichAgentConfig(parsed, agentPath);
|
||||
|
||||
// Validate schema
|
||||
const validationResult = AgentConfigSchema.safeParse(enriched);
|
||||
|
||||
if (!validationResult.success) {
|
||||
throw new DextoValidationError(
|
||||
validationResult.error.errors.map((err) => ({
|
||||
code: AgentErrorCode.INVALID_CONFIG,
|
||||
message: `${err.path.join('.')}: ${err.message}`,
|
||||
scope: ErrorScope.AGENT,
|
||||
type: ErrorType.USER,
|
||||
severity: 'error',
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
// Create backup
|
||||
const backupPath = `${agentPath}.backup`;
|
||||
await fs.copyFile(agentPath, backupPath);
|
||||
|
||||
try {
|
||||
// Write new config
|
||||
await fs.writeFile(agentPath, yaml, 'utf-8');
|
||||
|
||||
// Load from file (agent-management's job)
|
||||
const newConfig = await reloadAgentConfigFromFile(agentPath);
|
||||
|
||||
// Enrich config before reloading into agent (core expects enriched config with paths)
|
||||
const enrichedConfig = enrichAgentConfig(newConfig, agentPath);
|
||||
|
||||
// Reload into agent (core's job - handles restart automatically)
|
||||
const reloadResult = await agent.reload(enrichedConfig);
|
||||
|
||||
if (reloadResult.restarted) {
|
||||
logger.info(
|
||||
`Agent restarted to apply changes: ${reloadResult.changesApplied.join(', ')}`
|
||||
);
|
||||
} else if (reloadResult.changesApplied.length === 0) {
|
||||
logger.info('Configuration saved (no changes detected)');
|
||||
}
|
||||
|
||||
// Clean up backup file after successful save
|
||||
await fs.unlink(backupPath).catch(() => {
|
||||
// Ignore errors if backup file doesn't exist
|
||||
});
|
||||
|
||||
logger.info(`Agent configuration saved and applied: ${agentPath}`);
|
||||
|
||||
return ctx.json({
|
||||
ok: true as const,
|
||||
path: agentPath,
|
||||
reloaded: true,
|
||||
restarted: reloadResult.restarted,
|
||||
changesApplied: reloadResult.changesApplied,
|
||||
message: reloadResult.restarted
|
||||
? 'Configuration saved and applied successfully (agent restarted)'
|
||||
: 'Configuration saved successfully (no changes detected)',
|
||||
});
|
||||
} catch (error) {
|
||||
// Restore backup on error
|
||||
await fs.copyFile(backupPath, agentPath).catch(() => {
|
||||
// Ignore errors if backup restore fails
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
})
|
||||
.openapi(exportConfigRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('query');
|
||||
const config = agent.getEffectiveConfig(sessionId);
|
||||
|
||||
// Redact sensitive values
|
||||
const maskedConfig = {
|
||||
...config,
|
||||
llm: {
|
||||
...config.llm,
|
||||
apiKey: config.llm.apiKey ? '[REDACTED]' : undefined,
|
||||
},
|
||||
mcpServers: config.mcpServers
|
||||
? Object.fromEntries(
|
||||
Object.entries(config.mcpServers).map(([name, serverConfig]) => [
|
||||
name,
|
||||
serverConfig.type === 'stdio' && serverConfig.env
|
||||
? {
|
||||
...serverConfig,
|
||||
env: Object.fromEntries(
|
||||
Object.keys(serverConfig.env).map((key) => [
|
||||
key,
|
||||
'[REDACTED]',
|
||||
])
|
||||
),
|
||||
}
|
||||
: serverConfig,
|
||||
])
|
||||
)
|
||||
: undefined,
|
||||
};
|
||||
|
||||
const yamlStr = yamlStringify(maskedConfig);
|
||||
ctx.header('Content-Type', 'application/x-yaml');
|
||||
return ctx.body(yamlStr);
|
||||
});
|
||||
}
|
||||
213
dexto/packages/server/src/hono/routes/approvals.ts
Normal file
213
dexto/packages/server/src/hono/routes/approvals.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { type DextoAgent, DenialReason, ApprovalStatus, ApprovalError } from '@dexto/core';
|
||||
import type { ApprovalCoordinator } from '../../approval/approval-coordinator.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
const ApprovalBodySchema = z
|
||||
.object({
|
||||
status: z
|
||||
.enum([ApprovalStatus.APPROVED, ApprovalStatus.DENIED])
|
||||
.describe('The user decision'),
|
||||
formData: z
|
||||
.record(z.unknown())
|
||||
.optional()
|
||||
.describe('Optional form data provided by the user (for elicitation)'),
|
||||
rememberChoice: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to remember this choice for future requests'),
|
||||
})
|
||||
.describe('Request body for submitting an approval decision');
|
||||
|
||||
const ApprovalResponseSchema = z
|
||||
.object({
|
||||
ok: z.boolean().describe('Whether the approval was successfully processed'),
|
||||
approvalId: z.string().describe('The ID of the processed approval'),
|
||||
status: z
|
||||
.enum([ApprovalStatus.APPROVED, ApprovalStatus.DENIED])
|
||||
.describe('The final status'),
|
||||
})
|
||||
.describe('Response after processing approval');
|
||||
|
||||
const PendingApprovalSchema = z
|
||||
.object({
|
||||
approvalId: z.string().describe('The unique ID of the approval request'),
|
||||
type: z.string().describe('The type of approval (tool_confirmation, elicitation, etc.)'),
|
||||
sessionId: z.string().optional().describe('The session ID if applicable'),
|
||||
timeout: z.number().optional().describe('Timeout in milliseconds'),
|
||||
timestamp: z.string().describe('ISO timestamp when the request was created'),
|
||||
metadata: z.record(z.unknown()).describe('Type-specific metadata'),
|
||||
})
|
||||
.describe('A pending approval request');
|
||||
|
||||
const PendingApprovalsResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Success indicator'),
|
||||
approvals: z.array(PendingApprovalSchema).describe('List of pending approval requests'),
|
||||
})
|
||||
.describe('Response containing pending approval requests');
|
||||
|
||||
export function createApprovalsRouter(
|
||||
getAgent: GetAgentFn,
|
||||
approvalCoordinator?: ApprovalCoordinator
|
||||
) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
// GET /approvals - Fetch pending approval requests
|
||||
// Useful for restoring UI state after page refresh
|
||||
const getPendingApprovalsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/approvals',
|
||||
summary: 'Get Pending Approvals',
|
||||
description:
|
||||
'Fetch all pending approval requests for a session. Use this to restore UI state after page refresh.',
|
||||
tags: ['approvals'],
|
||||
request: {
|
||||
query: z.object({
|
||||
sessionId: z.string().describe('The session ID to fetch pending approvals for'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of pending approval requests',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: PendingApprovalsResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// TODO: Consider adding auth & idempotency for production deployments
|
||||
// See: https://github.com/truffle-ai/dexto/pull/450#discussion_r2545039760
|
||||
// - Auth: Open-source framework should allow flexible auth (reverse proxy, API gateway, etc.)
|
||||
// - Idempotency: Already documented in schema; platform can add tracking separately
|
||||
const submitApprovalRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/approvals/{approvalId}',
|
||||
summary: 'Submit Approval Decision',
|
||||
description: 'Submit a user decision for a pending approval request',
|
||||
tags: ['approvals'],
|
||||
request: {
|
||||
params: z.object({
|
||||
approvalId: z.string().describe('The ID of the approval request'),
|
||||
}),
|
||||
body: {
|
||||
content: { 'application/json': { schema: ApprovalBodySchema } },
|
||||
},
|
||||
headers: z.object({
|
||||
'Idempotency-Key': z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Optional key to ensure idempotent processing'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Approval processed successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: ApprovalResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Approval request not found or expired',
|
||||
},
|
||||
400: {
|
||||
description: 'Validation error',
|
||||
},
|
||||
503: {
|
||||
description:
|
||||
'Approval coordinator unavailable (server not initialized for approvals)',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(getPendingApprovalsRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('query');
|
||||
|
||||
agent.logger.debug(`Fetching pending approvals for session ${sessionId}`);
|
||||
|
||||
// Get all pending approval IDs from the approval manager
|
||||
const pendingIds = agent.services.approvalManager.getPendingApprovals();
|
||||
|
||||
// For now, return basic approval info
|
||||
// Full metadata would require storing approval requests in the coordinator
|
||||
const approvals = pendingIds.map((approvalId) => ({
|
||||
approvalId,
|
||||
type: 'tool_confirmation', // Default type
|
||||
sessionId,
|
||||
timestamp: new Date().toISOString(),
|
||||
metadata: {},
|
||||
}));
|
||||
|
||||
return ctx.json({
|
||||
ok: true as const,
|
||||
approvals,
|
||||
});
|
||||
})
|
||||
.openapi(submitApprovalRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { approvalId } = ctx.req.valid('param');
|
||||
const { status, formData, rememberChoice } = ctx.req.valid('json');
|
||||
|
||||
agent.logger.info(`Received approval decision for ${approvalId}: ${status}`);
|
||||
|
||||
if (!approvalCoordinator) {
|
||||
agent.logger.error('ApprovalCoordinator not available');
|
||||
return ctx.json({ ok: false as const, approvalId, status }, 503);
|
||||
}
|
||||
|
||||
// Validate that the approval exists
|
||||
const pendingApprovals = agent.services.approvalManager.getPendingApprovals();
|
||||
if (!pendingApprovals.includes(approvalId)) {
|
||||
throw ApprovalError.notFound(approvalId);
|
||||
}
|
||||
|
||||
try {
|
||||
// Build data object for approved requests
|
||||
const data: Record<string, unknown> = {};
|
||||
if (status === ApprovalStatus.APPROVED) {
|
||||
if (formData !== undefined) {
|
||||
data.formData = formData;
|
||||
}
|
||||
if (rememberChoice !== undefined) {
|
||||
data.rememberChoice = rememberChoice;
|
||||
}
|
||||
}
|
||||
|
||||
// Construct response payload
|
||||
// Get sessionId from coordinator's mapping (stored when request was emitted)
|
||||
const sessionId = approvalCoordinator.getSessionId(approvalId);
|
||||
const responsePayload = {
|
||||
approvalId,
|
||||
status,
|
||||
sessionId, // Attach sessionId for SSE routing to correct client streams
|
||||
...(status === ApprovalStatus.DENIED
|
||||
? {
|
||||
reason: DenialReason.USER_DENIED,
|
||||
message: 'User denied the request via API',
|
||||
}
|
||||
: {}),
|
||||
...(Object.keys(data).length > 0 ? { data } : {}),
|
||||
};
|
||||
|
||||
// Emit via approval coordinator which ManualApprovalHandler listens to
|
||||
approvalCoordinator.emitResponse(responsePayload);
|
||||
|
||||
return ctx.json({
|
||||
ok: true,
|
||||
approvalId,
|
||||
status,
|
||||
});
|
||||
} catch (error) {
|
||||
agent.logger.error('Error processing approval', { approvalId, error });
|
||||
return ctx.json({ ok: false as const, approvalId, status }, 500);
|
||||
}
|
||||
});
|
||||
}
|
||||
65
dexto/packages/server/src/hono/routes/dexto-auth.ts
Normal file
65
dexto/packages/server/src/hono/routes/dexto-auth.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
import {
|
||||
isDextoAuthEnabled,
|
||||
isDextoAuthenticated,
|
||||
canUseDextoProvider,
|
||||
} from '@dexto/agent-management';
|
||||
|
||||
/**
|
||||
* Dexto authentication status routes.
|
||||
* Provides endpoints to check dexto auth status for Web UI.
|
||||
*/
|
||||
export function createDextoAuthRouter(_getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const statusRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/dexto-auth/status',
|
||||
summary: 'Dexto Auth Status',
|
||||
description:
|
||||
'Returns dexto authentication status. Used by Web UI to check if user can use dexto features.',
|
||||
tags: ['auth'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Dexto auth status',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
enabled: z.boolean().describe('Whether dexto auth feature is enabled'),
|
||||
authenticated: z
|
||||
.boolean()
|
||||
.describe('Whether user is authenticated with dexto'),
|
||||
canUse: z
|
||||
.boolean()
|
||||
.describe(
|
||||
'Whether user can use dexto (authenticated AND has API key)'
|
||||
),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app.openapi(statusRoute, async (c) => {
|
||||
const enabled = isDextoAuthEnabled();
|
||||
|
||||
if (!enabled) {
|
||||
return c.json({
|
||||
enabled: false,
|
||||
authenticated: false,
|
||||
canUse: false,
|
||||
});
|
||||
}
|
||||
|
||||
const authenticated = await isDextoAuthenticated();
|
||||
const canUse = await canUseDextoProvider();
|
||||
|
||||
return c.json({
|
||||
enabled,
|
||||
authenticated,
|
||||
canUse,
|
||||
});
|
||||
});
|
||||
}
|
||||
64
dexto/packages/server/src/hono/routes/discovery.ts
Normal file
64
dexto/packages/server/src/hono/routes/discovery.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { listAllProviders } from '@dexto/core';
|
||||
|
||||
const DiscoveredProviderSchema = z
|
||||
.object({
|
||||
type: z.string().describe('Provider type identifier'),
|
||||
category: z
|
||||
.enum(['blob', 'database', 'compaction', 'customTools'])
|
||||
.describe('Provider category'),
|
||||
metadata: z
|
||||
.object({
|
||||
displayName: z.string().optional().describe('Human-readable display name'),
|
||||
description: z.string().optional().describe('Provider description'),
|
||||
})
|
||||
.passthrough()
|
||||
.optional()
|
||||
.describe('Optional metadata about the provider'),
|
||||
})
|
||||
.describe('Information about a registered provider');
|
||||
|
||||
const InternalToolSchema = z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.describe('Internal tool name identifier (e.g., "search_history", "ask_user")'),
|
||||
description: z.string().describe('Human-readable description of what the tool does'),
|
||||
})
|
||||
.describe('Information about an internal tool');
|
||||
|
||||
const DiscoveryResponseSchema = z
|
||||
.object({
|
||||
blob: z.array(DiscoveredProviderSchema).describe('Blob storage providers'),
|
||||
database: z.array(DiscoveredProviderSchema).describe('Database providers'),
|
||||
compaction: z.array(DiscoveredProviderSchema).describe('Compaction strategy providers'),
|
||||
customTools: z.array(DiscoveredProviderSchema).describe('Custom tool providers'),
|
||||
internalTools: z
|
||||
.array(InternalToolSchema)
|
||||
.describe('Internal tools available for configuration'),
|
||||
})
|
||||
.describe('Discovery response with providers grouped by category');
|
||||
|
||||
export function createDiscoveryRouter() {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const discoveryRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/discovery',
|
||||
summary: 'Discover Available Providers and Tools',
|
||||
description:
|
||||
'Returns all registered providers (blob storage, database, compaction, custom tools) and available internal tools. Useful for building UIs that need to display configurable options.',
|
||||
tags: ['discovery'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Available providers grouped by category',
|
||||
content: { 'application/json': { schema: DiscoveryResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app.openapi(discoveryRoute, async (ctx) => {
|
||||
const providers = listAllProviders();
|
||||
return ctx.json(providers);
|
||||
});
|
||||
}
|
||||
48
dexto/packages/server/src/hono/routes/greeting.ts
Normal file
48
dexto/packages/server/src/hono/routes/greeting.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
const querySchema = z
|
||||
.object({
|
||||
sessionId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Session identifier to retrieve session-specific greeting'),
|
||||
})
|
||||
.describe('Query parameters for greeting endpoint');
|
||||
|
||||
export function createGreetingRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const greetingRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/greeting',
|
||||
summary: 'Get Greeting Message',
|
||||
description: 'Retrieves the greeting message from the agent configuration',
|
||||
tags: ['config'],
|
||||
request: { query: querySchema.pick({ sessionId: true }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Greeting',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
greeting: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Greeting message from agent configuration'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app.openapi(greetingRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('query');
|
||||
const cfg = agent.getEffectiveConfig(sessionId);
|
||||
return ctx.json({ greeting: cfg.greeting });
|
||||
});
|
||||
}
|
||||
25
dexto/packages/server/src/hono/routes/health.ts
Normal file
25
dexto/packages/server/src/hono/routes/health.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
/**
|
||||
* NOTE: If we introduce a transport-agnostic handler layer later, the logic in this module can move
|
||||
* into that layer. For now we keep the implementation inline for simplicity.
|
||||
*/
|
||||
export function createHealthRouter(_getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const route = createRoute({
|
||||
method: 'get',
|
||||
path: '/',
|
||||
summary: 'Health Check',
|
||||
description: 'Returns server health status',
|
||||
tags: ['system'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Server health',
|
||||
content: { 'text/plain': { schema: z.string().openapi({ example: 'OK' }) } },
|
||||
},
|
||||
},
|
||||
});
|
||||
return app.openapi(route, (c) => c.text('OK'));
|
||||
}
|
||||
136
dexto/packages/server/src/hono/routes/key.ts
Normal file
136
dexto/packages/server/src/hono/routes/key.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
/**
|
||||
* API Key Management Routes
|
||||
*
|
||||
* Endpoints for managing LLM provider API keys.
|
||||
*
|
||||
* TODO: For hosted deployments, these endpoints should integrate with a secure
|
||||
* key management service (e.g., AWS Secrets Manager, HashiCorp Vault) rather
|
||||
* than storing keys in local .env files.
|
||||
*/
|
||||
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { LLM_PROVIDERS } from '@dexto/core';
|
||||
import {
|
||||
getProviderKeyStatus,
|
||||
saveProviderApiKey,
|
||||
resolveApiKeyForProvider,
|
||||
} from '@dexto/agent-management';
|
||||
|
||||
/**
|
||||
* Masks an API key for safe display, showing only prefix and suffix.
|
||||
* @example maskApiKey('sk-proj-abc123xyz789') → 'sk-proj...z789'
|
||||
*/
|
||||
function maskApiKey(key: string): string {
|
||||
if (!key) return '';
|
||||
if (key.length < 12) {
|
||||
return key.slice(0, 4) + '...' + key.slice(-4);
|
||||
}
|
||||
return key.slice(0, 7) + '...' + key.slice(-4);
|
||||
}
|
||||
|
||||
const GetKeyParamsSchema = z
|
||||
.object({
|
||||
provider: z.enum(LLM_PROVIDERS).describe('LLM provider identifier'),
|
||||
})
|
||||
.describe('Path parameters for API key operations');
|
||||
|
||||
const SaveKeySchema = z
|
||||
.object({
|
||||
provider: z
|
||||
.enum(LLM_PROVIDERS)
|
||||
.describe('LLM provider identifier (e.g., openai, anthropic)'),
|
||||
apiKey: z
|
||||
.string()
|
||||
.min(1, 'API key is required')
|
||||
.describe('API key for the provider (writeOnly - never returned in responses)')
|
||||
.openapi({ writeOnly: true }),
|
||||
})
|
||||
.describe('Request body for saving a provider API key');
|
||||
|
||||
export function createKeyRouter() {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const getKeyRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/llm/key/{provider}',
|
||||
summary: 'Get Provider API Key Status',
|
||||
description:
|
||||
'Retrieves the API key status for a provider. Returns a masked key value (e.g., sk-proj...xyz4) for UI display purposes.',
|
||||
tags: ['llm'],
|
||||
request: { params: GetKeyParamsSchema },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'API key status and value',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
provider: z.enum(LLM_PROVIDERS).describe('Provider identifier'),
|
||||
envVar: z.string().describe('Environment variable name'),
|
||||
hasKey: z.boolean().describe('Whether API key is configured'),
|
||||
keyValue: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Masked API key value if configured (e.g., sk-proj...xyz4)'
|
||||
),
|
||||
})
|
||||
.strict()
|
||||
.describe('API key status response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const saveKeyRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/llm/key',
|
||||
summary: 'Save Provider API Key',
|
||||
description: 'Stores an API key for a provider in .env and makes it available immediately',
|
||||
tags: ['llm'],
|
||||
request: { body: { content: { 'application/json': { schema: SaveKeySchema } } } },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'API key saved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Operation success indicator'),
|
||||
provider: z
|
||||
.enum(LLM_PROVIDERS)
|
||||
.describe('Provider for which the key was saved'),
|
||||
envVar: z
|
||||
.string()
|
||||
.describe('Environment variable name where key was stored'),
|
||||
})
|
||||
.strict()
|
||||
.describe('API key save response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(getKeyRoute, (ctx) => {
|
||||
const { provider } = ctx.req.valid('param');
|
||||
const keyStatus = getProviderKeyStatus(provider);
|
||||
const apiKey = resolveApiKeyForProvider(provider);
|
||||
const maskedKey = apiKey ? maskApiKey(apiKey) : undefined;
|
||||
|
||||
return ctx.json({
|
||||
provider,
|
||||
envVar: keyStatus.envVar,
|
||||
hasKey: keyStatus.hasApiKey,
|
||||
...(maskedKey && { keyValue: maskedKey }),
|
||||
});
|
||||
})
|
||||
.openapi(saveKeyRoute, async (ctx) => {
|
||||
const { provider, apiKey } = ctx.req.valid('json');
|
||||
// saveProviderApiKey uses getDextoEnvPath internally for context-aware .env resolution
|
||||
const meta = await saveProviderApiKey(provider, apiKey);
|
||||
return ctx.json({ ok: true as const, provider, envVar: meta.envVar });
|
||||
});
|
||||
}
|
||||
555
dexto/packages/server/src/hono/routes/llm.ts
Normal file
555
dexto/packages/server/src/hono/routes/llm.ts
Normal file
@@ -0,0 +1,555 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { DextoRuntimeError, ErrorScope, ErrorType } from '@dexto/core';
|
||||
import {
|
||||
LLM_REGISTRY,
|
||||
LLM_PROVIDERS,
|
||||
SUPPORTED_FILE_TYPES,
|
||||
supportsBaseURL,
|
||||
getAllModelsForProvider,
|
||||
getSupportedFileTypesForModel,
|
||||
type ProviderInfo,
|
||||
type LLMProvider,
|
||||
type SupportedFileType,
|
||||
LLMUpdatesSchema,
|
||||
} from '@dexto/core';
|
||||
import {
|
||||
getProviderKeyStatus,
|
||||
loadCustomModels,
|
||||
saveCustomModel,
|
||||
deleteCustomModel,
|
||||
CustomModelSchema,
|
||||
isDextoAuthEnabled,
|
||||
} from '@dexto/agent-management';
|
||||
import type { Context } from 'hono';
|
||||
import {
|
||||
ProviderCatalogSchema,
|
||||
ModelFlatSchema,
|
||||
LLMConfigResponseSchema,
|
||||
} from '../schemas/responses.js';
|
||||
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
const CurrentQuerySchema = z
|
||||
.object({
|
||||
sessionId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Session identifier to retrieve session-specific LLM configuration'),
|
||||
})
|
||||
.describe('Query parameters for getting current LLM configuration');
|
||||
|
||||
const CatalogQuerySchema = z
|
||||
.object({
|
||||
provider: z
|
||||
.union([z.string(), z.array(z.string())])
|
||||
.optional()
|
||||
.transform((value): string[] | undefined =>
|
||||
Array.isArray(value) ? value : value ? value.split(',') : undefined
|
||||
)
|
||||
.describe('Comma-separated list of LLM providers to filter by'),
|
||||
hasKey: z
|
||||
.union([z.literal('true'), z.literal('false'), z.literal('1'), z.literal('0')])
|
||||
.optional()
|
||||
.transform((raw): boolean | undefined =>
|
||||
raw === 'true' || raw === '1'
|
||||
? true
|
||||
: raw === 'false' || raw === '0'
|
||||
? false
|
||||
: undefined
|
||||
)
|
||||
.describe('Filter by API key presence (true or false)'),
|
||||
fileType: z
|
||||
.enum(SUPPORTED_FILE_TYPES)
|
||||
.optional()
|
||||
.describe('Filter by supported file type (audio, pdf, or image)'),
|
||||
defaultOnly: z
|
||||
.union([z.literal('true'), z.literal('false'), z.literal('1'), z.literal('0')])
|
||||
.optional()
|
||||
.transform((raw): boolean | undefined =>
|
||||
raw === 'true' || raw === '1'
|
||||
? true
|
||||
: raw === 'false' || raw === '0'
|
||||
? false
|
||||
: undefined
|
||||
)
|
||||
.describe('Include only default models (true or false)'),
|
||||
mode: z
|
||||
.enum(['grouped', 'flat'])
|
||||
.default('grouped')
|
||||
.describe('Response format mode (grouped by provider or flat list)'),
|
||||
})
|
||||
.describe('Query parameters for filtering and formatting the LLM catalog');
|
||||
|
||||
// Combine LLM updates schema with sessionId for API requests
|
||||
// LLMUpdatesSchema is no longer strict, so it accepts extra fields like sessionId
|
||||
const SwitchLLMBodySchema = LLMUpdatesSchema.and(
|
||||
z.object({
|
||||
sessionId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Session identifier for session-specific LLM configuration'),
|
||||
})
|
||||
).describe('LLM switch request body with optional session ID and LLM fields');
|
||||
|
||||
export function createLlmRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const currentRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/llm/current',
|
||||
summary: 'Get Current LLM Config',
|
||||
description: 'Retrieves the current LLM configuration for the agent or a specific session',
|
||||
tags: ['llm'],
|
||||
request: { query: CurrentQuerySchema },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Current LLM config',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
config: LLMConfigResponseSchema.partial({
|
||||
maxIterations: true,
|
||||
}).extend({
|
||||
displayName: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Human-readable model display name'),
|
||||
}),
|
||||
routing: z
|
||||
.object({
|
||||
viaDexto: z
|
||||
.boolean()
|
||||
.describe(
|
||||
'Whether requests route through Dexto gateway'
|
||||
),
|
||||
})
|
||||
.describe(
|
||||
'Routing information for the current LLM configuration'
|
||||
),
|
||||
})
|
||||
.describe('Response containing current LLM configuration'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const catalogRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/llm/catalog',
|
||||
summary: 'LLM Catalog',
|
||||
description: 'Providers, models, capabilities, and API key status',
|
||||
tags: ['llm'],
|
||||
request: { query: CatalogQuerySchema },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'LLM catalog',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.union([
|
||||
z
|
||||
.object({
|
||||
providers: z
|
||||
.record(z.enum(LLM_PROVIDERS), ProviderCatalogSchema)
|
||||
.describe(
|
||||
'Providers grouped by ID with their models and capabilities'
|
||||
),
|
||||
})
|
||||
.strict()
|
||||
.describe('Grouped catalog response (mode=grouped)'),
|
||||
z
|
||||
.object({
|
||||
models: z
|
||||
.array(ModelFlatSchema)
|
||||
.describe(
|
||||
'Flat list of all models with provider information'
|
||||
),
|
||||
})
|
||||
.strict()
|
||||
.describe('Flat catalog response (mode=flat)'),
|
||||
])
|
||||
.describe(
|
||||
'LLM catalog in grouped or flat format based on mode query parameter'
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const switchRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/llm/switch',
|
||||
summary: 'Switch LLM',
|
||||
description: 'Switches the LLM configuration for the agent or a specific session',
|
||||
tags: ['llm'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: SwitchLLMBodySchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'LLM switch result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
config: LLMConfigResponseSchema.describe(
|
||||
'New LLM configuration with all defaults applied (apiKey omitted)'
|
||||
),
|
||||
sessionId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Session ID if session-specific switch'),
|
||||
})
|
||||
.describe('LLM switch result'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Custom models routes
|
||||
const listCustomModelsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/llm/custom-models',
|
||||
summary: 'List Custom Models',
|
||||
description: 'Returns all saved custom openai-compatible model configurations',
|
||||
tags: ['llm'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of custom models',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
models: z.array(CustomModelSchema).describe('List of custom models'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const createCustomModelRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/llm/custom-models',
|
||||
summary: 'Create Custom Model',
|
||||
description: 'Saves a new custom openai-compatible model configuration',
|
||||
tags: ['llm'],
|
||||
request: {
|
||||
body: { content: { 'application/json': { schema: CustomModelSchema } } },
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Custom model saved',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ok: z.literal(true).describe('Success indicator'),
|
||||
model: CustomModelSchema,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const deleteCustomModelRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/llm/custom-models/{name}',
|
||||
summary: 'Delete Custom Model',
|
||||
description: 'Deletes a custom model by name',
|
||||
tags: ['llm'],
|
||||
request: {
|
||||
params: z.object({
|
||||
name: z.string().min(1).describe('Model name to delete'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Custom model deleted',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ok: z.literal(true).describe('Success indicator'),
|
||||
deleted: z.string().describe('Name of the deleted model'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Custom model not found',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ok: z.literal(false).describe('Failure indicator'),
|
||||
error: z.string().describe('Error message'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Model capabilities endpoint - resolves gateway providers to underlying model capabilities
|
||||
const capabilitiesRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/llm/capabilities',
|
||||
summary: 'Get Model Capabilities',
|
||||
description:
|
||||
'Returns the capabilities (supported file types) for a specific provider/model combination. ' +
|
||||
'Handles gateway providers (dexto, openrouter) by resolving to the underlying model capabilities.',
|
||||
tags: ['llm'],
|
||||
request: {
|
||||
query: z.object({
|
||||
provider: z.enum(LLM_PROVIDERS).describe('LLM provider name'),
|
||||
model: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe('Model name (supports both native and OpenRouter format)'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Model capabilities',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
provider: z.enum(LLM_PROVIDERS).describe('Provider name'),
|
||||
model: z.string().describe('Model name as provided'),
|
||||
supportedFileTypes: z
|
||||
.array(z.enum(SUPPORTED_FILE_TYPES))
|
||||
.describe('File types supported by this model'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(currentRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('query');
|
||||
|
||||
const currentConfig = sessionId
|
||||
? agent.getEffectiveConfig(sessionId).llm
|
||||
: agent.getCurrentLLMConfig();
|
||||
|
||||
let displayName: string | undefined;
|
||||
try {
|
||||
// First check registry for built-in models
|
||||
const model = LLM_REGISTRY[currentConfig.provider]?.models.find(
|
||||
(m) => m.name.toLowerCase() === String(currentConfig.model).toLowerCase()
|
||||
);
|
||||
displayName = model?.displayName || undefined;
|
||||
|
||||
// If not found in registry, check custom models
|
||||
if (!displayName) {
|
||||
const customModels = await loadCustomModels();
|
||||
const customModel = customModels.find(
|
||||
(cm) => cm.name.toLowerCase() === String(currentConfig.model).toLowerCase()
|
||||
);
|
||||
displayName = customModel?.displayName || undefined;
|
||||
}
|
||||
} catch {
|
||||
// ignore lookup errors
|
||||
}
|
||||
|
||||
// Omit apiKey from response for security
|
||||
const { apiKey, ...configWithoutKey } = currentConfig;
|
||||
|
||||
// With explicit providers, viaDexto is simply whether the provider is 'dexto'
|
||||
// Only report viaDexto when the feature is enabled
|
||||
const viaDexto = isDextoAuthEnabled() && currentConfig.provider === 'dexto';
|
||||
|
||||
return ctx.json({
|
||||
config: {
|
||||
...configWithoutKey,
|
||||
hasApiKey: !!apiKey,
|
||||
...(displayName && { displayName }),
|
||||
},
|
||||
routing: {
|
||||
viaDexto,
|
||||
},
|
||||
});
|
||||
})
|
||||
.openapi(catalogRoute, (ctx) => {
|
||||
type ProviderCatalog = Pick<ProviderInfo, 'models' | 'supportedFileTypes'> & {
|
||||
name: string;
|
||||
hasApiKey: boolean;
|
||||
primaryEnvVar: string;
|
||||
supportsBaseURL: boolean;
|
||||
};
|
||||
|
||||
type ModelFlat = ProviderCatalog['models'][number] & { provider: LLMProvider };
|
||||
|
||||
const queryParams = ctx.req.valid('query');
|
||||
|
||||
const providers: Record<string, ProviderCatalog> = {};
|
||||
|
||||
for (const provider of LLM_PROVIDERS) {
|
||||
// Skip dexto provider when feature is not enabled
|
||||
if (provider === 'dexto' && !isDextoAuthEnabled()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const info = LLM_REGISTRY[provider];
|
||||
const displayName = provider.charAt(0).toUpperCase() + provider.slice(1);
|
||||
const keyStatus = getProviderKeyStatus(provider);
|
||||
|
||||
// Use getAllModelsForProvider to get inherited models for gateway providers
|
||||
// like 'dexto' that have supportsAllRegistryModels: true
|
||||
const models = getAllModelsForProvider(provider);
|
||||
|
||||
providers[provider] = {
|
||||
name: displayName,
|
||||
hasApiKey: keyStatus.hasApiKey,
|
||||
primaryEnvVar: keyStatus.envVar,
|
||||
supportsBaseURL: supportsBaseURL(provider),
|
||||
models,
|
||||
supportedFileTypes: info.supportedFileTypes,
|
||||
};
|
||||
}
|
||||
|
||||
let filtered: Record<string, ProviderCatalog> = { ...providers };
|
||||
|
||||
if (queryParams.provider && queryParams.provider.length > 0) {
|
||||
const allowed = new Set(
|
||||
queryParams.provider.filter((p) =>
|
||||
(LLM_PROVIDERS as readonly string[]).includes(p)
|
||||
)
|
||||
);
|
||||
const filteredByProvider: Record<string, ProviderCatalog> = {};
|
||||
for (const [id, catalog] of Object.entries(filtered)) {
|
||||
if (allowed.has(id)) {
|
||||
filteredByProvider[id] = catalog;
|
||||
}
|
||||
}
|
||||
filtered = filteredByProvider;
|
||||
}
|
||||
|
||||
if (typeof queryParams.hasKey === 'boolean') {
|
||||
const byKey: Record<string, ProviderCatalog> = {};
|
||||
for (const [id, catalog] of Object.entries(filtered)) {
|
||||
if (catalog.hasApiKey === queryParams.hasKey) {
|
||||
byKey[id] = catalog;
|
||||
}
|
||||
}
|
||||
filtered = byKey;
|
||||
}
|
||||
|
||||
if (queryParams.fileType) {
|
||||
const byFileType: Record<string, ProviderCatalog> = {};
|
||||
for (const [id, catalog] of Object.entries(filtered)) {
|
||||
const models = catalog.models.filter((model) => {
|
||||
const modelTypes =
|
||||
Array.isArray(model.supportedFileTypes) &&
|
||||
model.supportedFileTypes.length > 0
|
||||
? model.supportedFileTypes
|
||||
: catalog.supportedFileTypes || [];
|
||||
return modelTypes.includes(queryParams.fileType!);
|
||||
});
|
||||
if (models.length > 0) {
|
||||
byFileType[id] = { ...catalog, models };
|
||||
}
|
||||
}
|
||||
filtered = byFileType;
|
||||
}
|
||||
|
||||
if (queryParams.defaultOnly) {
|
||||
const byDefault: Record<string, ProviderCatalog> = {};
|
||||
for (const [id, catalog] of Object.entries(filtered)) {
|
||||
const models = catalog.models.filter((model) => model.default === true);
|
||||
if (models.length > 0) {
|
||||
byDefault[id] = { ...catalog, models };
|
||||
}
|
||||
}
|
||||
filtered = byDefault;
|
||||
}
|
||||
|
||||
if (queryParams.mode === 'flat') {
|
||||
const flat: ModelFlat[] = [];
|
||||
for (const [id, catalog] of Object.entries(filtered)) {
|
||||
for (const model of catalog.models) {
|
||||
flat.push({ provider: id as LLMProvider, ...model });
|
||||
}
|
||||
}
|
||||
return ctx.json({ models: flat });
|
||||
}
|
||||
|
||||
return ctx.json({ providers: filtered });
|
||||
})
|
||||
.openapi(switchRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const raw = ctx.req.valid('json');
|
||||
const { sessionId, ...llmUpdates } = raw;
|
||||
|
||||
const config = await agent.switchLLM(llmUpdates, sessionId);
|
||||
|
||||
// Omit apiKey from response for security
|
||||
const { apiKey, ...configWithoutKey } = config;
|
||||
return ctx.json({
|
||||
config: {
|
||||
...configWithoutKey,
|
||||
hasApiKey: !!apiKey,
|
||||
},
|
||||
sessionId,
|
||||
});
|
||||
})
|
||||
.openapi(listCustomModelsRoute, async (ctx) => {
|
||||
const models = await loadCustomModels();
|
||||
return ctx.json({ models });
|
||||
})
|
||||
.openapi(createCustomModelRoute, async (ctx) => {
|
||||
const model = ctx.req.valid('json');
|
||||
await saveCustomModel(model);
|
||||
return ctx.json({ ok: true as const, model });
|
||||
})
|
||||
.openapi(deleteCustomModelRoute, async (ctx) => {
|
||||
const { name: encodedName } = ctx.req.valid('param');
|
||||
// Decode URL-encoded name to handle OpenRouter model IDs with slashes
|
||||
const name = decodeURIComponent(encodedName);
|
||||
const deleted = await deleteCustomModel(name);
|
||||
if (!deleted) {
|
||||
throw new DextoRuntimeError(
|
||||
'custom_model_not_found',
|
||||
ErrorScope.LLM,
|
||||
ErrorType.NOT_FOUND,
|
||||
`Custom model '${name}' not found`,
|
||||
{ modelName: name }
|
||||
);
|
||||
}
|
||||
return ctx.json({ ok: true as const, deleted: name } as const, 200);
|
||||
})
|
||||
.openapi(capabilitiesRoute, (ctx) => {
|
||||
const { provider, model } = ctx.req.valid('query');
|
||||
|
||||
// getSupportedFileTypesForModel handles:
|
||||
// 1. Gateway providers (dexto, openrouter) - resolves via resolveModelOrigin to underlying model
|
||||
// 2. Native providers - direct lookup in registry
|
||||
// 3. Custom model providers (openai-compatible) - returns provider-level capabilities
|
||||
// Falls back to provider-level supportedFileTypes if model not found
|
||||
let supportedFileTypes: SupportedFileType[];
|
||||
try {
|
||||
supportedFileTypes = getSupportedFileTypesForModel(provider, model);
|
||||
} catch {
|
||||
// If model lookup fails, fall back to provider-level capabilities
|
||||
const providerInfo = LLM_REGISTRY[provider];
|
||||
supportedFileTypes = providerInfo?.supportedFileTypes ?? [];
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
provider,
|
||||
model,
|
||||
supportedFileTypes,
|
||||
});
|
||||
});
|
||||
}
|
||||
446
dexto/packages/server/src/hono/routes/mcp.ts
Normal file
446
dexto/packages/server/src/hono/routes/mcp.ts
Normal file
@@ -0,0 +1,446 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { logger, McpServerConfigSchema, MCP_CONNECTION_STATUSES } from '@dexto/core';
|
||||
import { updateAgentConfigFile } from '@dexto/agent-management';
|
||||
import { ResourceSchema } from '../schemas/responses.js';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
const McpServerRequestSchema = z
|
||||
.object({
|
||||
name: z.string().min(1, 'Server name is required').describe('A unique name for the server'),
|
||||
config: McpServerConfigSchema.describe('The server configuration object'),
|
||||
persistToAgent: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('If true, saves the server to agent configuration file'),
|
||||
})
|
||||
.describe('Request body for adding or updating an MCP server');
|
||||
|
||||
const ExecuteToolBodySchema = z
|
||||
.record(z.unknown())
|
||||
.describe(
|
||||
"Tool execution parameters as JSON object. The specific fields depend on the tool being executed and are defined by the tool's inputSchema."
|
||||
);
|
||||
|
||||
// Response schemas
|
||||
const ServerStatusResponseSchema = z
|
||||
.object({
|
||||
status: z.string().describe('Connection status'),
|
||||
name: z.string().describe('Server name'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Server status response');
|
||||
|
||||
const ServerInfoSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Server identifier'),
|
||||
name: z.string().describe('Server name'),
|
||||
status: z.enum(MCP_CONNECTION_STATUSES).describe('Server status'),
|
||||
})
|
||||
.strict()
|
||||
.describe('MCP server information');
|
||||
|
||||
const ServersListResponseSchema = z
|
||||
.object({
|
||||
servers: z.array(ServerInfoSchema).describe('Array of server information'),
|
||||
})
|
||||
.strict()
|
||||
.describe('List of MCP servers');
|
||||
|
||||
// JSON Schema definition for tool input parameters (based on MCP SDK Tool type)
|
||||
const JsonSchemaProperty = z
|
||||
.object({
|
||||
type: z
|
||||
.enum(['string', 'number', 'integer', 'boolean', 'object', 'array'])
|
||||
.optional()
|
||||
.describe('Property type'),
|
||||
description: z.string().optional().describe('Property description'),
|
||||
enum: z
|
||||
.array(z.union([z.string(), z.number(), z.boolean()]))
|
||||
.optional()
|
||||
.describe('Enum values'),
|
||||
default: z.any().optional().describe('Default value'),
|
||||
})
|
||||
.passthrough()
|
||||
.describe('JSON Schema property definition');
|
||||
|
||||
const ToolInputSchema = z
|
||||
.object({
|
||||
type: z.literal('object').optional().describe('Schema type, always "object" when present'),
|
||||
properties: z.record(JsonSchemaProperty).optional().describe('Property definitions'),
|
||||
required: z.array(z.string()).optional().describe('Required property names'),
|
||||
})
|
||||
.passthrough()
|
||||
.describe('JSON Schema for tool input parameters');
|
||||
|
||||
const ToolInfoSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Tool identifier'),
|
||||
name: z.string().describe('Tool name'),
|
||||
description: z.string().describe('Tool description'),
|
||||
inputSchema: ToolInputSchema.optional().describe('JSON Schema for tool input parameters'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Tool information');
|
||||
|
||||
const ToolsListResponseSchema = z
|
||||
.object({
|
||||
tools: z.array(ToolInfoSchema).describe('Array of available tools'),
|
||||
})
|
||||
.strict()
|
||||
.describe('List of tools from MCP server');
|
||||
|
||||
const DisconnectResponseSchema = z
|
||||
.object({
|
||||
status: z.literal('disconnected').describe('Disconnection status'),
|
||||
id: z.string().describe('Server identifier'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Server disconnection response');
|
||||
|
||||
const RestartResponseSchema = z
|
||||
.object({
|
||||
status: z.literal('restarted').describe('Restart status'),
|
||||
id: z.string().describe('Server identifier'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Server restart response');
|
||||
|
||||
const ToolExecutionResponseSchema = z
|
||||
.object({
|
||||
success: z.boolean().describe('Whether tool execution succeeded'),
|
||||
data: z.any().optional().describe('Tool execution result data'),
|
||||
error: z.string().optional().describe('Error message if execution failed'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Tool execution response');
|
||||
|
||||
const ResourcesListResponseSchema = z
|
||||
.object({
|
||||
success: z.boolean().describe('Success indicator'),
|
||||
resources: z.array(ResourceSchema).describe('Array of available resources'),
|
||||
})
|
||||
.strict()
|
||||
.describe('List of resources from MCP server');
|
||||
|
||||
const ResourceContentSchema = z
|
||||
.object({
|
||||
content: z.any().describe('Resource content data'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource content wrapper');
|
||||
|
||||
const ResourceContentResponseSchema = z
|
||||
.object({
|
||||
success: z.boolean().describe('Success indicator'),
|
||||
data: ResourceContentSchema.describe('Resource content'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource content response');
|
||||
|
||||
export function createMcpRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const addServerRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/mcp/servers',
|
||||
summary: 'Add MCP Server',
|
||||
description: 'Connects a new MCP server dynamically',
|
||||
tags: ['mcp'],
|
||||
request: { body: { content: { 'application/json': { schema: McpServerRequestSchema } } } },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Server connected',
|
||||
content: { 'application/json': { schema: ServerStatusResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
const listServersRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/mcp/servers',
|
||||
summary: 'List MCP Servers',
|
||||
description: 'Gets a list of all connected and failed MCP servers',
|
||||
tags: ['mcp'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Servers list',
|
||||
content: { 'application/json': { schema: ServersListResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const toolsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/mcp/servers/{serverId}/tools',
|
||||
summary: 'List Server Tools',
|
||||
description: 'Retrieves the list of tools available on a specific MCP server',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({ serverId: z.string().describe('The ID of the MCP server') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Tools list',
|
||||
content: { 'application/json': { schema: ToolsListResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const deleteServerRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/mcp/servers/{serverId}',
|
||||
summary: 'Remove MCP Server',
|
||||
description: 'Disconnects and removes an MCP server',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({ serverId: z.string().describe('The ID of the MCP server') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Disconnected',
|
||||
content: { 'application/json': { schema: DisconnectResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const restartServerRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/mcp/servers/{serverId}/restart',
|
||||
summary: 'Restart MCP Server',
|
||||
description: 'Restarts a connected MCP server',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({ serverId: z.string().describe('The ID of the MCP server') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Server restarted',
|
||||
content: { 'application/json': { schema: RestartResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const execToolRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/mcp/servers/{serverId}/tools/{toolName}/execute',
|
||||
summary: 'Execute MCP Tool',
|
||||
description: 'Executes a tool on an MCP server directly',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({
|
||||
serverId: z.string().describe('The ID of the MCP server'),
|
||||
toolName: z.string().describe('The name of the tool to execute'),
|
||||
}),
|
||||
body: { content: { 'application/json': { schema: ExecuteToolBodySchema } } },
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Tool executed',
|
||||
content: { 'application/json': { schema: ToolExecutionResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const listResourcesRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/mcp/servers/{serverId}/resources',
|
||||
summary: 'List Server Resources',
|
||||
description: 'Retrieves all resources available from a specific MCP server',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({ serverId: z.string().describe('The ID of the MCP server') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Server resources',
|
||||
content: { 'application/json': { schema: ResourcesListResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const getResourceContentRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/mcp/servers/{serverId}/resources/{resourceId}/content',
|
||||
summary: 'Read Server Resource Content',
|
||||
description:
|
||||
'Reads content from a specific resource on an MCP server. This endpoint automatically constructs the qualified URI format (mcp:serverId:resourceId)',
|
||||
tags: ['mcp'],
|
||||
request: {
|
||||
params: z.object({
|
||||
serverId: z.string().describe('The ID of the MCP server'),
|
||||
resourceId: z
|
||||
.string()
|
||||
.min(1, 'Resource ID is required')
|
||||
.transform((encoded) => decodeURIComponent(encoded))
|
||||
.describe('The URI-encoded resource identifier on that server'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Resource content',
|
||||
content: { 'application/json': { schema: ResourceContentResponseSchema } },
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(addServerRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { name, config, persistToAgent } = ctx.req.valid('json');
|
||||
|
||||
// Add the server (connects if enabled, otherwise just registers)
|
||||
await agent.addMcpServer(name, config);
|
||||
const isConnected = config.enabled !== false;
|
||||
logger.info(
|
||||
isConnected
|
||||
? `Successfully connected to new server '${name}' via API request.`
|
||||
: `Registered server '${name}' (disabled) via API request.`
|
||||
);
|
||||
|
||||
// If persistToAgent is true, save to agent config file
|
||||
if (persistToAgent === true) {
|
||||
try {
|
||||
// Get the current effective config to read existing mcpServers
|
||||
const currentConfig = agent.getEffectiveConfig();
|
||||
|
||||
// Create update with new server added to mcpServers
|
||||
const updates = {
|
||||
mcpServers: {
|
||||
...(currentConfig.mcpServers || {}),
|
||||
[name]: config,
|
||||
},
|
||||
};
|
||||
|
||||
// Write to file (agent-management's job)
|
||||
const newConfig = await updateAgentConfigFile(
|
||||
agent.getAgentFilePath(),
|
||||
updates
|
||||
);
|
||||
|
||||
// Reload into agent (core's job - handles restart automatically)
|
||||
const reloadResult = await agent.reload(newConfig);
|
||||
if (reloadResult.restarted) {
|
||||
logger.info(
|
||||
`Agent restarted to apply changes: ${reloadResult.changesApplied.join(', ')}`
|
||||
);
|
||||
}
|
||||
logger.info(`Saved server '${name}' to agent configuration file`);
|
||||
} catch (saveError) {
|
||||
const errorMessage =
|
||||
saveError instanceof Error ? saveError.message : String(saveError);
|
||||
logger.warn(
|
||||
`Failed to save server '${name}' to agent config: ${errorMessage}`,
|
||||
{
|
||||
error: saveError,
|
||||
}
|
||||
);
|
||||
// Don't fail the request if saving fails - server is still connected
|
||||
}
|
||||
}
|
||||
|
||||
const status = isConnected ? 'connected' : 'registered';
|
||||
return ctx.json({ status, name }, 200);
|
||||
})
|
||||
.openapi(listServersRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const clientsMap = agent.getMcpClients();
|
||||
const failedConnections = agent.getMcpFailedConnections();
|
||||
const servers: z.output<typeof ServerInfoSchema>[] = [];
|
||||
for (const name of clientsMap.keys()) {
|
||||
servers.push({ id: name, name, status: 'connected' });
|
||||
}
|
||||
for (const name of Object.keys(failedConnections)) {
|
||||
servers.push({ id: name, name, status: 'error' });
|
||||
}
|
||||
return ctx.json({ servers });
|
||||
})
|
||||
.openapi(toolsRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId } = ctx.req.valid('param');
|
||||
const client = agent.getMcpClients().get(serverId);
|
||||
if (!client) {
|
||||
return ctx.json({ error: `Server '${serverId}' not found` }, 404);
|
||||
}
|
||||
const toolsMap = await client.getTools();
|
||||
const tools = Object.entries(toolsMap).map(([toolName, toolDef]) => ({
|
||||
id: toolName,
|
||||
name: toolName,
|
||||
description: toolDef.description || '',
|
||||
inputSchema: toolDef.parameters,
|
||||
}));
|
||||
return ctx.json({ tools });
|
||||
})
|
||||
.openapi(deleteServerRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId } = ctx.req.valid('param');
|
||||
const clientExists =
|
||||
agent.getMcpClients().has(serverId) || agent.getMcpFailedConnections()[serverId];
|
||||
if (!clientExists) {
|
||||
return ctx.json({ error: `Server '${serverId}' not found.` }, 404);
|
||||
}
|
||||
|
||||
await agent.removeMcpServer(serverId);
|
||||
return ctx.json({ status: 'disconnected', id: serverId });
|
||||
})
|
||||
.openapi(restartServerRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId } = ctx.req.valid('param');
|
||||
logger.info(`Received request to POST /api/mcp/servers/${serverId}/restart`);
|
||||
|
||||
const clientExists = agent.getMcpClients().has(serverId);
|
||||
if (!clientExists) {
|
||||
logger.warn(`Attempted to restart non-existent server: ${serverId}`);
|
||||
return ctx.json({ error: `Server '${serverId}' not found.` }, 404);
|
||||
}
|
||||
|
||||
await agent.restartMcpServer(serverId);
|
||||
return ctx.json({ status: 'restarted', id: serverId });
|
||||
})
|
||||
.openapi(execToolRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId, toolName } = ctx.req.valid('param');
|
||||
const body = ctx.req.valid('json');
|
||||
const client = agent.getMcpClients().get(serverId);
|
||||
if (!client) {
|
||||
return ctx.json({ success: false, error: `Server '${serverId}' not found` }, 404);
|
||||
}
|
||||
// Execute tool directly on the specified server (matches Express implementation)
|
||||
try {
|
||||
const rawResult = await client.callTool(toolName, body);
|
||||
return ctx.json({ success: true, data: rawResult });
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
logger.error(
|
||||
`Tool execution failed for '${toolName}' on server '${serverId}': ${errorMessage}`,
|
||||
{ error }
|
||||
);
|
||||
return ctx.json({ success: false, error: errorMessage }, 200);
|
||||
}
|
||||
})
|
||||
.openapi(listResourcesRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId } = ctx.req.valid('param');
|
||||
const client = agent.getMcpClients().get(serverId);
|
||||
if (!client) {
|
||||
return ctx.json({ error: `Server '${serverId}' not found` }, 404);
|
||||
}
|
||||
const resources = await agent.listResourcesForServer(serverId);
|
||||
return ctx.json({ success: true, resources });
|
||||
})
|
||||
.openapi(getResourceContentRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { serverId, resourceId } = ctx.req.valid('param');
|
||||
const client = agent.getMcpClients().get(serverId);
|
||||
if (!client) {
|
||||
return ctx.json({ error: `Server '${serverId}' not found` }, 404);
|
||||
}
|
||||
const qualifiedUri = `mcp:${serverId}:${resourceId}`;
|
||||
const content = await agent.readResource(qualifiedUri);
|
||||
return ctx.json({ success: true, data: { content } });
|
||||
});
|
||||
}
|
||||
233
dexto/packages/server/src/hono/routes/memory.ts
Normal file
233
dexto/packages/server/src/hono/routes/memory.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { CreateMemoryInputSchema, UpdateMemoryInputSchema } from '@dexto/core';
|
||||
import { MemorySchema } from '../schemas/responses.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
const MemoryIdParamSchema = z
|
||||
.object({
|
||||
id: z.string().min(1, 'Memory ID is required').describe('Memory unique identifier'),
|
||||
})
|
||||
.describe('Path parameters for memory endpoints');
|
||||
|
||||
const ListMemoriesQuerySchema = z
|
||||
.object({
|
||||
tags: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => (val ? val.split(',').map((t) => t.trim()) : undefined))
|
||||
.describe('Comma-separated list of tags to filter by'),
|
||||
source: z.enum(['user', 'system']).optional().describe('Filter by source (user or system)'),
|
||||
pinned: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => (val === 'true' ? true : val === 'false' ? false : undefined))
|
||||
.describe('Filter by pinned status (true or false)'),
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => (val ? parseInt(val, 10) : undefined))
|
||||
.describe('Maximum number of memories to return'),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((val) => (val ? parseInt(val, 10) : undefined))
|
||||
.describe('Number of memories to skip'),
|
||||
})
|
||||
.describe('Query parameters for listing and filtering memories');
|
||||
|
||||
// Response schemas
|
||||
const MemoryResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
memory: MemorySchema.describe('The created or retrieved memory'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Single memory response');
|
||||
|
||||
const MemoriesListResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
memories: z.array(MemorySchema).describe('List of memories'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Multiple memories response');
|
||||
|
||||
const MemoryDeleteResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
message: z.string().describe('Deletion confirmation message'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Memory deletion response');
|
||||
|
||||
export function createMemoryRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const createMemoryRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/memory',
|
||||
summary: 'Create Memory',
|
||||
description: 'Creates a new memory',
|
||||
tags: ['memory'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: CreateMemoryInputSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Memory created',
|
||||
content: { 'application/json': { schema: MemoryResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/memory',
|
||||
summary: 'List Memories',
|
||||
description: 'Retrieves a list of all memories with optional filtering',
|
||||
tags: ['memory'],
|
||||
request: { query: ListMemoriesQuerySchema },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List memories',
|
||||
content: { 'application/json': { schema: MemoriesListResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/memory/{id}',
|
||||
summary: 'Get Memory by ID',
|
||||
description: 'Retrieves a specific memory by its unique identifier',
|
||||
tags: ['memory'],
|
||||
request: {
|
||||
params: MemoryIdParamSchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Memory details',
|
||||
content: { 'application/json': { schema: MemoryResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const updateRoute = createRoute({
|
||||
method: 'put',
|
||||
path: '/memory/{id}',
|
||||
summary: 'Update Memory',
|
||||
description: 'Updates an existing memory. Only provided fields will be updated',
|
||||
tags: ['memory'],
|
||||
request: {
|
||||
params: MemoryIdParamSchema,
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: UpdateMemoryInputSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Memory updated',
|
||||
content: { 'application/json': { schema: MemoryResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const deleteRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/memory/{id}',
|
||||
summary: 'Delete Memory',
|
||||
description: 'Permanently deletes a memory. This action cannot be undone',
|
||||
tags: ['memory'],
|
||||
request: {
|
||||
params: MemoryIdParamSchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Memory deleted',
|
||||
content: { 'application/json': { schema: MemoryDeleteResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(createMemoryRoute, async (ctx) => {
|
||||
const input = ctx.req.valid('json');
|
||||
|
||||
// Filter out undefined values for exactOptionalPropertyTypes compatibility
|
||||
const createInput: {
|
||||
content: string;
|
||||
tags?: string[];
|
||||
metadata?: Record<string, unknown>;
|
||||
} = {
|
||||
content: input.content,
|
||||
};
|
||||
if (input.tags !== undefined && Array.isArray(input.tags)) {
|
||||
createInput.tags = input.tags;
|
||||
}
|
||||
if (input.metadata !== undefined) {
|
||||
createInput.metadata = input.metadata;
|
||||
}
|
||||
const agent = await getAgent(ctx);
|
||||
const memory = await agent.memoryManager.create(createInput);
|
||||
return ctx.json({ ok: true as const, memory }, 201);
|
||||
})
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const query = ctx.req.valid('query');
|
||||
const options: {
|
||||
tags?: string[];
|
||||
source?: 'user' | 'system';
|
||||
pinned?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
} = {};
|
||||
if (query.tags !== undefined) options.tags = query.tags;
|
||||
if (query.source !== undefined) options.source = query.source;
|
||||
if (query.pinned !== undefined) options.pinned = query.pinned;
|
||||
if (query.limit !== undefined) options.limit = query.limit;
|
||||
if (query.offset !== undefined) options.offset = query.offset;
|
||||
|
||||
const agent = await getAgent(ctx);
|
||||
const memories = await agent.memoryManager.list(options);
|
||||
return ctx.json({ ok: true as const, memories });
|
||||
})
|
||||
.openapi(getRoute, async (ctx) => {
|
||||
const { id } = ctx.req.valid('param');
|
||||
const agent = await getAgent(ctx);
|
||||
const memory = await agent.memoryManager.get(id);
|
||||
return ctx.json({ ok: true as const, memory });
|
||||
})
|
||||
.openapi(updateRoute, async (ctx) => {
|
||||
const { id } = ctx.req.valid('param');
|
||||
const updatesRaw = ctx.req.valid('json');
|
||||
// Build updates object only with defined properties for exactOptionalPropertyTypes
|
||||
const updates: {
|
||||
content?: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
tags?: string[];
|
||||
} = {};
|
||||
if (updatesRaw.content !== undefined) updates.content = updatesRaw.content;
|
||||
if (updatesRaw.metadata !== undefined) updates.metadata = updatesRaw.metadata;
|
||||
if (updatesRaw.tags !== undefined) updates.tags = updatesRaw.tags;
|
||||
const agent = await getAgent(ctx);
|
||||
const memory = await agent.memoryManager.update(id, updates);
|
||||
return ctx.json({ ok: true as const, memory });
|
||||
})
|
||||
.openapi(deleteRoute, async (ctx) => {
|
||||
const { id } = ctx.req.valid('param');
|
||||
const agent = await getAgent(ctx);
|
||||
await agent.memoryManager.delete(id);
|
||||
return ctx.json({ ok: true as const, message: 'Memory deleted successfully' });
|
||||
});
|
||||
}
|
||||
397
dexto/packages/server/src/hono/routes/messages.ts
Normal file
397
dexto/packages/server/src/hono/routes/messages.ts
Normal file
@@ -0,0 +1,397 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { streamSSE } from 'hono/streaming';
|
||||
import type { ContentInput } from '@dexto/core';
|
||||
import { LLM_PROVIDERS } from '@dexto/core';
|
||||
import type { ApprovalCoordinator } from '../../approval/approval-coordinator.js';
|
||||
import { TokenUsageSchema } from '../schemas/responses.js';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
// ContentPart schemas matching @dexto/core types
|
||||
// TODO: The Zod-inferred types don't exactly match core's ContentInput due to
|
||||
// exactOptionalPropertyTypes (Zod infers `mimeType?: string | undefined` vs core's `mimeType?: string`).
|
||||
// We cast to ContentInput after validation. Fix by either:
|
||||
// 1. Export Zod schemas from @dexto/core and reuse here
|
||||
// 2. Use .transform() to convert to exact types
|
||||
// 3. Relax exactOptionalPropertyTypes in tsconfig
|
||||
const TextPartSchema = z
|
||||
.object({
|
||||
type: z.literal('text').describe('Content type identifier'),
|
||||
text: z.string().describe('Text content'),
|
||||
})
|
||||
.describe('Text content part');
|
||||
|
||||
const ImagePartSchema = z
|
||||
.object({
|
||||
type: z.literal('image').describe('Content type identifier'),
|
||||
image: z.string().describe('Base64-encoded image data or URL'),
|
||||
mimeType: z.string().optional().describe('MIME type (e.g., image/png)'),
|
||||
})
|
||||
.describe('Image content part');
|
||||
|
||||
const FilePartSchema = z
|
||||
.object({
|
||||
type: z.literal('file').describe('Content type identifier'),
|
||||
data: z.string().describe('Base64-encoded file data or URL'),
|
||||
mimeType: z.string().describe('MIME type (e.g., application/pdf)'),
|
||||
filename: z.string().optional().describe('Optional filename'),
|
||||
})
|
||||
.describe('File content part');
|
||||
|
||||
const ContentPartSchema = z
|
||||
.discriminatedUnion('type', [TextPartSchema, ImagePartSchema, FilePartSchema])
|
||||
.describe('Content part - text, image, or file');
|
||||
|
||||
const MessageBodySchema = z
|
||||
.object({
|
||||
content: z
|
||||
.union([z.string(), z.array(ContentPartSchema)])
|
||||
.describe('Message content - string for text, or ContentPart[] for multimodal'),
|
||||
sessionId: z
|
||||
.string()
|
||||
.min(1, 'Session ID is required')
|
||||
.describe('The session to use for this message'),
|
||||
})
|
||||
.describe('Request body for sending a message to the agent');
|
||||
|
||||
const ResetBodySchema = z
|
||||
.object({
|
||||
sessionId: z
|
||||
.string()
|
||||
.min(1, 'Session ID is required')
|
||||
.describe('The ID of the session to reset'),
|
||||
})
|
||||
.describe('Request body for resetting a conversation');
|
||||
|
||||
export function createMessagesRouter(
|
||||
getAgent: GetAgentFn,
|
||||
approvalCoordinator?: ApprovalCoordinator
|
||||
) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
// TODO: Deprecate this endpoint - this async pattern is problematic and should be replaced
|
||||
// with a proper job queue or streaming-only approach. Consider removing in next major version.
|
||||
// Users should use /message/sync for synchronous responses or SSE for streaming.
|
||||
const messageRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/message',
|
||||
summary: 'Send Message (async)',
|
||||
description:
|
||||
'Sends a message and returns immediately. The full response will be sent over SSE',
|
||||
tags: ['messages'],
|
||||
request: {
|
||||
body: {
|
||||
content: { 'application/json': { schema: MessageBodySchema } },
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
202: {
|
||||
description: 'Message accepted for async processing; subscribe to SSE for results',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
accepted: z
|
||||
.literal(true)
|
||||
.describe('Indicates request was accepted'),
|
||||
sessionId: z.string().describe('Session ID used for this message'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
400: { description: 'Validation error' },
|
||||
},
|
||||
});
|
||||
const messageSyncRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/message-sync',
|
||||
summary: 'Send Message (sync)',
|
||||
description: 'Sends a message and waits for the full response',
|
||||
tags: ['messages'],
|
||||
request: {
|
||||
body: { content: { 'application/json': { schema: MessageBodySchema } } },
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Synchronous response',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
response: z.string().describe('Agent response text'),
|
||||
sessionId: z.string().describe('Session ID used for this message'),
|
||||
tokenUsage:
|
||||
TokenUsageSchema.optional().describe('Token usage statistics'),
|
||||
reasoning: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Extended thinking content from reasoning models'),
|
||||
model: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Model used for this response'),
|
||||
provider: z.enum(LLM_PROVIDERS).optional().describe('LLM provider'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
400: { description: 'Validation error' },
|
||||
},
|
||||
});
|
||||
|
||||
const resetRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/reset',
|
||||
summary: 'Reset Conversation',
|
||||
description: 'Resets the conversation history for a given session',
|
||||
tags: ['messages'],
|
||||
request: {
|
||||
body: { content: { 'application/json': { schema: ResetBodySchema } } },
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Reset initiated',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
status: z
|
||||
.string()
|
||||
.describe('Status message indicating reset was initiated'),
|
||||
sessionId: z.string().describe('Session ID that was reset'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const messageStreamRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/message-stream',
|
||||
summary: 'Stream message response',
|
||||
description:
|
||||
'Sends a message and streams the response via Server-Sent Events (SSE). Returns SSE stream directly in response. Events include llm:thinking, llm:chunk, llm:tool-call, llm:tool-result, llm:response, and llm:error. If the session is busy processing another message, returns 202 with queue information.',
|
||||
tags: ['messages'],
|
||||
request: {
|
||||
body: {
|
||||
content: { 'application/json': { schema: MessageBodySchema } },
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description:
|
||||
'SSE stream of agent events. Standard SSE format with event type and JSON data.',
|
||||
headers: {
|
||||
'Content-Type': {
|
||||
description: 'SSE content type',
|
||||
schema: { type: 'string', example: 'text/event-stream' },
|
||||
},
|
||||
'Cache-Control': {
|
||||
description: 'Disable caching for stream',
|
||||
schema: { type: 'string', example: 'no-cache' },
|
||||
},
|
||||
Connection: {
|
||||
description: 'Keep connection alive for streaming',
|
||||
schema: { type: 'string', example: 'keep-alive' },
|
||||
},
|
||||
'X-Accel-Buffering': {
|
||||
description: 'Disable nginx buffering',
|
||||
schema: { type: 'string', example: 'no' },
|
||||
},
|
||||
},
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: z
|
||||
.string()
|
||||
.describe(
|
||||
'Server-Sent Events stream. Events: llm:thinking (start), llm:chunk (text fragments), llm:tool-call (tool execution), llm:tool-result (tool output), llm:response (final), llm:error (errors)'
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
202: {
|
||||
description:
|
||||
'Session is busy processing another message. Use the queue endpoints to manage pending messages.',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
busy: z.literal(true).describe('Indicates session is busy'),
|
||||
sessionId: z.string().describe('The session ID'),
|
||||
queueLength: z
|
||||
.number()
|
||||
.describe('Current number of messages in queue'),
|
||||
hint: z.string().describe('Instructions for the client'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
400: { description: 'Validation error' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(messageRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
agent.logger.info('Received message via POST /api/message');
|
||||
const { content, sessionId } = ctx.req.valid('json');
|
||||
|
||||
agent.logger.info(`Message for session: ${sessionId}`);
|
||||
|
||||
// Fire and forget - start processing asynchronously
|
||||
// Results will be delivered via SSE
|
||||
agent.generate(content as ContentInput, sessionId).catch((error) => {
|
||||
agent.logger.error(
|
||||
`Error in async message processing: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
});
|
||||
|
||||
return ctx.json({ accepted: true, sessionId }, 202);
|
||||
})
|
||||
.openapi(messageSyncRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
agent.logger.info('Received message via POST /api/message-sync');
|
||||
const { content, sessionId } = ctx.req.valid('json');
|
||||
|
||||
agent.logger.info(`Message for session: ${sessionId}`);
|
||||
|
||||
const result = await agent.generate(content as ContentInput, sessionId);
|
||||
|
||||
// Get the session's current LLM config to include model/provider info
|
||||
const llmConfig = agent.stateManager.getLLMConfig(sessionId);
|
||||
|
||||
return ctx.json({
|
||||
response: result.content,
|
||||
sessionId: result.sessionId,
|
||||
tokenUsage: result.usage,
|
||||
reasoning: result.reasoning,
|
||||
model: llmConfig.model,
|
||||
provider: llmConfig.provider,
|
||||
});
|
||||
})
|
||||
.openapi(resetRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
agent.logger.info('Received request via POST /api/reset');
|
||||
const { sessionId } = ctx.req.valid('json');
|
||||
await agent.resetConversation(sessionId);
|
||||
return ctx.json({ status: 'reset initiated', sessionId });
|
||||
})
|
||||
.openapi(messageStreamRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { content, sessionId } = ctx.req.valid('json');
|
||||
|
||||
// Check if session is busy before starting stream
|
||||
const isBusy = await agent.isSessionBusy(sessionId);
|
||||
if (isBusy) {
|
||||
const queuedMessages = await agent.getQueuedMessages(sessionId);
|
||||
return ctx.json(
|
||||
{
|
||||
busy: true as const,
|
||||
sessionId,
|
||||
queueLength: queuedMessages.length,
|
||||
hint: 'Use POST /api/queue/{sessionId} to queue this message, or wait for the current request to complete.',
|
||||
},
|
||||
202
|
||||
);
|
||||
}
|
||||
|
||||
// Create abort controller for cleanup
|
||||
const abortController = new AbortController();
|
||||
const { signal } = abortController;
|
||||
|
||||
// Start agent streaming
|
||||
const iterator = await agent.stream(content as ContentInput, sessionId, { signal });
|
||||
|
||||
// Use Hono's streamSSE helper which handles backpressure correctly
|
||||
return streamSSE(ctx, async (stream) => {
|
||||
// Store pending approval events to be written to stream (only if coordinator available)
|
||||
const pendingApprovalEvents: Array<{ event: string; data: unknown }> = [];
|
||||
|
||||
// Subscribe to approval events from coordinator (if available)
|
||||
if (approvalCoordinator) {
|
||||
approvalCoordinator.onRequest(
|
||||
(request) => {
|
||||
if (request.sessionId === sessionId) {
|
||||
// No transformation needed - SSE uses 'name' discriminant, payload keeps 'type'
|
||||
pendingApprovalEvents.push({
|
||||
event: 'approval:request',
|
||||
data: request,
|
||||
});
|
||||
}
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
|
||||
approvalCoordinator.onResponse(
|
||||
(response) => {
|
||||
if (response.sessionId === sessionId) {
|
||||
pendingApprovalEvents.push({
|
||||
event: 'approval:response',
|
||||
data: response,
|
||||
});
|
||||
}
|
||||
},
|
||||
{ signal }
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// Stream LLM/tool events from iterator
|
||||
for await (const event of iterator) {
|
||||
// First, write any pending approval events
|
||||
while (pendingApprovalEvents.length > 0) {
|
||||
const approvalEvent = pendingApprovalEvents.shift()!;
|
||||
await stream.writeSSE({
|
||||
event: approvalEvent.event,
|
||||
data: JSON.stringify(approvalEvent.data),
|
||||
});
|
||||
}
|
||||
|
||||
// Then write the LLM/tool event
|
||||
// Serialize errors properly since Error objects don't JSON.stringify well
|
||||
const eventData =
|
||||
event.name === 'llm:error' && event.error instanceof Error
|
||||
? {
|
||||
...event,
|
||||
error: {
|
||||
message: event.error.message,
|
||||
name: event.error.name,
|
||||
stack: event.error.stack,
|
||||
},
|
||||
}
|
||||
: event;
|
||||
await stream.writeSSE({
|
||||
event: event.name,
|
||||
data: JSON.stringify(eventData),
|
||||
});
|
||||
}
|
||||
|
||||
// Write any remaining approval events
|
||||
while (pendingApprovalEvents.length > 0) {
|
||||
const approvalEvent = pendingApprovalEvents.shift()!;
|
||||
await stream.writeSSE({
|
||||
event: approvalEvent.event,
|
||||
data: JSON.stringify(approvalEvent.data),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
await stream.writeSSE({
|
||||
event: 'llm:error',
|
||||
data: JSON.stringify({
|
||||
error: {
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
},
|
||||
recoverable: false,
|
||||
sessionId,
|
||||
}),
|
||||
});
|
||||
} finally {
|
||||
abortController.abort(); // Cleanup subscriptions
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
413
dexto/packages/server/src/hono/routes/models.ts
Normal file
413
dexto/packages/server/src/hono/routes/models.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
/**
|
||||
* Models Routes
|
||||
*
|
||||
* API endpoints for listing and managing local/ollama models.
|
||||
* These endpoints expose model discovery that CLI does directly via function calls.
|
||||
*/
|
||||
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { promises as fs } from 'fs';
|
||||
import {
|
||||
getLocalModelById,
|
||||
listOllamaModels,
|
||||
DEFAULT_OLLAMA_URL,
|
||||
checkOllamaStatus,
|
||||
logger,
|
||||
} from '@dexto/core';
|
||||
import {
|
||||
getAllInstalledModels,
|
||||
getInstalledModel,
|
||||
removeInstalledModel,
|
||||
} from '@dexto/agent-management';
|
||||
|
||||
// ============================================================================
|
||||
// Schemas
|
||||
// ============================================================================
|
||||
|
||||
const LocalModelSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Model identifier'),
|
||||
displayName: z.string().describe('Human-readable model name'),
|
||||
filePath: z.string().describe('Absolute path to the GGUF file'),
|
||||
sizeBytes: z.number().describe('File size in bytes'),
|
||||
contextLength: z.number().optional().describe('Maximum context length in tokens'),
|
||||
source: z
|
||||
.enum(['huggingface', 'manual'])
|
||||
.optional()
|
||||
.describe('Where the model was downloaded from'),
|
||||
})
|
||||
.describe('An installed local GGUF model');
|
||||
|
||||
const OllamaModelSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Ollama model name (e.g., llama3.2:latest)'),
|
||||
size: z.number().optional().describe('Model size in bytes'),
|
||||
digest: z.string().optional().describe('Model digest/hash'),
|
||||
modifiedAt: z.string().optional().describe('Last modified timestamp'),
|
||||
})
|
||||
.describe('An Ollama model');
|
||||
|
||||
const ValidateFileRequestSchema = z
|
||||
.object({
|
||||
filePath: z.string().min(1).describe('Absolute path to the GGUF file to validate'),
|
||||
})
|
||||
.describe('File validation request');
|
||||
|
||||
const ValidateFileResponseSchema = z
|
||||
.object({
|
||||
valid: z.boolean().describe('Whether the file exists and is readable'),
|
||||
sizeBytes: z.number().optional().describe('File size in bytes if valid'),
|
||||
error: z.string().optional().describe('Error message if invalid'),
|
||||
})
|
||||
.describe('File validation response');
|
||||
|
||||
// ============================================================================
|
||||
// Route Definitions
|
||||
// ============================================================================
|
||||
|
||||
const listLocalModelsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/models/local',
|
||||
summary: 'List Local Models',
|
||||
description:
|
||||
'Returns all installed local GGUF models from ~/.dexto/models/state.json. ' +
|
||||
'These are models downloaded from HuggingFace or manually registered.',
|
||||
tags: ['models'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of installed local models',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
models: z
|
||||
.array(LocalModelSchema)
|
||||
.describe('List of installed local models'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const listOllamaModelsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/models/ollama',
|
||||
summary: 'List Ollama Models',
|
||||
description:
|
||||
'Returns available models from the local Ollama server. ' +
|
||||
'Returns empty list with available=false if Ollama is not running.',
|
||||
tags: ['models'],
|
||||
request: {
|
||||
query: z.object({
|
||||
baseURL: z
|
||||
.string()
|
||||
.url()
|
||||
.optional()
|
||||
.describe(`Ollama server URL (default: ${DEFAULT_OLLAMA_URL})`),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of Ollama models',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
available: z.boolean().describe('Whether Ollama server is running'),
|
||||
version: z.string().optional().describe('Ollama server version'),
|
||||
models: z
|
||||
.array(OllamaModelSchema)
|
||||
.describe('List of available Ollama models'),
|
||||
error: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Error message if Ollama not available'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const validateLocalFileRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/models/local/validate',
|
||||
summary: 'Validate GGUF File',
|
||||
description:
|
||||
'Validates that a GGUF file exists and is readable. ' +
|
||||
'Used by Web UI to validate custom file paths before saving.',
|
||||
tags: ['models'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: ValidateFileRequestSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Validation result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: ValidateFileResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const DeleteModelRequestSchema = z
|
||||
.object({
|
||||
deleteFile: z
|
||||
.boolean()
|
||||
.default(true)
|
||||
.describe('Whether to also delete the GGUF file from disk'),
|
||||
})
|
||||
.describe('Delete model request options');
|
||||
|
||||
const DeleteModelResponseSchema = z
|
||||
.object({
|
||||
success: z.boolean().describe('Whether the deletion was successful'),
|
||||
modelId: z.string().describe('The deleted model ID'),
|
||||
fileDeleted: z.boolean().describe('Whether the GGUF file was deleted'),
|
||||
error: z.string().optional().describe('Error message if deletion failed'),
|
||||
})
|
||||
.describe('Delete model response');
|
||||
|
||||
const deleteLocalModelRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/models/local/{modelId}',
|
||||
summary: 'Delete Installed Model',
|
||||
description:
|
||||
'Removes an installed local model from state.json. ' +
|
||||
'Optionally deletes the GGUF file from disk (default: true).',
|
||||
tags: ['models'],
|
||||
request: {
|
||||
params: z.object({
|
||||
modelId: z.string().describe('The model ID to delete'),
|
||||
}),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: DeleteModelRequestSchema,
|
||||
},
|
||||
},
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Model deleted successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: DeleteModelResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Model not found',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: DeleteModelResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Router
|
||||
// ============================================================================
|
||||
|
||||
export function createModelsRouter() {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
return app
|
||||
.openapi(listLocalModelsRoute, async (ctx) => {
|
||||
const installedModels = await getAllInstalledModels();
|
||||
|
||||
const models = installedModels.map((model) => {
|
||||
// Get display name from registry if available
|
||||
const registryInfo = getLocalModelById(model.id);
|
||||
|
||||
return {
|
||||
id: model.id,
|
||||
displayName: registryInfo?.name || model.id,
|
||||
filePath: model.filePath,
|
||||
sizeBytes: model.sizeBytes,
|
||||
contextLength: registryInfo?.contextLength,
|
||||
source: model.source,
|
||||
};
|
||||
});
|
||||
|
||||
return ctx.json({ models });
|
||||
})
|
||||
.openapi(listOllamaModelsRoute, async (ctx) => {
|
||||
const { baseURL } = ctx.req.valid('query');
|
||||
const ollamaURL = baseURL || DEFAULT_OLLAMA_URL;
|
||||
|
||||
try {
|
||||
// Check if Ollama is running
|
||||
const status = await checkOllamaStatus(ollamaURL);
|
||||
|
||||
if (!status.running) {
|
||||
return ctx.json({
|
||||
available: false,
|
||||
models: [],
|
||||
error: 'Ollama server is not running',
|
||||
});
|
||||
}
|
||||
|
||||
// List available models
|
||||
const ollamaModels = await listOllamaModels(ollamaURL);
|
||||
|
||||
return ctx.json({
|
||||
available: true,
|
||||
version: status.version,
|
||||
models: ollamaModels.map((m) => ({
|
||||
name: m.name,
|
||||
size: m.size,
|
||||
digest: m.digest,
|
||||
modifiedAt: m.modifiedAt,
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
return ctx.json({
|
||||
available: false,
|
||||
models: [],
|
||||
error:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to connect to Ollama server',
|
||||
});
|
||||
}
|
||||
})
|
||||
.openapi(validateLocalFileRoute, async (ctx) => {
|
||||
const { filePath } = ctx.req.valid('json');
|
||||
|
||||
// Security: Basic path validation
|
||||
// Prevent path traversal attacks by ensuring absolute path
|
||||
if (!filePath.startsWith('/')) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: 'File path must be absolute (start with /)',
|
||||
});
|
||||
}
|
||||
|
||||
// Validate file extension
|
||||
if (!filePath.endsWith('.gguf')) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: 'File must have .gguf extension',
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
if (!stats.isFile()) {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: 'Path is not a file',
|
||||
});
|
||||
}
|
||||
|
||||
// Check file is readable
|
||||
await fs.access(filePath, fs.constants.R_OK);
|
||||
|
||||
return ctx.json({
|
||||
valid: true,
|
||||
sizeBytes: stats.size,
|
||||
});
|
||||
} catch (error) {
|
||||
const nodeError = error as NodeJS.ErrnoException;
|
||||
|
||||
if (nodeError.code === 'ENOENT') {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: 'File not found',
|
||||
});
|
||||
}
|
||||
|
||||
if (nodeError.code === 'EACCES') {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: 'File is not readable (permission denied)',
|
||||
});
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to access file',
|
||||
});
|
||||
}
|
||||
})
|
||||
.openapi(deleteLocalModelRoute, async (ctx) => {
|
||||
const { modelId } = ctx.req.valid('param');
|
||||
|
||||
// Get body if provided, default to deleteFile: true
|
||||
let deleteFile = true;
|
||||
try {
|
||||
const body = await ctx.req.json();
|
||||
if (body && typeof body.deleteFile === 'boolean') {
|
||||
deleteFile = body.deleteFile;
|
||||
}
|
||||
} catch {
|
||||
// No body or invalid JSON - use default (deleteFile: true)
|
||||
}
|
||||
|
||||
// Get the model info first (need filePath for deletion)
|
||||
const model = await getInstalledModel(modelId);
|
||||
if (!model) {
|
||||
return ctx.json(
|
||||
{
|
||||
success: false,
|
||||
modelId,
|
||||
fileDeleted: false,
|
||||
error: `Model '${modelId}' not found`,
|
||||
},
|
||||
404
|
||||
);
|
||||
}
|
||||
|
||||
const filePath = model.filePath;
|
||||
let fileDeleted = false;
|
||||
|
||||
// Delete the GGUF file if requested
|
||||
if (deleteFile && filePath) {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
fileDeleted = true;
|
||||
} catch (error) {
|
||||
const nodeError = error as NodeJS.ErrnoException;
|
||||
// File already deleted or doesn't exist - that's fine
|
||||
if (nodeError.code === 'ENOENT') {
|
||||
fileDeleted = true; // Consider it deleted
|
||||
} else {
|
||||
// Permission error or other issue - report but continue
|
||||
logger.warn(
|
||||
`Failed to delete GGUF file ${filePath}: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from state.json
|
||||
const removed = await removeInstalledModel(modelId);
|
||||
if (!removed) {
|
||||
return ctx.json({
|
||||
success: false,
|
||||
modelId,
|
||||
fileDeleted,
|
||||
error: 'Failed to remove model from state',
|
||||
});
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
success: true,
|
||||
modelId,
|
||||
fileDeleted,
|
||||
});
|
||||
});
|
||||
}
|
||||
172
dexto/packages/server/src/hono/routes/openrouter.ts
Normal file
172
dexto/packages/server/src/hono/routes/openrouter.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
/**
|
||||
* OpenRouter Validation Routes
|
||||
*
|
||||
* Standalone routes for validating OpenRouter model IDs against the registry.
|
||||
* Decoupled from agent runtime - can be used independently.
|
||||
*/
|
||||
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import {
|
||||
logger,
|
||||
lookupOpenRouterModel,
|
||||
refreshOpenRouterModelCache,
|
||||
getOpenRouterModelInfo,
|
||||
} from '@dexto/core';
|
||||
|
||||
const ValidateModelParamsSchema = z
|
||||
.object({
|
||||
modelId: z
|
||||
.string()
|
||||
.min(1)
|
||||
.describe('OpenRouter model ID to validate (e.g., anthropic/claude-3.5-sonnet)'),
|
||||
})
|
||||
.describe('Path parameters for model validation');
|
||||
|
||||
const ValidateModelResponseSchema = z
|
||||
.object({
|
||||
valid: z.boolean().describe('Whether the model ID is valid'),
|
||||
modelId: z.string().describe('The model ID that was validated'),
|
||||
status: z
|
||||
.enum(['valid', 'invalid', 'unknown'])
|
||||
.describe('Validation status: valid, invalid, or unknown (cache empty)'),
|
||||
error: z.string().optional().describe('Error message if invalid'),
|
||||
info: z
|
||||
.object({
|
||||
contextLength: z.number().describe('Model context length in tokens'),
|
||||
})
|
||||
.optional()
|
||||
.describe('Model information if valid'),
|
||||
})
|
||||
.describe('Model validation response');
|
||||
|
||||
/**
|
||||
* Create OpenRouter validation router.
|
||||
* No agent dependency - purely utility routes.
|
||||
*/
|
||||
export function createOpenRouterRouter() {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const validateRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/openrouter/validate/{modelId}',
|
||||
summary: 'Validate OpenRouter Model',
|
||||
description:
|
||||
'Validates an OpenRouter model ID against the cached model registry. Refreshes cache if stale.',
|
||||
tags: ['openrouter'],
|
||||
request: {
|
||||
params: ValidateModelParamsSchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Validation result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: ValidateModelResponseSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const refreshRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/openrouter/refresh-cache',
|
||||
summary: 'Refresh OpenRouter Model Cache',
|
||||
description: 'Forces a refresh of the OpenRouter model registry cache from the API.',
|
||||
tags: ['openrouter'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Cache refreshed successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ok: z.literal(true).describe('Success indicator'),
|
||||
message: z.string().describe('Status message'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
500: {
|
||||
description: 'Cache refresh failed',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
ok: z.literal(false).describe('Failure indicator'),
|
||||
message: z.string().describe('Error message'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(validateRoute, async (ctx) => {
|
||||
const { modelId: encodedModelId } = ctx.req.valid('param');
|
||||
// Decode URL-encoded model ID to handle slashes (e.g., anthropic/claude-3.5-sonnet)
|
||||
const modelId = decodeURIComponent(encodedModelId);
|
||||
|
||||
// First lookup against current cache
|
||||
let status = lookupOpenRouterModel(modelId);
|
||||
|
||||
// If unknown (cache empty/stale), try refreshing
|
||||
if (status === 'unknown') {
|
||||
try {
|
||||
await refreshOpenRouterModelCache();
|
||||
status = lookupOpenRouterModel(modelId);
|
||||
} catch (error) {
|
||||
// Network failed - return unknown status
|
||||
logger.warn(
|
||||
`OpenRouter cache refresh failed during validation: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
modelId,
|
||||
status: 'unknown' as const,
|
||||
error: 'Could not validate model - cache refresh failed',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (status === 'invalid') {
|
||||
return ctx.json({
|
||||
valid: false,
|
||||
modelId,
|
||||
status: 'invalid' as const,
|
||||
error: `Model '${modelId}' not found in OpenRouter. Check the model ID at https://openrouter.ai/models`,
|
||||
});
|
||||
}
|
||||
|
||||
// Valid - include model info
|
||||
const info = getOpenRouterModelInfo(modelId);
|
||||
return ctx.json({
|
||||
valid: true,
|
||||
modelId,
|
||||
status: 'valid' as const,
|
||||
...(info && { info: { contextLength: info.contextLength } }),
|
||||
});
|
||||
})
|
||||
.openapi(refreshRoute, async (ctx) => {
|
||||
try {
|
||||
await refreshOpenRouterModelCache();
|
||||
return ctx.json(
|
||||
{
|
||||
ok: true as const,
|
||||
message: 'OpenRouter model cache refreshed successfully',
|
||||
},
|
||||
200
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to refresh OpenRouter cache: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
return ctx.json(
|
||||
{
|
||||
ok: false as const,
|
||||
message: 'Failed to refresh OpenRouter model cache',
|
||||
},
|
||||
500
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
294
dexto/packages/server/src/hono/routes/prompts.ts
Normal file
294
dexto/packages/server/src/hono/routes/prompts.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { PromptError } from '@dexto/core';
|
||||
import { PromptInfoSchema, PromptDefinitionSchema } from '../schemas/responses.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
const CustomPromptRequestSchema = z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Prompt name is required')
|
||||
.describe('Unique name for the custom prompt'),
|
||||
title: z.string().optional().describe('Display title for the prompt'),
|
||||
description: z.string().optional().describe('Description of what the prompt does'),
|
||||
content: z
|
||||
.string()
|
||||
.min(1, 'Prompt content is required')
|
||||
.describe('The prompt content text (can include {{argumentName}} placeholders)'),
|
||||
arguments: z
|
||||
.array(
|
||||
z
|
||||
.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Argument name is required')
|
||||
.describe('Argument name'),
|
||||
description: z.string().optional().describe('Argument description'),
|
||||
required: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether the argument is required'),
|
||||
})
|
||||
.strict()
|
||||
)
|
||||
.optional()
|
||||
.describe('Array of argument definitions'),
|
||||
resource: z
|
||||
.object({
|
||||
data: z
|
||||
.string()
|
||||
.min(1, 'Resource data is required')
|
||||
.describe('Base64-encoded resource data'),
|
||||
mimeType: z
|
||||
.string()
|
||||
.min(1, 'Resource MIME type is required')
|
||||
.describe('MIME type of the resource (e.g., text/plain, application/pdf)'),
|
||||
filename: z.string().optional().describe('Resource filename'),
|
||||
})
|
||||
.strict()
|
||||
.optional()
|
||||
.describe('Attach a resource to this prompt'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Request body for creating a custom prompt with optional resource attachment');
|
||||
|
||||
const PromptNameParamSchema = z
|
||||
.object({
|
||||
name: z.string().min(1, 'Prompt name is required').describe('The prompt name'),
|
||||
})
|
||||
.describe('Path parameters for prompt endpoints');
|
||||
|
||||
const ResolvePromptQuerySchema = z
|
||||
.object({
|
||||
context: z.string().optional().describe('Additional context for prompt resolution'),
|
||||
args: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Arguments to substitute in the prompt template (pass as a JSON string)'),
|
||||
})
|
||||
.describe('Query parameters for resolving prompt templates');
|
||||
|
||||
export function createPromptsRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/prompts',
|
||||
summary: 'List Prompts',
|
||||
description: 'Retrieves all available prompts, including both built-in and custom prompts',
|
||||
tags: ['prompts'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List all prompts',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
prompts: z
|
||||
.array(PromptInfoSchema)
|
||||
.describe('Array of available prompts'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Prompts list response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const createCustomRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/prompts/custom',
|
||||
summary: 'Create Custom Prompt',
|
||||
description:
|
||||
'Creates a new custom prompt with optional resource attachment. Maximum request size: 10MB',
|
||||
tags: ['prompts'],
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: CustomPromptRequestSchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Custom prompt created',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
prompt: PromptInfoSchema.describe('Created prompt information'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Create prompt response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const deleteCustomRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/prompts/custom/{name}',
|
||||
summary: 'Delete Custom Prompt',
|
||||
description: 'Permanently deletes a custom prompt. Built-in prompts cannot be deleted',
|
||||
tags: ['prompts'],
|
||||
request: {
|
||||
params: z.object({
|
||||
name: z.string().min(1, 'Prompt name is required').describe('The prompt name'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
204: { description: 'Prompt deleted' },
|
||||
},
|
||||
});
|
||||
|
||||
const getPromptRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/prompts/{name}',
|
||||
summary: 'Get Prompt Definition',
|
||||
description: 'Fetches the definition for a specific prompt',
|
||||
tags: ['prompts'],
|
||||
request: {
|
||||
params: PromptNameParamSchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Prompt definition',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
definition: PromptDefinitionSchema.describe('Prompt definition'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Get prompt definition response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Prompt not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const resolvePromptRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/prompts/{name}/resolve',
|
||||
summary: 'Resolve Prompt',
|
||||
description:
|
||||
'Resolves a prompt template with provided arguments and returns the final text with resources',
|
||||
tags: ['prompts'],
|
||||
request: {
|
||||
params: PromptNameParamSchema,
|
||||
query: ResolvePromptQuerySchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Resolved prompt content',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
text: z.string().describe('Resolved prompt text'),
|
||||
resources: z
|
||||
.array(z.string())
|
||||
.describe('Array of resource identifiers'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resolve prompt response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Prompt not found' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const prompts = await agent.listPrompts();
|
||||
const list = Object.values(prompts);
|
||||
return ctx.json({ prompts: list });
|
||||
})
|
||||
.openapi(createCustomRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const payload = ctx.req.valid('json');
|
||||
const promptArguments = payload.arguments
|
||||
?.map((arg) => ({
|
||||
name: arg.name,
|
||||
...(arg.description ? { description: arg.description } : {}),
|
||||
...(typeof arg.required === 'boolean' ? { required: arg.required } : {}),
|
||||
}))
|
||||
.filter(Boolean);
|
||||
|
||||
const createPayload = {
|
||||
name: payload.name,
|
||||
content: payload.content,
|
||||
...(payload.title ? { title: payload.title } : {}),
|
||||
...(payload.description ? { description: payload.description } : {}),
|
||||
...(promptArguments && promptArguments.length > 0
|
||||
? { arguments: promptArguments }
|
||||
: {}),
|
||||
...(payload.resource
|
||||
? {
|
||||
resource: {
|
||||
data: payload.resource.data,
|
||||
mimeType: payload.resource.mimeType,
|
||||
...(payload.resource.filename
|
||||
? { filename: payload.resource.filename }
|
||||
: {}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
const prompt = await agent.createCustomPrompt(createPayload);
|
||||
return ctx.json({ prompt }, 201);
|
||||
})
|
||||
.openapi(deleteCustomRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { name } = ctx.req.valid('param');
|
||||
// Hono automatically decodes path parameters, no manual decode needed
|
||||
await agent.deleteCustomPrompt(name);
|
||||
return ctx.body(null, 204);
|
||||
})
|
||||
.openapi(getPromptRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { name } = ctx.req.valid('param');
|
||||
const definition = await agent.getPromptDefinition(name);
|
||||
if (!definition) throw PromptError.notFound(name);
|
||||
return ctx.json({ definition });
|
||||
})
|
||||
.openapi(resolvePromptRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { name } = ctx.req.valid('param');
|
||||
const { context, args: argsString } = ctx.req.valid('query');
|
||||
|
||||
// Optional structured args in `args` query param as JSON
|
||||
let parsedArgs: Record<string, unknown> | undefined;
|
||||
if (argsString) {
|
||||
try {
|
||||
const parsed = JSON.parse(argsString);
|
||||
if (parsed && typeof parsed === 'object') {
|
||||
parsedArgs = parsed as Record<string, unknown>;
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed args JSON; continue with whatever we have
|
||||
}
|
||||
}
|
||||
|
||||
// Build options object with only defined values
|
||||
const options: {
|
||||
context?: string;
|
||||
args?: Record<string, unknown>;
|
||||
} = {};
|
||||
if (context !== undefined) options.context = context;
|
||||
if (parsedArgs !== undefined) options.args = parsedArgs;
|
||||
|
||||
// Use DextoAgent's resolvePrompt method
|
||||
const result = await agent.resolvePrompt(name, options);
|
||||
return ctx.json({ text: result.text, resources: result.resources });
|
||||
});
|
||||
}
|
||||
238
dexto/packages/server/src/hono/routes/queue.ts
Normal file
238
dexto/packages/server/src/hono/routes/queue.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent, ContentPart } from '@dexto/core';
|
||||
import { ContentPartSchema } from '../schemas/responses.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
// Schema for queued message in responses
|
||||
const QueuedMessageSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique identifier for the queued message'),
|
||||
content: z.array(ContentPartSchema).describe('Message content parts'),
|
||||
queuedAt: z.number().describe('Unix timestamp when message was queued'),
|
||||
metadata: z.record(z.unknown()).optional().describe('Optional metadata'),
|
||||
})
|
||||
.strict()
|
||||
.describe('A message waiting in the queue');
|
||||
|
||||
// ContentPart schemas matching @dexto/core types
|
||||
// TODO: Same as messages.ts - Zod-inferred types don't exactly match core's ContentInput
|
||||
// due to exactOptionalPropertyTypes. We cast to ContentPart after validation.
|
||||
const TextPartSchema = z
|
||||
.object({
|
||||
type: z.literal('text').describe('Content type identifier'),
|
||||
text: z.string().describe('Text content'),
|
||||
})
|
||||
.describe('Text content part');
|
||||
|
||||
const ImagePartSchema = z
|
||||
.object({
|
||||
type: z.literal('image').describe('Content type identifier'),
|
||||
image: z.string().describe('Base64-encoded image data or URL'),
|
||||
mimeType: z.string().optional().describe('MIME type (e.g., image/png)'),
|
||||
})
|
||||
.describe('Image content part');
|
||||
|
||||
const FilePartSchema = z
|
||||
.object({
|
||||
type: z.literal('file').describe('Content type identifier'),
|
||||
data: z.string().describe('Base64-encoded file data or URL'),
|
||||
mimeType: z.string().describe('MIME type (e.g., application/pdf)'),
|
||||
filename: z.string().optional().describe('Optional filename'),
|
||||
})
|
||||
.describe('File content part');
|
||||
|
||||
const QueueContentPartSchema = z
|
||||
.discriminatedUnion('type', [TextPartSchema, ImagePartSchema, FilePartSchema])
|
||||
.describe('Content part - text, image, or file');
|
||||
|
||||
// Schema for queue message request body - matches messages.ts MessageBodySchema
|
||||
const QueueMessageBodySchema = z
|
||||
.object({
|
||||
content: z
|
||||
.union([z.string(), z.array(QueueContentPartSchema)])
|
||||
.describe('Message content - string for text, or ContentPart[] for multimodal'),
|
||||
})
|
||||
.describe('Request body for queueing a message');
|
||||
|
||||
export function createQueueRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
// GET /queue/:sessionId - Get all queued messages
|
||||
const getQueueRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/queue/{sessionId}',
|
||||
summary: 'Get queued messages',
|
||||
description: 'Returns all messages waiting in the queue for a session',
|
||||
tags: ['queue'],
|
||||
request: {
|
||||
params: z.object({
|
||||
sessionId: z.string().min(1).describe('Session ID'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of queued messages',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
messages: z.array(QueuedMessageSchema).describe('Queued messages'),
|
||||
count: z.number().describe('Number of messages in queue'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Session not found' },
|
||||
},
|
||||
});
|
||||
|
||||
// POST /queue/:sessionId - Queue a new message
|
||||
const queueMessageRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/queue/{sessionId}',
|
||||
summary: 'Queue a message',
|
||||
description:
|
||||
'Adds a message to the queue for processing when the session is no longer busy',
|
||||
tags: ['queue'],
|
||||
request: {
|
||||
params: z.object({
|
||||
sessionId: z.string().min(1).describe('Session ID'),
|
||||
}),
|
||||
body: {
|
||||
content: { 'application/json': { schema: QueueMessageBodySchema } },
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Message queued successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
queued: z.literal(true).describe('Indicates message was queued'),
|
||||
id: z.string().describe('ID of the queued message'),
|
||||
position: z.number().describe('Position in the queue (1-based)'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Session not found' },
|
||||
},
|
||||
});
|
||||
|
||||
// DELETE /queue/:sessionId/:messageId - Remove a specific queued message
|
||||
const removeQueuedMessageRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/queue/{sessionId}/{messageId}',
|
||||
summary: 'Remove queued message',
|
||||
description: 'Removes a specific message from the queue',
|
||||
tags: ['queue'],
|
||||
request: {
|
||||
params: z.object({
|
||||
sessionId: z.string().min(1).describe('Session ID'),
|
||||
messageId: z.string().min(1).describe('ID of the queued message to remove'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Message removed successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
removed: z.literal(true).describe('Indicates message was removed'),
|
||||
id: z.string().describe('ID of the removed message'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Session or message not found' },
|
||||
},
|
||||
});
|
||||
|
||||
// DELETE /queue/:sessionId - Clear all queued messages
|
||||
const clearQueueRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/queue/{sessionId}',
|
||||
summary: 'Clear message queue',
|
||||
description: 'Removes all messages from the queue for a session',
|
||||
tags: ['queue'],
|
||||
request: {
|
||||
params: z.object({
|
||||
sessionId: z.string().min(1).describe('Session ID'),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Queue cleared successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
cleared: z.literal(true).describe('Indicates queue was cleared'),
|
||||
count: z.number().describe('Number of messages that were removed'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Session not found' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(getQueueRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
|
||||
const messages = await agent.getQueuedMessages(sessionId);
|
||||
return ctx.json({
|
||||
messages,
|
||||
count: messages.length,
|
||||
});
|
||||
})
|
||||
.openapi(queueMessageRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
const { content: rawContent } = ctx.req.valid('json');
|
||||
|
||||
// Normalize content to array format and cast to ContentPart[]
|
||||
// (same exactOptionalPropertyTypes issue as messages.ts - see TODO there)
|
||||
const content = (
|
||||
typeof rawContent === 'string'
|
||||
? [{ type: 'text' as const, text: rawContent }]
|
||||
: rawContent
|
||||
) as ContentPart[];
|
||||
|
||||
const result = await agent.queueMessage(sessionId, { content });
|
||||
return ctx.json(
|
||||
{
|
||||
queued: result.queued,
|
||||
id: result.id,
|
||||
position: result.position,
|
||||
},
|
||||
201
|
||||
);
|
||||
})
|
||||
.openapi(removeQueuedMessageRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId, messageId } = ctx.req.valid('param');
|
||||
|
||||
const removed = await agent.removeQueuedMessage(sessionId, messageId);
|
||||
if (!removed) {
|
||||
return ctx.json({ error: 'Message not found in queue' }, 404);
|
||||
}
|
||||
return ctx.json({ removed: true, id: messageId });
|
||||
})
|
||||
.openapi(clearQueueRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
|
||||
const count = await agent.clearMessageQueue(sessionId);
|
||||
return ctx.json({ cleared: true, count });
|
||||
});
|
||||
}
|
||||
128
dexto/packages/server/src/hono/routes/resources.ts
Normal file
128
dexto/packages/server/src/hono/routes/resources.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { ResourceSchema } from '../schemas/responses.js';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
const ResourceIdParamSchema = z
|
||||
.object({
|
||||
resourceId: z
|
||||
.string()
|
||||
.min(1, 'Resource ID is required')
|
||||
.transform((encoded) => decodeURIComponent(encoded))
|
||||
.describe('The URI-encoded resource identifier'),
|
||||
})
|
||||
.describe('Path parameters for resource endpoints');
|
||||
|
||||
// Response schemas for resources endpoints
|
||||
|
||||
const ListResourcesResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
resources: z
|
||||
.array(ResourceSchema)
|
||||
.describe('Array of all available resources from all sources'),
|
||||
})
|
||||
.strict()
|
||||
.describe('List of all resources');
|
||||
|
||||
const ResourceContentItemSchema = z
|
||||
.object({
|
||||
uri: z.string().describe('Resource URI'),
|
||||
mimeType: z.string().optional().describe('MIME type of the content'),
|
||||
text: z.string().optional().describe('Text content (for text resources)'),
|
||||
blob: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Base64-encoded binary content (for binary resources)'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource content item');
|
||||
|
||||
const ReadResourceResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
content: z
|
||||
.object({
|
||||
contents: z
|
||||
.array(ResourceContentItemSchema)
|
||||
.describe('Array of content items (typically one item)'),
|
||||
_meta: z
|
||||
.record(z.any())
|
||||
.optional()
|
||||
.describe('Optional metadata about the resource'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource content from MCP ReadResourceResult'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource content response');
|
||||
|
||||
export function createResourcesRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/resources',
|
||||
summary: 'List All Resources',
|
||||
description:
|
||||
'Retrieves a list of all available resources from all sources (MCP servers and internal providers)',
|
||||
tags: ['resources'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List all resources',
|
||||
content: { 'application/json': { schema: ListResourcesResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getContentRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/resources/{resourceId}/content',
|
||||
summary: 'Read Resource Content',
|
||||
description:
|
||||
'Reads the content of a specific resource by its URI. The resource ID in the URL must be URI-encoded',
|
||||
tags: ['resources'],
|
||||
request: {
|
||||
params: ResourceIdParamSchema,
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Resource content',
|
||||
content: { 'application/json': { schema: ReadResourceResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const headRoute = createRoute({
|
||||
method: 'head',
|
||||
path: '/resources/{resourceId}',
|
||||
summary: 'Check Resource Exists',
|
||||
description: 'Checks if a resource exists by its URI without retrieving its content',
|
||||
tags: ['resources'],
|
||||
request: {
|
||||
params: ResourceIdParamSchema,
|
||||
},
|
||||
responses: {
|
||||
200: { description: 'Resource exists' },
|
||||
404: { description: 'Resource not found' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const resources = await agent.listResources();
|
||||
return ctx.json({ ok: true, resources: Object.values(resources) });
|
||||
})
|
||||
.openapi(getContentRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { resourceId } = ctx.req.valid('param');
|
||||
const content = await agent.readResource(resourceId);
|
||||
return ctx.json({ ok: true, content });
|
||||
})
|
||||
.openapi(headRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { resourceId } = ctx.req.valid('param');
|
||||
const exists = await agent.hasResource(resourceId);
|
||||
return ctx.body(null, exists ? 200 : 404);
|
||||
});
|
||||
}
|
||||
87
dexto/packages/server/src/hono/routes/search.ts
Normal file
87
dexto/packages/server/src/hono/routes/search.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { MessageSearchResponseSchema, SessionSearchResponseSchema } from '../schemas/responses.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
const MessageSearchQuery = z.object({
|
||||
q: z.string().min(1, 'Search query is required').describe('Search query string'),
|
||||
limit: z.coerce
|
||||
.number()
|
||||
.min(1)
|
||||
.max(100)
|
||||
.optional()
|
||||
.describe('Maximum number of results to return (default: 20)'),
|
||||
offset: z.coerce
|
||||
.number()
|
||||
.min(0)
|
||||
.optional()
|
||||
.describe('Number of results to skip for pagination (default: 0)'),
|
||||
sessionId: z.string().optional().describe('Limit search to a specific session'),
|
||||
role: z
|
||||
.enum(['user', 'assistant', 'system', 'tool'])
|
||||
.optional()
|
||||
.describe('Filter by message role'),
|
||||
});
|
||||
|
||||
const SessionSearchQuery = z.object({
|
||||
q: z.string().min(1, 'Search query is required').describe('Search query string'),
|
||||
});
|
||||
|
||||
export function createSearchRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const messagesRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/search/messages',
|
||||
summary: 'Search Messages',
|
||||
description: 'Searches for messages across all sessions or within a specific session',
|
||||
tags: ['search'],
|
||||
request: { query: MessageSearchQuery },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Message search results',
|
||||
content: { 'application/json': { schema: MessageSearchResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const sessionsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/search/sessions',
|
||||
summary: 'Search Sessions',
|
||||
description: 'Searches for sessions that contain the specified query',
|
||||
tags: ['search'],
|
||||
request: { query: SessionSearchQuery },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session search results',
|
||||
content: { 'application/json': { schema: SessionSearchResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(messagesRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { q, limit, offset, sessionId, role } = ctx.req.valid('query');
|
||||
const options = {
|
||||
limit: limit || 20,
|
||||
offset: offset || 0,
|
||||
...(sessionId && { sessionId }),
|
||||
...(role && { role }),
|
||||
};
|
||||
|
||||
const searchResults = await agent.searchMessages(q, options);
|
||||
// TODO: Improve type alignment between core and server schemas.
|
||||
// Core's InternalMessage has union types for binary data, but JSON responses are strings.
|
||||
return ctx.json(searchResults as z.output<typeof MessageSearchResponseSchema>);
|
||||
})
|
||||
.openapi(sessionsRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { q } = ctx.req.valid('query');
|
||||
const searchResults = await agent.searchSessions(q);
|
||||
// TODO: Improve type alignment between core and server schemas.
|
||||
return ctx.json(searchResults as z.output<typeof SessionSearchResponseSchema>);
|
||||
});
|
||||
}
|
||||
492
dexto/packages/server/src/hono/routes/sessions.ts
Normal file
492
dexto/packages/server/src/hono/routes/sessions.ts
Normal file
@@ -0,0 +1,492 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import { SessionMetadataSchema, InternalMessageSchema } from '../schemas/responses.js';
|
||||
import type { GetAgentFn } from '../index.js';
|
||||
|
||||
const CreateSessionSchema = z
|
||||
.object({
|
||||
sessionId: z.string().optional().describe('A custom ID for the new session'),
|
||||
})
|
||||
.describe('Request body for creating a new session');
|
||||
|
||||
export function createSessionsRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/sessions',
|
||||
summary: 'List Sessions',
|
||||
description: 'Retrieves a list of all active sessions',
|
||||
tags: ['sessions'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List of all active sessions',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
sessions: z
|
||||
.array(SessionMetadataSchema)
|
||||
.describe('Array of session metadata objects'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const createRouteDef = createRoute({
|
||||
method: 'post',
|
||||
path: '/sessions',
|
||||
summary: 'Create Session',
|
||||
description: 'Creates a new session',
|
||||
tags: ['sessions'],
|
||||
request: { body: { content: { 'application/json': { schema: CreateSessionSchema } } } },
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Session created successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
session: SessionMetadataSchema.describe(
|
||||
'Newly created session metadata'
|
||||
),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/sessions/{sessionId}',
|
||||
summary: 'Get Session Details',
|
||||
description: 'Fetches details for a specific session',
|
||||
tags: ['sessions'],
|
||||
request: { params: z.object({ sessionId: z.string().describe('Session identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session details with metadata',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
session: SessionMetadataSchema.extend({
|
||||
history: z
|
||||
.number()
|
||||
.int()
|
||||
.nonnegative()
|
||||
.describe('Number of messages in history'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Session metadata with history count'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const historyRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/sessions/{sessionId}/history',
|
||||
summary: 'Get Session History',
|
||||
description:
|
||||
'Retrieves the conversation history for a session along with processing status',
|
||||
tags: ['sessions'],
|
||||
request: { params: z.object({ sessionId: z.string().describe('Session identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session conversation history',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
history: z
|
||||
.array(InternalMessageSchema)
|
||||
.describe('Array of messages in conversation history'),
|
||||
isBusy: z
|
||||
.boolean()
|
||||
.describe(
|
||||
'Whether the session is currently processing a message'
|
||||
),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const deleteRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/sessions/{sessionId}',
|
||||
summary: 'Delete Session',
|
||||
description:
|
||||
'Permanently deletes a session and all its conversation history. This action cannot be undone',
|
||||
tags: ['sessions'],
|
||||
request: { params: z.object({ sessionId: z.string().describe('Session identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session deleted successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
status: z.literal('deleted').describe('Deletion status'),
|
||||
sessionId: z.string().describe('ID of the deleted session'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const cancelRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/sessions/{sessionId}/cancel',
|
||||
summary: 'Cancel Session Run',
|
||||
description:
|
||||
'Cancels an in-flight agent run for the specified session. ' +
|
||||
'By default (soft cancel), only the current LLM call is cancelled and queued messages continue processing. ' +
|
||||
'Set clearQueue=true for hard cancel to also clear any queued messages.',
|
||||
tags: ['sessions'],
|
||||
request: {
|
||||
params: z.object({ sessionId: z.string().describe('Session identifier') }),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
clearQueue: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe(
|
||||
'If true (hard cancel), clears queued messages. If false (soft cancel, default), queued messages continue processing.'
|
||||
),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Cancel operation result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
cancelled: z.boolean().describe('Whether a run was cancelled'),
|
||||
sessionId: z.string().describe('Session ID'),
|
||||
queueCleared: z
|
||||
.boolean()
|
||||
.describe('Whether queued messages were cleared'),
|
||||
clearedCount: z
|
||||
.number()
|
||||
.describe(
|
||||
'Number of queued messages cleared (0 if soft cancel)'
|
||||
),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const loadRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/sessions/{sessionId}/load',
|
||||
summary: 'Load Session',
|
||||
description:
|
||||
'Validates and retrieves session information including processing status. The client should track the active session.',
|
||||
tags: ['sessions'],
|
||||
request: {
|
||||
params: z.object({ sessionId: z.string().describe('Session identifier') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session information retrieved successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
session: SessionMetadataSchema.extend({
|
||||
isBusy: z
|
||||
.boolean()
|
||||
.describe(
|
||||
'Whether the session is currently processing a message'
|
||||
),
|
||||
}).describe('Session metadata with processing status'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Session not found',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
error: z.string().describe('Error message'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const patchRoute = createRoute({
|
||||
method: 'patch',
|
||||
path: '/sessions/{sessionId}',
|
||||
summary: 'Update Session Title',
|
||||
description: 'Updates the title of an existing session',
|
||||
tags: ['sessions'],
|
||||
request: {
|
||||
params: z.object({ sessionId: z.string().describe('Session identifier') }),
|
||||
body: {
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z.object({
|
||||
title: z
|
||||
.string()
|
||||
.min(1, 'Title is required')
|
||||
.max(120, 'Title too long')
|
||||
.describe('New title for the session (maximum 120 characters)'),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Session updated successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
session: SessionMetadataSchema.describe('Updated session metadata'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const generateTitleRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/sessions/{sessionId}/generate-title',
|
||||
summary: 'Generate Session Title',
|
||||
description:
|
||||
'Generates a descriptive title for the session using the first user message. Returns existing title if already set.',
|
||||
tags: ['sessions'],
|
||||
request: {
|
||||
params: z.object({ sessionId: z.string().describe('Session identifier') }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Title generated successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
title: z
|
||||
.string()
|
||||
.nullable()
|
||||
.describe('Generated title, or null if generation failed'),
|
||||
sessionId: z.string().describe('Session ID'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: 'Session not found (error format handled by middleware)',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const sessionIds = await agent.listSessions();
|
||||
const sessions = await Promise.all(
|
||||
sessionIds.map(async (id) => {
|
||||
try {
|
||||
const metadata = await agent.getSessionMetadata(id);
|
||||
return {
|
||||
id,
|
||||
createdAt: metadata?.createdAt || null,
|
||||
lastActivity: metadata?.lastActivity || null,
|
||||
messageCount: metadata?.messageCount || 0,
|
||||
title: metadata?.title || null,
|
||||
};
|
||||
} catch {
|
||||
// Skip sessions that no longer exist
|
||||
return {
|
||||
id,
|
||||
createdAt: null,
|
||||
lastActivity: null,
|
||||
messageCount: 0,
|
||||
title: null,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
return ctx.json({ sessions });
|
||||
})
|
||||
.openapi(createRouteDef, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('json');
|
||||
const session = await agent.createSession(sessionId);
|
||||
const metadata = await agent.getSessionMetadata(session.id);
|
||||
return ctx.json(
|
||||
{
|
||||
session: {
|
||||
id: session.id,
|
||||
createdAt: metadata?.createdAt || Date.now(),
|
||||
lastActivity: metadata?.lastActivity || Date.now(),
|
||||
messageCount: metadata?.messageCount || 0,
|
||||
title: metadata?.title || null,
|
||||
},
|
||||
},
|
||||
201
|
||||
);
|
||||
})
|
||||
.openapi(getRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.param();
|
||||
const metadata = await agent.getSessionMetadata(sessionId);
|
||||
const history = await agent.getSessionHistory(sessionId);
|
||||
return ctx.json({
|
||||
session: {
|
||||
id: sessionId,
|
||||
createdAt: metadata?.createdAt || null,
|
||||
lastActivity: metadata?.lastActivity || null,
|
||||
messageCount: metadata?.messageCount || 0,
|
||||
title: metadata?.title || null,
|
||||
history: history.length,
|
||||
},
|
||||
});
|
||||
})
|
||||
.openapi(historyRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.param();
|
||||
const [history, isBusy] = await Promise.all([
|
||||
agent.getSessionHistory(sessionId),
|
||||
agent.isSessionBusy(sessionId),
|
||||
]);
|
||||
// TODO: Improve type alignment between core and server schemas.
|
||||
// Core's InternalMessage has union types (string | Uint8Array | Buffer | URL)
|
||||
// for binary data, but JSON responses are always base64 strings.
|
||||
return ctx.json({
|
||||
history: history as z.output<typeof InternalMessageSchema>[],
|
||||
isBusy,
|
||||
});
|
||||
})
|
||||
.openapi(deleteRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.param();
|
||||
await agent.deleteSession(sessionId);
|
||||
return ctx.json({ status: 'deleted', sessionId });
|
||||
})
|
||||
.openapi(cancelRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
|
||||
// Get clearQueue from body, default to false (soft cancel)
|
||||
let clearQueue = false;
|
||||
try {
|
||||
const body = ctx.req.valid('json');
|
||||
clearQueue = body?.clearQueue ?? false;
|
||||
} catch {
|
||||
// No body or invalid body - use default (soft cancel)
|
||||
}
|
||||
|
||||
// If hard cancel, clear the queue first
|
||||
let clearedCount = 0;
|
||||
if (clearQueue) {
|
||||
try {
|
||||
clearedCount = await agent.clearMessageQueue(sessionId);
|
||||
agent.logger.debug(
|
||||
`Hard cancel: cleared ${clearedCount} queued message(s) for session: ${sessionId}`
|
||||
);
|
||||
} catch {
|
||||
// Session might not exist or queue not accessible - continue with cancel
|
||||
}
|
||||
}
|
||||
|
||||
// Then cancel the current run
|
||||
const cancelled = await agent.cancel(sessionId);
|
||||
if (!cancelled) {
|
||||
agent.logger.debug(`No in-flight run to cancel for session: ${sessionId}`);
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
cancelled,
|
||||
sessionId,
|
||||
queueCleared: clearQueue,
|
||||
clearedCount,
|
||||
});
|
||||
})
|
||||
.openapi(loadRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
|
||||
// Validate that session exists
|
||||
const sessionIds = await agent.listSessions();
|
||||
if (!sessionIds.includes(sessionId)) {
|
||||
return ctx.json({ error: `Session not found: ${sessionId}` }, 404);
|
||||
}
|
||||
|
||||
// Return session metadata with processing status
|
||||
const metadata = await agent.getSessionMetadata(sessionId);
|
||||
const isBusy = await agent.isSessionBusy(sessionId);
|
||||
return ctx.json(
|
||||
{
|
||||
session: {
|
||||
id: sessionId,
|
||||
createdAt: metadata?.createdAt || null,
|
||||
lastActivity: metadata?.lastActivity || null,
|
||||
messageCount: metadata?.messageCount || 0,
|
||||
title: metadata?.title || null,
|
||||
isBusy,
|
||||
},
|
||||
},
|
||||
200
|
||||
);
|
||||
})
|
||||
.openapi(patchRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
const { title } = ctx.req.valid('json');
|
||||
await agent.setSessionTitle(sessionId, title);
|
||||
const metadata = await agent.getSessionMetadata(sessionId);
|
||||
return ctx.json({
|
||||
session: {
|
||||
id: sessionId,
|
||||
createdAt: metadata?.createdAt || null,
|
||||
lastActivity: metadata?.lastActivity || null,
|
||||
messageCount: metadata?.messageCount || 0,
|
||||
title: metadata?.title || title,
|
||||
},
|
||||
});
|
||||
})
|
||||
.openapi(generateTitleRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { sessionId } = ctx.req.valid('param');
|
||||
const title = await agent.generateSessionTitle(sessionId);
|
||||
return ctx.json({ title, sessionId });
|
||||
});
|
||||
}
|
||||
118
dexto/packages/server/src/hono/routes/static.ts
Normal file
118
dexto/packages/server/src/hono/routes/static.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { Hono } from 'hono';
|
||||
import type { NotFoundHandler } from 'hono';
|
||||
import { serveStatic } from '@hono/node-server/serve-static';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
|
||||
/**
|
||||
* Runtime configuration injected into WebUI via window globals.
|
||||
* This replaces the Next.js SSR injection that was lost in the Vite migration.
|
||||
*
|
||||
* TODO: This injection only works in production mode where Hono serves index.html.
|
||||
* In dev mode (`pnpm dev`), Vite serves index.html directly, bypassing this injection.
|
||||
* To support dev mode analytics, add a `/api/config/analytics` endpoint that the
|
||||
* WebUI can fetch as a fallback when `window.__DEXTO_ANALYTICS__` is undefined.
|
||||
*/
|
||||
export interface WebUIRuntimeConfig {
|
||||
analytics?: {
|
||||
distinctId: string;
|
||||
posthogKey: string;
|
||||
posthogHost: string;
|
||||
appVersion: string;
|
||||
} | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a static file router for serving WebUI assets.
|
||||
*
|
||||
* Serves static files from the specified webRoot directory.
|
||||
* Note: SPA fallback is handled separately via createSpaFallbackHandler.
|
||||
*
|
||||
* @param webRoot - Absolute path to the directory containing WebUI build output
|
||||
*/
|
||||
export function createStaticRouter(webRoot: string) {
|
||||
const app = new Hono();
|
||||
|
||||
// Serve static assets from /assets/
|
||||
app.use('/assets/*', serveStatic({ root: webRoot }));
|
||||
|
||||
// Serve static files from /logos/
|
||||
app.use('/logos/*', serveStatic({ root: webRoot }));
|
||||
|
||||
// Serve other static files (favicon, etc.)
|
||||
app.use('/favicon.ico', serveStatic({ root: webRoot }));
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the injection script for runtime config.
|
||||
* Escapes values to prevent XSS and script injection.
|
||||
*/
|
||||
function buildInjectionScript(config: WebUIRuntimeConfig): string {
|
||||
const scripts: string[] = [];
|
||||
|
||||
if (config.analytics) {
|
||||
// Escape < to prevent script injection via JSON values
|
||||
const safeJson = JSON.stringify(config.analytics).replace(/</g, '\\u003c');
|
||||
scripts.push(`window.__DEXTO_ANALYTICS__ = ${safeJson};`);
|
||||
}
|
||||
|
||||
if (scripts.length === 0) return '';
|
||||
return `<script>${scripts.join('\n')}</script>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a notFound handler for SPA fallback.
|
||||
*
|
||||
* This handler serves index.html for client-side routes (paths without file extensions).
|
||||
* For paths with file extensions (like /openapi.json), it returns a standard 404.
|
||||
*
|
||||
* This should be registered as app.notFound() to run after all routes fail to match.
|
||||
*
|
||||
* @param webRoot - Absolute path to the directory containing WebUI build output
|
||||
* @param runtimeConfig - Optional runtime configuration to inject into the HTML
|
||||
*/
|
||||
export function createSpaFallbackHandler(
|
||||
webRoot: string,
|
||||
runtimeConfig?: WebUIRuntimeConfig
|
||||
): NotFoundHandler {
|
||||
// Pre-build the injection script once (not per-request)
|
||||
const injectionScript = runtimeConfig ? buildInjectionScript(runtimeConfig) : '';
|
||||
|
||||
return async (c) => {
|
||||
const path = c.req.path;
|
||||
|
||||
// If path ends with a file extension, it's a real 404 (not an SPA route)
|
||||
// This allows /openapi.json, /.well-known/agent-card.json etc. to 404 properly
|
||||
// Uses regex to avoid false positives like /session/2024.01.01
|
||||
if (/\.[a-zA-Z0-9]+$/.test(path)) {
|
||||
return c.json({ error: 'Not Found', path }, 404);
|
||||
}
|
||||
|
||||
// SPA fallback - serve index.html for client-side routes
|
||||
try {
|
||||
let html = await readFile(join(webRoot, 'index.html'), 'utf-8');
|
||||
|
||||
// Inject runtime config into <head> if provided
|
||||
if (injectionScript) {
|
||||
html = html.replace('</head>', `${injectionScript}</head>`);
|
||||
}
|
||||
|
||||
return c.html(html);
|
||||
} catch {
|
||||
// index.html not found - WebUI not available
|
||||
return c.html(
|
||||
`<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>Dexto API Server</title></head>
|
||||
<body>
|
||||
<h1>Dexto API Server</h1>
|
||||
<p>WebUI is not available. API endpoints are accessible at <code>/api/*</code></p>
|
||||
</body>
|
||||
</html>`,
|
||||
200
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
147
dexto/packages/server/src/hono/routes/tools.ts
Normal file
147
dexto/packages/server/src/hono/routes/tools.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
// JSON Schema definition for tool input parameters
|
||||
const JsonSchemaProperty = z
|
||||
.object({
|
||||
type: z
|
||||
.enum(['string', 'number', 'integer', 'boolean', 'object', 'array'])
|
||||
.optional()
|
||||
.describe('Property type'),
|
||||
description: z.string().optional().describe('Property description'),
|
||||
enum: z
|
||||
.array(z.union([z.string(), z.number(), z.boolean()]))
|
||||
.optional()
|
||||
.describe('Enum values'),
|
||||
default: z.any().optional().describe('Default value'),
|
||||
})
|
||||
.passthrough()
|
||||
.describe('JSON Schema property definition');
|
||||
|
||||
const ToolInputSchema = z
|
||||
.object({
|
||||
type: z.literal('object').optional().describe('Schema type, always "object" when present'),
|
||||
properties: z.record(JsonSchemaProperty).optional().describe('Property definitions'),
|
||||
required: z.array(z.string()).optional().describe('Required property names'),
|
||||
})
|
||||
.passthrough()
|
||||
.describe('JSON Schema for tool input parameters');
|
||||
|
||||
const ToolInfoSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Tool identifier'),
|
||||
name: z.string().describe('Tool name'),
|
||||
description: z.string().describe('Tool description'),
|
||||
source: z
|
||||
.enum(['internal', 'custom', 'mcp'])
|
||||
.describe('Source of the tool (internal, custom, or mcp)'),
|
||||
serverName: z.string().optional().describe('MCP server name (if source is mcp)'),
|
||||
inputSchema: ToolInputSchema.optional().describe('JSON Schema for tool input parameters'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Tool information');
|
||||
|
||||
const AllToolsResponseSchema = z
|
||||
.object({
|
||||
tools: z.array(ToolInfoSchema).describe('Array of all available tools'),
|
||||
totalCount: z.number().describe('Total number of tools'),
|
||||
internalCount: z.number().describe('Number of internal tools'),
|
||||
customCount: z.number().describe('Number of custom tools'),
|
||||
mcpCount: z.number().describe('Number of MCP tools'),
|
||||
})
|
||||
.strict()
|
||||
.describe('All available tools from all sources');
|
||||
|
||||
export function createToolsRouter(getAgent: GetAgentFn) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const allToolsRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/tools',
|
||||
summary: 'List All Tools',
|
||||
description:
|
||||
'Retrieves all available tools from all sources (internal, custom, and MCP servers)',
|
||||
tags: ['tools'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'All tools',
|
||||
content: { 'application/json': { schema: AllToolsResponseSchema } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return app.openapi(allToolsRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
|
||||
// Get all tools from all sources
|
||||
const allTools = await agent.getAllTools();
|
||||
|
||||
// Get MCP tools with server metadata for proper grouping
|
||||
const mcpToolsWithServerInfo = agent.getAllMcpToolsWithServerInfo();
|
||||
|
||||
const toolList: z.output<typeof ToolInfoSchema>[] = [];
|
||||
|
||||
let internalCount = 0;
|
||||
let customCount = 0;
|
||||
let mcpCount = 0;
|
||||
|
||||
for (const [toolName, toolInfo] of Object.entries(allTools)) {
|
||||
// Determine source and extract server name
|
||||
let source: 'internal' | 'custom' | 'mcp';
|
||||
let serverName: string | undefined;
|
||||
|
||||
if (toolName.startsWith('mcp--')) {
|
||||
// MCP tool - strip the mcp-- prefix to look up in cache
|
||||
const mcpToolName = toolName.substring(5); // Remove 'mcp--' prefix
|
||||
const mcpToolInfo = mcpToolsWithServerInfo.get(mcpToolName);
|
||||
if (mcpToolInfo) {
|
||||
source = 'mcp';
|
||||
serverName = mcpToolInfo.serverName;
|
||||
mcpCount++;
|
||||
} else {
|
||||
// Fallback if not found in cache
|
||||
source = 'mcp';
|
||||
mcpCount++;
|
||||
}
|
||||
} else if (toolName.startsWith('internal--')) {
|
||||
source = 'internal';
|
||||
internalCount++;
|
||||
} else if (toolName.startsWith('custom--')) {
|
||||
source = 'custom';
|
||||
customCount++;
|
||||
} else {
|
||||
// Default to internal
|
||||
source = 'internal';
|
||||
internalCount++;
|
||||
}
|
||||
|
||||
toolList.push({
|
||||
id: toolName,
|
||||
name: toolName,
|
||||
description: toolInfo.description || 'No description available',
|
||||
source,
|
||||
serverName,
|
||||
inputSchema: toolInfo.parameters as z.output<typeof ToolInputSchema> | undefined,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort: internal first, then custom, then MCP
|
||||
toolList.sort((a, b) => {
|
||||
const sourceOrder = { internal: 0, custom: 1, mcp: 2 };
|
||||
if (a.source !== b.source) {
|
||||
return sourceOrder[a.source] - sourceOrder[b.source];
|
||||
}
|
||||
return a.name.localeCompare(b.name);
|
||||
});
|
||||
|
||||
return ctx.json({
|
||||
tools: toolList,
|
||||
totalCount: toolList.length,
|
||||
internalCount,
|
||||
customCount,
|
||||
mcpCount,
|
||||
});
|
||||
});
|
||||
}
|
||||
260
dexto/packages/server/src/hono/routes/webhooks.ts
Normal file
260
dexto/packages/server/src/hono/routes/webhooks.ts
Normal file
@@ -0,0 +1,260 @@
|
||||
import { OpenAPIHono, createRoute, z } from '@hono/zod-openapi';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { WebhookEventSubscriber } from '../../events/webhook-subscriber.js';
|
||||
import type { WebhookConfig } from '../../events/webhook-types.js';
|
||||
import type { Context } from 'hono';
|
||||
type GetAgentFn = (ctx: Context) => DextoAgent | Promise<DextoAgent>;
|
||||
|
||||
// Response schemas
|
||||
const WebhookResponseSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique webhook identifier'),
|
||||
url: z.string().url().describe('Webhook URL'),
|
||||
description: z.string().optional().describe('Webhook description'),
|
||||
createdAt: z.union([z.date(), z.number()]).describe('Creation timestamp (Date or Unix ms)'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Webhook response object');
|
||||
|
||||
const WebhookTestResultSchema = z
|
||||
.object({
|
||||
success: z.boolean().describe('Whether the webhook test succeeded'),
|
||||
statusCode: z.number().optional().describe('HTTP status code from webhook'),
|
||||
responseTime: z.number().optional().describe('Response time in milliseconds'),
|
||||
error: z.string().optional().describe('Error message if test failed'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Webhook test result');
|
||||
|
||||
const WebhookBodySchema = z
|
||||
.object({
|
||||
url: z
|
||||
.string()
|
||||
.url('Invalid URL format')
|
||||
.describe('The URL to send webhook events to (must be a valid HTTP/HTTPS URL)'),
|
||||
secret: z.string().optional().describe('A secret key for HMAC signature verification'),
|
||||
description: z.string().optional().describe('A description of the webhook for reference'),
|
||||
})
|
||||
.describe('Request body for registering a webhook');
|
||||
|
||||
export function createWebhooksRouter(
|
||||
getAgent: GetAgentFn,
|
||||
webhookSubscriber: WebhookEventSubscriber
|
||||
) {
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const registerRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/webhooks',
|
||||
summary: 'Register Webhook',
|
||||
description: 'Registers a new webhook endpoint to receive agent events',
|
||||
tags: ['webhooks'],
|
||||
request: { body: { content: { 'application/json': { schema: WebhookBodySchema } } } },
|
||||
responses: {
|
||||
201: {
|
||||
description: 'Webhook registered',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
webhook: WebhookResponseSchema.describe(
|
||||
'Registered webhook details'
|
||||
),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const listRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/webhooks',
|
||||
summary: 'List Webhooks',
|
||||
description: 'Retrieves a list of all registered webhooks',
|
||||
tags: ['webhooks'],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'List webhooks',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
webhooks: z
|
||||
.array(WebhookResponseSchema)
|
||||
.describe('Array of registered webhooks'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const getRoute = createRoute({
|
||||
method: 'get',
|
||||
path: '/webhooks/{webhookId}',
|
||||
summary: 'Get Webhook Details',
|
||||
description: 'Fetches details for a specific webhook',
|
||||
tags: ['webhooks'],
|
||||
request: { params: z.object({ webhookId: z.string().describe('The webhook identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Webhook',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
webhook: WebhookResponseSchema.describe('Webhook details'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const deleteRoute = createRoute({
|
||||
method: 'delete',
|
||||
path: '/webhooks/{webhookId}',
|
||||
summary: 'Delete Webhook',
|
||||
description: 'Permanently removes a webhook endpoint. This action cannot be undone',
|
||||
tags: ['webhooks'],
|
||||
request: { params: z.object({ webhookId: z.string().describe('The webhook identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Removed',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
status: z
|
||||
.literal('removed')
|
||||
.describe('Operation status indicating successful removal'),
|
||||
webhookId: z.string().describe('ID of the removed webhook'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
const testRoute = createRoute({
|
||||
method: 'post',
|
||||
path: '/webhooks/{webhookId}/test',
|
||||
summary: 'Test Webhook',
|
||||
description: 'Sends a sample event to test webhook connectivity and configuration',
|
||||
tags: ['webhooks'],
|
||||
request: { params: z.object({ webhookId: z.string().describe('The webhook identifier') }) },
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Test result',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: z
|
||||
.object({
|
||||
test: z
|
||||
.literal('completed')
|
||||
.describe('Test status indicating completion'),
|
||||
result: WebhookTestResultSchema.describe('Test execution results'),
|
||||
})
|
||||
.strict(),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: { description: 'Not found' },
|
||||
},
|
||||
});
|
||||
|
||||
return app
|
||||
.openapi(registerRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { url, secret, description } = ctx.req.valid('json');
|
||||
|
||||
const webhookId = `wh_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`;
|
||||
const webhook: WebhookConfig = {
|
||||
id: webhookId,
|
||||
url,
|
||||
createdAt: new Date(),
|
||||
...(secret && { secret }),
|
||||
...(description && { description }),
|
||||
};
|
||||
|
||||
webhookSubscriber.addWebhook(webhook);
|
||||
agent.logger.info(`Webhook registered: ${webhookId} -> ${url}`);
|
||||
|
||||
return ctx.json(
|
||||
{
|
||||
webhook: {
|
||||
id: webhook.id,
|
||||
url: webhook.url,
|
||||
description: webhook.description,
|
||||
createdAt: webhook.createdAt,
|
||||
},
|
||||
},
|
||||
201
|
||||
);
|
||||
})
|
||||
.openapi(listRoute, async (ctx) => {
|
||||
const webhooks = webhookSubscriber.getWebhooks().map((webhook) => ({
|
||||
id: webhook.id,
|
||||
url: webhook.url,
|
||||
description: webhook.description,
|
||||
createdAt: webhook.createdAt,
|
||||
}));
|
||||
|
||||
return ctx.json({ webhooks });
|
||||
})
|
||||
.openapi(getRoute, (ctx) => {
|
||||
const { webhookId } = ctx.req.valid('param');
|
||||
const webhook = webhookSubscriber.getWebhook(webhookId);
|
||||
if (!webhook) {
|
||||
return ctx.json({ error: 'Webhook not found' }, 404);
|
||||
}
|
||||
|
||||
return ctx.json({
|
||||
webhook: {
|
||||
id: webhook.id,
|
||||
url: webhook.url,
|
||||
description: webhook.description,
|
||||
createdAt: webhook.createdAt,
|
||||
},
|
||||
});
|
||||
})
|
||||
.openapi(deleteRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { webhookId } = ctx.req.valid('param');
|
||||
const removed = webhookSubscriber.removeWebhook(webhookId);
|
||||
if (!removed) {
|
||||
return ctx.json({ error: 'Webhook not found' }, 404);
|
||||
}
|
||||
agent.logger.info(`Webhook removed: ${webhookId}`);
|
||||
return ctx.json({ status: 'removed', webhookId });
|
||||
})
|
||||
.openapi(testRoute, async (ctx) => {
|
||||
const agent = await getAgent(ctx);
|
||||
const { webhookId } = ctx.req.valid('param');
|
||||
const webhook = webhookSubscriber.getWebhook(webhookId);
|
||||
|
||||
if (!webhook) {
|
||||
return ctx.json({ error: 'Webhook not found' }, 404);
|
||||
}
|
||||
|
||||
agent.logger.info(`Testing webhook: ${webhookId}`);
|
||||
const result = await webhookSubscriber.testWebhook(webhookId);
|
||||
|
||||
return ctx.json({
|
||||
test: 'completed',
|
||||
result: {
|
||||
success: result.success,
|
||||
statusCode: result.statusCode,
|
||||
responseTime: result.responseTime,
|
||||
error: result.error,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
577
dexto/packages/server/src/hono/schemas/responses.ts
Normal file
577
dexto/packages/server/src/hono/schemas/responses.ts
Normal file
@@ -0,0 +1,577 @@
|
||||
/**
|
||||
* Response schemas for OpenAPI documentation
|
||||
*
|
||||
* This file defines Zod schemas for all API response types, following these principles:
|
||||
* 1. Import reusable schemas from @dexto/core where available
|
||||
* 2. Define message/context schemas HERE (not in core) - see note below
|
||||
* 3. All schemas follow Zod best practices from CLAUDE.md (strict, describe, etc.)
|
||||
*
|
||||
* TYPE BOUNDARY: Core vs Server Schemas
|
||||
* -------------------------------------
|
||||
* Core's TypeScript interfaces use rich union types for binary data:
|
||||
* `image: string | Uint8Array | Buffer | ArrayBuffer | URL`
|
||||
*
|
||||
* This allows internal code to work with various binary formats before serialization.
|
||||
* However, JSON API responses can only contain strings (base64-encoded).
|
||||
*
|
||||
* Server schemas use `z.string()` for these fields because:
|
||||
* 1. JSON serialization converts all binary data to base64 strings
|
||||
* 2. Hono client type inference works correctly with concrete types
|
||||
* 3. WebUI receives properly typed `string` instead of `JSONValue`
|
||||
*
|
||||
* CONSEQUENCE: Route handlers that return core types (e.g., `InternalMessage[]`)
|
||||
* need type casts when passing to `ctx.json()` because TypeScript sees the union
|
||||
* type from core but the schema expects just `string`. At runtime the data IS
|
||||
* already strings - the cast just bridges the static type mismatch.
|
||||
*
|
||||
* See routes/sessions.ts, routes/search.ts for examples with TODO comments.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { LLMConfigBaseSchema as CoreLLMConfigBaseSchema, LLM_PROVIDERS } from '@dexto/core';
|
||||
|
||||
// TODO: Implement shared error response schemas for OpenAPI documentation.
|
||||
// Currently, 404 and other error responses lack body schemas because @hono/zod-openapi
|
||||
// enforces strict type matching between route definitions and handlers. When a 404 schema
|
||||
// is defined, TypeScript expects handler return types to be a union of all response types,
|
||||
// but the type system tries to match every return against every schema instead of by status code.
|
||||
//
|
||||
// Solution: Create a typed helper or wrapper that:
|
||||
// 1. Defines a shared ErrorResponseSchema (e.g., { error: string, code?: string })
|
||||
// 2. Properly types handlers to return discriminated unions by status code
|
||||
// 3. Can be reused across all routes for consistent error documentation
|
||||
//
|
||||
// See: https://github.com/honojs/middleware/tree/main/packages/zod-openapi for patterns
|
||||
|
||||
// ============================================================================
|
||||
// Imports from @dexto/core - Reusable schemas
|
||||
// ============================================================================
|
||||
|
||||
// Memory schemas
|
||||
export { MemorySchema } from '@dexto/core';
|
||||
|
||||
// LLM schemas
|
||||
export { LLMConfigBaseSchema, type ValidatedLLMConfig } from '@dexto/core';
|
||||
|
||||
// ============================================================================
|
||||
// Message/Context Schemas (defined here, not in core - see header comment)
|
||||
// ============================================================================
|
||||
|
||||
export const TextPartSchema = z
|
||||
.object({
|
||||
type: z.literal('text').describe('Part type: text'),
|
||||
text: z.string().describe('Text content'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Text content part');
|
||||
|
||||
export const ImagePartSchema = z
|
||||
.object({
|
||||
type: z.literal('image').describe('Part type: image'),
|
||||
image: z.string().describe('Base64-encoded image data'),
|
||||
mimeType: z.string().optional().describe('MIME type of the image'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Image content part');
|
||||
|
||||
export const FilePartSchema = z
|
||||
.object({
|
||||
type: z.literal('file').describe('Part type: file'),
|
||||
data: z.string().describe('Base64-encoded file data'),
|
||||
mimeType: z.string().describe('MIME type of the file'),
|
||||
filename: z.string().optional().describe('Optional filename'),
|
||||
})
|
||||
.strict()
|
||||
.describe('File content part');
|
||||
|
||||
export const UIResourcePartSchema = z
|
||||
.object({
|
||||
type: z.literal('ui-resource').describe('Part type: ui-resource'),
|
||||
uri: z.string().describe('URI identifying the UI resource (must start with ui://)'),
|
||||
mimeType: z
|
||||
.string()
|
||||
.describe('MIME type: text/html, text/uri-list, or application/vnd.mcp-ui.remote-dom'),
|
||||
content: z.string().optional().describe('Inline HTML content or URL'),
|
||||
blob: z.string().optional().describe('Base64-encoded content (alternative to content)'),
|
||||
metadata: z
|
||||
.object({
|
||||
title: z.string().optional().describe('Display title for the UI resource'),
|
||||
preferredSize: z
|
||||
.object({
|
||||
width: z.number().describe('Preferred width in pixels'),
|
||||
height: z.number().describe('Preferred height in pixels'),
|
||||
})
|
||||
.strict()
|
||||
.optional()
|
||||
.describe('Preferred rendering size'),
|
||||
})
|
||||
.strict()
|
||||
.optional()
|
||||
.describe('Optional metadata for the UI resource'),
|
||||
})
|
||||
.strict()
|
||||
.describe('UI Resource content part for MCP-UI interactive components');
|
||||
|
||||
export const ContentPartSchema = z
|
||||
.discriminatedUnion('type', [
|
||||
TextPartSchema,
|
||||
ImagePartSchema,
|
||||
FilePartSchema,
|
||||
UIResourcePartSchema,
|
||||
])
|
||||
.describe('Message content part (text, image, file, or UI resource)');
|
||||
|
||||
export const ToolCallSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique identifier for this tool call'),
|
||||
type: z
|
||||
.literal('function')
|
||||
.describe('Tool call type (currently only function is supported)'),
|
||||
function: z
|
||||
.object({
|
||||
name: z.string().describe('Name of the function to call'),
|
||||
arguments: z.string().describe('Arguments for the function in JSON string format'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Function call details'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Tool call made by the assistant');
|
||||
|
||||
export const TokenUsageSchema = z
|
||||
.object({
|
||||
inputTokens: z.number().int().nonnegative().optional().describe('Number of input tokens'),
|
||||
outputTokens: z.number().int().nonnegative().optional().describe('Number of output tokens'),
|
||||
reasoningTokens: z
|
||||
.number()
|
||||
.int()
|
||||
.nonnegative()
|
||||
.optional()
|
||||
.describe('Number of reasoning tokens'),
|
||||
totalTokens: z.number().int().nonnegative().optional().describe('Total tokens used'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Token usage accounting');
|
||||
|
||||
export const InternalMessageSchema = z
|
||||
.object({
|
||||
id: z.string().uuid().optional().describe('Unique message identifier (UUID)'),
|
||||
role: z
|
||||
.enum(['system', 'user', 'assistant', 'tool'])
|
||||
.describe('Role of the message sender'),
|
||||
timestamp: z.number().int().positive().optional().describe('Creation timestamp (Unix ms)'),
|
||||
content: z
|
||||
.union([z.string(), z.null(), z.array(ContentPartSchema)])
|
||||
.describe('Message content (string, null, or array of parts)'),
|
||||
reasoning: z.string().optional().describe('Optional model reasoning text'),
|
||||
tokenUsage: TokenUsageSchema.optional().describe('Optional token usage accounting'),
|
||||
model: z.string().optional().describe('Model identifier for assistant messages'),
|
||||
provider: z
|
||||
.enum(LLM_PROVIDERS)
|
||||
.optional()
|
||||
.describe('Provider identifier for assistant messages'),
|
||||
toolCalls: z.array(ToolCallSchema).optional().describe('Tool calls made by the assistant'),
|
||||
toolCallId: z.string().optional().describe('ID of the tool call this message responds to'),
|
||||
name: z.string().optional().describe('Name of the tool that produced this result'),
|
||||
success: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether tool execution succeeded (present for role=tool messages)'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Internal message representation');
|
||||
|
||||
// Derived types for consumers
|
||||
export type TextPart = z.output<typeof TextPartSchema>;
|
||||
export type ImagePart = z.output<typeof ImagePartSchema>;
|
||||
export type FilePart = z.output<typeof FilePartSchema>;
|
||||
export type ContentPart = z.output<typeof ContentPartSchema>;
|
||||
export type ToolCall = z.output<typeof ToolCallSchema>;
|
||||
export type TokenUsage = z.output<typeof TokenUsageSchema>;
|
||||
export type InternalMessage = z.output<typeof InternalMessageSchema>;
|
||||
|
||||
// ============================================================================
|
||||
// LLM Config Schemas
|
||||
// ============================================================================
|
||||
|
||||
// LLM config response schema - omits apiKey for security
|
||||
// API keys should never be returned in responses
|
||||
export const LLMConfigResponseSchema = CoreLLMConfigBaseSchema.omit({ apiKey: true })
|
||||
.extend({
|
||||
hasApiKey: z.boolean().optional().describe('Whether an API key is configured'),
|
||||
})
|
||||
.describe('LLM configuration (apiKey omitted for security)');
|
||||
|
||||
// Full LLM config schema for requests (includes apiKey with writeOnly)
|
||||
export const LLMConfigSchema = CoreLLMConfigBaseSchema.describe('LLM configuration with API key');
|
||||
|
||||
export type LLMConfigResponse = z.output<typeof LLMConfigResponseSchema>;
|
||||
|
||||
// Agent schemas
|
||||
export { AgentCardSchema, type AgentCard } from '@dexto/core';
|
||||
|
||||
// MCP schemas
|
||||
export {
|
||||
McpServerConfigSchema,
|
||||
StdioServerConfigSchema,
|
||||
SseServerConfigSchema,
|
||||
HttpServerConfigSchema,
|
||||
type McpServerConfig,
|
||||
type ValidatedMcpServerConfig,
|
||||
} from '@dexto/core';
|
||||
|
||||
// Tool schemas
|
||||
export { ToolConfirmationConfigSchema } from '@dexto/core';
|
||||
|
||||
// Resource schemas
|
||||
export { InternalResourceConfigSchema } from '@dexto/core';
|
||||
|
||||
// ============================================================================
|
||||
// New schemas for types that don't have Zod equivalents in core
|
||||
// ============================================================================
|
||||
|
||||
// --- Session Schemas ---
|
||||
|
||||
export const SessionMetadataSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique session identifier'),
|
||||
createdAt: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.nullable()
|
||||
.describe('Creation timestamp (Unix ms, null if unavailable)'),
|
||||
lastActivity: z
|
||||
.number()
|
||||
.int()
|
||||
.positive()
|
||||
.nullable()
|
||||
.describe('Last activity timestamp (Unix ms, null if unavailable)'),
|
||||
messageCount: z
|
||||
.number()
|
||||
.int()
|
||||
.nonnegative()
|
||||
.describe('Total number of messages in session'),
|
||||
title: z.string().optional().nullable().describe('Optional session title'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Session metadata');
|
||||
|
||||
export type SessionMetadata = z.output<typeof SessionMetadataSchema>;
|
||||
|
||||
// --- Search Schemas ---
|
||||
|
||||
export const SearchResultSchema = z
|
||||
.object({
|
||||
sessionId: z.string().describe('Session ID where the message was found'),
|
||||
message: InternalMessageSchema.describe('The message that matched the search'),
|
||||
matchedText: z.string().describe('The specific text that matched the search query'),
|
||||
context: z.string().describe('Context around the match for preview'),
|
||||
messageIndex: z
|
||||
.number()
|
||||
.int()
|
||||
.nonnegative()
|
||||
.describe('Index of the message within the session'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Result of a message search');
|
||||
|
||||
export type SearchResult = z.output<typeof SearchResultSchema>;
|
||||
|
||||
export const SessionSearchResultSchema = z
|
||||
.object({
|
||||
sessionId: z.string().describe('Session ID'),
|
||||
matchCount: z
|
||||
.number()
|
||||
.int()
|
||||
.nonnegative()
|
||||
.describe('Number of messages that matched in this session'),
|
||||
firstMatch: SearchResultSchema.describe('Preview of the first matching message'),
|
||||
metadata: z
|
||||
.object({
|
||||
createdAt: z.number().int().positive().describe('Session creation timestamp'),
|
||||
lastActivity: z.number().int().positive().describe('Last activity timestamp'),
|
||||
messageCount: z.number().int().nonnegative().describe('Total messages in session'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Session metadata'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Result of a session search');
|
||||
|
||||
export type SessionSearchResult = z.output<typeof SessionSearchResultSchema>;
|
||||
|
||||
export const MessageSearchResponseSchema = z
|
||||
.object({
|
||||
results: z.array(SearchResultSchema).describe('Array of search results'),
|
||||
total: z.number().int().nonnegative().describe('Total number of results available'),
|
||||
hasMore: z.boolean().describe('Whether there are more results beyond the current page'),
|
||||
query: z.string().describe('Query that was searched'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Message search response');
|
||||
|
||||
export type MessageSearchResponse = z.output<typeof MessageSearchResponseSchema>;
|
||||
|
||||
export const SessionSearchResponseSchema = z
|
||||
.object({
|
||||
results: z.array(SessionSearchResultSchema).describe('Array of session search results'),
|
||||
total: z.number().int().nonnegative().describe('Total number of sessions with matches'),
|
||||
hasMore: z
|
||||
.boolean()
|
||||
.describe(
|
||||
'Always false - session search returns all matching sessions without pagination'
|
||||
),
|
||||
query: z.string().describe('Query that was searched'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Session search response');
|
||||
|
||||
export type SessionSearchResponse = z.output<typeof SessionSearchResponseSchema>;
|
||||
|
||||
// --- Webhook Schemas ---
|
||||
|
||||
export const WebhookSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique webhook identifier'),
|
||||
url: z.string().url().describe('Webhook URL to send events to'),
|
||||
events: z.array(z.string()).describe('Array of event types this webhook subscribes to'),
|
||||
createdAt: z.number().int().positive().describe('Creation timestamp (Unix ms)'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Webhook subscription');
|
||||
|
||||
export type Webhook = z.output<typeof WebhookSchema>;
|
||||
|
||||
// --- LLM Provider/Model Schemas ---
|
||||
|
||||
// Schema for ModelInfo from core registry
|
||||
export const CatalogModelInfoSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Model name identifier'),
|
||||
maxInputTokens: z.number().int().positive().describe('Maximum input tokens'),
|
||||
default: z.boolean().optional().describe('Whether this is a default model'),
|
||||
supportedFileTypes: z
|
||||
.array(z.enum(['audio', 'pdf', 'image']))
|
||||
.describe('File types this model supports'),
|
||||
displayName: z.string().optional().describe('Human-readable display name'),
|
||||
pricing: z
|
||||
.object({
|
||||
inputPerM: z.number().describe('Input cost per million tokens (USD)'),
|
||||
outputPerM: z.number().describe('Output cost per million tokens (USD)'),
|
||||
cacheReadPerM: z.number().optional().describe('Cache read cost per million tokens'),
|
||||
cacheWritePerM: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe('Cache write cost per million tokens'),
|
||||
currency: z.literal('USD').optional().describe('Currency'),
|
||||
unit: z.literal('per_million_tokens').optional().describe('Unit'),
|
||||
})
|
||||
.optional()
|
||||
.describe('Pricing information in USD per million tokens'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Model information from LLM registry');
|
||||
|
||||
export type CatalogModelInfo = z.output<typeof CatalogModelInfoSchema>;
|
||||
|
||||
// Schema for ProviderCatalog returned by /llm/catalog (grouped mode)
|
||||
export const ProviderCatalogSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Provider display name'),
|
||||
hasApiKey: z.boolean().describe('Whether API key is configured'),
|
||||
primaryEnvVar: z.string().describe('Primary environment variable for API key'),
|
||||
supportsBaseURL: z.boolean().describe('Whether custom base URLs are supported'),
|
||||
models: z.array(CatalogModelInfoSchema).describe('Models available from this provider'),
|
||||
supportedFileTypes: z
|
||||
.array(z.enum(['audio', 'pdf', 'image']))
|
||||
.describe('Provider-level file type support'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Provider catalog entry with models and capabilities');
|
||||
|
||||
export type ProviderCatalog = z.output<typeof ProviderCatalogSchema>;
|
||||
|
||||
// Schema for flat model list (includes provider field)
|
||||
export const ModelFlatSchema = CatalogModelInfoSchema.extend({
|
||||
provider: z.string().describe('Provider identifier for this model'),
|
||||
}).describe('Flattened model entry with provider information');
|
||||
|
||||
export type ModelFlat = z.output<typeof ModelFlatSchema>;
|
||||
|
||||
// --- Agent Registry Schemas ---
|
||||
|
||||
export const AgentRegistryEntrySchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique agent identifier'),
|
||||
name: z.string().describe('Agent name'),
|
||||
description: z.string().describe('Agent description'),
|
||||
author: z.string().optional().describe('Agent author'),
|
||||
tags: z.array(z.string()).optional().describe('Agent tags'),
|
||||
type: z.enum(['builtin', 'custom']).describe('Agent type'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Agent registry entry');
|
||||
|
||||
export type AgentRegistryEntry = z.output<typeof AgentRegistryEntrySchema>;
|
||||
|
||||
// --- Resource Schemas ---
|
||||
|
||||
// TODO: Consider refactoring to use discriminated union for better type safety:
|
||||
// - MCP resources (source: 'mcp') should require serverName field
|
||||
// - Internal resources (source: 'internal') should not have serverName field
|
||||
// This would require updating core's ResourceMetadata interface to also use discriminated union
|
||||
export const ResourceSchema = z
|
||||
.object({
|
||||
uri: z.string().describe('Resource URI'),
|
||||
name: z.string().optional().describe('Resource name'),
|
||||
description: z.string().optional().describe('Resource description'),
|
||||
mimeType: z.string().optional().describe('MIME type of the resource'),
|
||||
source: z.enum(['mcp', 'internal']).describe('Source system that provides this resource'),
|
||||
serverName: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Original server/provider name (for MCP resources)'),
|
||||
size: z.number().optional().describe('Size of the resource in bytes (if known)'),
|
||||
lastModified: z
|
||||
.string()
|
||||
.datetime()
|
||||
.optional()
|
||||
.describe('Last modified timestamp (ISO 8601 string)'),
|
||||
metadata: z
|
||||
.record(z.unknown())
|
||||
.optional()
|
||||
.describe('Additional metadata specific to the resource type'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Resource metadata');
|
||||
|
||||
export type Resource = z.output<typeof ResourceSchema>;
|
||||
|
||||
// --- Tool Schemas ---
|
||||
|
||||
export const ToolSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Tool name'),
|
||||
description: z.string().describe('Tool description'),
|
||||
inputSchema: z.record(z.unknown()).describe('JSON Schema for tool input parameters'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Tool metadata');
|
||||
|
||||
export type Tool = z.output<typeof ToolSchema>;
|
||||
|
||||
// --- Prompt Schemas ---
|
||||
|
||||
export const PromptArgumentSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Argument name'),
|
||||
description: z.string().optional().describe('Argument description'),
|
||||
required: z.boolean().optional().describe('Whether the argument is required'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Prompt argument definition');
|
||||
|
||||
export type PromptArgument = z.output<typeof PromptArgumentSchema>;
|
||||
|
||||
export const PromptDefinitionSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Prompt name'),
|
||||
title: z.string().optional().describe('Prompt title'),
|
||||
description: z.string().optional().describe('Prompt description'),
|
||||
arguments: z
|
||||
.array(PromptArgumentSchema)
|
||||
.optional()
|
||||
.describe('Array of argument definitions'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Prompt definition (MCP-compliant)');
|
||||
|
||||
export type PromptDefinition = z.output<typeof PromptDefinitionSchema>;
|
||||
|
||||
export const PromptInfoSchema = z
|
||||
.object({
|
||||
name: z.string().describe('Prompt name'),
|
||||
title: z.string().optional().describe('Prompt title'),
|
||||
description: z.string().optional().describe('Prompt description'),
|
||||
arguments: z
|
||||
.array(PromptArgumentSchema)
|
||||
.optional()
|
||||
.describe('Array of argument definitions'),
|
||||
source: z.enum(['mcp', 'config', 'custom']).describe('Source of the prompt'),
|
||||
metadata: z.record(z.unknown()).optional().describe('Additional metadata'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Enhanced prompt information');
|
||||
|
||||
export type PromptInfo = z.output<typeof PromptInfoSchema>;
|
||||
|
||||
export const PromptSchema = z
|
||||
.object({
|
||||
id: z.string().describe('Unique prompt identifier'),
|
||||
name: z.string().describe('Prompt name'),
|
||||
description: z.string().optional().describe('Prompt description'),
|
||||
content: z.string().describe('Prompt template content'),
|
||||
variables: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('List of variable placeholders in the prompt'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Prompt template');
|
||||
|
||||
export type Prompt = z.output<typeof PromptSchema>;
|
||||
|
||||
// ============================================================================
|
||||
// Common Response Patterns
|
||||
// ============================================================================
|
||||
|
||||
// Generic success response with data
|
||||
export const OkResponseSchema = <T extends z.ZodTypeAny>(dataSchema: T) =>
|
||||
z
|
||||
.object({
|
||||
ok: z.literal(true).describe('Indicates successful response'),
|
||||
data: dataSchema.describe('Response data'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Successful API response');
|
||||
|
||||
// Generic error response
|
||||
export const ErrorResponseSchema = z
|
||||
.object({
|
||||
ok: z.literal(false).describe('Indicates failed response'),
|
||||
error: z
|
||||
.object({
|
||||
message: z.string().describe('Error message'),
|
||||
code: z.string().optional().describe('Error code'),
|
||||
details: z.unknown().optional().describe('Additional error details'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Error details'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Error API response');
|
||||
|
||||
export type ErrorResponse = z.output<typeof ErrorResponseSchema>;
|
||||
|
||||
// Status response (for operations that don't return data)
|
||||
export const StatusResponseSchema = z
|
||||
.object({
|
||||
status: z.string().describe('Operation status'),
|
||||
message: z.string().optional().describe('Optional status message'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Status response');
|
||||
|
||||
export type StatusResponse = z.output<typeof StatusResponseSchema>;
|
||||
|
||||
// Delete response
|
||||
export const DeleteResponseSchema = z
|
||||
.object({
|
||||
status: z.literal('deleted').describe('Indicates successful deletion'),
|
||||
id: z.string().optional().describe('ID of the deleted resource'),
|
||||
})
|
||||
.strict()
|
||||
.describe('Delete operation response');
|
||||
|
||||
export type DeleteResponse = z.output<typeof DeleteResponseSchema>;
|
||||
165
dexto/packages/server/src/hono/start-server.ts
Normal file
165
dexto/packages/server/src/hono/start-server.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import type { Server } from 'node:http';
|
||||
import type { Context } from 'hono';
|
||||
import type { DextoAgent, AgentCard } from '@dexto/core';
|
||||
import { createAgentCard, logger } from '@dexto/core';
|
||||
import { createDextoApp } from './index.js';
|
||||
import { createNodeServer } from './node/index.js';
|
||||
import type { DextoApp } from './types.js';
|
||||
import type { WebUIRuntimeConfig } from './routes/static.js';
|
||||
import { WebhookEventSubscriber } from '../events/webhook-subscriber.js';
|
||||
import { A2ASseEventSubscriber } from '../events/a2a-sse-subscriber.js';
|
||||
import { ApprovalCoordinator } from '../approval/approval-coordinator.js';
|
||||
import { createManualApprovalHandler } from '../approval/manual-approval-handler.js';
|
||||
|
||||
export type StartDextoServerOptions = {
|
||||
/** Port to listen on. Defaults to 3000 or process.env.PORT */
|
||||
port?: number;
|
||||
/** Hostname to bind to. Defaults to 0.0.0.0 */
|
||||
hostname?: string;
|
||||
/** Override agent card metadata (name, version, etc.) */
|
||||
agentCard?: Partial<AgentCard>;
|
||||
/** Absolute path to WebUI build output. If provided, static files will be served. */
|
||||
webRoot?: string;
|
||||
/** Runtime configuration to inject into WebUI (analytics, etc.) */
|
||||
webUIConfig?: WebUIRuntimeConfig;
|
||||
/** Base URL for agent card. Defaults to http://localhost:{port} */
|
||||
baseUrl?: string;
|
||||
};
|
||||
|
||||
export type StartDextoServerResult = {
|
||||
/** HTTP server instance */
|
||||
server: Server;
|
||||
/** Hono app instance */
|
||||
app: DextoApp;
|
||||
/** Stop the server and agent gracefully */
|
||||
stop: () => Promise<void>;
|
||||
/** Agent card with resolved metadata */
|
||||
agentCard: AgentCard;
|
||||
};
|
||||
|
||||
/**
|
||||
* Start a Dexto server with minimal configuration.
|
||||
*
|
||||
* This is a high-level helper that:
|
||||
* 1. Creates event subscribers and approval coordinator
|
||||
* 2. Creates and configures the Hono app
|
||||
* 3. Wires up all the infrastructure (SSE, webhooks, approvals)
|
||||
* 4. Starts the agent
|
||||
* 5. Starts the HTTP server
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Register providers (filesystem-tools, process-tools, etc.)
|
||||
* import '@dexto/image-local';
|
||||
*
|
||||
* import { DextoAgent } from '@dexto/core';
|
||||
* import { loadAgentConfig } from '@dexto/agent-management';
|
||||
* import { startDextoServer } from '@dexto/server';
|
||||
*
|
||||
* const config = await loadAgentConfig('./agents/default.yml');
|
||||
* const agent = new DextoAgent(config, './agents/default.yml');
|
||||
*
|
||||
* const { server, stop } = await startDextoServer(agent, {
|
||||
* port: 3000,
|
||||
* agentCard: { name: 'My Agent' }
|
||||
* });
|
||||
*
|
||||
* // Server is now running at http://localhost:3000
|
||||
* // To stop: await stop();
|
||||
* ```
|
||||
*/
|
||||
export async function startDextoServer(
|
||||
agent: DextoAgent,
|
||||
options: StartDextoServerOptions = {}
|
||||
): Promise<StartDextoServerResult> {
|
||||
const {
|
||||
port: requestedPort,
|
||||
hostname = '0.0.0.0',
|
||||
agentCard: agentCardOverride = {},
|
||||
webRoot,
|
||||
webUIConfig,
|
||||
baseUrl: baseUrlOverride,
|
||||
} = options;
|
||||
|
||||
// Resolve port from options, env, or default
|
||||
const resolvedPort = requestedPort ?? (process.env.PORT ? Number(process.env.PORT) : 3000);
|
||||
const baseUrl = baseUrlOverride ?? `http://localhost:${resolvedPort}`;
|
||||
|
||||
logger.info(`Initializing Dexto server on ${hostname}:${resolvedPort}...`);
|
||||
|
||||
// Create agent card with overrides
|
||||
const agentCard = createAgentCard(
|
||||
{
|
||||
defaultName: agentCardOverride.name ?? 'dexto-agent',
|
||||
defaultVersion: agentCardOverride.version ?? '1.0.0',
|
||||
defaultBaseUrl: baseUrl,
|
||||
},
|
||||
agentCardOverride
|
||||
);
|
||||
|
||||
// Create event subscribers and approval coordinator
|
||||
logger.debug('Creating event infrastructure...');
|
||||
const webhookSubscriber = new WebhookEventSubscriber();
|
||||
const sseSubscriber = new A2ASseEventSubscriber();
|
||||
const approvalCoordinator = new ApprovalCoordinator();
|
||||
|
||||
// Create Hono app
|
||||
logger.debug('Creating Hono application...');
|
||||
const app = createDextoApp({
|
||||
getAgent: (_ctx: Context) => agent,
|
||||
getAgentCard: () => agentCard,
|
||||
approvalCoordinator,
|
||||
webhookSubscriber,
|
||||
sseSubscriber,
|
||||
...(webRoot ? { webRoot } : {}),
|
||||
...(webUIConfig ? { webUIConfig } : {}),
|
||||
});
|
||||
|
||||
// Create Node.js HTTP server
|
||||
logger.debug('Creating Node.js HTTP server...');
|
||||
const { server, webhookSubscriber: bridgeWebhookSubscriber } = createNodeServer(app, {
|
||||
getAgent: () => agent,
|
||||
port: resolvedPort,
|
||||
hostname,
|
||||
});
|
||||
|
||||
// Register webhook subscriber with agent for LLM streaming events
|
||||
if (bridgeWebhookSubscriber) {
|
||||
logger.debug('Registering webhook subscriber with agent...');
|
||||
agent.registerSubscriber(bridgeWebhookSubscriber);
|
||||
}
|
||||
|
||||
// Set approval handler if manual mode OR elicitation enabled
|
||||
const needsHandler =
|
||||
agent.config.toolConfirmation?.mode === 'manual' || agent.config.elicitation.enabled;
|
||||
|
||||
if (needsHandler) {
|
||||
logger.debug('Setting up manual approval handler...');
|
||||
const handler = createManualApprovalHandler(approvalCoordinator);
|
||||
agent.setApprovalHandler(handler);
|
||||
}
|
||||
|
||||
// Wire SSE subscribers to agent event bus
|
||||
logger.debug('Wiring event subscribers to agent...');
|
||||
webhookSubscriber.subscribe(agent.agentEventBus);
|
||||
sseSubscriber.subscribe(agent.agentEventBus);
|
||||
|
||||
// Start the agent
|
||||
logger.info('Starting agent...');
|
||||
await agent.start();
|
||||
|
||||
logger.info(`Server running at http://${hostname}:${resolvedPort}`, null, 'green');
|
||||
|
||||
// Return result with stop function
|
||||
return {
|
||||
server,
|
||||
app,
|
||||
agentCard,
|
||||
stop: async () => {
|
||||
logger.info('Stopping Dexto server...');
|
||||
await agent.stop();
|
||||
server.close();
|
||||
logger.info('Server stopped', null, 'yellow');
|
||||
},
|
||||
};
|
||||
}
|
||||
6
dexto/packages/server/src/hono/types.ts
Normal file
6
dexto/packages/server/src/hono/types.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { OpenAPIHono } from '@hono/zod-openapi';
|
||||
import type { WebhookEventSubscriber } from '../events/webhook-subscriber.js';
|
||||
|
||||
export type DextoApp = OpenAPIHono & {
|
||||
webhookSubscriber?: WebhookEventSubscriber;
|
||||
};
|
||||
11
dexto/packages/server/src/index.ts
Normal file
11
dexto/packages/server/src/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export * from './hono/index.js';
|
||||
export * from './hono/node/index.js';
|
||||
export * from './hono/start-server.js';
|
||||
export type { DextoApp } from './hono/types.js';
|
||||
export * from './events/webhook-subscriber.js';
|
||||
export * from './events/a2a-sse-subscriber.js';
|
||||
export * from './events/webhook-types.js';
|
||||
export * from './events/types.js';
|
||||
export * from './mcp/mcp-handler.js';
|
||||
export * from './approval/manual-approval-handler.js';
|
||||
export * from './approval/approval-coordinator.js';
|
||||
146
dexto/packages/server/src/mcp/mcp-handler.ts
Normal file
146
dexto/packages/server/src/mcp/mcp-handler.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import type { ReadResourceCallback } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
||||
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
||||
import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js';
|
||||
import type { AgentCard, IDextoLogger } from '@dexto/core';
|
||||
import { logger } from '@dexto/core';
|
||||
import { z } from 'zod';
|
||||
import type { DextoAgent } from '@dexto/core';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
export type McpTransportType = 'stdio' | 'sse' | 'http';
|
||||
|
||||
export async function createMcpTransport(
|
||||
transportType: McpTransportType = 'http'
|
||||
): Promise<Transport> {
|
||||
logger.info(`Creating MCP transport of type: ${transportType}`);
|
||||
|
||||
switch (transportType) {
|
||||
case 'stdio':
|
||||
return new StdioServerTransport();
|
||||
case 'sse':
|
||||
throw new Error(
|
||||
'SSE transport requires HTTP response context and should be created per-request'
|
||||
);
|
||||
default: {
|
||||
// Use stateless mode (no session management) for better compatibility
|
||||
// with clients like OpenAI that may not properly handle Mcp-Session-Id headers
|
||||
return new StreamableHTTPServerTransport({
|
||||
enableJsonResponse: true,
|
||||
}) as Transport;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function initializeMcpServer(
|
||||
agent: DextoAgent,
|
||||
agentCardData: AgentCard,
|
||||
mcpTransport: Transport
|
||||
): Promise<McpServer> {
|
||||
const mcpServer = new McpServer(
|
||||
{ name: agentCardData.name, version: agentCardData.version },
|
||||
{
|
||||
capabilities: {
|
||||
resources: {},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const toolName = 'chat_with_agent';
|
||||
const toolDescription = 'Allows you to chat with the an AI agent. Send a message to interact.';
|
||||
|
||||
mcpServer.tool(
|
||||
toolName,
|
||||
toolDescription,
|
||||
{ message: z.string() },
|
||||
async ({ message }: { message: string }) => {
|
||||
agent.logger.info(
|
||||
`MCP tool '${toolName}' received message: ${message.substring(0, 100)}${message.length > 100 ? '...' : ''}`
|
||||
);
|
||||
// Create ephemeral session for this MCP tool call (stateless MCP interactions)
|
||||
const session = await agent.createSession(`mcp-${randomUUID()}`);
|
||||
try {
|
||||
const text = await agent.run(message, undefined, undefined, session.id);
|
||||
agent.logger.info(
|
||||
`MCP tool '${toolName}' sending response: ${text?.substring(0, 100)}${(text?.length ?? 0) > 100 ? '...' : ''}`
|
||||
);
|
||||
return { content: [{ type: 'text', text: text ?? '' }] };
|
||||
} finally {
|
||||
// Always clean up ephemeral session to prevent accumulation
|
||||
await agent
|
||||
.deleteSession(session.id)
|
||||
.catch((err) =>
|
||||
agent.logger.warn(`Failed to cleanup MCP session ${session.id}: ${err}`)
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
agent.logger.info(`Registered MCP tool: '${toolName}'`);
|
||||
|
||||
await initializeAgentCardResource(mcpServer, agentCardData, agent.logger);
|
||||
|
||||
agent.logger.info(`Initializing MCP protocol server connection...`);
|
||||
await mcpServer.connect(mcpTransport);
|
||||
agent.logger.info(`✅ MCP server protocol connected via transport.`);
|
||||
return mcpServer;
|
||||
}
|
||||
|
||||
export async function initializeAgentCardResource(
|
||||
mcpServer: McpServer,
|
||||
agentCardData: AgentCard,
|
||||
agentLogger: IDextoLogger
|
||||
): Promise<void> {
|
||||
const agentCardResourceProgrammaticName = 'agentCard';
|
||||
const agentCardResourceUri = 'dexto://agent/card';
|
||||
try {
|
||||
const readCallback: ReadResourceCallback = async (uri, _extra) => {
|
||||
agentLogger.info(`MCP client requesting resource at ${uri.href}`);
|
||||
return {
|
||||
contents: [
|
||||
{
|
||||
uri: uri.href,
|
||||
type: 'application/json',
|
||||
text: JSON.stringify(agentCardData, null, 2),
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
mcpServer.resource(agentCardResourceProgrammaticName, agentCardResourceUri, readCallback);
|
||||
agentLogger.info(
|
||||
`Registered MCP Resource: '${agentCardResourceProgrammaticName}' at URI '${agentCardResourceUri}'`
|
||||
);
|
||||
} catch (e: any) {
|
||||
agentLogger.warn(
|
||||
`Error attempting to register MCP Resource '${agentCardResourceProgrammaticName}': ${e.message}. Check SDK.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function createMcpHttpHandlers(mcpTransport: Transport) {
|
||||
if (!(mcpTransport instanceof StreamableHTTPServerTransport)) {
|
||||
logger.info('Non-HTTP transport detected. Skipping HTTP route setup.');
|
||||
return null;
|
||||
}
|
||||
|
||||
const handlePost = async (req: IncomingMessage, res: ServerResponse, body: unknown) => {
|
||||
logger.info(`MCP POST /mcp received request body: ${JSON.stringify(body)}`);
|
||||
try {
|
||||
await mcpTransport.handleRequest(req, res, body);
|
||||
} catch (err) {
|
||||
logger.error(`MCP POST error: ${JSON.stringify(err, null, 2)}`);
|
||||
}
|
||||
};
|
||||
|
||||
const handleGet = async (req: IncomingMessage, res: ServerResponse) => {
|
||||
logger.info('MCP GET /mcp received request, attempting to establish SSE connection.');
|
||||
try {
|
||||
await mcpTransport.handleRequest(req, res);
|
||||
} catch (err) {
|
||||
logger.error(`MCP GET error: ${JSON.stringify(err, null, 2)}`);
|
||||
}
|
||||
};
|
||||
|
||||
return { handlePost, handleGet };
|
||||
}
|
||||
Reference in New Issue
Block a user