feat(channels): add streaming response hooks to ChannelBase

- Add onResponseChunk hook for progressive text display during streaming
- Add onResponseComplete hook for customizing response delivery
- Update mock plugin channel to support streaming chunks

This enables channels to display AI responses progressively as they stream,
improving user experience with real-time feedback.

Co-authored-by: Qwen-Coder <qwen-coder@alibabacloud.com>
This commit is contained in:
tanzhenxin 2026-03-27 14:08:09 +00:00
parent 0ca8cf86f6
commit f7979aa902
4 changed files with 117 additions and 17 deletions

View file

@ -72,6 +72,30 @@ export abstract class ChannelBase {
onToolCall(_chatId: string, _event: ToolCallEvent): void {}
/**
* Called for each text chunk as the agent streams its response.
* Override to implement progressive display (e.g., updating an AI card in-place).
* Default: no-op (chunks are collected internally and delivered via onResponseComplete).
*/
protected onResponseChunk(
_chatId: string,
_chunk: string,
_sessionId: string,
): void {}
/**
* Called when the agent's full response is ready.
* Override to customize delivery (e.g., finalize an AI card).
* Default: calls sendMessage() with the full response text.
*/
protected async onResponseComplete(
chatId: string,
fullText: string,
_sessionId: string,
): Promise<void> {
await this.sendMessage(chatId, fullText);
}
/**
* Register a slash command handler. Subclasses can call this to add
* platform-specific commands (e.g., /start for Telegram).
@ -223,13 +247,25 @@ export abstract class ChannelBase {
// pollution when concurrent messages hit the same session.
const prev = this.sessionQueues.get(sessionId) ?? Promise.resolve();
const current = prev.then(async () => {
const response = await this.bridge.prompt(sessionId, promptText, {
imageBase64: envelope.imageBase64,
imageMimeType: envelope.imageMimeType,
});
// Forward streaming chunks to the subclass hook
const onChunk = (sid: string, chunk: string) => {
if (sid === sessionId) {
this.onResponseChunk(envelope.chatId, chunk, sessionId);
}
};
this.bridge.on('textChunk', onChunk);
if (response) {
await this.sendMessage(envelope.chatId, response);
try {
const response = await this.bridge.prompt(sessionId, promptText, {
imageBase64: envelope.imageBase64,
imageMimeType: envelope.imageMimeType,
});
if (response) {
await this.onResponseComplete(envelope.chatId, response, sessionId);
}
} finally {
this.bridge.off('textChunk', onChunk);
}
});
this.sessionQueues.set(