fix: enhance event hook handling to return status and prevent propagation
This commit is contained in:
@@ -23,7 +23,12 @@ class PipelineContext:
|
||||
event: AstrMessageEvent,
|
||||
hook_type: EventType,
|
||||
*args,
|
||||
):
|
||||
) -> bool:
|
||||
"""调用事件钩子函数
|
||||
|
||||
Returns:
|
||||
bool: 如果事件被终止,返回 True
|
||||
"""
|
||||
platform_id = event.get_platform_id()
|
||||
handlers = star_handlers_registry.get_handlers_by_event_type(
|
||||
hook_type, platform_id=platform_id
|
||||
@@ -41,7 +46,8 @@ class PipelineContext:
|
||||
logger.info(
|
||||
f"{star_map[handler.handler_module_path].name} - {handler.handler_name} 终止了事件传播。"
|
||||
)
|
||||
return
|
||||
|
||||
return event.is_stopped()
|
||||
|
||||
async def call_handler(
|
||||
self,
|
||||
|
||||
@@ -127,9 +127,10 @@ class ToolLoopAgent(BaseAgentRunner):
|
||||
self._transition_state(AgentState.DONE)
|
||||
|
||||
# 执行事件钩子
|
||||
await self.pipeline_ctx.call_event_hook(
|
||||
if await self.pipeline_ctx.call_event_hook(
|
||||
self.event, EventType.OnLLMResponseEvent, llm_resp
|
||||
)
|
||||
):
|
||||
return
|
||||
|
||||
# 返回 LLM 结果
|
||||
if llm_resp.result_chain:
|
||||
|
||||
@@ -112,7 +112,8 @@ class LLMRequestSubStage(Stage):
|
||||
return
|
||||
|
||||
# 执行请求 LLM 前事件钩子。
|
||||
await self.ctx.call_event_hook(event, EventType.OnLLMRequestEvent, req)
|
||||
if await self.ctx.call_event_hook(event, EventType.OnLLMRequestEvent, req):
|
||||
return
|
||||
|
||||
if isinstance(req.contexts, str):
|
||||
req.contexts = json.loads(req.contexts)
|
||||
@@ -159,6 +160,8 @@ class LLMRequestSubStage(Stage):
|
||||
step_idx += 1
|
||||
try:
|
||||
async for resp in tool_loop_agent.step():
|
||||
if event.is_stopped():
|
||||
return
|
||||
if resp.type == "tool_call_result":
|
||||
continue # 跳过工具调用结果
|
||||
if resp.type == "tool_call":
|
||||
|
||||
Reference in New Issue
Block a user