🐛 fix: 修复 dify 下某些修改了 LLM 响应的插件可能不生效的问题 #876

This commit is contained in:
Soulter
2025-03-19 01:05:28 +08:00
parent d891801c5a
commit e2283d1453
4 changed files with 57 additions and 9 deletions

View File

@@ -581,7 +581,7 @@ CONFIG_METADATA_2 = {
"dify_api_type": "chat",
"dify_api_key": "",
"dify_api_base": "https://api.dify.ai/v1",
"dify_workflow_output_key": "",
"dify_workflow_output_key": "astrbot_wf_output",
"dify_query_input_key": "astrbot_text_query",
"variables": {},
"timeout": 60,

View File

@@ -77,6 +77,10 @@ class MessageChain:
self.use_t2i_ = use_t2i
return self
def get_plain_text(self) -> str:
"""获取纯文本消息。这个方法将获取 chain 中所有 Plain 组件的文本并拼接成一条消息。空格分隔。"""
return " ".join([comp.text for comp in self.chain if isinstance(comp, Plain)])
class EventResultType(enum.Enum):
"""用于描述事件处理的结果类型。
@@ -147,9 +151,4 @@ class MessageEventResult(MessageChain):
"""是否为 LLM 结果。"""
return self.result_content_type == ResultContentType.LLM_RESULT
def get_plain_text(self) -> str:
"""获取纯文本消息。这个方法将获取所有 Plain 组件的文本并拼接成一条消息。空格分隔。"""
return " ".join([comp.text for comp in self.chain if isinstance(comp, Plain)])
CommandResult = MessageEventResult

View File

@@ -5,6 +5,7 @@ from .func_tool_manager import FuncCall
from openai.types.chat.chat_completion import ChatCompletion
from astrbot.core.db.po import Conversation
from astrbot.core.message.message_event_result import MessageChain
import astrbot.core.message.components as Comp
class ProviderType(enum.Enum):
@@ -59,8 +60,6 @@ class LLMResponse:
"""角色, assistant, tool, err"""
result_chain: MessageChain = None
"""返回的消息链"""
completion_text: str = ""
"""LLM 返回的文本, 已经废弃但仍然兼容。使用 result_chain 替代"""
tools_call_args: List[Dict[str, any]] = field(default_factory=list)
"""工具调用参数"""
tools_call_name: List[str] = field(default_factory=list)
@@ -68,3 +67,51 @@ class LLMResponse:
raw_completion: ChatCompletion = None
_new_record: Dict[str, any] = None
_completion_text: str = ""
def __init__(
self,
role: str,
completion_text: str = "",
result_chain: MessageChain = None,
tools_call_args: List[Dict[str, any]] = None,
tools_call_name: List[str] = None,
raw_completion: ChatCompletion = None,
_new_record: Dict[str, any] = None,
):
"""初始化 LLMResponse
Args:
role (str): 角色, assistant, tool, err
completion_text (str, optional): 返回的结果文本,已经过时,推荐使用 result_chain. Defaults to "".
result_chain (MessageChain, optional): 返回的消息链. Defaults to None.
tools_call_args (List[Dict[str, any]], optional): 工具调用参数. Defaults to None.
tools_call_name (List[str], optional): 工具调用名称. Defaults to None.
raw_completion (ChatCompletion, optional): 原始响应, OpenAI 格式. Defaults to None.
"""
self.role = role
self.completion_text = completion_text
self.result_chain = result_chain
self.tools_call_args = tools_call_args
self.tools_call_name = tools_call_name
self.raw_completion = raw_completion
self._new_record = _new_record
@property
def completion_text(self):
if self.result_chain:
return self.result_chain.get_plain_text()
return self._completion_text
@completion_text.setter
def completion_text(self, value):
if self.result_chain:
self.result_chain.chain = [
comp
for comp in self.result_chain.chain
if not isinstance(comp, Comp.Plain)
] # 清空 Plain 组件
self.result_chain.chain.insert(0, Comp.Plain(value))
else:
self._completion_text = value

View File

@@ -44,9 +44,11 @@ class ProviderDify(Provider):
self.dify_query_input_key = provider_config.get(
"dify_query_input_key", "astrbot_text_query"
)
self.variables: dict = provider_config.get("variables", {})
if not self.dify_query_input_key:
self.dify_query_input_key = "astrbot_text_query"
if not self.workflow_output_key:
self.workflow_output_key = "astrbot_wf_output"
self.variables: dict = provider_config.get("variables", {})
self.timeout = provider_config.get("timeout", 120)
if isinstance(self.timeout, str):
self.timeout = int(self.timeout)