feat(llm): 添加 AI 对话历史管理

This commit is contained in:
fccckaug 2025-06-17 21:36:52 +08:00 committed by webjoin111
parent 17dd2bb22e
commit 2ac3baf63d

View File

@ -75,9 +75,23 @@ class AI:
3. 高级方法通过get_model_instance()直接访问 3. 高级方法通过get_model_instance()直接访问
""" """
def __init__(self, config: AIConfig | None = None): def __init__(
"""初始化AI服务""" self, config: AIConfig | None = None, history: list[LLMMessage] | None = None
):
"""
初始化AI服务
Args:
config: AI 配置.
history: 可选的初始对话历史.
"""
self.config = config or AIConfig() self.config = config or AIConfig()
self.history = history or []
def clear_history(self):
"""清空当前会话的历史记录"""
self.history = []
logger.info("AI session history cleared.")
async def chat( async def chat(
self, self,
@ -86,17 +100,19 @@ class AI:
model: ModelName = None, model: ModelName = None,
**kwargs: Any, **kwargs: Any,
) -> str: ) -> str:
"""聊天对话 - 支持简单多模态输入""" """
llm_messages: list[LLMMessage] 进行一次聊天对话
此方法会自动使用和更新会话内的历史记录
"""
current_message: LLMMessage
if isinstance(message, str): if isinstance(message, str):
llm_messages = [LLMMessage.user(message)] current_message = LLMMessage.user(message)
elif isinstance(message, list) and all( elif isinstance(message, list) and all(
isinstance(part, LLMContentPart) for part in message isinstance(part, LLMContentPart) for part in message
): ):
llm_messages = [LLMMessage.user(message)] current_message = LLMMessage.user(message)
elif isinstance(message, LLMMessage): elif isinstance(message, LLMMessage):
llm_messages = [message] current_message = message
else: else:
raise LLMException( raise LLMException(
f"AI.chat 不支持的消息类型: {type(message)}. " f"AI.chat 不支持的消息类型: {type(message)}. "
@ -105,9 +121,15 @@ class AI:
code=LLMErrorCode.API_REQUEST_FAILED, code=LLMErrorCode.API_REQUEST_FAILED,
) )
final_messages = [*self.history, current_message]
response = await self._execute_generation( response = await self._execute_generation(
llm_messages, model, "聊天失败", kwargs final_messages, model, "聊天失败", kwargs
) )
self.history.append(current_message)
self.history.append(LLMMessage.assistant_text_response(response.text))
return response.text return response.text
async def code( async def code(