78 lines
2.1 KiB
Python
78 lines
2.1 KiB
Python
"""
|
||
LLM服务 - 兼容层,使用多模型服务
|
||
"""
|
||
from typing import Optional, List, Dict, Any
|
||
from app.services.multi_llm_service import multi_llm_service
|
||
from app.utils.logger import logger
|
||
|
||
|
||
class LLMService:
|
||
"""LLM服务类(兼容层)"""
|
||
|
||
def __init__(self):
|
||
"""初始化LLM服务"""
|
||
self.multi_service = multi_llm_service
|
||
self.client = multi_llm_service.current_model # 兼容性
|
||
|
||
def chat(
|
||
self,
|
||
messages: List[Dict[str, str]],
|
||
model: str = None,
|
||
temperature: float = 0.7,
|
||
max_tokens: int = 2000,
|
||
model_override: str = None
|
||
) -> Optional[str]:
|
||
"""
|
||
调用LLM进行对话
|
||
|
||
Args:
|
||
messages: 消息列表
|
||
model: 模型名称(已废弃,使用 model_override)
|
||
temperature: 温度参数
|
||
max_tokens: 最大token数
|
||
model_override: 指定使用的模型 (zhipu/deepseek)
|
||
|
||
Returns:
|
||
LLM响应文本
|
||
"""
|
||
return self.multi_service.chat(
|
||
messages=messages,
|
||
temperature=temperature,
|
||
max_tokens=max_tokens,
|
||
model_override=model_override
|
||
)
|
||
|
||
def analyze_intent(self, user_message: str) -> Dict[str, Any]:
|
||
"""使用LLM分析用户意图"""
|
||
return self.multi_service.analyze_intent(user_message)
|
||
|
||
def chat_stream(
|
||
self,
|
||
messages: List[Dict[str, str]],
|
||
temperature: float = 0.7,
|
||
max_tokens: int = 2000,
|
||
model_override: str = None
|
||
):
|
||
"""
|
||
流式调用LLM进行对话
|
||
|
||
Args:
|
||
messages: 消息列表
|
||
temperature: 温度参数
|
||
max_tokens: 最大token数
|
||
model_override: 指定使用的模型 (zhipu/deepseek)
|
||
|
||
Yields:
|
||
LLM响应的文本片段
|
||
"""
|
||
return self.multi_service.chat_stream(
|
||
messages=messages,
|
||
temperature=temperature,
|
||
max_tokens=max_tokens,
|
||
model_override=model_override
|
||
)
|
||
|
||
|
||
# 创建全局实例
|
||
llm_service = LLMService()
|