224 lines
6.7 KiB
Python
224 lines
6.7 KiB
Python
import json
|
||
import requests
|
||
import asyncio
|
||
from typing import Dict, Optional, List, Union
|
||
from settings import settings
|
||
|
||
# 默认模型配置
|
||
default_model = "deepseek" # 默认使用的模型,可选值为"gpt"、"deepseek"或"doubao"
|
||
gpt_api_model = "gpt-4o" # ChatGPT模型名称
|
||
deepseek_api_model = "deepseek-chat" # DeepSeek模型名称
|
||
doubao_api_model = "doubao-1-5-thinking-pro-250415" # Doubao模型名称
|
||
|
||
# 模型映射配置
|
||
model_mapping = {
|
||
"jingrow-chat": {
|
||
"type": "deepseek",
|
||
"model": "deepseek-chat"
|
||
},
|
||
"jingrow-chat-lite": {
|
||
"type": "doubao",
|
||
"model": "doubao-1-5-lite-32k-250115"
|
||
},
|
||
"jingrow-chat-think": {
|
||
"type": "doubao",
|
||
"model": "doubao-1-5-thinking-pro-250415"
|
||
},
|
||
"jingrow-chat-vision": {
|
||
"type": "doubao",
|
||
"model": "doubao-1.5-vision-pro-250328"
|
||
}
|
||
}
|
||
|
||
# 默认系统提示词
|
||
default_system_message = """
|
||
你是一个有用的AI助手,请根据用户的问题提供清晰、准确的回答。
|
||
"""
|
||
|
||
class ChatService:
|
||
def __init__(self, model: str = None, temperature: float = 0.7, top_p: float = 0.9, max_tokens: int = 2048):
|
||
"""初始化聊天服务
|
||
|
||
Args:
|
||
model: 选择使用的模型
|
||
temperature: 温度参数
|
||
top_p: top_p参数
|
||
max_tokens: 最大生成token数
|
||
"""
|
||
self.model = model
|
||
self.temperature = temperature
|
||
self.top_p = top_p
|
||
self.max_tokens = max_tokens
|
||
|
||
def _get_model_config(self, model: str) -> Dict:
|
||
"""获取模型配置
|
||
|
||
Args:
|
||
model: 模型名称
|
||
|
||
Returns:
|
||
包含模型类型和具体模型名称的字典
|
||
"""
|
||
# 检查是否在映射表中
|
||
if model in model_mapping:
|
||
return model_mapping[model]
|
||
|
||
# 根据模型名称判断类型
|
||
model_lower = model.lower()
|
||
if "deepseek" in model_lower:
|
||
return {"type": "deepseek", "model": model}
|
||
elif "doubao" in model_lower:
|
||
return {"type": "doubao", "model": model}
|
||
else:
|
||
return {"type": "gpt", "model": model}
|
||
|
||
def _get_api_config(self, model_type: str) -> Dict:
|
||
"""获取API配置
|
||
|
||
Args:
|
||
model_type: 模型类型(gpt/deepseek/doubao)
|
||
|
||
Returns:
|
||
包含API配置的字典
|
||
"""
|
||
config = {
|
||
"gpt": {
|
||
"url": settings.chatgpt_api_url,
|
||
"key": settings.chatgpt_api_key,
|
||
"model": settings.chatgpt_api_model
|
||
},
|
||
"deepseek": {
|
||
"url": settings.deepseek_api_url,
|
||
"key": settings.deepseek_api_key,
|
||
"model": settings.deepseek_api_model
|
||
},
|
||
"doubao": {
|
||
"url": settings.doubao_api_url,
|
||
"key": settings.doubao_api_key,
|
||
"model": settings.doubao_api_model
|
||
}
|
||
}
|
||
return config.get(model_type, config["gpt"])
|
||
|
||
def _prepare_payload(self, messages: List[Dict], model_type: str, model_name: str) -> Dict:
|
||
"""准备请求payload
|
||
|
||
Args:
|
||
messages: 消息列表
|
||
model_type: 模型类型
|
||
model_name: 具体模型名称
|
||
|
||
Returns:
|
||
请求payload
|
||
"""
|
||
api_config = self._get_api_config(model_type)
|
||
|
||
payload = {
|
||
"model": model_name, # 使用映射后的具体模型名称
|
||
"messages": messages,
|
||
"temperature": self.temperature,
|
||
"top_p": self.top_p,
|
||
"max_tokens": self.max_tokens
|
||
}
|
||
|
||
return payload
|
||
|
||
def _send_request(self, messages: List[Dict], model_type: str, model_name: str) -> Optional[Dict]:
|
||
"""发送API请求
|
||
|
||
Args:
|
||
messages: 消息列表
|
||
model_type: 模型类型
|
||
model_name: 具体模型名称
|
||
|
||
Returns:
|
||
API响应
|
||
"""
|
||
api_config = self._get_api_config(model_type)
|
||
payload = self._prepare_payload(messages, model_type, model_name)
|
||
|
||
headers = {
|
||
"Content-Type": "application/json",
|
||
"Authorization": f"Bearer {api_config['key']}"
|
||
}
|
||
|
||
try:
|
||
response = requests.post(
|
||
api_config["url"],
|
||
headers=headers,
|
||
json=payload,
|
||
timeout=(10, 300)
|
||
)
|
||
|
||
if response.status_code != 200:
|
||
return None
|
||
return response.json()
|
||
except Exception as e:
|
||
return None
|
||
|
||
def chat_sync(self, messages: List[Dict]) -> Dict:
|
||
"""同步处理聊天请求
|
||
|
||
Args:
|
||
messages: 消息列表,每个消息包含 role 和 content
|
||
|
||
Returns:
|
||
处理结果
|
||
"""
|
||
try:
|
||
model_config = self._get_model_config(self.model or default_model)
|
||
model_type = model_config["type"]
|
||
model_name = model_config["model"]
|
||
|
||
ai_response = self._send_request(messages, model_type, model_name)
|
||
|
||
if ai_response is None:
|
||
return {
|
||
"status": "error",
|
||
"message": "AI服务请求失败"
|
||
}
|
||
|
||
choices = ai_response.get("choices", [])
|
||
if not choices:
|
||
return {
|
||
"status": "error",
|
||
"message": "AI响应无效"
|
||
}
|
||
|
||
message = choices[0].get("message", {}).get("content", "")
|
||
if not message:
|
||
return {
|
||
"status": "error",
|
||
"message": "AI响应内容为空"
|
||
}
|
||
|
||
return {
|
||
"status": "success",
|
||
"data": message
|
||
}
|
||
|
||
except Exception as e:
|
||
return {
|
||
"status": "error",
|
||
"message": f"处理聊天任务时发生错误: {str(e)}"
|
||
}
|
||
|
||
async def chat(self, messages: List[Dict]) -> Dict:
|
||
"""异步处理聊天请求
|
||
|
||
Args:
|
||
messages: 消息列表,每个消息包含 role 和 content
|
||
|
||
Returns:
|
||
处理结果
|
||
"""
|
||
try:
|
||
loop = asyncio.get_event_loop()
|
||
result = await loop.run_in_executor(None, self.chat_sync, messages)
|
||
return result
|
||
except Exception as e:
|
||
return {
|
||
"status": "error",
|
||
"message": f"聊天请求失败: {str(e)}"
|
||
}
|