Files
AIChatRoom/backend/adapters/minimax_adapter.py
Claude Code edbddf855d feat: AI聊天室多Agent协作讨论平台
- 实现Agent管理,支持AI辅助生成系统提示词
- 支持多个AI提供商(OpenRouter、智谱、MiniMax等)
- 实现聊天室和讨论引擎
- WebSocket实时消息推送
- 前端使用React + Ant Design
- 后端使用FastAPI + MongoDB

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-03 19:20:02 +08:00

252 lines
9.1 KiB
Python

"""
MiniMax适配器
支持MiniMax大模型API
"""
import json
from datetime import datetime
from typing import List, Dict, Any, Optional, AsyncGenerator
from loguru import logger
from .base_adapter import BaseAdapter, ChatMessage, AdapterResponse
from utils.proxy_handler import get_http_client
class MiniMaxAdapter(BaseAdapter):
"""
MiniMax API适配器
支持abab系列模型
"""
DEFAULT_BASE_URL = "https://api.minimax.chat/v1"
def __init__(
self,
api_key: str,
base_url: str = "",
model: str = "abab6.5-chat",
use_proxy: bool = False,
proxy_config: Optional[Dict[str, Any]] = None,
timeout: int = 60,
**kwargs
):
super().__init__(
api_key=api_key,
base_url=base_url or self.DEFAULT_BASE_URL,
model=model,
use_proxy=use_proxy,
proxy_config=proxy_config,
timeout=timeout,
**kwargs
)
# MiniMax需要group_id
self.group_id = kwargs.get("group_id", "")
async def chat(
self,
messages: List[ChatMessage],
temperature: float = 0.7,
max_tokens: int = 2000,
**kwargs
) -> AdapterResponse:
"""发送聊天请求"""
start_time = datetime.utcnow()
try:
async with get_http_client(
use_proxy=self.use_proxy,
proxy_config=self.proxy_config,
timeout=self.timeout
) as client:
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
# MiniMax使用特殊的消息格式
minimax_messages = []
bot_setting = []
for msg in messages:
if msg.role == "system":
bot_setting.append({
"bot_name": "assistant",
"content": msg.content
})
else:
minimax_messages.append({
"sender_type": "USER" if msg.role == "user" else "BOT",
"sender_name": msg.name or ("用户" if msg.role == "user" else "assistant"),
"text": msg.content
})
payload = {
"model": self.model,
"messages": minimax_messages,
"bot_setting": bot_setting if bot_setting else [{"bot_name": "assistant", "content": "你是一个有帮助的助手"}],
"temperature": temperature,
"tokens_to_generate": max_tokens,
"mask_sensitive_info": False,
**kwargs
}
url = f"{self.base_url}/text/chatcompletion_v2"
if self.group_id:
url = f"{url}?GroupId={self.group_id}"
response = await client.post(
url,
headers=headers,
json=payload
)
if response.status_code != 200:
error_text = response.text
logger.error(f"MiniMax API错误: {response.status_code} - {error_text}")
return AdapterResponse(
success=False,
error=f"API错误: {response.status_code} - {error_text}",
latency_ms=self._calculate_latency(start_time)
)
data = response.json()
# 检查API返回的错误
if data.get("base_resp", {}).get("status_code", 0) != 0:
error_msg = data.get("base_resp", {}).get("status_msg", "未知错误")
return AdapterResponse(
success=False,
error=f"API错误: {error_msg}",
latency_ms=self._calculate_latency(start_time)
)
reply = data.get("reply", "")
usage = data.get("usage", {})
return AdapterResponse(
success=True,
content=reply,
model=self.model,
finish_reason=data.get("output_sensitive", False) and "content_filter" or "stop",
prompt_tokens=usage.get("prompt_tokens", 0),
completion_tokens=usage.get("completion_tokens", 0),
total_tokens=usage.get("total_tokens", 0),
latency_ms=self._calculate_latency(start_time)
)
except Exception as e:
logger.error(f"MiniMax请求异常: {e}")
return AdapterResponse(
success=False,
error=str(e),
latency_ms=self._calculate_latency(start_time)
)
async def chat_stream(
self,
messages: List[ChatMessage],
temperature: float = 0.7,
max_tokens: int = 2000,
**kwargs
) -> AsyncGenerator[str, None]:
"""发送流式聊天请求"""
try:
async with get_http_client(
use_proxy=self.use_proxy,
proxy_config=self.proxy_config,
timeout=self.timeout
) as client:
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
minimax_messages = []
bot_setting = []
for msg in messages:
if msg.role == "system":
bot_setting.append({
"bot_name": "assistant",
"content": msg.content
})
else:
minimax_messages.append({
"sender_type": "USER" if msg.role == "user" else "BOT",
"sender_name": msg.name or ("用户" if msg.role == "user" else "assistant"),
"text": msg.content
})
payload = {
"model": self.model,
"messages": minimax_messages,
"bot_setting": bot_setting if bot_setting else [{"bot_name": "assistant", "content": "你是一个有帮助的助手"}],
"temperature": temperature,
"tokens_to_generate": max_tokens,
"stream": True,
**kwargs
}
url = f"{self.base_url}/text/chatcompletion_v2"
if self.group_id:
url = f"{url}?GroupId={self.group_id}"
async with client.stream(
"POST",
url,
headers=headers,
json=payload
) as response:
async for line in response.aiter_lines():
if line.startswith("data: "):
data_str = line[6:]
if data_str.strip() == "[DONE]":
break
try:
data = json.loads(data_str)
delta = data.get("choices", [{}])[0].get("delta", {})
content = delta.get("content", "")
if content:
yield content
except json.JSONDecodeError:
continue
except Exception as e:
logger.error(f"MiniMax流式请求异常: {e}")
yield f"[错误: {str(e)}]"
async def test_connection(self) -> Dict[str, Any]:
"""测试API连接"""
start_time = datetime.utcnow()
try:
test_messages = [
ChatMessage(role="user", content="你好,请回复'OK'")
]
response = await self.chat(
messages=test_messages,
temperature=0,
max_tokens=10
)
if response.success:
return {
"success": True,
"message": "连接成功",
"model": response.model,
"latency_ms": response.latency_ms
}
else:
return {
"success": False,
"message": response.error,
"latency_ms": response.latency_ms
}
except Exception as e:
return {
"success": False,
"message": str(e),
"latency_ms": self._calculate_latency(start_time)
}