diff --git a/src/minenasai/webtui/server.py b/src/minenasai/webtui/server.py index e7b8cb2..e065152 100644 --- a/src/minenasai/webtui/server.py +++ b/src/minenasai/webtui/server.py @@ -390,7 +390,7 @@ async def update_proxy_config(data: ProxyConfigUpdate) -> dict[str, Any]: @app.post("/api/llm/test/{provider}") async def test_llm_connection(provider: str) -> dict[str, Any]: - """测试 LLM 连接""" + """测试 LLM 连接(使用已保存配置)""" try: from minenasai.llm import get_llm_manager @@ -406,6 +406,160 @@ async def test_llm_connection(provider: str) -> dict[str, Any]: return {"success": False, "error": str(e)} +class LLMTestRequest(BaseModel): + """LLM 测试请求""" + provider: str + api_key: str + base_url: str | None = None + model: str | None = None + + +@app.post("/api/llm/test") +async def test_llm_with_config(request: LLMTestRequest) -> dict[str, Any]: + """使用指定配置测试 LLM 连接""" + import time + + try: + provider = request.provider + api_key = request.api_key + base_url = request.base_url + model = request.model + + # 根据提供商进行测试 + start = time.time() + + if provider in ["openai-compatible", "openai", "deepseek"]: + # OpenAI 兼容接口 + import httpx + + url = base_url or "https://api.openai.com/v1" + model_name = model or "gpt-3.5-turbo" + + # 获取代理设置 + settings = get_settings() + proxy_url = None + if settings.proxy.enabled and provider not in ["deepseek"]: + proxy_url = settings.proxy.http or settings.proxy.https + + async with httpx.AsyncClient( + proxy=proxy_url, + timeout=30 + ) as client: + resp = await client.post( + f"{url.rstrip('/')}/chat/completions", + headers={ + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json" + }, + json={ + "model": model_name, + "messages": [{"role": "user", "content": "Say hi"}], + "max_tokens": 5 + } + ) + + latency = int((time.time() - start) * 1000) + + if resp.status_code == 200: + return {"success": True, "message": "连接正常", "latency": latency} + elif resp.status_code == 401: + return {"success": False, "error": "401 Unauthorized - API Key 无效"} + elif resp.status_code == 403: + return {"success": False, "error": "403 Forbidden - 无权访问"} + elif resp.status_code == 404: + return {"success": False, "error": "404 Not Found - API URL 不正确"} + else: + error_text = resp.text[:200] if resp.text else str(resp.status_code) + return {"success": False, "error": f"{resp.status_code}: {error_text}"} + + elif provider == "anthropic": + import httpx + + url = base_url or "https://api.anthropic.com" + model_name = model or "claude-sonnet-4-20250514" + + settings = get_settings() + proxy_url = None + if settings.proxy.enabled: + proxy_url = settings.proxy.http or settings.proxy.https + + async with httpx.AsyncClient( + proxy=proxy_url, + timeout=30 + ) as client: + resp = await client.post( + f"{url.rstrip('/')}/v1/messages", + headers={ + "x-api-key": api_key, + "anthropic-version": "2023-06-01", + "Content-Type": "application/json" + }, + json={ + "model": model_name, + "messages": [{"role": "user", "content": "Say hi"}], + "max_tokens": 5 + } + ) + + latency = int((time.time() - start) * 1000) + + if resp.status_code == 200: + return {"success": True, "message": "连接正常", "latency": latency} + elif resp.status_code == 401: + return {"success": False, "error": "401 - API Key 无效或格式不正确"} + else: + error_text = resp.text[:200] if resp.text else str(resp.status_code) + return {"success": False, "error": f"{resp.status_code}: {error_text}"} + + elif provider == "gemini": + import httpx + + model_name = model or "gemini-2.0-flash" + url = f"https://generativelanguage.googleapis.com/v1beta/models/{model_name}:generateContent?key={api_key}" + + settings = get_settings() + proxy_url = None + if settings.proxy.enabled: + proxy_url = settings.proxy.http or settings.proxy.https + + async with httpx.AsyncClient( + proxy=proxy_url, + timeout=30 + ) as client: + resp = await client.post( + url, + headers={"Content-Type": "application/json"}, + json={ + "contents": [{"parts": [{"text": "Say hi"}]}], + "generationConfig": {"maxOutputTokens": 5} + } + ) + + latency = int((time.time() - start) * 1000) + + if resp.status_code == 200: + return {"success": True, "message": "连接正常", "latency": latency} + elif resp.status_code == 400: + return {"success": False, "error": "400 - API Key 无效或模型名称错误"} + else: + error_text = resp.text[:200] if resp.text else str(resp.status_code) + return {"success": False, "error": f"{resp.status_code}: {error_text}"} + else: + return {"success": False, "error": f"不支持的提供商: {provider}"} + + except Exception as e: + error_msg = str(e) + # 处理常见的网络错误 + if "ConnectError" in error_msg or "ECONNREFUSED" in error_msg: + return {"success": False, "error": "连接被拒绝 - 请检查 URL 或代理设置"} + elif "TimeoutException" in error_msg or "timed out" in error_msg: + return {"success": False, "error": "请求超时 - 请检查网络或代理设置"} + elif "SSLError" in error_msg: + return {"success": False, "error": "SSL 错误 - 请检查代理配置"} + else: + return {"success": False, "error": error_msg} + + @app.post("/api/proxy/test") async def test_proxy() -> dict[str, Any]: """测试代理连接""" diff --git a/src/minenasai/webtui/static/webui/css/webui.css b/src/minenasai/webtui/static/webui/css/webui.css index b03e05b..c2af379 100644 --- a/src/minenasai/webtui/static/webui/css/webui.css +++ b/src/minenasai/webtui/static/webui/css/webui.css @@ -1,5 +1,6 @@ /* MineNASAI WebUI 样式 */ +/* 暗色主题(默认) */ :root { --bg-primary: #1a1b26; --bg-secondary: #24283b; @@ -13,6 +14,20 @@ --border-color: #3b4261; } +/* 亮色主题 */ +.theme-light { + --bg-primary: #ffffff; + --bg-secondary: #f5f7fa; + --bg-tertiary: #fafafa; + --text-primary: #303133; + --text-secondary: #909399; + --accent-primary: #409eff; + --accent-success: #67c23a; + --accent-warning: #e6a23c; + --accent-error: #f56c6c; + --border-color: #dcdfe6; +} + * { margin: 0; padding: 0; @@ -196,7 +211,8 @@ body { border: 1px solid var(--border-color); border-radius: 8px; overflow: hidden; - height: calc(100vh - 180px); + height: calc(100vh - 340px); + min-height: 300px; } .terminal-header { @@ -368,6 +384,67 @@ body { color: var(--accent-success); } +/* LLM 配置状态卡片 */ +.status-chip { + display: inline-flex; + align-items: center; + gap: 6px; + padding: 8px 12px; + border: 1px solid var(--border-color); + border-radius: 8px; + cursor: pointer; + transition: all 0.2s; + background-color: var(--bg-tertiary); +} + +.status-chip:hover { + border-color: var(--accent-primary); + background-color: rgba(122, 162, 247, 0.1); +} + +.status-chip.selected { + border-color: var(--accent-primary); + background-color: rgba(122, 162, 247, 0.15); +} + +.status-chip.active { + border-color: var(--accent-success); +} + +.status-chip.active .status-icon { + opacity: 1; +} + +.status-icon { + font-size: 16px; + opacity: 0.8; +} + +.status-name { + font-size: 13px; + font-weight: 500; +} + +/* 测试结果样式 */ +.test-result { + display: flex; + align-items: center; + gap: 6px; + padding: 6px 12px; + border-radius: 4px; + font-size: 14px; +} + +.test-result.success { + color: var(--accent-success); + background-color: rgba(158, 206, 106, 0.15); +} + +.test-result.error { + color: var(--accent-error); + background-color: rgba(247, 118, 142, 0.15); +} + /* 响应式 */ @media (max-width: 768px) { .sidebar { @@ -380,3 +457,100 @@ body { left: 0; } } + +/* 亮色主题额外样式覆盖 */ +.theme-light .el-menu { + background-color: var(--bg-primary) !important; +} + +.theme-light .el-menu-item, +.theme-light .el-sub-menu__title { + color: var(--text-primary) !important; +} + +.theme-light .el-menu-item.is-active { + background-color: var(--bg-secondary) !important; + color: var(--accent-primary) !important; +} + +.theme-light .el-menu-item:hover, +.theme-light .el-sub-menu__title:hover { + background-color: var(--bg-tertiary) !important; +} + +.theme-light .el-card { + background-color: var(--bg-secondary); + border-color: var(--border-color); +} + +.theme-light .el-input__inner, +.theme-light .el-textarea__inner { + background-color: var(--bg-primary) !important; + border-color: var(--border-color) !important; + color: var(--text-primary) !important; +} + +.theme-light .el-select-dropdown { + background-color: var(--bg-primary) !important; +} + +.theme-light .el-select-dropdown__item { + color: var(--text-primary) !important; +} + +.theme-light .config-card { + background-color: var(--bg-secondary); +} + +.theme-light .provider-card { + background-color: var(--bg-primary); +} + +.theme-light .stat-card { + background-color: var(--bg-secondary); +} + +.theme-light .terminal-container, +.theme-light .logs-container { + background-color: var(--bg-primary); +} + +.theme-light .terminal-header, +.theme-light .logs-header { + background-color: var(--bg-secondary); +} + +.theme-light .el-breadcrumb__inner { + color: var(--text-primary) !important; +} + +.theme-light .el-dialog { + --el-dialog-bg-color: var(--bg-primary); +} + +.theme-light .el-table { + --el-table-bg-color: var(--bg-secondary); + --el-table-header-bg-color: var(--bg-tertiary); + --el-table-tr-bg-color: var(--bg-secondary); + --el-table-row-hover-bg-color: var(--bg-tertiary); + --el-table-text-color: var(--text-primary); + --el-table-header-text-color: var(--text-secondary); + --el-table-border-color: var(--border-color); +} + +.theme-light .status-chip { + background-color: var(--bg-primary); +} + +.theme-light .status-chip:hover, +.theme-light .status-chip.selected { + background-color: rgba(64, 158, 255, 0.1); +} + +.theme-light .test-result.success { + background-color: rgba(103, 194, 58, 0.15); +} + +.theme-light .test-result.error { + background-color: rgba(245, 108, 108, 0.15); +} diff --git a/src/minenasai/webtui/static/webui/index.html b/src/minenasai/webtui/static/webui/index.html index c64eaa9..c830e53 100644 --- a/src/minenasai/webtui/static/webui/index.html +++ b/src/minenasai/webtui/static/webui/index.html @@ -12,7 +12,7 @@
-