diff --git a/apps/glm-free-api/1.0.0/data.yml b/apps/glm-free-api/1.0.0/data.yml new file mode 100644 index 0000000..6f5a333 --- /dev/null +++ b/apps/glm-free-api/1.0.0/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8004" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/glm-free-api/1.0.0/docker-compose.yml b/apps/glm-free-api/1.0.0/docker-compose.yml new file mode 100644 index 0000000..607571c --- /dev/null +++ b/apps/glm-free-api/1.0.0/docker-compose.yml @@ -0,0 +1,16 @@ +services: + glm-free-api: + image: akashrajpuroh1t/glm-free-api-fix:1.0.0 + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/glm-free-api/README.md b/apps/glm-free-api/README.md new file mode 100644 index 0000000..0a68943 --- /dev/null +++ b/apps/glm-free-api/README.md @@ -0,0 +1,45 @@ +### 工具介绍 + +🚀 智谱清言ChatGLM-4-Plus大模型逆向API【特长:超强智能体】,支持高速流式输出、支持智能体对话、支持多轮对话、支持沉思模型、支持Zero思考推理模型、支持视频生成、支持AI绘图、支持联网搜索、支持长文档解读、支持代码调用、支持图像解析,零配置部署,多路token支持,自动清理会话痕迹,仅供测试,如需商用请前往官方开放平台。 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Free-API.png) + +![](https://img.shields.io/badge/Copyright-arch3rPro-ff9800?style=flat&logo=github&logoColor=white) + +### 风险说明 + +- 逆向API是不稳定的,建议前往智谱AI官方 https://open.bigmodel.cn/ 付费使用API,避免封禁的风险。 + +- 本组织和个人不接受任何资金捐助和交易,此项目是纯粹研究交流学习性质! + +- 仅限自用,禁止对外提供服务或商用,避免对官方造成服务压力,否则风险自担! + +### 修改原因: + +1. 原项目作者账号被封,无法更新了 +2. 已去除原项目中包含的恶意代码,欢迎对本项目源码进行审查 + +### 更新说明 +1. 更新models.ts 模型列表,支持glm-4.5、glm-4.5-x、glm-4.5-air、glm-4.6等最新模型 +2. 重新打包新版本的docker镜像,akashrajpuroh1t/glm-free-api-fix:latest +3. 已修复源码中恶意代码问题,并重新打包,原项目包含混淆代码在src/api/chat.js文件末尾处 + +> PS:模型名称实际上并没啥用,只是方便和好看,实际上线上Chat调用是啥模型,就用的啥模型,模型名称随便填都可以。 + +### 使用说明 + + +从 [智谱清言](https://chatglm.cn/) 获取refresh_token + +进入智谱清言随便发起一个对话,然后F12打开开发者工具,从Application > Cookies中找到chatglm_refresh_token的值,复制这个值填写到Lobechat或者CherryStudio等工具中,作为API密钥,API地址是你部署应用的IP加端口,例如:`https://192.168.1.105:8001/v1/chat/completions`,注意某些工具只需要填写`https://192.168.1.105:8001/`即可。 + +![获取chatglm_refresh_token](https://cdn.jsdelivr.net/gh/LLM-Red-Team/glm-free-api@master/doc/example-0.png) + +### 多账号接入 + +目前同个账号同时只能有*一路*输出,你可以通过提供多个账号的userToken value并使用`,`拼接提供: + +``` +API密钥:TOKEN1,TOKEN2,TOKEN3 +``` +每次请求服务会从中挑选一个。 diff --git a/apps/glm-free-api/data.yml b/apps/glm-free-api/data.yml new file mode 100644 index 0000000..13d9c39 --- /dev/null +++ b/apps/glm-free-api/data.yml @@ -0,0 +1,24 @@ +name: GLM-Free-API +tags: + - AI / 大模型 +title: 🚀 智谱清言ChatGLM-4-Plus大模型逆向API +description: 智谱AI (智谱清言) 接口转API +additionalProperties: + key: glm-free-api + name: GLM-Free-API + tags: + - AI + - Tools + shortDescZh: 🚀 智谱清言ChatGLM-4-Plus大模型逆向API + shortDescEn: A 1Panel deployment for glm-free-api + type: website + crossVersionUpdate: true + limit: 0 + recommend: 0 + architectures: + - amd64 + - arm64 + + website: https://open.bigmodel.cn/ + github: https://github.com/LLM-Red-Team/glm-free-api + document: https://github.com/LLM-Red-Team/glm-free-api \ No newline at end of file diff --git a/apps/glm-free-api/latest/data.yml b/apps/glm-free-api/latest/data.yml new file mode 100644 index 0000000..6f5a333 --- /dev/null +++ b/apps/glm-free-api/latest/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8004" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/glm-free-api/latest/docker-compose.yml b/apps/glm-free-api/latest/docker-compose.yml new file mode 100644 index 0000000..0fe6975 --- /dev/null +++ b/apps/glm-free-api/latest/docker-compose.yml @@ -0,0 +1,16 @@ +services: + glm-free-api: + image: akashrajpuroh1t/glm-free-api-fix:latest + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/glm-free-api/logo.png b/apps/glm-free-api/logo.png new file mode 100644 index 0000000..8a08de4 Binary files /dev/null and b/apps/glm-free-api/logo.png differ diff --git a/apps/kimi-free-api/1.0.0/data.yml b/apps/kimi-free-api/1.0.0/data.yml new file mode 100644 index 0000000..2b7be3c --- /dev/null +++ b/apps/kimi-free-api/1.0.0/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8002" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/kimi-free-api/1.0.0/docker-compose.yml b/apps/kimi-free-api/1.0.0/docker-compose.yml new file mode 100644 index 0000000..7b03037 --- /dev/null +++ b/apps/kimi-free-api/1.0.0/docker-compose.yml @@ -0,0 +1,16 @@ +services: + kimi-free-api: + image: akashrajpuroh1t/kimi-free-api-fix:1.0.0 + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/kimi-free-api/README.md b/apps/kimi-free-api/README.md new file mode 100644 index 0000000..a765b9d --- /dev/null +++ b/apps/kimi-free-api/README.md @@ -0,0 +1,60 @@ +### 工具介绍 + +🚀 KIMI AI 长文本大模型逆向API【特长:长文本解读整理】,支持高速流式输出、智能体对话、联网搜索、探索版、K1思考模型、长文档解读、图像解析、多轮对话,零配置部署,多路token支持,自动清理会话痕迹,仅供测试,如需商用请前往官方开放平台。 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Free-API.png) + +![](https://img.shields.io/badge/Copyright-arch3rPro-ff9800?style=flat&logo=github&logoColor=white) + +### 风险说明 + +- 逆向API是不稳定的,建议前往Kimi官方 https://platform.moonshot.cn/ 付费使用API,避免封禁的风险。 + +- 本组织和个人不接受任何资金捐助和交易,此项目是纯粹研究交流学习性质! + +- 仅限自用,禁止对外提供服务或商用,避免对官方造成服务压力,否则风险自担! + + +### 修改原因 + +- 原项目作者账号被封,无法更新了 +- 已去除原项目中包含的恶意代码,欢迎对本项目源码进行审查 + +### 更新说明 +1. 更新models.ts 模型列表,支持kimi-k2-0905-preview、kimi-k2-thinking、kimi-latest等最新模型 +2. 重新打包新版本的docker镜像,akashrajpuroh1t/kimi-free-api-fix:latest +3. 已修复源码中恶意代码问题,并重新打包,原项目包含混淆代码在src/api/chat.js文件末尾处 + +> PS:模型名称实际上并没啥用,只是方便和好看,实际上线上Chat调用是啥模型,就用的啥模型,模型名称随便填都可以。 + +### 使用说明 + +从 [kimi.moonshot.cn](https://kimi.moonshot.cn/) 获取refresh_token + +进入kimi随便发起一个对话,然后F12打开开发者工具,从Application > Local Storage中找到`refresh_token`的值,复制这个值填写到Lobechat或者CherryStudio等工具中,作为API密钥,API地址是你部署应用的IP加端口,例如:`https://192.168.1.105:8001/v1/chat/completions`,注意某些工具只需要填写`https://192.168.1.105:8001/`即可。 + +![获取token](https://cdn.jsdelivr.net/gh/LLM-Red-Team/kimi-free-api@master/doc/example-0.png) + +如果你看到的`refresh_token`是一个数组,请使用`.`拼接起来再使用。 + +![example8](https://cdn.jsdelivr.net/gh/LLM-Red-Team/kimi-free-api@master/doc/example-8.jpg) + + +### 支持K2模型 + +Kimi K2 是一款先进的混合专家 (MoE) 语言模型,拥有 320 亿个激活参数和 1 万亿个总参数。Kimi K2 采用 Muon 优化器进行训练,在前沿知识、推理和编码任务中表现出色,同时针对代理能力进行了精心优化 + +当前版本支持K2模型使用,需要手动添加模型,模型ID为`kimi-k2-0711-preview`, 模型名称随便填写 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Kimi-K2.png) + + +### 多账号接入 + +目前同个账号同时只能有*一路*输出,你可以通过提供多个账号的userToken value并使用`,`拼接提供: + +``` +API密钥:TOKEN1,TOKEN2,TOKEN3 +``` + +每次请求服务会从中挑选一个。 diff --git a/apps/kimi-free-api/data.yml b/apps/kimi-free-api/data.yml new file mode 100644 index 0000000..a919fae --- /dev/null +++ b/apps/kimi-free-api/data.yml @@ -0,0 +1,24 @@ +name: Kimi-Free-API +tags: + - AI / 大模型 +title: 🚀 KIMI AI 长文本大模型逆向API +description: Moonshot AI(Kimi.ai)接口转API +additionalProperties: + key: kimi-free-api + name: Kimi-Free-API + tags: + - AI + - Tools + shortDescZh: 🚀 KIMI AI 长文本大模型逆向API + shortDescEn: A 1Panel deployment for kimi-free-api + type: website + crossVersionUpdate: true + limit: 0 + recommend: 0 + architectures: + - amd64 + - arm64 + + website: https://platform.moonshot.cn/ + github: https://github.com/LLM-Red-Team/kimi-free-api + document: https://github.com/LLM-Red-Team/kimi-free-api \ No newline at end of file diff --git a/apps/kimi-free-api/latest/data.yml b/apps/kimi-free-api/latest/data.yml new file mode 100644 index 0000000..2b7be3c --- /dev/null +++ b/apps/kimi-free-api/latest/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8002" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/kimi-free-api/latest/docker-compose.yml b/apps/kimi-free-api/latest/docker-compose.yml new file mode 100644 index 0000000..dc8443e --- /dev/null +++ b/apps/kimi-free-api/latest/docker-compose.yml @@ -0,0 +1,16 @@ +services: + kimi-free-api: + image: akashrajpuroh1t/kimi-free-api-fix:latest + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/kimi-free-api/logo.png b/apps/kimi-free-api/logo.png new file mode 100644 index 0000000..6e9e3a4 Binary files /dev/null and b/apps/kimi-free-api/logo.png differ diff --git a/apps/minimax-free-api/1.0.0/data.yml b/apps/minimax-free-api/1.0.0/data.yml new file mode 100644 index 0000000..218c06b --- /dev/null +++ b/apps/minimax-free-api/1.0.0/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8007" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/minimax-free-api/1.0.0/docker-compose.yml b/apps/minimax-free-api/1.0.0/docker-compose.yml new file mode 100644 index 0000000..5d0f28d --- /dev/null +++ b/apps/minimax-free-api/1.0.0/docker-compose.yml @@ -0,0 +1,16 @@ +services: + minimax-free-api: + image: akashrajpuroh1t/minimax-free-api-fix:1.0.0 + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/minimax-free-api/README.md b/apps/minimax-free-api/README.md new file mode 100644 index 0000000..bd757f3 --- /dev/null +++ b/apps/minimax-free-api/README.md @@ -0,0 +1,36 @@ +### 工具介绍 + +🚀 MiniMax大模型海螺AI逆向API【特长:超自然语音】,支持MiniMax Text-01、MiniMax-VL-01模型,支持高速流式输出、语音合成、联网搜索、长文档解读、图像解析、多轮对话,零配置部署,多路token支持,自动清理会话痕迹,仅供测试,如需商用请前往官方开放平台。 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Free-API.png) + +![](https://img.shields.io/badge/Copyright-arch3rPro-ff9800?style=flat&logo=github&logoColor=white) + +### 风险说明 + +- 逆向API是不稳定的,建议前往MiniMax官方 https://www.minimaxi.com/platform 付费使用API,避免封禁的风险。 + +- 本组织和个人不接受任何资金捐助和交易,此项目是纯粹研究交流学习性质! + +- 仅限自用,禁止对外提供服务或商用,避免对官方造成服务压力,否则风险自担! + +### 修改原因 + +- 原项目中官方接口https://hailuoai.com/已更新,接口参数发生变化,API已经不可用了 +- 原项目作者账号被封,无法更新了 +- 经过排查,这个项目的原项目暂未发现恶意代码 + +### 使用说明 + +从 [MiniMax-Agent](https://agent.minimaxi.com) 获取token + +进入iniMax-Agent随便发起一个对话,然后F12打开开发者工具,从Application > LocalStorage中找到_token的值,复制该值填写到Lobechat或者CherryStudio等工具中,作为API密钥,API地址是你部署应用的IP加端口,例如:`https://192.168.1.105:8001/v1/chat/completions`,注意某些工具只需要填写`https://192.168.1.105:8001/`即可。 + +### 多账号接入 + +目前同个账号同时只能有*一路*输出,你可以通过提供多个账号的userToken value并使用`,`拼接提供: + +``` +API密钥:TOKEN1,TOKEN2,TOKEN3 +``` +每次请求服务会从中挑选一个。 \ No newline at end of file diff --git a/apps/minimax-free-api/data.yml b/apps/minimax-free-api/data.yml new file mode 100644 index 0000000..08f5671 --- /dev/null +++ b/apps/minimax-free-api/data.yml @@ -0,0 +1,24 @@ +name: Minimax-Free-API +tags: + - AI / 大模型 +title: 🚀 MiniMax大模型海螺AI逆向API【特长:超自然语音】 +description: 支持最新MiniMax-Text-01、MiniMax-VL-01模型,支持高速流式输出、支持多轮对话、支持语音合成等 +additionalProperties: + key: minimax-free-api + name: Minimax-Free-API + tags: + - AI + - Tools + shortDescZh: 🚀 MiniMax大模型海螺AI逆向API【特长:超自然语音】 + shortDescEn: A 1Panel deployment for minimax-free-api + type: website + crossVersionUpdate: true + limit: 0 + recommend: 0 + architectures: + - amd64 + - arm64 + + website: https://platform.minimaxi.com/ + github: https://github.com/LLM-Red-Team/minimax-free-api + document: https://github.com/LLM-Red-Team/minimax-free-api \ No newline at end of file diff --git a/apps/minimax-free-api/latest/data.yml b/apps/minimax-free-api/latest/data.yml new file mode 100644 index 0000000..218c06b --- /dev/null +++ b/apps/minimax-free-api/latest/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8007" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/minimax-free-api/latest/docker-compose.yml b/apps/minimax-free-api/latest/docker-compose.yml new file mode 100644 index 0000000..4aefe23 --- /dev/null +++ b/apps/minimax-free-api/latest/docker-compose.yml @@ -0,0 +1,16 @@ +services: + minimax-free-api: + image: akashrajpuroh1t/minimax-free-api-fix:latest + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/minimax-free-api/logo.png b/apps/minimax-free-api/logo.png new file mode 100644 index 0000000..75cf6ba Binary files /dev/null and b/apps/minimax-free-api/logo.png differ diff --git a/apps/qwen-free-api/1.0.0/data.yml b/apps/qwen-free-api/1.0.0/data.yml new file mode 100644 index 0000000..6719ea8 --- /dev/null +++ b/apps/qwen-free-api/1.0.0/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8003" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/qwen-free-api/1.0.0/docker-compose.yml b/apps/qwen-free-api/1.0.0/docker-compose.yml new file mode 100644 index 0000000..10ecdb5 --- /dev/null +++ b/apps/qwen-free-api/1.0.0/docker-compose.yml @@ -0,0 +1,16 @@ +services: + qwen-free-api: + image: akashrajpuroh1t/qwen-free-api-fix:1.0.0 + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/qwen-free-api/README.md b/apps/qwen-free-api/README.md new file mode 100644 index 0000000..783f383 --- /dev/null +++ b/apps/qwen-free-api/README.md @@ -0,0 +1,54 @@ +### 工具介绍 + +🚀 阿里通义千问3大模型逆向API【特长:六边形战士】,支持高速流式输出、无水印AI绘图、长文档解读、图像解析、联网检索、多轮对话,零配置部署,多路token支持,自动清理会话痕迹,仅供测试,如需商用请前往官方开放平台。 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Free-API.png) + +![](https://img.shields.io/badge/Copyright-arch3rPro-ff9800?style=flat&logo=github&logoColor=white) + +### 风险说明 + +- 逆向API是不稳定的,建议前往阿里云官方 https://dashscope.console.aliyun.com/ 付费使用API,避免封禁的风险。 + +- 本组织和个人不接受任何资金捐助和交易,此项目是纯粹研究交流学习性质! + +- 仅限自用,禁止对外提供服务或商用,避免对官方造成服务压力,否则风险自担! + +### 更新说明 + +重要提示:原项目由于供应链攻击,提交的代码内包含恶意代码,强烈建议不再继续使用。 + +修改原因: + +- 原项目中官方接口修改导致回答乱序,API基本不可用 +- 原项目作者基本不咋更新了 +- 已去除原项目中包含的恶意代码,欢迎对本项目源码进行审查 + +### 使用说明 + +从 [通义千问](https://tongyi.aliyun.com/qianwen) 登录 + +进入通义千问随便发起一个对话,然后F12打开开发者工具,从Application > Cookies中找到tongyi_sso_ticket的值,复制这个值填写到Lobechat或者CherryStudio等工具中,作为API密钥,API地址是你部署应用的IP加端口,例如:`https://192.168.1.105:8001/v1/chat/completions`,注意某些工具只需要填写`https://192.168.1.105:8001/`即可。 + +![获取tongyi_sso_ticket](https://cdn.jsdelivr.net/gh/LLM-Red-Team/qwen-free-api@master/doc/example-0.png) + +### 支持qwen3模型 + +Qwen3是阿里云Qwen团队研发的大型语言模型系列。 + +当前版本支持Qwen3和Qwen3-Coder等模型使用,需要手动添加模型,模型ID为`qwen3-coder-plus`, 模型名称随便填写 + +![](https://cdn.jsdelivr.net/gh/xiaoY233/PicList@main/public/assets/Qwen3-Coder.png) + +详细模型ID参考 +[阿里云百炼官方文档](https://bailian.console.aliyun.com/?tab=doc#/doc/?type=model&url=https%3A%2F%2Fhelp.aliyun.com%2Fdocument_detail%2F2840914.html&renderType=iframe) + + +### 多账号接入 + +目前同个账号同时只能有*一路*输出,你可以通过提供多个账号的userToken value并使用`,`拼接提供: + +``` +API密钥:TOKEN1,TOKEN2,TOKEN3 +``` +每次请求服务会从中挑选一个。 diff --git a/apps/qwen-free-api/data.yml b/apps/qwen-free-api/data.yml new file mode 100644 index 0000000..f5c6185 --- /dev/null +++ b/apps/qwen-free-api/data.yml @@ -0,0 +1,24 @@ +name: Qwen-Free-API +tags: + - AI / 大模型 +title: 🚀 阿里通义千问3大模型逆向API +description: 阿里通义 (Qwen) 接口转API +additionalProperties: + key: qwen-free-api + name: Qwen-Free-API + tags: + - AI + - Tools + shortDescZh: 🚀 阿里通义千问3大模型逆向API + shortDescEn: A 1Panel deployment for qwen-free-api + type: website + crossVersionUpdate: true + limit: 0 + recommend: 0 + architectures: + - amd64 + - arm64 + + website: https://www.aliyun.com/product/bailian + github: https://github.com/LLM-Red-Team/qwen-free-api + document: https://github.com/LLM-Red-Team/kimi-free-api \ No newline at end of file diff --git a/apps/qwen-free-api/latest/data.yml b/apps/qwen-free-api/latest/data.yml new file mode 100644 index 0000000..6719ea8 --- /dev/null +++ b/apps/qwen-free-api/latest/data.yml @@ -0,0 +1,10 @@ +additionalProperties: + formFields: + - default: "8003" + edit: true + envKey: PANEL_APP_PORT_HTTP + labelEn: Service Port + labelZh: 服务端口 + required: true + rule: paramPort + type: number diff --git a/apps/qwen-free-api/latest/docker-compose.yml b/apps/qwen-free-api/latest/docker-compose.yml new file mode 100644 index 0000000..4d916cf --- /dev/null +++ b/apps/qwen-free-api/latest/docker-compose.yml @@ -0,0 +1,16 @@ +services: + qwen-free-api: + image: akashrajpuroh1t/qwen-free-api-fix:latest + container_name: ${CONTAINER_NAME} + ports: + - ${PANEL_APP_PORT_HTTP}:8000 + networks: + - 1panel-network + environment: + - TZ=Asia/Shanghai + labels: + createdBy: Apps + restart: always +networks: + 1panel-network: + external: true diff --git a/apps/qwen-free-api/logo.png b/apps/qwen-free-api/logo.png new file mode 100644 index 0000000..83f5bf4 Binary files /dev/null and b/apps/qwen-free-api/logo.png differ