Compare commits
124 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
542d4bc703 | ||
|
|
e3640fdac9 | ||
|
|
f64ab4b190 | ||
|
|
bd571e1577 | ||
|
|
e4a5cbd893 | ||
|
|
7a9fd7fd1e | ||
|
|
d9b60108db | ||
|
|
8455c8b4ed | ||
|
|
5c2e7099fc | ||
|
|
1fd1d55895 | ||
|
|
5ce4137e75 | ||
|
|
d49179541e | ||
|
|
676f258981 | ||
|
|
fa44749240 | ||
|
|
6c856f9da2 | ||
|
|
e8773cea7f | ||
|
|
4d36ffcb08 | ||
|
|
c653e492c4 | ||
|
|
f08de1f404 | ||
|
|
1218691b61 | ||
|
|
61fc27ff79 | ||
|
|
123ee24f7e | ||
|
|
52c9045a28 | ||
|
|
f00f1e8933 | ||
|
|
8da4433e57 | ||
|
|
7babb87934 | ||
|
|
f67b171385 | ||
|
|
1780d1355d | ||
|
|
5a3390e4f3 | ||
|
|
337d96b41d | ||
|
|
38a1dfea98 | ||
|
|
fbef73aeec | ||
|
|
d6214c2b7c | ||
|
|
d58c86f6fc | ||
|
|
ea34c20198 | ||
|
|
934ca94e62 | ||
|
|
1775327c2e | ||
|
|
707fcad8b4 | ||
|
|
f143c5afc6 | ||
|
|
99f94b2611 | ||
|
|
e39c1f9116 | ||
|
|
235e0b9b8f | ||
|
|
d5a9bed8a4 | ||
|
|
d7dc8a7612 | ||
|
|
08cd3ca40c | ||
|
|
a13562dcea | ||
|
|
d7a0c0d1d0 | ||
|
|
c0729b2d29 | ||
|
|
a80f474290 | ||
|
|
699207dd54 | ||
|
|
e7708010c9 | ||
|
|
f66091e08f | ||
|
|
03bb932f8f | ||
|
|
fbf8b349e0 | ||
|
|
e9278fce6a | ||
|
|
9a7db956d5 | ||
|
|
13196dd667 | ||
|
|
52b80e24d2 | ||
|
|
7dff87e65d | ||
|
|
31ee64d1b2 | ||
|
|
8e865b6918 | ||
|
|
66f91e5832 | ||
|
|
cd2d368f9c | ||
|
|
7736c1c9bd | ||
|
|
6728c0b7b5 | ||
|
|
344f92e0e7 | ||
|
|
fdabfef6a7 | ||
|
|
6c5718f134 | ||
|
|
edfde51434 | ||
|
|
3fc1347bba | ||
|
|
e643eea365 | ||
|
|
1af481f5f9 | ||
|
|
317d1c4c41 | ||
|
|
a703860512 | ||
|
|
1cd1c8ea0d | ||
|
|
53ef3bbf4f | ||
|
|
ab7b8aad7c | ||
|
|
c49213282b | ||
|
|
3c87fc5b31 | ||
|
|
9684508e1d | ||
|
|
bb0edae200 | ||
|
|
acb68a4a1e | ||
|
|
46dd6f3243 | ||
|
|
ecab072890 | ||
|
|
148534d3c2 | ||
|
|
1278f16973 | ||
|
|
7d9b3c6c5c | ||
|
|
83dcb5165c | ||
|
|
30862bb82f | ||
|
|
6c0bda8feb | ||
|
|
e14dece206 | ||
|
|
680593d636 | ||
|
|
144440214f | ||
|
|
6667b58a3f | ||
|
|
b55d9533be | ||
|
|
3484fc60e6 | ||
|
|
eac0265522 | ||
|
|
ac74431633 | ||
|
|
4c098200be | ||
|
|
2cf18972f3 | ||
|
|
d522d2a6a9 | ||
|
|
7079ce096f | ||
|
|
5e8c5067b1 | ||
|
|
570ff4e8b6 | ||
|
|
e2f1362a1f | ||
|
|
3519e38211 | ||
|
|
08734250f7 | ||
|
|
e8407f6449 | ||
|
|
04f3400f83 | ||
|
|
89c8b3e7fc | ||
|
|
66294100ec | ||
|
|
8ed8a23c8b | ||
|
|
449b0b03b5 | ||
|
|
d93754bf1d | ||
|
|
a007a61ecc | ||
|
|
e481377317 | ||
|
|
4c5831c7b4 | ||
|
|
fc54b5237f | ||
|
|
f8f42678d1 | ||
|
|
38b1f4128c | ||
|
|
04fb4f88ad | ||
|
|
4675f5df08 | ||
|
|
34ee358d40 | ||
|
|
c4cfd1a3e2 |
14
.github/workflows/docker-image.yml
vendored
14
.github/workflows/docker-image.yml
vendored
@@ -1,12 +1,8 @@
|
||||
name: Docker Image CI/CD
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- dev_dashboard
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
publish-latest-docker-image:
|
||||
@@ -18,8 +14,10 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build image
|
||||
run: |
|
||||
docker build -t ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:v1 .
|
||||
git clone https://github.com/Soulter/AstrBot
|
||||
cd AstrBot
|
||||
docker build -t ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest .
|
||||
- name: Publish image
|
||||
run: |
|
||||
docker login -u ${{ secrets.DOCKER_HUB_USERNAME }} -p ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
docker push ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:v1
|
||||
docker push ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest
|
||||
|
||||
27
.github/workflows/stale.yml
vendored
Normal file
27
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '21 23 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
stale-pr-message: 'Stale pull request message'
|
||||
stale-issue-label: 'no-issue-activity'
|
||||
stale-pr-label: 'no-pr-activity'
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -6,3 +6,8 @@ configs/session
|
||||
configs/config.yaml
|
||||
**/.DS_Store
|
||||
temp
|
||||
cmd_config.json
|
||||
addons/plugins/
|
||||
data/*
|
||||
cookies.json
|
||||
logs/
|
||||
|
||||
68
README.md
68
README.md
@@ -6,63 +6,38 @@
|
||||
|
||||
# AstrBot
|
||||
|
||||
*✨ 2024 - 希望成为一个跨平台、极易上手、稳定安全的机器人项目。✨*
|
||||
|
||||
[](https://github.com/Soulter/AstrBot/releases/latest)
|
||||
<img src="https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/34412545-2e37-400f-bedc-42348713ac1f.svg" alt="wakatime">
|
||||
<img src="https://img.shields.io/badge/python-3.9+-blue.svg" alt="python">
|
||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
||||
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=EYGsuUTfe00_iOu9JTXS7_TEpMkXOvwv&jump_from=webapi&authKey=uUEMKCROfsseS+8IzqPjzV3y1tzy4AkykwTib2jNkOFdzezF9s9XknqnIaf3CDft">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-322154837-purple">
|
||||
</a>
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/频道-x42d56aki2-purple">
|
||||
|
||||
<a href="https://astrbot.soulter.top/center">项目主页(开发中)</a> |
|
||||
<a href="https://github.com/Soulter/QQChannelChatGPT/wiki">部署文档</a> |
|
||||
<a href="https://github.com/Soulter/QQChannelChatGPT/issues">问题提交</a> |
|
||||
<a href="https://astrbot.soulter.top/center/docs/%E5%BC%80%E5%8F%91/%E6%8F%92%E4%BB%B6%E5%BC%80%E5%8F%91">插件开发(最少只需 25 行,真不难!)</a>
|
||||
|
||||
<a href="https://astrbot.soulter.top/center">项目部署</a> |
|
||||
<a href="https://github.com/Soulter/AstrBot/issues">问题提交</a> |
|
||||
<a href="https://astrbot.soulter.top/center/docs/%E5%BC%80%E5%8F%91/%E6%8F%92%E4%BB%B6%E5%BC%80%E5%8F%91">插件开发</a>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
## 🤔您可能想了解的
|
||||
- **如何部署?** [帮助文档](https://github.com/Soulter/QQChannelChatGPT/wiki) (部署不成功欢迎进群捞人解决<3)
|
||||
- **go-cqhttp启动不成功、报登录失败?** [在这里搜索解决方法](https://github.com/Mrs4s/go-cqhttp/issues)
|
||||
- **程序闪退/机器人启动不成功?** [提交issue或加群反馈](https://github.com/Soulter/QQChannelChatGPT/issues)
|
||||
- **如何开启ChatGPT、Bard、Claude等语言模型?** [查看帮助](https://github.com/Soulter/QQChannelChatGPT/wiki/%E8%A1%A5%E5%85%85%EF%BC%9A%E5%A6%82%E4%BD%95%E5%BC%80%E5%90%AFChatGPT%E3%80%81Bard%E3%80%81Claude%E7%AD%89%E8%AF%AD%E8%A8%80%E6%A8%A1%E5%9E%8B%EF%BC%9F)
|
||||
|
||||
## 🧩功能:
|
||||
|
||||
✨ 最近功能:
|
||||
1. 支持切换代码分支。输入`/update checkout <分支名>`即可切换代码分支
|
||||
2. 正在测试可视化面板,输入`/update checkout dev_dashboard`后根据提示即可体验
|
||||
🌍支持的消息平台
|
||||
- QQ 群、QQ 频道(OneBot、QQ 官方接口)
|
||||
- Telegram(由 [astrbot_plugin_telegram](https://github.com/Soulter/astrbot_plugin_telegram) 插件支持)
|
||||
|
||||
🌍支持的AI语言模型一览:
|
||||
🌍支持的大模型一览:
|
||||
|
||||
**文字模型/图片理解**
|
||||
|
||||
- OpenAI GPT-3(原生支持)
|
||||
- OpenAI GPT-3.5(原生支持)
|
||||
- OpenAI GPT-4(原生支持)
|
||||
- OpenAI GPT、DallE 系列
|
||||
- Claude(免费,由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
- HuggingChat(免费,由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
|
||||
**图片生成**
|
||||
|
||||
- NovelAI/Naifu (免费,由[AIDraw插件](https://github.com/Soulter/aidraw)支持)
|
||||
|
||||
- Gemini(免费,由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
|
||||
🌍机器人支持的能力一览:
|
||||
- 可视化面板(beta)
|
||||
- 同时部署机器人到 QQ 和 QQ 频道
|
||||
- 大模型对话
|
||||
- 大模型网页搜索能力 **(目前仅支持OpenAI系模型,最新版本下使用 web on 指令打开)**
|
||||
- 插件(在QQ或QQ频道聊天框内输入 `plugin` 了解详情)
|
||||
- 回复文字图片渲染(以图片markdown格式回复,**大幅度降低被风控概率**,需手动在`cmd_config.json`内开启qq_pic_mode)
|
||||
- 人格设置
|
||||
- 关键词回复
|
||||
- 热更新(更新本项目时**仅需**在QQ或QQ频道聊天框内输入`update latest r`)
|
||||
- Windows一键部署 https://github.com/Soulter/QQChatGPTLauncher/releases/latest
|
||||
- 大模型对话、人格、网页搜索
|
||||
- 可视化管理面板
|
||||
- 同时处理多平台消息
|
||||
- 精确到个人的会话隔离
|
||||
- 插件支持
|
||||
- 文本转图片回复
|
||||
|
||||
<!--
|
||||
### 基本功能
|
||||
@@ -129,13 +104,11 @@
|
||||
|
||||
> 使用`plugin i 插件GitHub链接`即可安装。
|
||||
|
||||
插件开发教程:https://github.com/Soulter/QQChannelChatGPT/wiki/%E5%9B%9B%E3%80%81%E5%BC%80%E5%8F%91%E6%8F%92%E4%BB%B6
|
||||
|
||||
部分插件:
|
||||
|
||||
- `LLMS`: https://github.com/Soulter/llms | Claude, HuggingChat 大语言模型接入。
|
||||
|
||||
- `GoodPlugins`: https://github.com/Soulter/goodplugins | 随机动漫图片、搜番、喜报生成器等等
|
||||
- `GoodPlugins`: https://github.com/Soulter/goodplugins | 随机动漫图片、搜番、喜报生成器等。
|
||||
|
||||
- `sysstat`: https://github.com/Soulter/sysstatqcbot | 查看系统状态
|
||||
|
||||
@@ -143,6 +116,8 @@
|
||||
|
||||
- `liferestart`: https://github.com/Soulter/liferestart | 人生重开模拟器
|
||||
|
||||
- `astrbot_plugin_aiocqhttp`: https://github.com/Soulter/astrbot_plugin_aiocqhttp | aiocqhttp 适配器,支持接入支持反向 WS 的 OneBot 协议实现,如 Lagrange.OneBot,Shamrock 等。
|
||||
|
||||
|
||||
<img width="900" alt="image" src="https://github.com/Soulter/AstrBot/assets/37870767/824d1ff3-7b85-481c-b795-8e62dedb9fd7">
|
||||
|
||||
@@ -162,15 +137,12 @@
|
||||
- `/key` 动态添加key
|
||||
- `/set` 人格设置面板
|
||||
- `/keyword nihao 你好` 设置关键词回复。nihao->你好
|
||||
- `/bing` 切换为bing
|
||||
- `/revgpt` 切换为ChatGPT逆向库
|
||||
- `/画` 画画
|
||||
|
||||
#### 逆向ChatGPT库语言模型
|
||||
- `/gpt` 切换为OpenAI官方API
|
||||
- `/bing` 切换为bing
|
||||
|
||||
* 切换模型指令支持临时回复。如`/bing 你好`将会临时使用一次bing模型 -->
|
||||
* 切换模型指令支持临时回复。如`/a 你好`将会临时使用一次bing模型 -->
|
||||
<!--
|
||||
## 🙇感谢
|
||||
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
from aip import AipContentCensor
|
||||
|
||||
|
||||
class BaiduJudge:
|
||||
def __init__(self, baidu_configs) -> None:
|
||||
if 'app_id' in baidu_configs and 'api_key' in baidu_configs and 'secret_key' in baidu_configs:
|
||||
self.app_id = str(baidu_configs['app_id'])
|
||||
self.api_key = baidu_configs['api_key']
|
||||
self.secret_key = baidu_configs['secret_key']
|
||||
self.client = AipContentCensor(self.app_id, self.api_key, self.secret_key)
|
||||
self.client = AipContentCensor(
|
||||
self.app_id, self.api_key, self.secret_key)
|
||||
else:
|
||||
raise ValueError("Baidu configs error! 请填写百度内容审核服务相关配置!")
|
||||
|
||||
def judge(self, text):
|
||||
res = self.client.textCensorUserDefined(text)
|
||||
if 'conclusionType' not in res:
|
||||
@@ -23,4 +26,4 @@ class BaiduJudge:
|
||||
for i in res['data']:
|
||||
info += f"{i['msg']};\n"
|
||||
info += "\n判断结果:"+res['conclusion']
|
||||
return False, info
|
||||
return False, info
|
||||
|
||||
@@ -1 +1 @@
|
||||
import{x as i,o as l,c as _,w as s,a as e,f as a,J as m,V as r,b as t,t as u,ab as p,B as n,ac as o,j as f}from"./index-7c8bc001.js";const b={class:"text-h3"},h={class:"d-flex align-center"},g={class:"d-flex align-center"},V=i({__name:"BaseBreadcrumb",props:{title:String,breadcrumbs:Array,icon:String},setup(d){const c=d;return(x,B)=>(l(),_(r,{class:"page-breadcrumb mb-1 mt-1"},{default:s(()=>[e(a,{cols:"12",md:"12"},{default:s(()=>[e(m,{variant:"outlined",elevation:"0",class:"px-4 py-3 withbg"},{default:s(()=>[e(r,{"no-gutters":"",class:"align-center"},{default:s(()=>[e(a,{md:"5"},{default:s(()=>[t("h3",b,u(c.title),1)]),_:1}),e(a,{md:"7",sm:"12",cols:"12"},{default:s(()=>[e(p,{items:c.breadcrumbs,class:"text-h5 justify-md-end pa-1"},{divider:s(()=>[t("div",h,[e(n(o),{size:"17"})])]),prepend:s(()=>[e(f,{size:"small",icon:"mdi-home",class:"text-secondary mr-2"}),t("div",g,[e(n(o),{size:"17"})])]),_:1},8,["items"])]),_:1})]),_:1})]),_:1})]),_:1})]),_:1}))}});export{V as _};
|
||||
import{x as i,o as l,c as _,w as s,a as e,f as a,J as m,V as c,b as t,t as u,ad as p,B as n,ae as o,j as f}from"./index-dc96e1be.js";const b={class:"text-h3"},h={class:"d-flex align-center"},g={class:"d-flex align-center"},V=i({__name:"BaseBreadcrumb",props:{title:String,breadcrumbs:Array,icon:String},setup(d){const r=d;return(x,B)=>(l(),_(c,{class:"page-breadcrumb mb-1 mt-1"},{default:s(()=>[e(a,{cols:"12",md:"12"},{default:s(()=>[e(m,{variant:"outlined",elevation:"0",class:"px-4 py-3 withbg"},{default:s(()=>[e(c,{"no-gutters":"",class:"align-center"},{default:s(()=>[e(a,{md:"5"},{default:s(()=>[t("h3",b,u(r.title),1)]),_:1}),e(a,{md:"7",sm:"12",cols:"12"},{default:s(()=>[e(p,{items:r.breadcrumbs,class:"text-h5 justify-md-end pa-1"},{divider:s(()=>[t("div",h,[e(n(o),{size:"17"})])]),prepend:s(()=>[e(f,{size:"small",icon:"mdi-home",class:"text-secondary mr-2"}),t("div",g,[e(n(o),{size:"17"})])]),_:1},8,["items"])]),_:1})]),_:1})]),_:1})]),_:1})]),_:1}))}});export{V as _};
|
||||
1
addons/dashboard/dist/assets/BlankLayout-1694b31c.js
vendored
Normal file
1
addons/dashboard/dist/assets/BlankLayout-1694b31c.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{x as e,o as a,c as t,w as o,a as s,B as n,Z as r,W as c}from"./index-dc96e1be.js";const f=e({__name:"BlankLayout",setup(p){return(u,_)=>(a(),t(c,null,{default:o(()=>[s(n(r))]),_:1}))}});export{f as default};
|
||||
@@ -1 +0,0 @@
|
||||
import{x as e,o as a,c as t,w as o,a as s,B as n,X as r,T as c}from"./index-7c8bc001.js";const f=e({__name:"BlankLayout",setup(p){return(u,_)=>(a(),t(c,null,{default:o(()=>[s(n(r))]),_:1}))}});export{f as default};
|
||||
1
addons/dashboard/dist/assets/ColorPage-757221a5.js
vendored
Normal file
1
addons/dashboard/dist/assets/ColorPage-757221a5.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as p,D as a,o as r,s,a as e,w as t,f as o,V as i,F as n,u as g,c as h,a0 as b,e as x,t as y}from"./index-dc96e1be.js";const P=p({__name:"ColorPage",setup(C){const c=a({title:"Colors Page"}),d=a([{title:"Utilities",disabled:!1,href:"#"},{title:"Colors",disabled:!0,href:"#"}]),u=a(["primary","lightprimary","secondary","lightsecondary","info","success","accent","warning","error","darkText","lightText","borderLight","inputBorder","containerBg"]);return(V,k)=>(r(),s(n,null,[e(m,{title:c.value.title,breadcrumbs:d.value},null,8,["title","breadcrumbs"]),e(i,null,{default:t(()=>[e(o,{cols:"12",md:"12"},{default:t(()=>[e(_,{title:"Color Palette"},{default:t(()=>[e(i,null,{default:t(()=>[(r(!0),s(n,null,g(u.value,(l,f)=>(r(),h(o,{md:"3",cols:"12",key:f},{default:t(()=>[e(b,{rounded:"md",class:"align-center justify-center d-flex",height:"100",width:"100%",color:l},{default:t(()=>[x("class: "+y(l),1)]),_:2},1032,["color"])]),_:2},1024))),128))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{P as default};
|
||||
@@ -1 +0,0 @@
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-89ca5198.js";import{_}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{x as p,D as a,o as r,s,a as e,w as t,f as o,V as i,F as n,u as g,c as h,_ as b,e as x,t as y}from"./index-7c8bc001.js";const P=p({__name:"ColorPage",setup(C){const c=a({title:"Colors Page"}),d=a([{title:"Utilities",disabled:!1,href:"#"},{title:"Colors",disabled:!0,href:"#"}]),u=a(["primary","lightprimary","secondary","lightsecondary","info","success","accent","warning","error","darkText","lightText","borderLight","inputBorder","containerBg"]);return(V,k)=>(r(),s(n,null,[e(m,{title:c.value.title,breadcrumbs:d.value},null,8,["title","breadcrumbs"]),e(i,null,{default:t(()=>[e(o,{cols:"12",md:"12"},{default:t(()=>[e(_,{title:"Color Palette"},{default:t(()=>[e(i,null,{default:t(()=>[(r(!0),s(n,null,g(u.value,(l,f)=>(r(),h(o,{md:"3",cols:"12",key:f},{default:t(()=>[e(b,{rounded:"md",class:"align-center justify-center d-flex",height:"100",width:"100%",color:l},{default:t(()=>[x("class: "+y(l),1)]),_:2},1032,["color"])]),_:2},1024))),128))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{P as default};
|
||||
1
addons/dashboard/dist/assets/ConfigDetailCard-8467c848.js
vendored
Normal file
1
addons/dashboard/dist/assets/ConfigDetailCard-8467c848.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{o as l,s as o,u as c,c as n,w as u,Q as g,b as s,R as k,F as t,ab as h,O as p,t as m,a as V,ac as f,i as C,q as x,k as v,A as U}from"./index-dc96e1be.js";import{_ as w}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";const S={__name:"ConfigDetailCard",props:{config:Array},setup(d){return(y,B)=>(l(!0),o(t,null,c(d.config,r=>(l(),n(w,{key:r.name,title:r.name,style:{"margin-bottom":"16px"}},{default:u(()=>[g(s("a",null,"No data",512),[[k,d.config.length===0]]),(l(!0),o(t,null,c(r.body,e=>(l(),o(t,null,[e.config_type==="item"?(l(),o(t,{key:0},[e.val_type==="bool"?(l(),n(h,{key:0,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,color:"primary",inset:""},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="str"?(l(),n(p,{key:1,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="int"?(l(),n(p,{key:2,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="list"?(l(),o(t,{key:3},[s("span",null,m(e.name),1),V(f,{modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,chips:"",clearable:"",label:"请添加",multiple:"","prepend-icon":"mdi-tag-multiple-outline"},{selection:u(({attrs:a,item:i,select:b,selected:_})=>[V(C,x(a,{"model-value":_,closable:"",onClick:b,"onClick:close":D=>y.remove(i)}),{default:u(()=>[s("strong",null,m(i),1)]),_:2},1040,["model-value","onClick","onClick:close"])]),_:2},1032,["modelValue","onUpdate:modelValue"])],64)):v("",!0)],64)):e.config_type==="divider"?(l(),n(U,{key:1,style:{"margin-top":"8px","margin-bottom":"8px"}})):v("",!0)],64))),256))]),_:2},1032,["title"]))),128))}};export{S as _};
|
||||
1
addons/dashboard/dist/assets/ConfigPage-164b73e7.js
vendored
Normal file
1
addons/dashboard/dist/assets/ConfigPage-164b73e7.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as y}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as h,o,c as u,w as t,a,a8 as b,b as c,K as x,e as f,t as g,G as V,A as w,L as S,a9 as $,J as B,s as _,d as v,F as d,u as p,f as G,V as T,aa as j,T as l}from"./index-dc96e1be.js";import{_ as m}from"./ConfigDetailCard-8467c848.js";const D={class:"d-sm-flex align-center justify-space-between"},C=h({__name:"ConfigGroupCard",props:{title:String},setup(e){const s=e;return(i,n)=>(o(),u(B,{variant:"outlined",elevation:"0",class:"withbg",style:{width:"50%"}},{default:t(()=>[a(b,{style:{padding:"10px 20px"}},{default:t(()=>[c("div",D,[a(x,null,{default:t(()=>[f(g(s.title),1)]),_:1}),a(V)])]),_:1}),a(w),a(S,null,{default:t(()=>[$(i.$slots,"default")]),_:3})]),_:3}))}}),I={style:{display:"flex","flex-direction":"row","justify-content":"space-between","align-items":"center","margin-bottom":"12px"}},N={style:{display:"flex","flex-direction":"row"}},R={style:{"margin-right":"10px",color:"black"}},F={style:{color:"#222"}},k=h({__name:"ConfigGroupItem",props:{title:String,desc:String,btnRoute:String,namespace:String},setup(e){const s=e;return(i,n)=>(o(),_("div",I,[c("div",N,[c("h3",R,g(s.title),1),c("p",F,g(s.desc),1)]),a(v,{to:s.btnRoute,color:"primary",class:"ml-2",style:{"border-radius":"10px"}},{default:t(()=>[f("配置")]),_:1},8,["to"])]))}}),L={style:{display:"flex","flex-direction":"row",padding:"16px",gap:"16px",width:"100%"}},P={name:"ConfigPage",components:{UiParentCard:y,ConfigGroupCard:C,ConfigGroupItem:k,ConfigDetailCard:m},data(){return{config_data:[],config_base:[],save_message_snack:!1,save_message:"",save_message_success:"",config_outline:[],namespace:""}},mounted(){this.getConfig()},methods:{switchConfig(e){l.get("/api/configs?namespace="+e).then(s=>{this.namespace=e,this.config_data=s.data.data,console.log(this.config_data)}).catch(s=>{save_message=s,save_message_snack=!0,save_message_success="error"})},getConfig(){l.get("/api/config_outline").then(e=>{this.config_outline=e.data.data,console.log(this.config_outline)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"}),l.get("/api/configs").then(e=>{this.config_base=e.data.data,console.log(this.config_data)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"})},updateConfig(){l.post("/api/configs",{base_config:this.config_base,config:this.config_data,namespace:this.namespace}).then(e=>{e.data.status==="success"?(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="success"):(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="error")}).catch(e=>{this.save_message=e,this.save_message_snack=!0,this.save_message_success="error"})}}},J=Object.assign(P,{setup(e){return(s,i)=>(o(),_(d,null,[a(T,null,{default:t(()=>[c("div",L,[(o(!0),_(d,null,p(s.config_outline,n=>(o(),u(C,{key:n.name,title:n.name},{default:t(()=>[(o(!0),_(d,null,p(n.body,r=>(o(),u(k,{title:r.title,desc:r.desc,namespace:r.namespace,onClick:U=>s.switchConfig(r.namespace)},null,8,["title","desc","namespace","onClick"]))),256))]),_:2},1032,["title"]))),128))]),a(G,{cols:"12",md:"12"},{default:t(()=>[a(m,{config:s.config_data},null,8,["config"]),a(m,{config:s.config_base},null,8,["config"])]),_:1})]),_:1}),a(v,{icon:"mdi-content-save",size:"x-large",style:{position:"fixed",right:"52px",bottom:"52px"},color:"darkprimary",onClick:s.updateConfig},null,8,["onClick"]),a(j,{timeout:2e3,elevation:"24",color:s.save_message_success,modelValue:s.save_message_snack,"onUpdate:modelValue":i[0]||(i[0]=n=>s.save_message_snack=n)},{default:t(()=>[f(g(s.save_message),1)]),_:1},8,["color","modelValue"])],64))}});export{J as default};
|
||||
@@ -1 +0,0 @@
|
||||
import{_ as h}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{o as a,s as t,a as n,w as i,f as b,F as d,u as g,V as C,d as U,e as x,t as c,a8 as B,R as _,c as r,a9 as w,O as v,b as V,aa as N,i as F,q as P,k as f,A as S}from"./index-7c8bc001.js";const D={name:"ConfigPage",components:{UiParentCard:h},data(){return{config_data:{data:[]},save_message_snack:!1,save_message:"",save_message_success:""}},mounted(){this.getConfig()},methods:{getConfig(){_.get("/api/configs").then(o=>{this.config_data=o.data.data,console.log(this.config_data)})},updateConfig(){_.post("/api/configs",this.config_data).then(o=>{console.log(this.config_data),o.data.status==="success"?(this.save_message=o.data.message,this.save_message_snack=!0,this.save_message_success="success"):(this.save_message=o.data.message,this.save_message_snack=!0,this.save_message_success="error")})}}},$=Object.assign(D,{setup(o){return(s,m)=>(a(),t(d,null,[n(C,null,{default:i(()=>[n(b,{cols:"12",md:"12"},{default:i(()=>[(a(!0),t(d,null,g(s.config_data.data,u=>(a(),r(h,{key:u.name,title:u.name,style:{"margin-bottom":"16px"}},{default:i(()=>[(a(!0),t(d,null,g(u.body,e=>(a(),t(d,null,[e.config_type==="item"?(a(),t(d,{key:0},[e.val_type==="bool"?(a(),r(w,{key:0,modelValue:e.value,"onUpdate:modelValue":l=>e.value=l,label:e.name,hint:e.description,color:"primary",inset:""},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="string"?(a(),r(v,{key:1,modelValue:e.value,"onUpdate:modelValue":l=>e.value=l,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="int"?(a(),r(v,{key:2,modelValue:e.value,"onUpdate:modelValue":l=>e.value=l,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="list"?(a(),t(d,{key:3},[V("span",null,c(e.name),1),n(N,{modelValue:e.value,"onUpdate:modelValue":l=>e.value=l,chips:"",clearable:"",label:"请添加",multiple:"","prepend-icon":"mdi-tag-multiple-outline"},{selection:i(({attrs:l,item:p,select:k,selected:y})=>[n(F,P(l,{"model-value":y,closable:"",onClick:k,"onClick:close":O=>s.remove(p)}),{default:i(()=>[V("strong",null,c(p),1)]),_:2},1040,["model-value","onClick","onClick:close"])]),_:2},1032,["modelValue","onUpdate:modelValue"])],64)):f("",!0)],64)):e.config_type==="divider"?(a(),r(S,{key:1,style:{"margin-top":"8px","margin-bottom":"8px"}})):f("",!0)],64))),256))]),_:2},1032,["title"]))),128))]),_:1})]),_:1}),n(U,{icon:"mdi-content-save",size:"x-large",style:{position:"fixed",right:"52px",bottom:"52px"},color:"darkprimary",onClick:s.updateConfig},null,8,["onClick"]),n(B,{timeout:2e3,elevation:"24",color:s.save_message_success,modelValue:s.save_message_snack,"onUpdate:modelValue":m[0]||(m[0]=u=>s.save_message_snack=u)},{default:i(()=>[x(c(s.save_message),1)]),_:1},8,["color","modelValue"])],64))}});export{$ as default};
|
||||
File diff suppressed because one or more lines are too long
32
addons/dashboard/dist/assets/ConsolePage-ff373be6.css
vendored
Normal file
32
addons/dashboard/dist/assets/ConsolePage-ff373be6.css
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Copyright (c) 2014 The xterm.js authors. All rights reserved.
|
||||
* Copyright (c) 2012-2013, Christopher Jeffrey (MIT License)
|
||||
* https://github.com/chjj/term.js
|
||||
* @license MIT
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*
|
||||
* Originally forked from (with the author's permission):
|
||||
* Fabrice Bellard's javascript vt100 for jslinux:
|
||||
* http://bellard.org/jslinux/
|
||||
* Copyright (c) 2011 Fabrice Bellard
|
||||
* The original design remains. The terminal itself
|
||||
* has been extended to include xterm CSI codes, among
|
||||
* other features.
|
||||
*/.xterm{cursor:text;position:relative;user-select:none;-ms-user-select:none;-webkit-user-select:none}.xterm.focus,.xterm:focus{outline:none}.xterm .xterm-helpers{position:absolute;top:0;z-index:5}.xterm .xterm-helper-textarea{padding:0;border:0;margin:0;position:absolute;opacity:0;left:-9999em;top:0;width:0;height:0;z-index:-5;white-space:nowrap;overflow:hidden;resize:none}.xterm .composition-view{background:#000;color:#fff;display:none;position:absolute;white-space:nowrap;z-index:1}.xterm .composition-view.active{display:block}.xterm .xterm-viewport{background-color:#000;overflow-y:scroll;cursor:default;position:absolute;right:0;left:0;top:0;bottom:0}.xterm .xterm-screen{position:relative}.xterm .xterm-screen canvas{position:absolute;left:0;top:0}.xterm .xterm-scroll-area{visibility:hidden}.xterm-char-measure-element{display:inline-block;visibility:hidden;position:absolute;top:0;left:-9999em;line-height:normal}.xterm.enable-mouse-events{cursor:default}.xterm.xterm-cursor-pointer,.xterm .xterm-cursor-pointer{cursor:pointer}.xterm.column-select.focus{cursor:crosshair}.xterm .xterm-accessibility,.xterm .xterm-message{position:absolute;left:0;top:0;bottom:0;right:0;z-index:10;color:transparent;pointer-events:none}.xterm .live-region{position:absolute;left:-9999px;width:1px;height:1px;overflow:hidden}.xterm-dim{opacity:1!important}.xterm-underline-1{text-decoration:underline}.xterm-underline-2{text-decoration:double underline}.xterm-underline-3{text-decoration:wavy underline}.xterm-underline-4{text-decoration:dotted underline}.xterm-underline-5{text-decoration:dashed underline}.xterm-overline{text-decoration:overline}.xterm-overline.xterm-underline-1{text-decoration:overline underline}.xterm-overline.xterm-underline-2{text-decoration:overline double underline}.xterm-overline.xterm-underline-3{text-decoration:overline wavy underline}.xterm-overline.xterm-underline-4{text-decoration:overline dotted underline}.xterm-overline.xterm-underline-5{text-decoration:overline dashed underline}.xterm-strikethrough{text-decoration:line-through}.xterm-screen .xterm-decoration-container .xterm-decoration{z-index:6;position:absolute}.xterm-screen .xterm-decoration-container .xterm-decoration.xterm-decoration-top-layer{z-index:7}.xterm-decoration-overview-ruler{z-index:8;position:absolute;top:0;right:0;pointer-events:none}.xterm-decoration-top{z-index:2;position:relative}
|
||||
1
addons/dashboard/dist/assets/DefaultDashboard-8752691b.js
vendored
Normal file
1
addons/dashboard/dist/assets/DefaultDashboard-8752691b.js
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
import{_ as t}from"./_plugin-vue_export-helper-c27b6911.js";import{o,c,w as s,V as i,a as r,b as e,d as l,e as a,f as d}from"./index-7c8bc001.js";const n="/assets/img-error-bg-ab6474a0.svg",_="/assets/img-error-blue-2675a7a9.svg",m="/assets/img-error-text-a6aebfa0.svg",g="/assets/img-error-purple-edee3fbc.svg";const p={},u={class:"text-center"},f=e("div",{class:"CardMediaWrapper"},[e("img",{src:n,alt:"grid",class:"w-100"}),e("img",{src:_,alt:"grid",class:"CardMediaParts"}),e("img",{src:m,alt:"build",class:"CardMediaBuild"}),e("img",{src:g,alt:"build",class:"CardMediaBuild"})],-1),h=e("h1",{class:"text-h1"},"Something is wrong",-1),v=e("p",null,[e("small",null,[a("The page you are looking was moved, removed, "),e("br"),a("renamed, or might never exist! ")])],-1);function x(b,V){return o(),c(i,{"no-gutters":"",class:"h-100vh"},{default:s(()=>[r(d,{class:"d-flex align-center justify-center"},{default:s(()=>[e("div",u,[f,h,v,r(l,{variant:"flat",color:"primary",class:"mt-4",to:"/","prepend-icon":"mdi-home"},{default:s(()=>[a(" Home")]),_:1})])]),_:1})]),_:1})}const C=t(p,[["render",x]]);export{C as default};
|
||||
import{_ as t}from"./_plugin-vue_export-helper-c27b6911.js";import{o,c,w as s,V as i,a as r,b as e,d as l,e as a,f as d}from"./index-dc96e1be.js";const n="/assets/img-error-bg-ab6474a0.svg",_="/assets/img-error-blue-2675a7a9.svg",m="/assets/img-error-text-a6aebfa0.svg",g="/assets/img-error-purple-edee3fbc.svg";const p={},u={class:"text-center"},f=e("div",{class:"CardMediaWrapper"},[e("img",{src:n,alt:"grid",class:"w-100"}),e("img",{src:_,alt:"grid",class:"CardMediaParts"}),e("img",{src:m,alt:"build",class:"CardMediaBuild"}),e("img",{src:g,alt:"build",class:"CardMediaBuild"})],-1),h=e("h1",{class:"text-h1"},"Something is wrong",-1),v=e("p",null,[e("small",null,[a("The page you are looking was moved, removed, "),e("br"),a("renamed, or might never exist! ")])],-1);function x(b,V){return o(),c(i,{"no-gutters":"",class:"h-100vh"},{default:s(()=>[r(d,{class:"d-flex align-center justify-center"},{default:s(()=>[e("div",u,[f,h,v,r(l,{variant:"flat",color:"primary",class:"mt-4",to:"/","prepend-icon":"mdi-home"},{default:s(()=>[a(" Home")]),_:1})])]),_:1})]),_:1})}const C=t(p,[["render",x]]);export{C as default};
|
||||
1
addons/dashboard/dist/assets/ExtensionPage-9b03c00a.js
vendored
Normal file
1
addons/dashboard/dist/assets/ExtensionPage-9b03c00a.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{x as b,o as d,c as h,w as e,a,a6 as C,b as i,K as x,e as o,t as u,G as m,d as r,A as E,L as V,a7 as y,J as w,s as p,f as c,F as f,u as $,V as k,q as S,N as B,O as N,P as T,H as j,a8 as D,R as g,j as F}from"./index-7c8bc001.js";const G={class:"d-sm-flex align-center justify-space-between"},v=b({__name:"ExtensionCard",props:{title:String,link:String},setup(n){const s=n,l=t=>{window.open(t,"_blank")};return(t,_)=>(d(),h(w,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(C,{style:{padding:"10px 20px"}},{default:e(()=>[i("div",G,[a(x,null,{default:e(()=>[o(u(s.title),1)]),_:1}),a(m),a(r,{icon:"mdi-link",variant:"plain",onClick:_[0]||(_[0]=z=>l(s.link))})])]),_:1}),a(E),a(V,null,{default:e(()=>[y(t.$slots,"default")]),_:3})]),_:3}))}}),P=i("div",{style:{"background-color":"white",width:"100%",padding:"16px","border-radius":"10px"}},[i("h3",null,"🧩 已安装的插件")],-1),U={style:{"min-height":"180px","max-height":"180px",overflow:"hidden"}},q={class:"d-flex align-center gap-3"},A=i("div",{style:{"background-color":"white",width:"100%",padding:"16px","border-radius":"10px"}},[i("h3",null,"🧩 插件市场 [待开发]")],-1),I=i("span",{class:"text-h5"},"从 Git 仓库链接安装插件",-1),L=i("small",null,"github, gitee, gitlab 等公开的仓库都行。",-1),O=i("br",null,null,-1),R={name:"ExtensionPage",components:{ExtensionCard:v},data(){return{extension_data:{data:[]},save_message_snack:!1,save_message:"",save_message_success:"",extension_url:"",status:"",dialog:!1,snack_message:"",snack_show:!1,snack_success:"success",install_loading:!1,uninstall_loading:!1}},mounted(){this.getExtensions()},methods:{getExtensions(){g.get("/api/extensions").then(n=>{this.extension_data.data=n.data.data,console.log(this.extension_data)})},newExtension(){this.install_loading=!0,console.log(this.install_loading),g.post("/api/extensions/install",{url:this.extension_url}).then(n=>{if(this.install_loading=!1,n.data.status==="error"){this.snack_message=n.data.message,this.snack_show=!0,this.snack_success="error";return}this.extension_data.data=n.data.data,console.log(this.extension_data),this.extension_url="",this.snack_message=n.data.message,this.snack_show=!0,this.snack_success="success",this.dialog=!1,this.getExtensions()}).catch(n=>{this.install_loading=!1,this.snack_message=n,this.snack_show=!0,this.snack_success="error"})},uninstallExtension(n){this.uninstall_loading=!0,g.post("/api/extensions/uninstall",{name:n}).then(s=>{if(this.uninstall_loading=!1,s.data.status==="error"){this.snack_message=s.data.message,this.snack_show=!0,this.snack_success="error";return}this.extension_data.data=s.data.data,console.log(this.extension_data),this.snack_message=s.data.message,this.snack_show=!0,this.snack_success="success",this.dialog=!1,this.getExtensions()}).catch(s=>{this.uninstall_loading=!1,this.snack_message=s,this.snack_show=!0,this.snack_success="error"})}}},J=Object.assign(R,{setup(n){return(s,l)=>(d(),p(f,null,[a(k,null,{default:e(()=>[a(c,{cols:"12",md:"12"},{default:e(()=>[P]),_:1}),(d(!0),p(f,null,$(s.extension_data.data,t=>(d(),h(c,{cols:"12",md:"6",lg:"4"},{default:e(()=>[(d(),h(v,{key:t.name,title:t.name,link:t.repo,style:{"margin-bottom":"16px"}},{default:e(()=>[i("p",U,u(t.desc),1),i("div",q,[a(F,null,{default:e(()=>[o("mdi-account")]),_:1}),i("span",null,u(t.author),1),a(m),a(r,{variant:"plain",onClick:_=>s.uninstallExtension(t.name),loading:s.uninstall_loading},{default:e(()=>[o("卸 载")]),_:2},1032,["onClick","loading"])])]),_:2},1032,["title","link"]))]),_:2},1024))),256)),a(c,{cols:"12",md:"12"},{default:e(()=>[A]),_:1})]),_:1}),a(j,{modelValue:s.dialog,"onUpdate:modelValue":l[3]||(l[3]=t=>s.dialog=t),persistent:"",width:"700"},{activator:e(({props:t})=>[a(r,S(t,{icon:"mdi-plus",size:"x-large",style:{position:"fixed",right:"52px",bottom:"52px"},color:"darkprimary"}),null,16)]),default:e(()=>[a(w,null,{default:e(()=>[a(x,null,{default:e(()=>[I]),_:1}),a(V,null,{default:e(()=>[a(B,null,{default:e(()=>[a(k,null,{default:e(()=>[a(c,{cols:"12"},{default:e(()=>[a(N,{label:"Git 库链接",modelValue:s.extension_url,"onUpdate:modelValue":l[0]||(l[0]=t=>s.extension_url=t),required:""},null,8,["modelValue"])]),_:1})]),_:1})]),_:1}),L,O,i("small",null,u(s.status),1)]),_:1}),a(T,null,{default:e(()=>[a(m),a(r,{color:"blue-darken-1",variant:"text",onClick:l[1]||(l[1]=t=>s.dialog=!1)},{default:e(()=>[o(" 关闭 ")]),_:1}),a(r,{color:"blue-darken-1",variant:"text",loading:s.install_loading,onClick:l[2]||(l[2]=t=>s.newExtension(s.extension_url))},{default:e(()=>[o(" 安装 ")]),_:1},8,["loading"])]),_:1})]),_:1})]),_:1},8,["modelValue"]),a(D,{timeout:2e3,elevation:"24",color:s.snack_success,modelValue:s.snack_show,"onUpdate:modelValue":l[4]||(l[4]=t=>s.snack_show=t)},{default:e(()=>[o(u(s.snack_message),1)]),_:1},8,["color","modelValue"])],64))}});export{J as default};
|
||||
1
addons/dashboard/dist/assets/FullLayout-58c19404.js
vendored
Normal file
1
addons/dashboard/dist/assets/FullLayout-58c19404.js
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{at as _,x as d,D as n,o as c,s as m,a as f,w as p,au as r,b as a,av as o,B as t,aw as h}from"./index-7c8bc001.js";const s={Sidebar_drawer:!0,Customizer_drawer:!1,mini_sidebar:!1,fontTheme:"Roboto",inputBg:!1},l=_({id:"customizer",state:()=>({Sidebar_drawer:s.Sidebar_drawer,Customizer_drawer:s.Customizer_drawer,mini_sidebar:s.mini_sidebar,fontTheme:"Poppins",inputBg:s.inputBg}),getters:{},actions:{SET_SIDEBAR_DRAWER(){this.Sidebar_drawer=!this.Sidebar_drawer},SET_MINI_SIDEBAR(e){this.mini_sidebar=e},SET_FONT(e){this.fontTheme=e}}}),u={class:"logo",style:{display:"flex","align-items":"center"}},b={style:{"font-size":"24px","font-weight":"1000"}},w={style:{"font-size":"20px","font-weight":"1000"}},S={style:{"font-size":"20px"}},z=d({__name:"LogoDark",setup(e){n("rgb(var(--v-theme-primary))"),n("rgb(var(--v-theme-secondary))");const i=l();return(g,B)=>(c(),m("div",u,[f(t(h),{to:"/",style:{"text-decoration":"none",color:"black"}},{default:p(()=>[r(a("span",b,"AstrBot 仪表盘",512),[[o,!t(i).mini_sidebar]]),r(a("span",w,"Astr",512),[[o,t(i).mini_sidebar]]),r(a("span",S,"Bot",512),[[o,t(i).mini_sidebar]])]),_:1})]))}});export{z as _,l as u};
|
||||
1
addons/dashboard/dist/assets/LogoDark.vue_vue_type_script_setup_true_lang-7df35c25.js
vendored
Normal file
1
addons/dashboard/dist/assets/LogoDark.vue_vue_type_script_setup_true_lang-7df35c25.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{av as _,x as d,D as n,o as c,s as m,a as f,w as p,Q as r,b as a,R as o,B as t,aw as h}from"./index-dc96e1be.js";const s={Sidebar_drawer:!0,Customizer_drawer:!1,mini_sidebar:!1,fontTheme:"Roboto",inputBg:!1},l=_({id:"customizer",state:()=>({Sidebar_drawer:s.Sidebar_drawer,Customizer_drawer:s.Customizer_drawer,mini_sidebar:s.mini_sidebar,fontTheme:"Poppins",inputBg:s.inputBg}),getters:{},actions:{SET_SIDEBAR_DRAWER(){this.Sidebar_drawer=!this.Sidebar_drawer},SET_MINI_SIDEBAR(e){this.mini_sidebar=e},SET_FONT(e){this.fontTheme=e}}}),u={class:"logo",style:{display:"flex","align-items":"center"}},b={style:{"font-size":"24px","font-weight":"1000"}},w={style:{"font-size":"20px","font-weight":"1000"}},S={style:{"font-size":"20px"}},z=d({__name:"LogoDark",setup(e){n("rgb(var(--v-theme-primary))"),n("rgb(var(--v-theme-secondary))");const i=l();return(g,B)=>(c(),m("div",u,[f(t(h),{to:"/",style:{"text-decoration":"none",color:"black"}},{default:p(()=>[r(a("span",b,"AstrBot 仪表盘",512),[[o,!t(i).mini_sidebar]]),r(a("span",w,"Astr",512),[[o,t(i).mini_sidebar]]),r(a("span",S,"Bot",512),[[o,t(i).mini_sidebar]])]),_:1})]))}});export{z as _,l as u};
|
||||
@@ -1 +1 @@
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-89ca5198.js";import{_ as i}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{x as n,D as a,o as c,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-7c8bc001.js";const p=["innerHTML"],v=n({__name:"MaterialIcons",setup(b){const s=a({title:"Material Icons"}),r=a('<iframe src="https://materialdesignicons.com/" frameborder="0" width="100%" height="1000"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Material Icons",disabled:!0,href:"#"}]);return(h,M)=>(c(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(i,{title:"Material Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,p)]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as i}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as n,D as a,o as c,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-dc96e1be.js";const p=["innerHTML"],v=n({__name:"MaterialIcons",setup(b){const s=a({title:"Material Icons"}),r=a('<iframe src="https://materialdesignicons.com/" frameborder="0" width="100%" height="1000"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Material Icons",disabled:!0,href:"#"}]);return(h,M)=>(c(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(i,{title:"Material Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,p)]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
@@ -1 +1 @@
|
||||
import{_ as B}from"./LogoDark.vue_vue_type_script_setup_true_lang-4faa128a.js";import{x as y,D as o,o as b,s as U,a as e,w as a,b as n,B as $,d as u,f as d,A as _,e as f,V as r,O as m,an as A,as as E,F,c as T,N as q,J as V,L as P}from"./index-7c8bc001.js";const z="/assets/social-google-a359a253.svg",N=["src"],S=n("span",{class:"ml-2"},"Sign up with Google",-1),D=n("h5",{class:"text-h5 text-center my-4 mb-8"},"Sign up with Email address",-1),G={class:"d-sm-inline-flex align-center mt-2 mb-7 mb-sm-0 font-weight-bold"},L=n("a",{href:"#",class:"ml-1 text-lightText"},"Terms and Condition",-1),O={class:"mt-5 text-right"},j=y({__name:"AuthRegister",setup(w){const c=o(!1),i=o(!1),p=o(""),v=o(""),g=o(),h=o(""),x=o(""),k=o([s=>!!s||"Password is required",s=>s&&s.length<=10||"Password must be less than 10 characters"]),C=o([s=>!!s||"E-mail is required",s=>/.+@.+\..+/.test(s)||"E-mail must be valid"]);function R(){g.value.validate()}return(s,l)=>(b(),U(F,null,[e(u,{block:"",color:"primary",variant:"outlined",class:"text-lightText googleBtn"},{default:a(()=>[n("img",{src:$(z),alt:"google"},null,8,N),S]),_:1}),e(r,null,{default:a(()=>[e(d,{class:"d-flex align-center"},{default:a(()=>[e(_,{class:"custom-devider"}),e(u,{variant:"outlined",class:"orbtn",rounded:"md",size:"small"},{default:a(()=>[f("OR")]),_:1}),e(_,{class:"custom-devider"})]),_:1})]),_:1}),D,e(E,{ref_key:"Regform",ref:g,"lazy-validation":"",action:"/dashboards/analytical",class:"mt-7 loginForm"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:h.value,"onUpdate:modelValue":l[0]||(l[0]=t=>h.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Firstname"},null,8,["modelValue"])]),_:1}),e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:x.value,"onUpdate:modelValue":l[1]||(l[1]=t=>x.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Lastname"},null,8,["modelValue"])]),_:1})]),_:1}),e(m,{modelValue:v.value,"onUpdate:modelValue":l[2]||(l[2]=t=>v.value=t),rules:C.value,label:"Email Address / Username",class:"mt-4 mb-4",required:"",density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary"},null,8,["modelValue","rules"]),e(m,{modelValue:p.value,"onUpdate:modelValue":l[3]||(l[3]=t=>p.value=t),rules:k.value,label:"Password",required:"",density:"comfortable",variant:"outlined",color:"primary","hide-details":"auto","append-icon":i.value?"mdi-eye":"mdi-eye-off",type:i.value?"text":"password","onClick:append":l[4]||(l[4]=t=>i.value=!i.value),class:"pwdInput"},null,8,["modelValue","rules","append-icon","type"]),n("div",G,[e(A,{modelValue:c.value,"onUpdate:modelValue":l[5]||(l[5]=t=>c.value=t),rules:[t=>!!t||"You must agree to continue!"],label:"Agree with?",required:"",color:"primary",class:"ms-n2","hide-details":""},null,8,["modelValue","rules"]),L]),e(u,{color:"secondary",block:"",class:"mt-2",variant:"flat",size:"large",onClick:l[6]||(l[6]=t=>R())},{default:a(()=>[f("Sign Up")]),_:1})]),_:1},512),n("div",O,[e(_),e(u,{variant:"plain",to:"/auth/login",class:"mt-2 text-capitalize mr-n2"},{default:a(()=>[f("Already have an account?")]),_:1})])],64))}});const I={class:"pa-7 pa-sm-12"},J=n("h2",{class:"text-secondary text-h2 mt-8"},"Sign up",-1),Y=n("h4",{class:"text-disabled text-h4 mt-3"},"Enter credentials to continue",-1),M=y({__name:"RegisterPage",setup(w){return(c,i)=>(b(),T(r,{class:"h-100vh","no-gutters":""},{default:a(()=>[e(d,{cols:"12",class:"d-flex align-center bg-lightprimary"},{default:a(()=>[e(q,null,{default:a(()=>[n("div",I,[e(r,{justify:"center"},{default:a(()=>[e(d,{cols:"12",lg:"10",xl:"6",md:"7"},{default:a(()=>[e(V,{elevation:"0",class:"loginBox"},{default:a(()=>[e(V,{variant:"outlined"},{default:a(()=>[e(P,{class:"pa-9"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",class:"text-center"},{default:a(()=>[e(B),J,Y]),_:1})]),_:1}),e(j)]),_:1})]),_:1})]),_:1})]),_:1})]),_:1})])]),_:1})]),_:1})]),_:1}))}});export{M as default};
|
||||
import{_ as B}from"./LogoDark.vue_vue_type_script_setup_true_lang-7df35c25.js";import{x as y,D as o,o as b,s as U,a as e,w as a,b as n,B as $,d as u,f as d,A as _,e as f,V as r,O as m,ap as A,au as E,F,c as T,N as q,J as V,L as P}from"./index-dc96e1be.js";const z="/assets/social-google-a359a253.svg",N=["src"],S=n("span",{class:"ml-2"},"Sign up with Google",-1),D=n("h5",{class:"text-h5 text-center my-4 mb-8"},"Sign up with Email address",-1),G={class:"d-sm-inline-flex align-center mt-2 mb-7 mb-sm-0 font-weight-bold"},L=n("a",{href:"#",class:"ml-1 text-lightText"},"Terms and Condition",-1),O={class:"mt-5 text-right"},j=y({__name:"AuthRegister",setup(w){const c=o(!1),i=o(!1),p=o(""),v=o(""),g=o(),h=o(""),x=o(""),k=o([s=>!!s||"Password is required",s=>s&&s.length<=10||"Password must be less than 10 characters"]),C=o([s=>!!s||"E-mail is required",s=>/.+@.+\..+/.test(s)||"E-mail must be valid"]);function R(){g.value.validate()}return(s,l)=>(b(),U(F,null,[e(u,{block:"",color:"primary",variant:"outlined",class:"text-lightText googleBtn"},{default:a(()=>[n("img",{src:$(z),alt:"google"},null,8,N),S]),_:1}),e(r,null,{default:a(()=>[e(d,{class:"d-flex align-center"},{default:a(()=>[e(_,{class:"custom-devider"}),e(u,{variant:"outlined",class:"orbtn",rounded:"md",size:"small"},{default:a(()=>[f("OR")]),_:1}),e(_,{class:"custom-devider"})]),_:1})]),_:1}),D,e(E,{ref_key:"Regform",ref:g,"lazy-validation":"",action:"/dashboards/analytical",class:"mt-7 loginForm"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:h.value,"onUpdate:modelValue":l[0]||(l[0]=t=>h.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Firstname"},null,8,["modelValue"])]),_:1}),e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:x.value,"onUpdate:modelValue":l[1]||(l[1]=t=>x.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Lastname"},null,8,["modelValue"])]),_:1})]),_:1}),e(m,{modelValue:v.value,"onUpdate:modelValue":l[2]||(l[2]=t=>v.value=t),rules:C.value,label:"Email Address / Username",class:"mt-4 mb-4",required:"",density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary"},null,8,["modelValue","rules"]),e(m,{modelValue:p.value,"onUpdate:modelValue":l[3]||(l[3]=t=>p.value=t),rules:k.value,label:"Password",required:"",density:"comfortable",variant:"outlined",color:"primary","hide-details":"auto","append-icon":i.value?"mdi-eye":"mdi-eye-off",type:i.value?"text":"password","onClick:append":l[4]||(l[4]=t=>i.value=!i.value),class:"pwdInput"},null,8,["modelValue","rules","append-icon","type"]),n("div",G,[e(A,{modelValue:c.value,"onUpdate:modelValue":l[5]||(l[5]=t=>c.value=t),rules:[t=>!!t||"You must agree to continue!"],label:"Agree with?",required:"",color:"primary",class:"ms-n2","hide-details":""},null,8,["modelValue","rules"]),L]),e(u,{color:"secondary",block:"",class:"mt-2",variant:"flat",size:"large",onClick:l[6]||(l[6]=t=>R())},{default:a(()=>[f("Sign Up")]),_:1})]),_:1},512),n("div",O,[e(_),e(u,{variant:"plain",to:"/auth/login",class:"mt-2 text-capitalize mr-n2"},{default:a(()=>[f("Already have an account?")]),_:1})])],64))}});const I={class:"pa-7 pa-sm-12"},J=n("h2",{class:"text-secondary text-h2 mt-8"},"Sign up",-1),Y=n("h4",{class:"text-disabled text-h4 mt-3"},"Enter credentials to continue",-1),M=y({__name:"RegisterPage",setup(w){return(c,i)=>(b(),T(r,{class:"h-100vh","no-gutters":""},{default:a(()=>[e(d,{cols:"12",class:"d-flex align-center bg-lightprimary"},{default:a(()=>[e(q,null,{default:a(()=>[n("div",I,[e(r,{justify:"center"},{default:a(()=>[e(d,{cols:"12",lg:"10",xl:"6",md:"7"},{default:a(()=>[e(V,{elevation:"0",class:"loginBox"},{default:a(()=>[e(V,{variant:"outlined"},{default:a(()=>[e(P,{class:"pa-9"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",class:"text-center"},{default:a(()=>[e(B),J,Y]),_:1})]),_:1}),e(j)]),_:1})]),_:1})]),_:1})]),_:1})]),_:1})])]),_:1})]),_:1})]),_:1}))}});export{M as default};
|
||||
@@ -1 +1 @@
|
||||
import{_ as c}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-89ca5198.js";import{_ as f}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{x as m,D as s,o as l,s as r,a as e,w as a,f as i,V as o,F as d,u as _,J as p,U as b,b as h,t as g}from"./index-7c8bc001.js";const v=m({__name:"ShadowPage",setup(w){const n=s({title:"Shadow Page"}),u=s([{title:"Utilities",disabled:!1,href:"#"},{title:"Shadow",disabled:!0,href:"#"}]);return(V,x)=>(l(),r(d,null,[e(c,{title:n.value.title,breadcrumbs:u.value},null,8,["title","breadcrumbs"]),e(o,null,{default:a(()=>[e(i,{cols:"12",md:"12"},{default:a(()=>[e(f,{title:"Basic Shadow"},{default:a(()=>[e(o,{justify:"center"},{default:a(()=>[(l(),r(d,null,_(25,t=>e(i,{key:t,cols:"auto"},{default:a(()=>[e(p,{height:"100",width:"100",class:b(["mb-5",["d-flex justify-center align-center bg-primary",`elevation-${t}`]])},{default:a(()=>[h("div",null,g(t-1),1)]),_:2},1032,["class"])]),_:2},1024)),64))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
import{_ as c}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as f}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as m,D as s,o as l,s as r,a as e,w as a,f as i,V as o,F as d,u as _,J as p,X as b,b as h,t as g}from"./index-dc96e1be.js";const v=m({__name:"ShadowPage",setup(w){const n=s({title:"Shadow Page"}),u=s([{title:"Utilities",disabled:!1,href:"#"},{title:"Shadow",disabled:!0,href:"#"}]);return(V,x)=>(l(),r(d,null,[e(c,{title:n.value.title,breadcrumbs:u.value},null,8,["title","breadcrumbs"]),e(o,null,{default:a(()=>[e(i,{cols:"12",md:"12"},{default:a(()=>[e(f,{title:"Basic Shadow"},{default:a(()=>[e(o,{justify:"center"},{default:a(()=>[(l(),r(d,null,_(25,t=>e(i,{key:t,cols:"auto"},{default:a(()=>[e(p,{height:"100",width:"100",class:b(["mb-5",["d-flex justify-center align-center bg-primary",`elevation-${t}`]])},{default:a(()=>[h("div",null,g(t-1),1)]),_:2},1032,["class"])]),_:2},1024)),64))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
@@ -1 +1 @@
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-89ca5198.js";import{_ as n}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{x as c,D as a,o as i,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-7c8bc001.js";const b=["innerHTML"],w=c({__name:"TablerIcons",setup(p){const s=a({title:"Tabler Icons"}),r=a('<iframe src="https://tablericons.com/" frameborder="0" width="100%" height="600"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Tabler Icons",disabled:!0,href:"#"}]);return(h,T)=>(i(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(n,{title:"Tabler Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,b)]),_:1})]),_:1})]),_:1})],64))}});export{w as default};
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as n}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as c,D as a,o as i,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-dc96e1be.js";const b=["innerHTML"],w=c({__name:"TablerIcons",setup(p){const s=a({title:"Tabler Icons"}),r=a('<iframe src="https://tablericons.com/" frameborder="0" width="100%" height="600"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Tabler Icons",disabled:!0,href:"#"}]);return(h,T)=>(i(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(n,{title:"Tabler Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,b)]),_:1})]),_:1})]),_:1})],64))}});export{w as default};
|
||||
@@ -1 +1 @@
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-89ca5198.js";import{_ as v}from"./UiParentCard.vue_vue_type_script_setup_true_lang-03a5c441.js";import{x as f,o as i,c as g,w as e,a,a6 as y,K as b,e as w,t as d,A as C,L as V,a7 as L,J as _,D as o,s as h,f as k,b as t,F as x,u as B,U as H,V as T}from"./index-7c8bc001.js";const s=f({__name:"UiChildCard",props:{title:String},setup(r){const l=r;return(n,c)=>(i(),g(_,{variant:"outlined"},{default:e(()=>[a(y,{class:"py-3"},{default:e(()=>[a(b,{class:"text-h5"},{default:e(()=>[w(d(l.title),1)]),_:1})]),_:1}),a(C),a(V,null,{default:e(()=>[L(n.$slots,"default")]),_:3})]),_:3}))}}),D={class:"d-flex flex-column gap-1"},S={class:"text-caption pa-2 bg-lightprimary"},z=t("div",{class:"text-grey"},"Class",-1),N={class:"font-weight-medium"},U=t("div",null,[t("p",{class:"text-left"},"Left aligned on all viewport sizes."),t("p",{class:"text-center"},"Center aligned on all viewport sizes."),t("p",{class:"text-right"},"Right aligned on all viewport sizes."),t("p",{class:"text-sm-left"},"Left aligned on viewports SM (small) or wider."),t("p",{class:"text-right text-md-left"},"Left aligned on viewports MD (medium) or wider."),t("p",{class:"text-right text-lg-left"},"Left aligned on viewports LG (large) or wider."),t("p",{class:"text-right text-xl-left"},"Left aligned on viewports XL (extra-large) or wider.")],-1),$=t("div",{class:"d-flex justify-space-between flex-row"},[t("a",{href:"#",class:"text-decoration-none"},"Non-underlined link"),t("div",{class:"text-decoration-line-through"},"Line-through text"),t("div",{class:"text-decoration-overline"},"Overline text"),t("div",{class:"text-decoration-underline"},"Underline text")],-1),M=t("div",null,[t("p",{class:"text-high-emphasis"},"High-emphasis has an opacity of 87% in light theme and 100% in dark."),t("p",{class:"text-medium-emphasis"},"Medium-emphasis text and hint text have opacities of 60% in light theme and 70% in dark."),t("p",{class:"text-disabled"},"Disabled text has an opacity of 38% in light theme and 50% in dark.")],-1),A=f({__name:"TypographyPage",setup(r){const l=o({title:"Typography Page"}),n=o([["Heading 1","text-h1"],["Heading 2","text-h2"],["Heading 3","text-h3"],["Heading 4","text-h4"],["Heading 5","text-h5"],["Heading 6","text-h6"],["Subtitle 1","text-subtitle-1"],["Subtitle 2","text-subtitle-2"],["Body 1","text-body-1"],["Body 2","text-body-2"],["Button","text-button"],["Caption","text-caption"],["Overline","text-overline"]]),c=o([{title:"Utilities",disabled:!1,href:"#"},{title:"Typography",disabled:!0,href:"#"}]);return(O,F)=>(i(),h(x,null,[a(m,{title:l.value.title,breadcrumbs:c.value},null,8,["title","breadcrumbs"]),a(T,null,{default:e(()=>[a(k,{cols:"12",md:"12"},{default:e(()=>[a(v,{title:"Basic Typography"},{default:e(()=>[a(s,{title:"Heading"},{default:e(()=>[t("div",D,[(i(!0),h(x,null,B(n.value,([p,u])=>(i(),g(_,{variant:"outlined",key:p,class:"my-4"},{default:e(()=>[t("div",{class:H([u,"pa-2"])},d(p),3),t("div",S,[z,t("div",N,d(u),1)])]),_:2},1024))),128))])]),_:1}),a(s,{title:"Text-alignment",class:"mt-8"},{default:e(()=>[U]),_:1}),a(s,{title:"Decoration",class:"mt-8"},{default:e(()=>[$]),_:1}),a(s,{title:"Opacity",class:"mt-8"},{default:e(()=>[M]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{A as default};
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as v}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as f,o as i,c as g,w as e,a,a8 as y,K as b,e as w,t as d,A as C,L as V,a9 as L,J as _,D as o,s as h,f as k,b as t,F as x,u as B,X as H,V as T}from"./index-dc96e1be.js";const s=f({__name:"UiChildCard",props:{title:String},setup(r){const l=r;return(n,c)=>(i(),g(_,{variant:"outlined"},{default:e(()=>[a(y,{class:"py-3"},{default:e(()=>[a(b,{class:"text-h5"},{default:e(()=>[w(d(l.title),1)]),_:1})]),_:1}),a(C),a(V,null,{default:e(()=>[L(n.$slots,"default")]),_:3})]),_:3}))}}),D={class:"d-flex flex-column gap-1"},S={class:"text-caption pa-2 bg-lightprimary"},z=t("div",{class:"text-grey"},"Class",-1),N={class:"font-weight-medium"},$=t("div",null,[t("p",{class:"text-left"},"Left aligned on all viewport sizes."),t("p",{class:"text-center"},"Center aligned on all viewport sizes."),t("p",{class:"text-right"},"Right aligned on all viewport sizes."),t("p",{class:"text-sm-left"},"Left aligned on viewports SM (small) or wider."),t("p",{class:"text-right text-md-left"},"Left aligned on viewports MD (medium) or wider."),t("p",{class:"text-right text-lg-left"},"Left aligned on viewports LG (large) or wider."),t("p",{class:"text-right text-xl-left"},"Left aligned on viewports XL (extra-large) or wider.")],-1),M=t("div",{class:"d-flex justify-space-between flex-row"},[t("a",{href:"#",class:"text-decoration-none"},"Non-underlined link"),t("div",{class:"text-decoration-line-through"},"Line-through text"),t("div",{class:"text-decoration-overline"},"Overline text"),t("div",{class:"text-decoration-underline"},"Underline text")],-1),O=t("div",null,[t("p",{class:"text-high-emphasis"},"High-emphasis has an opacity of 87% in light theme and 100% in dark."),t("p",{class:"text-medium-emphasis"},"Medium-emphasis text and hint text have opacities of 60% in light theme and 70% in dark."),t("p",{class:"text-disabled"},"Disabled text has an opacity of 38% in light theme and 50% in dark.")],-1),j=f({__name:"TypographyPage",setup(r){const l=o({title:"Typography Page"}),n=o([["Heading 1","text-h1"],["Heading 2","text-h2"],["Heading 3","text-h3"],["Heading 4","text-h4"],["Heading 5","text-h5"],["Heading 6","text-h6"],["Subtitle 1","text-subtitle-1"],["Subtitle 2","text-subtitle-2"],["Body 1","text-body-1"],["Body 2","text-body-2"],["Button","text-button"],["Caption","text-caption"],["Overline","text-overline"]]),c=o([{title:"Utilities",disabled:!1,href:"#"},{title:"Typography",disabled:!0,href:"#"}]);return(U,F)=>(i(),h(x,null,[a(m,{title:l.value.title,breadcrumbs:c.value},null,8,["title","breadcrumbs"]),a(T,null,{default:e(()=>[a(k,{cols:"12",md:"12"},{default:e(()=>[a(v,{title:"Basic Typography"},{default:e(()=>[a(s,{title:"Heading"},{default:e(()=>[t("div",D,[(i(!0),h(x,null,B(n.value,([p,u])=>(i(),g(_,{variant:"outlined",key:p,class:"my-4"},{default:e(()=>[t("div",{class:H([u,"pa-2"])},d(p),3),t("div",S,[z,t("div",N,d(u),1)])]),_:2},1024))),128))])]),_:1}),a(s,{title:"Text-alignment",class:"mt-8"},{default:e(()=>[$]),_:1}),a(s,{title:"Decoration",class:"mt-8"},{default:e(()=>[M]),_:1}),a(s,{title:"Opacity",class:"mt-8"},{default:e(()=>[O]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{j as default};
|
||||
@@ -1 +1 @@
|
||||
import{x as n,o,c as i,w as e,a,a6 as d,b as c,K as u,e as p,t as _,a7 as s,A as f,L as V,J as m}from"./index-7c8bc001.js";const C={class:"d-sm-flex align-center justify-space-between"},h=n({__name:"UiParentCard",props:{title:String},setup(l){const r=l;return(t,x)=>(o(),i(m,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(d,null,{default:e(()=>[c("div",C,[a(u,null,{default:e(()=>[p(_(r.title),1)]),_:1}),s(t.$slots,"action")])]),_:3}),a(f),a(V,null,{default:e(()=>[s(t.$slots,"default")]),_:3})]),_:3}))}});export{h as _};
|
||||
import{x as n,o,c as i,w as e,a,a8 as d,b as c,K as u,e as p,t as _,a9 as s,A as f,L as V,J as m}from"./index-dc96e1be.js";const C={class:"d-sm-flex align-center justify-space-between"},h=n({__name:"UiParentCard",props:{title:String},setup(l){const r=l;return(t,x)=>(o(),i(m,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(d,null,{default:e(()=>[c("div",C,[a(u,null,{default:e(()=>[p(_(r.title),1)]),_:1}),s(t.$slots,"action")])]),_:3}),a(f),a(V,null,{default:e(()=>[s(t.$slots,"default")]),_:3})]),_:3}))}});export{h as _};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
addons/dashboard/dist/index.html
vendored
2
addons/dashboard/dist/index.html
vendored
@@ -11,7 +11,7 @@
|
||||
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap"
|
||||
/>
|
||||
<title>AstrBot - 仪表盘</title>
|
||||
<script type="module" crossorigin src="/assets/index-7c8bc001.js"></script>
|
||||
<script type="module" crossorigin src="/assets/index-dc96e1be.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-0f1523f3.css">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -9,67 +9,75 @@ import sys
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import asyncio
|
||||
from util.plugin_dev.api.v1.config import update_config
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
def shutdown_bot(delay_s: int):
|
||||
time.sleep(delay_s)
|
||||
py = sys.executable
|
||||
os.execl(py, py, *sys.argv)
|
||||
|
||||
@dataclass
|
||||
class DashBoardConfig():
|
||||
config_type: str
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
path: Optional[str] = None # 仅 item 才需要
|
||||
body: Optional[list['DashBoardConfig']] = None # 仅 group 才需要
|
||||
value: Optional[Union[list, dict, str, int, bool]] = None # 仅 item 才需要
|
||||
val_type: Optional[str] = None # 仅 item 才需要
|
||||
path: Optional[str] = None # 仅 item 才需要
|
||||
body: Optional[list['DashBoardConfig']] = None # 仅 group 才需要
|
||||
value: Optional[Union[list, dict, str, int, bool]] = None # 仅 item 才需要
|
||||
val_type: Optional[str] = None # 仅 item 才需要
|
||||
|
||||
|
||||
class DashBoardHelper():
|
||||
def __init__(self, dashboard_data: DashBoardData, config: dict):
|
||||
def __init__(self, global_object, config: dict):
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
dashboard_data = global_object.dashboard_data
|
||||
dashboard_data.configs = {
|
||||
"data": []
|
||||
}
|
||||
self.parse_default_config(dashboard_data, config)
|
||||
self.dashboard_data: DashBoardData = dashboard_data
|
||||
self.dashboard = AstrBotDashBoard(self.dashboard_data)
|
||||
self.key_map = {} # key: uuid, value: config key name
|
||||
self.dashboard = AstrBotDashBoard(global_object)
|
||||
self.key_map = {} # key: uuid, value: config key name
|
||||
self.cc = CmdConfig()
|
||||
|
||||
|
||||
@self.dashboard.register("post_configs")
|
||||
def on_post_configs(post_configs: dict):
|
||||
try:
|
||||
gu.log(f"收到配置更新请求", gu.LEVEL_INFO, tag="可视化面板")
|
||||
self.save_config(post_configs)
|
||||
self.parse_default_config(self.dashboard_data, self.cc.get_all())
|
||||
if 'base_config' in post_configs:
|
||||
self.save_config(
|
||||
post_configs['base_config'], namespace='') # 基础配置
|
||||
self.save_config(
|
||||
post_configs['config'], namespace=post_configs['namespace']) # 选定配置
|
||||
self.parse_default_config(
|
||||
self.dashboard_data, self.cc.get_all())
|
||||
# 重启
|
||||
threading.Thread(target=shutdown_bot, args=(2,), daemon=True).start()
|
||||
threading.Thread(target=self.dashboard.shutdown_bot,
|
||||
args=(2,), daemon=True).start()
|
||||
except Exception as e:
|
||||
gu.log(f"在保存配置时发生错误:{e}", gu.LEVEL_ERROR, tag="可视化面板")
|
||||
raise e
|
||||
|
||||
|
||||
# 将 config.yaml、 中的配置解析到 dashboard_data.configs 中
|
||||
def parse_default_config(self, dashboard_data: DashBoardData, config: dict):
|
||||
|
||||
|
||||
try:
|
||||
bot_platform_group = DashBoardConfig(
|
||||
qq_official_platform_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="机器人平台配置",
|
||||
description="机器人平台配置描述",
|
||||
name="QQ_OFFICIAL 平台配置",
|
||||
description="",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="启用 QQ 频道平台",
|
||||
description="就是你想到的那个 QQ 频道平台。详见 q.qq.com",
|
||||
name="启用 QQ_OFFICIAL 平台",
|
||||
description="官方的接口,仅支持 QQ 频道。详见 q.qq.com",
|
||||
value=config['qqbot']['enable'],
|
||||
path="qqbot.enable",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="QQ机器人APPID",
|
||||
description="详见 q.qq.com",
|
||||
value=config['qqbot']['appid'],
|
||||
@@ -77,7 +85,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="QQ机器人令牌",
|
||||
description="详见 q.qq.com",
|
||||
value=config['qqbot']['token'],
|
||||
@@ -85,129 +93,46 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="QQ机器人 Secret",
|
||||
description="详见 q.qq.com",
|
||||
value=config['qqbot_secret'],
|
||||
path="qqbot_secret",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="divider"
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="启用 GO-CQHTTP 平台",
|
||||
description="gocq 是一个基于 HTTP 协议的 CQHTTP 协议的实现。详见 github.com/Mrs4s/go-cqhttp",
|
||||
value=config['gocqbot']['enable'],
|
||||
path="gocqbot.enable",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
proxy_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="代理配置",
|
||||
description="代理配置描述",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="HTTP 代理地址",
|
||||
description="建议上下一致",
|
||||
value=config['http_proxy'],
|
||||
path="proxy",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="HTTPS 代理地址",
|
||||
description="建议上下一致",
|
||||
value=config['https_proxy'],
|
||||
path="proxy",
|
||||
)
|
||||
]
|
||||
)
|
||||
general_platform_detail_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="通用平台配置",
|
||||
description="",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="启动消息文字转图片",
|
||||
description="启动后,机器人会将消息转换为图片发送,以降低风控风险。",
|
||||
value=config['qq_pic_mode'],
|
||||
path="qq_pic_mode",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="消息限制时间",
|
||||
description="在此时间内,机器人不会回复同一个用户的消息。单位:秒",
|
||||
value=config['limit']['time'],
|
||||
path="limit.time",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="消息限制次数",
|
||||
description="在上面的时间内,如果用户发送消息超过此次数,则机器人不会回复。单位:次",
|
||||
value=config['limit']['count'],
|
||||
path="limit.count",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="回复前缀",
|
||||
description="[xxxx] 你好! 其中xxxx是你可以填写的前缀。如果为空则不显示。",
|
||||
value=config['reply_prefix'],
|
||||
path="reply_prefix",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="管理员用户 ID",
|
||||
description="对机器人 !myid 即可获得。如果此功能不可用,请加群 322154837",
|
||||
value=config['gocq_qqchan_admin'],
|
||||
path="gocq_qqchan_admin",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="list",
|
||||
name="通用管理员用户 ID(同上,此项支持多个管理员)",
|
||||
description="",
|
||||
value=config['other_admins'],
|
||||
path="other_admins",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="独立会话",
|
||||
description="是否启用独立会话模式,即 1 个用户自然账号 1 个会话。",
|
||||
value=config['uniqueSessionMode'],
|
||||
path="uniqueSessionMode",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="是否允许 QQ 频道私聊",
|
||||
description="仅针对 QQ 频道 SDK,而非 GO-CQHTTP。如果启用,那么机器人会响应私聊消息。",
|
||||
description="如果启用,机器人会响应私聊消息。",
|
||||
value=config['direct_message_mode'],
|
||||
path="direct_message_mode",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="是否接收QQ群消息",
|
||||
description="需要机器人有相应的群消息接收权限。在 q.qq.com 上查看。",
|
||||
value=config['qqofficial_enable_group_message'],
|
||||
path="qqofficial_enable_group_message",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
gocq_platform_detail_group = DashBoardConfig(
|
||||
qq_gocq_platform_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="GO-CQHTTP 平台配置",
|
||||
name="go-cqhttp",
|
||||
description="",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="bool",
|
||||
name="启用",
|
||||
description="",
|
||||
value=config['gocqbot']['enable'],
|
||||
path="gocqbot.enable",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="str",
|
||||
name="HTTP 服务器地址",
|
||||
description="",
|
||||
value=config['gocq_host'],
|
||||
@@ -225,7 +150,7 @@ class DashBoardHelper():
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="WebSocket 服务器端口",
|
||||
description="",
|
||||
description="目前仅支持正向 WebSocket",
|
||||
value=config['gocq_websocket_port'],
|
||||
path="gocq_websocket_port",
|
||||
),
|
||||
@@ -272,39 +197,103 @@ class DashBoardHelper():
|
||||
]
|
||||
)
|
||||
|
||||
llm_group = DashBoardConfig(
|
||||
general_platform_detail_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="LLM 配置",
|
||||
name="通用平台配置",
|
||||
description="",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="启动消息文字转图片",
|
||||
description="启动后,机器人会将消息转换为图片发送,以降低风控风险。",
|
||||
value=config['qq_pic_mode'],
|
||||
path="qq_pic_mode",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="消息限制时间",
|
||||
description="在此时间内,机器人不会回复同一个用户的消息。单位:秒",
|
||||
value=config['limit']['time'],
|
||||
path="limit.time",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="消息限制次数",
|
||||
description="在上面的时间内,如果用户发送消息超过此次数,则机器人不会回复。单位:次",
|
||||
value=config['limit']['count'],
|
||||
path="limit.count",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="str",
|
||||
name="回复前缀",
|
||||
description="[xxxx] 你好! 其中xxxx是你可以填写的前缀。如果为空则不显示。",
|
||||
value=config['reply_prefix'],
|
||||
path="reply_prefix",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="list",
|
||||
name="通用管理员用户 ID(支持多个管理员)。通过 !myid 指令获取。",
|
||||
description="",
|
||||
value=config['other_admins'],
|
||||
path="other_admins",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="bool",
|
||||
name="独立会话",
|
||||
description="是否启用独立会话模式,即 1 个用户自然账号 1 个会话。",
|
||||
value=config['uniqueSessionMode'],
|
||||
path="uniqueSessionMode",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="str",
|
||||
name="LLM 唤醒词",
|
||||
description="如果不为空, 那么只有当消息以此词开头时,才会调用大语言模型进行回复。如设置为 /chat,那么只有当消息以 /chat 开头时,才会调用大语言模型进行回复。",
|
||||
value=config['llm_wake_prefix'],
|
||||
path="llm_wake_prefix",
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
openai_official_llm_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="OpenAI 官方接口类设置",
|
||||
description="",
|
||||
body=[
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="list",
|
||||
name="OpenAI API KEY",
|
||||
description="OpenAI API 的 KEY。支持使用非官方但是兼容的 API。",
|
||||
name="OpenAI API Key",
|
||||
description="OpenAI API 的 Key。支持使用非官方但兼容的 API(第三方中转key)。",
|
||||
value=config['openai']['key'],
|
||||
path="openai.key",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="OpenAI API 节点地址",
|
||||
val_type="str",
|
||||
name="OpenAI API 节点地址(api base)",
|
||||
description="OpenAI API 的节点地址,配合非官方 API 使用。如果不想填写,那么请填写 none",
|
||||
value=config['openai']['api_base'],
|
||||
path="openai.api_base",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="OpenAI 模型",
|
||||
description="OpenAI 模型。详见 https://platform.openai.com/docs/api-reference/chat",
|
||||
val_type="str",
|
||||
name="OpenAI model",
|
||||
description="OpenAI LLM 模型。详见 https://platform.openai.com/docs/api-reference/chat",
|
||||
value=config['openai']['chatGPTConfigs']['model'],
|
||||
path="openai.chatGPTConfigs.model",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="int",
|
||||
name="OpenAI 最大生成长度",
|
||||
name="OpenAI max_tokens",
|
||||
description="OpenAI 最大生成长度。详见 https://platform.openai.com/docs/api-reference/chat",
|
||||
value=config['openai']['chatGPTConfigs']['max_tokens'],
|
||||
path="openai.chatGPTConfigs.max_tokens",
|
||||
@@ -312,7 +301,7 @@ class DashBoardHelper():
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="float",
|
||||
name="OpenAI 温度",
|
||||
name="OpenAI temperature",
|
||||
description="OpenAI 温度。详见 https://platform.openai.com/docs/api-reference/chat",
|
||||
value=config['openai']['chatGPTConfigs']['temperature'],
|
||||
path="openai.chatGPTConfigs.temperature",
|
||||
@@ -351,7 +340,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="OpenAI 图像生成模型",
|
||||
description="OpenAI 图像生成模型。",
|
||||
value=config['openai_image_generate']['model'],
|
||||
@@ -359,7 +348,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="OpenAI 图像生成大小",
|
||||
description="OpenAI 图像生成大小。",
|
||||
value=config['openai_image_generate']['size'],
|
||||
@@ -367,7 +356,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="OpenAI 图像生成风格",
|
||||
description="OpenAI 图像生成风格。修改前请参考 OpenAI 官方文档",
|
||||
value=config['openai_image_generate']['style'],
|
||||
@@ -375,7 +364,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="OpenAI 图像生成质量",
|
||||
description="OpenAI 图像生成质量。修改前请参考 OpenAI 官方文档",
|
||||
value=config['openai_image_generate']['quality'],
|
||||
@@ -383,15 +372,23 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="大语言模型问题题首提示词",
|
||||
val_type="str",
|
||||
name="问题题首提示词",
|
||||
description="如果填写了此项,在每个对大语言模型的请求中,都会在问题前加上此提示词。",
|
||||
value=config['llm_env_prompt'],
|
||||
path="llm_env_prompt",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="str",
|
||||
name="默认人格文本",
|
||||
description="默认人格文本",
|
||||
value=config['default_personality_str'],
|
||||
path="default_personality_str",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
baidu_aip_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="百度内容审核",
|
||||
@@ -405,12 +402,9 @@ class DashBoardHelper():
|
||||
value=config['baidu_aip']['enable'],
|
||||
path="baidu_aip.enable"
|
||||
),
|
||||
# "app_id": null,
|
||||
# "api_key": null,
|
||||
# "secret_key": null
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="APP ID",
|
||||
description="",
|
||||
value=config['baidu_aip']['app_id'],
|
||||
@@ -418,7 +412,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="API KEY",
|
||||
description="",
|
||||
value=config['baidu_aip']['api_key'],
|
||||
@@ -426,7 +420,7 @@ class DashBoardHelper():
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="SECRET KEY",
|
||||
description="",
|
||||
value=config['baidu_aip']['secret_key'],
|
||||
@@ -435,25 +429,30 @@ class DashBoardHelper():
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
|
||||
other_group = DashBoardConfig(
|
||||
config_type="group",
|
||||
name="其他配置",
|
||||
description="其他配置描述",
|
||||
body=[
|
||||
# 人格
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
name="默认人格文本",
|
||||
description="默认人格文本",
|
||||
value=config['default_personality_str'],
|
||||
path="default_personality_str",
|
||||
val_type="str",
|
||||
name="HTTP 代理地址",
|
||||
description="建议上下一致",
|
||||
value=config['http_proxy'],
|
||||
path="http_proxy",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="string",
|
||||
val_type="str",
|
||||
name="HTTPS 代理地址",
|
||||
description="建议上下一致",
|
||||
value=config['https_proxy'],
|
||||
path="https_proxy",
|
||||
),
|
||||
DashBoardConfig(
|
||||
config_type="item",
|
||||
val_type="str",
|
||||
name="面板用户名",
|
||||
description="是的,就是你理解的这个面板的用户名",
|
||||
value=config['dashboard_username'],
|
||||
@@ -461,31 +460,26 @@ class DashBoardHelper():
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
dashboard_data.configs['data'] = [
|
||||
bot_platform_group,
|
||||
qq_official_platform_group,
|
||||
qq_gocq_platform_group,
|
||||
general_platform_detail_group,
|
||||
gocq_platform_detail_group,
|
||||
proxy_group,
|
||||
llm_group,
|
||||
openai_official_llm_group,
|
||||
other_group,
|
||||
baidu_aip_group
|
||||
]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
gu.log(f"配置文件解析错误:{e}", gu.LEVEL_ERROR)
|
||||
logger.error(f"配置文件解析错误:{e}")
|
||||
raise e
|
||||
|
||||
|
||||
def save_config(self, post_config: dict):
|
||||
|
||||
def save_config(self, post_config: list, namespace: str):
|
||||
'''
|
||||
根据 path 解析并保存配置
|
||||
'''
|
||||
|
||||
queue = []
|
||||
for config in post_config['data']:
|
||||
queue.append(config)
|
||||
|
||||
|
||||
queue = post_config
|
||||
while len(queue) > 0:
|
||||
config = queue.pop(0)
|
||||
if config['config_type'] == "group":
|
||||
@@ -494,33 +488,46 @@ class DashBoardHelper():
|
||||
elif config['config_type'] == "item":
|
||||
if config['path'] is None or config['path'] == "":
|
||||
continue
|
||||
|
||||
|
||||
path = config['path'].split('.')
|
||||
if len(path) == 0:
|
||||
continue
|
||||
|
||||
|
||||
if config['val_type'] == "bool":
|
||||
self.cc.put_by_dot_str(config['path'], config['value'])
|
||||
elif config['val_type'] == "string":
|
||||
self.cc.put_by_dot_str(config['path'], config['value'])
|
||||
self._write_config(
|
||||
namespace, config['path'], config['value'])
|
||||
elif config['val_type'] == "str":
|
||||
self._write_config(
|
||||
namespace, config['path'], config['value'])
|
||||
elif config['val_type'] == "int":
|
||||
try:
|
||||
self.cc.put_by_dot_str(config['path'], int(config['value']))
|
||||
self._write_config(
|
||||
namespace, config['path'], int(config['value']))
|
||||
except:
|
||||
raise ValueError(f"配置项 {config['name']} 的值必须是整数")
|
||||
elif config['val_type'] == "float":
|
||||
try:
|
||||
self.cc.put_by_dot_str(config['path'], float(config['value']))
|
||||
self._write_config(
|
||||
namespace, config['path'], float(config['value']))
|
||||
except:
|
||||
raise ValueError(f"配置项 {config['name']} 的值必须是浮点数")
|
||||
elif config['val_type'] == "list":
|
||||
if config['value'] is None:
|
||||
self.cc.put_by_dot_str(config['path'], [])
|
||||
self._write_config(namespace, config['path'], [])
|
||||
elif not isinstance(config['value'], list):
|
||||
raise ValueError(f"配置项 {config['name']} 的值必须是列表")
|
||||
self.cc.put_by_dot_str(config['path'], config['value'])
|
||||
self._write_config(
|
||||
namespace, config['path'], config['value'])
|
||||
else:
|
||||
raise NotImplementedError(f"未知或者未实现的的配置项类型:{config['val_type']}")
|
||||
|
||||
raise NotImplementedError(
|
||||
f"未知或者未实现的配置项类型:{config['val_type']}")
|
||||
|
||||
def _write_config(self, namespace: str, key: str, value):
|
||||
if namespace == "" or namespace.startswith("internal_"):
|
||||
# 机器人自带配置,存到 config.yaml
|
||||
self.cc.put_by_dot_str(key, value)
|
||||
else:
|
||||
update_config(namespace, key, value)
|
||||
|
||||
def run(self):
|
||||
self.dashboard.run()
|
||||
self.dashboard.run()
|
||||
|
||||
@@ -1,42 +1,76 @@
|
||||
import util.plugin_util as putil
|
||||
import websockets
|
||||
import json
|
||||
import threading
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from flask import Flask, request
|
||||
from flask.logging import default_handler
|
||||
from werkzeug.serving import make_server
|
||||
import datetime
|
||||
from util import general_utils as gu
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from cores.database.conn import dbConn
|
||||
from persist.session import dbConn
|
||||
from type.register import RegisteredPlugin
|
||||
from typing import List
|
||||
from util.cmd_config import CmdConfig
|
||||
import util.plugin_util as putil
|
||||
from util.updator import check_update, update_project, request_release_info, _reboot
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
@dataclass
|
||||
class DashBoardData():
|
||||
stats: dict
|
||||
configs: dict
|
||||
logs: dict
|
||||
plugins: list[dict]
|
||||
plugins: List[RegisteredPlugin]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Response():
|
||||
status: str
|
||||
message: str
|
||||
data: dict
|
||||
|
||||
|
||||
|
||||
class AstrBotDashBoard():
|
||||
def __init__(self, dashboard_data: DashBoardData):
|
||||
self.dashboard_data = dashboard_data
|
||||
self.dashboard_be = Flask(__name__, static_folder="dist", static_url_path="/")
|
||||
log = logging.getLogger('werkzeug')
|
||||
log.setLevel(logging.ERROR)
|
||||
def __init__(self, global_object: 'gu.GlobalObject'):
|
||||
self.global_object = global_object
|
||||
self.loop = asyncio.get_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
self.dashboard_data: DashBoardData = global_object.dashboard_data
|
||||
self.dashboard_be = Flask(
|
||||
__name__, static_folder="dist", static_url_path="/")
|
||||
self.funcs = {}
|
||||
self.cc = CmdConfig()
|
||||
|
||||
|
||||
self.ws_clients = {} # remote_ip: ws
|
||||
# 启动 websocket 服务器
|
||||
self.ws_server = websockets.serve(self.__handle_msg, "0.0.0.0", 6186)
|
||||
|
||||
@self.dashboard_be.get("/")
|
||||
def index():
|
||||
# 返回页面
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/config")
|
||||
def rt_config():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/logs")
|
||||
def rt_logs():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/extension")
|
||||
def rt_extension():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/dashboard/default")
|
||||
def rt_dashboard():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.post("/api/authenticate")
|
||||
def authenticate():
|
||||
username = self.cc.get("dashboard_username", "")
|
||||
@@ -58,7 +92,7 @@ class AstrBotDashBoard():
|
||||
message="用户名或密码错误。",
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.post("/api/change_password")
|
||||
def change_password():
|
||||
password = self.cc.get("dashboard_password", "")
|
||||
@@ -86,9 +120,11 @@ class AstrBotDashBoard():
|
||||
# last_24_platform = db_inst.get_last_24h_stat_platform()
|
||||
platforms = db_inst.get_platform_cnt_total()
|
||||
self.dashboard_data.stats["session"] = []
|
||||
self.dashboard_data.stats["session_total"] = db_inst.get_session_cnt_total()
|
||||
self.dashboard_data.stats["session_total"] = db_inst.get_session_cnt_total(
|
||||
)
|
||||
self.dashboard_data.stats["message"] = last_24_message
|
||||
self.dashboard_data.stats["message_total"] = db_inst.get_message_cnt_total()
|
||||
self.dashboard_data.stats["message_total"] = db_inst.get_message_cnt_total(
|
||||
)
|
||||
self.dashboard_data.stats["platform"] = platforms
|
||||
|
||||
return Response(
|
||||
@@ -96,15 +132,28 @@ class AstrBotDashBoard():
|
||||
message="",
|
||||
data=self.dashboard_data.stats
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.get("/api/configs")
|
||||
def get_configs():
|
||||
# 如果params中有namespace,则返回该namespace下的配置
|
||||
# 否则返回所有配置
|
||||
namespace = "" if "namespace" not in request.args else request.args["namespace"]
|
||||
conf = self._get_configs(namespace)
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=self.dashboard_data.configs
|
||||
data=conf
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.get("/api/config_outline")
|
||||
def get_config_outline():
|
||||
outline = self._generate_outline()
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=outline
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/configs")
|
||||
def post_configs():
|
||||
post_configs = request.json
|
||||
@@ -121,35 +170,18 @@ class AstrBotDashBoard():
|
||||
message=e.__str__(),
|
||||
data=self.dashboard_data.configs
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/logs")
|
||||
def get_logs():
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=self.dashboard_data.logs
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.get("/api/extensions")
|
||||
def get_plugins():
|
||||
"""
|
||||
{
|
||||
"name": "GoodPlugins",
|
||||
"repo": "https://gitee.com/soulter/goodplugins",
|
||||
"author": "soulter",
|
||||
"desc": "一些好用的插件",
|
||||
"version": "1.0"
|
||||
}
|
||||
"""
|
||||
_plugin_resp = []
|
||||
for plugin in self.dashboard_data.plugins:
|
||||
_p = self.dashboard_data.plugins[plugin]
|
||||
_p = plugin.metadata
|
||||
_t = {
|
||||
"name": _p["info"]["name"],
|
||||
"repo": '' if "repo" not in _p["info"] else _p["info"]["repo"],
|
||||
"author": _p["info"]["author"],
|
||||
"desc": _p["info"]["desc"],
|
||||
"version": _p["info"]["version"]
|
||||
"name": _p.plugin_name,
|
||||
"repo": '' if _p.repo is None else _p.repo,
|
||||
"author": _p.author,
|
||||
"desc": _p.desc,
|
||||
"version": _p.version
|
||||
}
|
||||
_plugin_resp.append(_t)
|
||||
return Response(
|
||||
@@ -157,15 +189,15 @@ class AstrBotDashBoard():
|
||||
message="",
|
||||
data=_plugin_resp
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/install")
|
||||
def install_plugin():
|
||||
post_data = request.json
|
||||
repo_url = post_data["url"]
|
||||
try:
|
||||
gu.log(f"正在安装插件 {repo_url}", tag="可视化面板")
|
||||
putil.install_plugin(repo_url, self.dashboard_data.plugins)
|
||||
gu.log(f"安装插件 {repo_url} 成功", tag="可视化面板")
|
||||
logger.info(f"正在安装插件 {repo_url}")
|
||||
putil.install_plugin(repo_url, global_object)
|
||||
logger.info(f"安装插件 {repo_url} 成功")
|
||||
return Response(
|
||||
status="success",
|
||||
message="安装成功~",
|
||||
@@ -177,15 +209,16 @@ class AstrBotDashBoard():
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/uninstall")
|
||||
def uninstall_plugin():
|
||||
post_data = request.json
|
||||
plugin_name = post_data["name"]
|
||||
try:
|
||||
gu.log(f"正在卸载插件 {plugin_name}", tag="可视化面板")
|
||||
putil.uninstall_plugin(plugin_name, self.dashboard_data.plugins)
|
||||
gu.log(f"卸载插件 {plugin_name} 成功", tag="可视化面板")
|
||||
logger.info(f"正在卸载插件 {plugin_name}")
|
||||
putil.uninstall_plugin(
|
||||
plugin_name, self.dashboard_data.plugins)
|
||||
logger.info(f"卸载插件 {plugin_name} 成功")
|
||||
return Response(
|
||||
status="success",
|
||||
message="卸载成功~",
|
||||
@@ -197,15 +230,15 @@ class AstrBotDashBoard():
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/update")
|
||||
def update_plugin():
|
||||
post_data = request.json
|
||||
plugin_name = post_data["name"]
|
||||
try:
|
||||
gu.log(f"正在更新插件 {plugin_name}", tag="可视化面板")
|
||||
putil.update_plugin(plugin_name, self.dashboard_data.plugins)
|
||||
gu.log(f"更新插件 {plugin_name} 成功", tag="可视化面板")
|
||||
logger.info(f"正在更新插件 {plugin_name}")
|
||||
putil.update_plugin(plugin_name, global_object)
|
||||
logger.info(f"更新插件 {plugin_name} 成功")
|
||||
return Response(
|
||||
status="success",
|
||||
message="更新成功~",
|
||||
@@ -217,17 +250,226 @@ class AstrBotDashBoard():
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
|
||||
@self.dashboard_be.post("/api/log")
|
||||
def log():
|
||||
for item in self.ws_clients:
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.ws_clients[item].send(request.data.decode()), self.loop)
|
||||
except Exception as e:
|
||||
pass
|
||||
return 'ok'
|
||||
|
||||
@self.dashboard_be.get("/api/check_update")
|
||||
def get_update_info():
|
||||
try:
|
||||
ret = check_update()
|
||||
return Response(
|
||||
status="success",
|
||||
message=ret,
|
||||
data={
|
||||
"has_new_version": ret != "当前已经是最新版本。" # 先这样吧,累了=.=
|
||||
}
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/update_project")
|
||||
def update_project_api():
|
||||
version = request.json['version']
|
||||
if version == "" or version == "latest":
|
||||
latest = True
|
||||
version = ''
|
||||
else:
|
||||
latest = False
|
||||
try:
|
||||
update_project(request_release_info(latest),
|
||||
latest=latest, version=version)
|
||||
threading.Thread(target=self.shutdown_bot, args=(3,)).start()
|
||||
return Response(
|
||||
status="success",
|
||||
message="更新成功,机器人将在 3 秒内重启。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/llm/list")
|
||||
def llm_list():
|
||||
ret = []
|
||||
for llm in self.global_object.llms:
|
||||
ret.append(llm.llm_name)
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=ret
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/llm")
|
||||
def llm():
|
||||
text = request.args["text"]
|
||||
llm = request.args["llm"]
|
||||
for llm_ in self.global_object.llms:
|
||||
if llm_.llm_name == llm:
|
||||
try:
|
||||
# ret = await llm_.llm_instance.text_chat(text)
|
||||
ret = asyncio.run_coroutine_threadsafe(
|
||||
llm_.llm_instance.text_chat(text), self.loop).result()
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=ret
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
return Response(
|
||||
status="error",
|
||||
message="LLM not found.",
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
def shutdown_bot(self, delay_s: int):
|
||||
time.sleep(delay_s)
|
||||
_reboot()
|
||||
|
||||
def _get_configs(self, namespace: str):
|
||||
if namespace == "":
|
||||
ret = [self.dashboard_data.configs['data'][4],
|
||||
self.dashboard_data.configs['data'][5],]
|
||||
elif namespace == "internal_platform_qq_official":
|
||||
ret = [self.dashboard_data.configs['data'][0],]
|
||||
elif namespace == "internal_platform_qq_gocq":
|
||||
ret = [self.dashboard_data.configs['data'][1],]
|
||||
elif namespace == "internal_platform_general": # 全局平台配置
|
||||
ret = [self.dashboard_data.configs['data'][2],]
|
||||
elif namespace == "internal_llm_openai_official":
|
||||
ret = [self.dashboard_data.configs['data'][3],]
|
||||
else:
|
||||
path = f"data/config/{namespace}.json"
|
||||
if not os.path.exists(path):
|
||||
return []
|
||||
with open(path, "r", encoding="utf-8-sig") as f:
|
||||
ret = [{
|
||||
"config_type": "group",
|
||||
"name": namespace + " 插件配置",
|
||||
"description": "",
|
||||
"body": list(json.load(f).values())
|
||||
},]
|
||||
return ret
|
||||
|
||||
def _generate_outline(self):
|
||||
'''
|
||||
生成配置大纲。目前分为 platform(消息平台配置) 和 llm(语言模型配置) 两大类。
|
||||
插件的info函数中如果带了plugin_type字段,则会被归类到对应的大纲中。目前仅支持 platform 和 llm 两种类型。
|
||||
'''
|
||||
outline = [
|
||||
{
|
||||
"type": "platform",
|
||||
"name": "配置通用消息平台",
|
||||
"body": [
|
||||
{
|
||||
"title": "通用",
|
||||
"desc": "通用平台配置",
|
||||
"namespace": "internal_platform_general",
|
||||
"tag": ""
|
||||
},
|
||||
{
|
||||
"title": "QQ_OFFICIAL",
|
||||
"desc": "QQ官方API。支持频道、群(需获得群权限)",
|
||||
"namespace": "internal_platform_qq_official",
|
||||
"tag": ""
|
||||
},
|
||||
{
|
||||
"title": "go-cqhttp",
|
||||
"desc": "第三方 QQ 协议实现。支持频道、群",
|
||||
"namespace": "internal_platform_qq_gocq",
|
||||
"tag": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "llm",
|
||||
"name": "配置 LLM",
|
||||
"body": [
|
||||
{
|
||||
"title": "OpenAI Official",
|
||||
"desc": "也支持使用官方接口的中转服务",
|
||||
"namespace": "internal_llm_openai_official",
|
||||
"tag": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
for plugin in self.global_object.cached_plugins:
|
||||
for item in outline:
|
||||
if item['type'] == plugin.metadata.plugin_type:
|
||||
item['body'].append({
|
||||
"title": plugin.metadata.plugin_name,
|
||||
"desc": plugin.metadata.desc,
|
||||
"namespace": plugin.metadata.plugin_name,
|
||||
"tag": plugin.metadata.plugin_name
|
||||
})
|
||||
return outline
|
||||
|
||||
def register(self, name: str):
|
||||
def decorator(func):
|
||||
self.funcs[name] = func
|
||||
return func
|
||||
return decorator
|
||||
|
||||
async def get_log_history(self):
|
||||
try:
|
||||
with open("logs/astrbot-core/astrbot-core.log", "r", encoding="utf-8") as f:
|
||||
return f.readlines()[-100:]
|
||||
except Exception as e:
|
||||
logger.warning(f"读取日志历史失败: {e.__str__()}")
|
||||
return []
|
||||
|
||||
async def __handle_msg(self, websocket, path):
|
||||
address = websocket.remote_address
|
||||
self.ws_clients[address] = websocket
|
||||
data = await self.get_log_history()
|
||||
data = ''.join(data).replace('\n', '\r\n')
|
||||
await websocket.send(data)
|
||||
while True:
|
||||
try:
|
||||
msg = await websocket.recv()
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
# logger.info(f"和 {address} 的 websocket 连接已断开")
|
||||
del self.ws_clients[address]
|
||||
break
|
||||
except Exception as e:
|
||||
# logger.info(f"和 {path} 的 websocket 连接发生了错误: {e.__str__()}")
|
||||
del self.ws_clients[address]
|
||||
break
|
||||
|
||||
def run_ws_server(self, loop):
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(self.ws_server)
|
||||
loop.run_forever()
|
||||
|
||||
def run(self):
|
||||
threading.Thread(target=self.run_ws_server, args=(self.loop,)).start()
|
||||
logger.info("已启动 websocket 服务器")
|
||||
ip_address = gu.get_local_ip_addresses()
|
||||
ip_str = f"http://{ip_address}:6185\n\thttp://localhost:6185"
|
||||
gu.log(f"\n\n==================\n您可以访问:\n\n\t{ip_str}\n\n来登录可视化面板。\n注意: 所有配置项现已全量迁移至 cmd_config.json 文件下。您可以登录可视化面板在线修改配置。\n==================\n\n", tag="可视化面板")
|
||||
# self.dashboard_be.run(host="0.0.0.0", port=6185)
|
||||
http_server = make_server('0.0.0.0', 6185, self.dashboard_be)
|
||||
logger.info(
|
||||
f"\n==================\n您可访问:\n\n\t{ip_str}\n\n来登录可视化面板,默认账号密码为空。\n注意: 所有配置项现已全量迁移至 cmd_config.json 文件下,可登录可视化面板在线修改配置。\n==================\n")
|
||||
|
||||
http_server = make_server(
|
||||
'0.0.0.0', 6185, self.dashboard_be, threaded=True)
|
||||
http_server.serve_forever()
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
# helloworld
|
||||
|
||||
QQChannelChatGPT项目的测试插件
|
||||
|
||||
A test plugin for QQChannelChatGPT plugin feature
|
||||
@@ -1,25 +1,38 @@
|
||||
import os
|
||||
import shutil
|
||||
from nakuru.entities.components import *
|
||||
from nakuru import (
|
||||
GroupMessage,
|
||||
FriendMessage
|
||||
)
|
||||
from botpy.message import Message, DirectMessage
|
||||
from model.platform.qq import QQ
|
||||
from cores.qqbot.global_object import (
|
||||
AstrMessageEvent,
|
||||
CommandResult
|
||||
)
|
||||
flag_not_support = False
|
||||
try:
|
||||
from util.plugin_dev.api.v1.config import *
|
||||
from util.plugin_dev.api.v1.bot import (
|
||||
AstrMessageEvent,
|
||||
CommandResult,
|
||||
)
|
||||
except ImportError:
|
||||
flag_not_support = True
|
||||
print("导入接口失败。请升级到 AstrBot 最新版本。")
|
||||
|
||||
|
||||
'''
|
||||
注意改插件名噢!格式:XXXPlugin 或 Main
|
||||
小提示:把此模板仓库 fork 之后 clone 到机器人文件夹下的 addons/plugins/ 目录下,然后用 Pycharm/VSC 等工具打开可获更棒的编程体验(自动补全等)
|
||||
'''
|
||||
|
||||
|
||||
class HelloWorldPlugin:
|
||||
"""
|
||||
初始化函数, 可以选择直接pass
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
print("hello, world!")
|
||||
# 复制旧配置文件到 data 目录下。
|
||||
if os.path.exists("keyword.json"):
|
||||
shutil.move("keyword.json", "data/keyword.json")
|
||||
self.keywords = {}
|
||||
if os.path.exists("data/keyword.json"):
|
||||
self.keywords = json.load(open("data/keyword.json", "r"))
|
||||
else:
|
||||
self.save_keyword()
|
||||
|
||||
"""
|
||||
机器人程序会调用此函数。
|
||||
@@ -27,22 +40,110 @@ class HelloWorldPlugin:
|
||||
Tuple: Non e或者长度为 3 的元组。如果不响应, 返回 None; 如果响应, 第 1 个参数为指令是否调用成功, 第 2 个参数为返回的消息链列表, 第 3 个参数为指令名称
|
||||
例子:一个名为"yuanshen"的插件;当接收到消息为“原神 可莉”, 如果不想要处理此消息,则返回False, None;如果想要处理,但是执行失败了,返回True, tuple([False, "请求失败。", "yuanshen"]) ;执行成功了,返回True, tuple([True, "结果文本", "yuanshen"])
|
||||
"""
|
||||
|
||||
def run(self, ame: AstrMessageEvent):
|
||||
if ame.message_str == "helloworld":
|
||||
# return True, tuple([True, "Hello World!!", "helloworld"])
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=[Plain("Hello World!!")],
|
||||
command_name="helloworld"
|
||||
)
|
||||
if ame.message_str.startswith("/keyword") or ame.message_str.startswith("keyword"):
|
||||
return self.handle_keyword_command(ame)
|
||||
|
||||
ret = self.check_keyword(ame.message_str)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
return CommandResult(
|
||||
hit=False,
|
||||
success=False,
|
||||
message_chain=None,
|
||||
command_name=None
|
||||
)
|
||||
|
||||
def handle_keyword_command(self, ame: AstrMessageEvent):
|
||||
l = ame.message_str.split(" ")
|
||||
|
||||
# 获取图片
|
||||
image_url = ""
|
||||
for comp in ame.message_obj.message:
|
||||
if isinstance(comp, Image) and image_url == "":
|
||||
if comp.url is None:
|
||||
image_url = comp.file
|
||||
else:
|
||||
image_url = comp.url
|
||||
|
||||
command_result = CommandResult(
|
||||
hit=True,
|
||||
success=False,
|
||||
message_chain=None,
|
||||
command_name="keyword"
|
||||
)
|
||||
if len(l) == 1 or (len(l) == 2 and image_url == ""):
|
||||
ret = """【设置关键词回复】
|
||||
示例:
|
||||
1. keyword <触发词> <回复词>
|
||||
keyword hi 你好
|
||||
发送 hi 回复你好
|
||||
* 回复词支持图片
|
||||
|
||||
2. keyword d <触发词>
|
||||
keyword d hi
|
||||
删除 hi 触发词产生的回复"""
|
||||
command_result.success = True
|
||||
command_result.message_chain = [Plain(ret)]
|
||||
return command_result
|
||||
elif len(l) == 3 and l[1] == "d":
|
||||
if l[2] not in self.keywords:
|
||||
command_result.message_chain = [Plain(f"关键词 {l[2]} 不存在")]
|
||||
return command_result
|
||||
self.keywords.pop(l[2])
|
||||
self.save_keyword()
|
||||
command_result.success = True
|
||||
command_result.message_chain = [Plain("删除成功")]
|
||||
return command_result
|
||||
else:
|
||||
return CommandResult(
|
||||
hit=False,
|
||||
success=False,
|
||||
message_chain=None,
|
||||
command_name=None
|
||||
)
|
||||
self.keywords[l[1]] = {
|
||||
"plain_text": " ".join(l[2:]),
|
||||
"image_url": image_url
|
||||
}
|
||||
self.save_keyword()
|
||||
command_result.success = True
|
||||
command_result.message_chain = [Plain("设置成功")]
|
||||
return command_result
|
||||
|
||||
def save_keyword(self):
|
||||
json.dump(self.keywords, open(
|
||||
"data/keyword.json", "w"), ensure_ascii=False)
|
||||
|
||||
def check_keyword(self, message_str: str):
|
||||
for k in self.keywords:
|
||||
if message_str == k:
|
||||
plain_text = ""
|
||||
if 'plain_text' in self.keywords[k]:
|
||||
plain_text = self.keywords[k]['plain_text']
|
||||
else:
|
||||
plain_text = self.keywords[k]
|
||||
image_url = ""
|
||||
if 'image_url' in self.keywords[k]:
|
||||
image_url = self.keywords[k]['image_url']
|
||||
if image_url != "":
|
||||
res = [Plain(plain_text), Image.fromURL(image_url)]
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=res,
|
||||
command_name="keyword"
|
||||
)
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=[Plain(plain_text)],
|
||||
command_name="keyword"
|
||||
)
|
||||
|
||||
"""
|
||||
插件元信息。
|
||||
当用户输入 plugin v 插件名称 时,会调用此函数,返回帮助信息。
|
||||
@@ -55,12 +156,13 @@ class HelloWorldPlugin:
|
||||
"repo": str, # 插件仓库地址 [ 可选 ]
|
||||
"homepage": str, # 插件主页 [ 可选 ]
|
||||
}
|
||||
"""
|
||||
"""
|
||||
|
||||
def info(self):
|
||||
return {
|
||||
"name": "helloworld",
|
||||
"desc": "测试插件",
|
||||
"help": "测试插件, 回复 helloworld 即可触发",
|
||||
"version": "v1.2",
|
||||
"desc": "这是 AstrBot 的默认插件,支持关键词回复。",
|
||||
"help": "输入 /keyword 查看关键词回复帮助。",
|
||||
"version": "v1.3",
|
||||
"author": "Soulter"
|
||||
}
|
||||
}
|
||||
|
||||
474
astrbot/core.py
Normal file
474
astrbot/core.py
Normal file
@@ -0,0 +1,474 @@
|
||||
import re
|
||||
import threading
|
||||
import asyncio
|
||||
import time
|
||||
import util.unfit_words as uw
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import util.agent.web_searcher as web_searcher
|
||||
import util.plugin_util as putil
|
||||
|
||||
from nakuru.entities.components import Plain, At, Image
|
||||
|
||||
from addons.baidu_aip_judge import BaiduJudge
|
||||
from model.provider.provider import Provider
|
||||
from model.command.command import Command
|
||||
from util import general_utils as gu
|
||||
from util.general_utils import upload, run_monitor
|
||||
from util.cmd_config import CmdConfig as cc
|
||||
from util.cmd_config import init_astrbot_config_items
|
||||
from type.types import GlobalObject
|
||||
from type.register import *
|
||||
from type.message import AstrBotMessage
|
||||
from type.config import *
|
||||
from addons.dashboard.helper import DashBoardHelper
|
||||
from addons.dashboard.server import DashBoardData
|
||||
from persist.session import dbConn
|
||||
from model.platform._message_result import MessageResult
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
# 用户发言频率
|
||||
user_frequency = {}
|
||||
# 时间默认值
|
||||
frequency_time = 60
|
||||
# 计数默认值
|
||||
frequency_count = 10
|
||||
|
||||
# 语言模型
|
||||
OPENAI_OFFICIAL = 'openai_official'
|
||||
NONE_LLM = 'none_llm'
|
||||
chosen_provider = None
|
||||
# 语言模型对象
|
||||
llm_instance: dict[str, Provider] = {}
|
||||
llm_command_instance: dict[str, Command] = {}
|
||||
llm_wake_prefix = ""
|
||||
|
||||
# 百度内容审核实例
|
||||
baidu_judge = None
|
||||
|
||||
# CLI
|
||||
PLATFORM_CLI = 'cli'
|
||||
|
||||
# 全局对象
|
||||
_global_object: GlobalObject = None
|
||||
|
||||
|
||||
def privider_chooser(cfg):
|
||||
l = []
|
||||
if 'openai' in cfg and len(cfg['openai']['key']) > 0 and cfg['openai']['key'][0] is not None:
|
||||
l.append('openai_official')
|
||||
return l
|
||||
|
||||
def init():
|
||||
'''
|
||||
初始化机器人
|
||||
'''
|
||||
global llm_instance, llm_command_instance
|
||||
global baidu_judge, chosen_provider
|
||||
global frequency_count, frequency_time
|
||||
global _global_object
|
||||
|
||||
init_astrbot_config_items()
|
||||
cfg = cc.get_all()
|
||||
|
||||
_event_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(_event_loop)
|
||||
|
||||
# 初始化 global_object
|
||||
_global_object = GlobalObject()
|
||||
_global_object.version = VERSION
|
||||
_global_object.base_config = cfg
|
||||
_global_object.logger = logger
|
||||
logger.info("AstrBot v" + VERSION)
|
||||
|
||||
if 'reply_prefix' in cfg:
|
||||
# 适配旧版配置
|
||||
if isinstance(cfg['reply_prefix'], dict):
|
||||
_global_object.reply_prefix = ""
|
||||
cfg['reply_prefix'] = ""
|
||||
cc.put("reply_prefix", "")
|
||||
else:
|
||||
_global_object.reply_prefix = cfg['reply_prefix']
|
||||
|
||||
default_personality_str = cc.get("default_personality_str", "")
|
||||
if default_personality_str == "":
|
||||
_global_object.default_personality = None
|
||||
else:
|
||||
_global_object.default_personality = {
|
||||
"name": "default",
|
||||
"prompt": default_personality_str,
|
||||
}
|
||||
|
||||
# 语言模型提供商
|
||||
logger.info("正在载入语言模型...")
|
||||
prov = privider_chooser(cfg)
|
||||
if OPENAI_OFFICIAL in prov:
|
||||
logger.info("初始化:OpenAI官方")
|
||||
if cfg['openai']['key'] is not None and cfg['openai']['key'] != [None]:
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial
|
||||
from model.command.openai_official import CommandOpenAIOfficial
|
||||
llm_instance[OPENAI_OFFICIAL] = ProviderOpenAIOfficial(
|
||||
cfg['openai'])
|
||||
llm_command_instance[OPENAI_OFFICIAL] = CommandOpenAIOfficial(
|
||||
llm_instance[OPENAI_OFFICIAL], _global_object)
|
||||
_global_object.llms.append(RegisteredLLM(
|
||||
llm_name=OPENAI_OFFICIAL, llm_instance=llm_instance[OPENAI_OFFICIAL], origin="internal"))
|
||||
chosen_provider = OPENAI_OFFICIAL
|
||||
|
||||
instance = llm_instance[OPENAI_OFFICIAL]
|
||||
assert isinstance(instance, ProviderOpenAIOfficial)
|
||||
instance.DEFAULT_PERSONALITY = _global_object.default_personality
|
||||
instance.curr_personality = instance.DEFAULT_PERSONALITY
|
||||
|
||||
# 检查provider设置偏好
|
||||
p = cc.get("chosen_provider", None)
|
||||
if p is not None and p in llm_instance:
|
||||
chosen_provider = p
|
||||
|
||||
# 百度内容审核
|
||||
if 'baidu_aip' in cfg and 'enable' in cfg['baidu_aip'] and cfg['baidu_aip']['enable']:
|
||||
try:
|
||||
baidu_judge = BaiduJudge(cfg['baidu_aip'])
|
||||
logger.info("百度内容审核初始化成功")
|
||||
except BaseException as e:
|
||||
logger.info("百度内容审核初始化失败")
|
||||
|
||||
threading.Thread(target=upload, args=(
|
||||
_global_object, ), daemon=True).start()
|
||||
|
||||
# 得到发言频率配置
|
||||
if 'limit' in cfg:
|
||||
if 'count' in cfg['limit']:
|
||||
frequency_count = cfg['limit']['count']
|
||||
if 'time' in cfg['limit']:
|
||||
frequency_time = cfg['limit']['time']
|
||||
|
||||
try:
|
||||
if 'uniqueSessionMode' in cfg and cfg['uniqueSessionMode']:
|
||||
_global_object.unique_session = True
|
||||
else:
|
||||
_global_object.unique_session = False
|
||||
except BaseException as e:
|
||||
logger.info("独立会话配置错误: "+str(e))
|
||||
|
||||
nick_qq = cc.get("nick_qq", None)
|
||||
if nick_qq == None:
|
||||
nick_qq = ("ai", "!", "!")
|
||||
if isinstance(nick_qq, str):
|
||||
nick_qq = (nick_qq,)
|
||||
if isinstance(nick_qq, list):
|
||||
nick_qq = tuple(nick_qq)
|
||||
_global_object.nick = nick_qq
|
||||
|
||||
# 语言模型唤醒词
|
||||
global llm_wake_prefix
|
||||
llm_wake_prefix = cc.get("llm_wake_prefix", "")
|
||||
|
||||
logger.info("正在载入插件...")
|
||||
# 加载插件
|
||||
_command = Command(None, _global_object)
|
||||
ok, err = putil.plugin_reload(_global_object)
|
||||
if ok:
|
||||
logger.info(
|
||||
f"成功载入 {len(_global_object.cached_plugins)} 个插件")
|
||||
else:
|
||||
logger.info(err)
|
||||
|
||||
if chosen_provider is None:
|
||||
llm_command_instance[NONE_LLM] = _command
|
||||
chosen_provider = NONE_LLM
|
||||
|
||||
logger.info("正在载入机器人消息平台")
|
||||
# logger.info("提示:需要添加管理员 ID 才能使用 update/plugin 等指令),可在可视化面板添加。(如已添加可忽略)")
|
||||
platform_str = ""
|
||||
# GOCQ
|
||||
if 'gocqbot' in cfg and cfg['gocqbot']['enable']:
|
||||
logger.info("启用 QQ_GOCQ 机器人消息平台")
|
||||
threading.Thread(target=run_gocq_bot, args=(
|
||||
cfg, _global_object), daemon=True).start()
|
||||
platform_str += "QQ_GOCQ,"
|
||||
|
||||
# QQ频道
|
||||
if 'qqbot' in cfg and cfg['qqbot']['enable'] and cfg['qqbot']['appid'] != None:
|
||||
logger.info("启用 QQ_OFFICIAL 机器人消息平台")
|
||||
threading.Thread(target=run_qqchan_bot, args=(
|
||||
cfg, _global_object), daemon=True).start()
|
||||
platform_str += "QQ_OFFICIAL,"
|
||||
|
||||
# 初始化dashboard
|
||||
_global_object.dashboard_data = DashBoardData(
|
||||
stats={},
|
||||
configs={},
|
||||
logs={},
|
||||
plugins=_global_object.cached_plugins,
|
||||
)
|
||||
dashboard_helper = DashBoardHelper(_global_object, config=cc.get_all())
|
||||
dashboard_thread = threading.Thread(
|
||||
target=dashboard_helper.run, daemon=True)
|
||||
dashboard_thread.start()
|
||||
|
||||
# 运行 monitor
|
||||
threading.Thread(target=run_monitor, args=(
|
||||
_global_object,), daemon=True).start()
|
||||
|
||||
logger.info(
|
||||
"如果有任何问题, 请在 https://github.com/Soulter/AstrBot 上提交 issue 或加群 322154837。")
|
||||
logger.info("请给 https://github.com/Soulter/AstrBot 点个 star。")
|
||||
if platform_str == '':
|
||||
platform_str = "(未启动任何平台,请前往面板添加)"
|
||||
logger.info(f"🎉 项目启动完成")
|
||||
|
||||
dashboard_thread.join()
|
||||
|
||||
|
||||
'''
|
||||
运行 QQ_OFFICIAL 机器人
|
||||
'''
|
||||
|
||||
|
||||
def run_qqchan_bot(cfg: dict, global_object: GlobalObject):
|
||||
try:
|
||||
from model.platform.qq_official import QQOfficial
|
||||
qqchannel_bot = QQOfficial(
|
||||
cfg=cfg, message_handler=oper_msg, global_object=global_object)
|
||||
global_object.platforms.append(RegisteredPlatform(
|
||||
platform_name="qqchan", platform_instance=qqchannel_bot, origin="internal"))
|
||||
qqchannel_bot.run()
|
||||
except BaseException as e:
|
||||
logger.error("启动 QQ 频道机器人时出现错误, 原因如下: " + str(e))
|
||||
logger.error(r"如果您是初次启动,请前往可视化面板填写配置。详情请看:https://astrbot.soulter.top/center/。")
|
||||
|
||||
|
||||
'''
|
||||
运行 QQ_GOCQ 机器人
|
||||
'''
|
||||
|
||||
|
||||
def run_gocq_bot(cfg: dict, _global_object: GlobalObject):
|
||||
from model.platform.qq_gocq import QQGOCQ
|
||||
|
||||
noticed = False
|
||||
host = cc.get("gocq_host", "127.0.0.1")
|
||||
port = cc.get("gocq_websocket_port", 6700)
|
||||
http_port = cc.get("gocq_http_port", 5700)
|
||||
logger.info(
|
||||
f"正在检查连接...host: {host}, ws port: {port}, http port: {http_port}")
|
||||
while True:
|
||||
if not gu.port_checker(port=port, host=host) or not gu.port_checker(port=http_port, host=host):
|
||||
if not noticed:
|
||||
noticed = True
|
||||
logger.warning(
|
||||
f"连接到{host}:{port}(或{http_port})失败。程序会每隔 5s 自动重试。")
|
||||
time.sleep(5)
|
||||
else:
|
||||
logger.info("已连接到 gocq。")
|
||||
break
|
||||
try:
|
||||
qq_gocq = QQGOCQ(cfg=cfg, message_handler=oper_msg,
|
||||
global_object=_global_object)
|
||||
_global_object.platforms.append(RegisteredPlatform(
|
||||
platform_name="gocq", platform_instance=qq_gocq, origin="internal"))
|
||||
qq_gocq.run()
|
||||
except BaseException as e:
|
||||
input("启动QQ机器人出现错误"+str(e))
|
||||
|
||||
|
||||
'''
|
||||
检查发言频率
|
||||
'''
|
||||
|
||||
|
||||
def check_frequency(id) -> bool:
|
||||
ts = int(time.time())
|
||||
if id in user_frequency:
|
||||
if ts-user_frequency[id]['time'] > frequency_time:
|
||||
user_frequency[id]['time'] = ts
|
||||
user_frequency[id]['count'] = 1
|
||||
return True
|
||||
else:
|
||||
if user_frequency[id]['count'] >= frequency_count:
|
||||
return False
|
||||
else:
|
||||
user_frequency[id]['count'] += 1
|
||||
return True
|
||||
else:
|
||||
t = {'time': ts, 'count': 1}
|
||||
user_frequency[id] = t
|
||||
return True
|
||||
|
||||
|
||||
async def record_message(platform: str, session_id: str):
|
||||
# TODO: 这里会非常吃资源。然而 sqlite3 不支持多线程,所以暂时这样写。
|
||||
curr_ts = int(time.time())
|
||||
db_inst = dbConn()
|
||||
db_inst.increment_stat_session(platform, session_id, 1)
|
||||
db_inst.increment_stat_message(curr_ts, 1)
|
||||
db_inst.increment_stat_platform(curr_ts, platform, 1)
|
||||
|
||||
|
||||
async def oper_msg(message: AstrBotMessage,
|
||||
session_id: str,
|
||||
role: str = 'member',
|
||||
platform: str = None,
|
||||
) -> MessageResult:
|
||||
"""
|
||||
处理消息。
|
||||
message: 消息对象
|
||||
session_id: 该消息源的唯一识别号
|
||||
role: member | admin
|
||||
platform: str 所注册的平台的名称。如果没有注册,将抛出一个异常。
|
||||
"""
|
||||
global chosen_provider, _global_object
|
||||
message_str = ''
|
||||
session_id = session_id
|
||||
role = role
|
||||
hit = False # 是否命中指令
|
||||
command_result = () # 调用指令返回的结果
|
||||
|
||||
# 获取平台实例
|
||||
reg_platform: RegisteredPlatform = None
|
||||
for p in _global_object.platforms:
|
||||
if p.platform_name == platform:
|
||||
reg_platform = p
|
||||
break
|
||||
if not reg_platform:
|
||||
raise Exception(f"未找到平台 {platform} 的实例。")
|
||||
|
||||
# 统计数据,如频道消息量
|
||||
await record_message(platform, session_id)
|
||||
|
||||
for i in message.message:
|
||||
if isinstance(i, Plain):
|
||||
message_str += i.text.strip()
|
||||
if message_str == "":
|
||||
return MessageResult("Hi~")
|
||||
|
||||
# 检查发言频率
|
||||
if not check_frequency(message.sender.user_id):
|
||||
return MessageResult(f'你的发言超过频率限制(╯▔皿▔)╯。\n管理员设置{frequency_time}秒内只能提问{frequency_count}次。')
|
||||
|
||||
# 检查是否是更换语言模型的请求
|
||||
temp_switch = ""
|
||||
if message_str.startswith('/gpt'):
|
||||
target = chosen_provider
|
||||
if message_str.startswith('/gpt'):
|
||||
target = OPENAI_OFFICIAL
|
||||
l = message_str.split(' ')
|
||||
if len(l) > 1 and l[1] != "":
|
||||
# 临时对话模式,先记录下之前的语言模型,回答完毕后再切回
|
||||
temp_switch = chosen_provider
|
||||
chosen_provider = target
|
||||
message_str = l[1]
|
||||
else:
|
||||
chosen_provider = target
|
||||
cc.put("chosen_provider", chosen_provider)
|
||||
return MessageResult(f"已切换至【{chosen_provider}】")
|
||||
|
||||
llm_result_str = ""
|
||||
|
||||
# check commands and plugins
|
||||
message_str_no_wake_prefix = message_str
|
||||
for wake_prefix in _global_object.nick: # nick: tuple
|
||||
if message_str.startswith(wake_prefix):
|
||||
message_str_no_wake_prefix = message_str.removeprefix(wake_prefix)
|
||||
break
|
||||
hit, command_result = await llm_command_instance[chosen_provider].check_command(
|
||||
message_str_no_wake_prefix,
|
||||
session_id,
|
||||
role,
|
||||
reg_platform,
|
||||
message,
|
||||
)
|
||||
|
||||
# 没触发指令
|
||||
if not hit:
|
||||
# 关键词拦截
|
||||
for i in uw.unfit_words_q:
|
||||
matches = re.match(i, message_str.strip(), re.I | re.M)
|
||||
if matches:
|
||||
return MessageResult(f"你的提问得到的回复未通过【默认关键词拦截】服务, 不予回复。")
|
||||
if baidu_judge != None:
|
||||
check, msg = await asyncio.to_thread(baidu_judge.judge, message_str)
|
||||
if not check:
|
||||
return MessageResult(f"你的提问得到的回复未通过【百度AI内容审核】服务, 不予回复。\n\n{msg}")
|
||||
if chosen_provider == NONE_LLM:
|
||||
logger.info("一条消息由于 Bot 未启动任何语言模型并且未触发指令而将被忽略。")
|
||||
return
|
||||
try:
|
||||
if llm_wake_prefix != "" and not message_str.startswith(llm_wake_prefix):
|
||||
return
|
||||
# check image url
|
||||
image_url = None
|
||||
for comp in message.message:
|
||||
if isinstance(comp, Image):
|
||||
if comp.url is None:
|
||||
image_url = comp.file
|
||||
break
|
||||
else:
|
||||
image_url = comp.url
|
||||
break
|
||||
# web search keyword
|
||||
web_sch_flag = False
|
||||
if message_str.startswith("ws ") and message_str != "ws ":
|
||||
message_str = message_str[3:]
|
||||
web_sch_flag = True
|
||||
else:
|
||||
message_str += " " + cc.get("llm_env_prompt", "")
|
||||
if chosen_provider == OPENAI_OFFICIAL:
|
||||
if _global_object.web_search or web_sch_flag:
|
||||
official_fc = chosen_provider == OPENAI_OFFICIAL
|
||||
llm_result_str = await web_searcher.web_search(message_str, llm_instance[chosen_provider], session_id, official_fc)
|
||||
else:
|
||||
llm_result_str = await llm_instance[chosen_provider].text_chat(message_str, session_id, image_url)
|
||||
|
||||
llm_result_str = _global_object.reply_prefix + llm_result_str
|
||||
except BaseException as e:
|
||||
logger.error(f"调用异常:{traceback.format_exc()}")
|
||||
return MessageResult(f"调用异常。详细原因:{str(e)}")
|
||||
|
||||
# 切换回原来的语言模型
|
||||
if temp_switch != "":
|
||||
chosen_provider = temp_switch
|
||||
|
||||
if hit:
|
||||
# 有指令或者插件触发
|
||||
# command_result 是一个元组:(指令调用是否成功, 指令返回的文本结果, 指令类型)
|
||||
if command_result == None:
|
||||
return
|
||||
command = command_result[2]
|
||||
|
||||
if not command_result[0]:
|
||||
return MessageResult(f"指令调用错误: \n{str(command_result[1])}")
|
||||
|
||||
# 画图指令
|
||||
if command == 'draw':
|
||||
# 保存到本地
|
||||
path = await gu.download_image_by_url(command_result[1])
|
||||
return MessageResult([Image.fromFileSystem(path)])
|
||||
# 其他指令
|
||||
else:
|
||||
try:
|
||||
return MessageResult(command_result[1])
|
||||
except BaseException as e:
|
||||
return MessageResult(f"回复消息出错: {str(e)}")
|
||||
return
|
||||
|
||||
# 敏感过滤
|
||||
# 过滤不合适的词
|
||||
for i in uw.unfit_words:
|
||||
llm_result_str = re.sub(i, "***", llm_result_str)
|
||||
# 百度内容审核服务二次审核
|
||||
if baidu_judge != None:
|
||||
check, msg = await asyncio.to_thread(baidu_judge.judge, llm_result_str)
|
||||
if not check:
|
||||
return MessageResult(f"你的提问得到的回复【百度内容审核】未通过,不予回复。\n\n{msg}")
|
||||
# 发送信息
|
||||
try:
|
||||
return MessageResult(llm_result_str)
|
||||
except BaseException as e:
|
||||
logger.info("回复消息错误: \n"+str(e))
|
||||
@@ -1,23 +0,0 @@
|
||||
'''
|
||||
监测机器性能
|
||||
- Bot 内存使用量
|
||||
- CPU 占用率
|
||||
'''
|
||||
|
||||
import psutil
|
||||
from cores.qqbot.global_object import GlobalObject
|
||||
import time
|
||||
|
||||
def run_monitor(global_object: GlobalObject):
|
||||
'''运行监测'''
|
||||
start_time = time.time()
|
||||
while True:
|
||||
stat = global_object.dashboard_data.stats
|
||||
# 程序占用的内存大小
|
||||
mem = psutil.Process().memory_info().rss / 1024 / 1024 # MB
|
||||
stat['sys_perf'] = {
|
||||
'memory': mem,
|
||||
'cpu': psutil.cpu_percent()
|
||||
}
|
||||
stat['sys_start_time'] = start_time
|
||||
time.sleep(30)
|
||||
@@ -1,937 +0,0 @@
|
||||
import botpy
|
||||
from botpy.message import Message, DirectMessage
|
||||
import re
|
||||
import json
|
||||
import threading
|
||||
import asyncio
|
||||
import time
|
||||
import requests
|
||||
import util.unfit_words as uw
|
||||
import os
|
||||
import sys
|
||||
from cores.qqbot.personality import personalities
|
||||
from addons.baidu_aip_judge import BaiduJudge
|
||||
from model.platform.qqchan import QQChan, NakuruGuildMember, NakuruGuildMessage
|
||||
from model.platform.qq import QQ
|
||||
from model.platform.qqgroup import (
|
||||
UnofficialQQBotSDK,
|
||||
Event as QQEvent,
|
||||
Message as QQMessage,
|
||||
MessageChain,
|
||||
PlainText
|
||||
)
|
||||
from nakuru import (
|
||||
CQHTTP,
|
||||
GroupMessage,
|
||||
GroupMemberIncrease,
|
||||
FriendMessage,
|
||||
GuildMessage,
|
||||
Notify
|
||||
)
|
||||
from nakuru.entities.components import Plain,At,Image
|
||||
from model.provider.provider import Provider
|
||||
from model.command.command import Command
|
||||
from util import general_utils as gu
|
||||
from util.cmd_config import CmdConfig as cc
|
||||
import util.function_calling.gplugin as gplugin
|
||||
import util.plugin_util as putil
|
||||
from PIL import Image as PILImage
|
||||
import io
|
||||
import traceback
|
||||
from . global_object import GlobalObject
|
||||
from typing import Union, Callable
|
||||
from addons.dashboard.helper import DashBoardHelper
|
||||
from addons.dashboard.server import DashBoardData
|
||||
from cores.monitor.perf import run_monitor
|
||||
from cores.database.conn import dbConn
|
||||
|
||||
# 缓存的会话
|
||||
session_dict = {}
|
||||
# 统计信息
|
||||
count = {}
|
||||
# 统计信息
|
||||
stat_file = ''
|
||||
|
||||
# 用户发言频率
|
||||
user_frequency = {}
|
||||
# 时间默认值
|
||||
frequency_time = 60
|
||||
# 计数默认值
|
||||
frequency_count = 2
|
||||
|
||||
# 公告(可自定义):
|
||||
announcement = ""
|
||||
|
||||
# 机器人私聊模式
|
||||
direct_message_mode = True
|
||||
|
||||
# 版本
|
||||
version = '3.1.0'
|
||||
|
||||
# 语言模型
|
||||
REV_CHATGPT = 'rev_chatgpt'
|
||||
OPENAI_OFFICIAL = 'openai_official'
|
||||
REV_ERNIE = 'rev_ernie'
|
||||
REV_EDGEGPT = 'rev_edgegpt'
|
||||
NONE_LLM = 'none_llm'
|
||||
chosen_provider = None
|
||||
|
||||
# 语言模型对象
|
||||
llm_instance: dict[str, Provider] = {}
|
||||
llm_command_instance: dict[str, Command] = {}
|
||||
|
||||
# 百度内容审核实例
|
||||
baidu_judge = None
|
||||
# 关键词回复
|
||||
keywords = {}
|
||||
|
||||
# QQ频道机器人
|
||||
qqchannel_bot: QQChan = None
|
||||
PLATFORM_QQCHAN = 'qqchan'
|
||||
qqchan_loop = None
|
||||
client = None
|
||||
|
||||
# QQ群机器人
|
||||
PLATFROM_QQBOT = 'qqbot'
|
||||
|
||||
# CLI
|
||||
PLATFORM_CLI = 'cli'
|
||||
|
||||
# 加载默认配置
|
||||
cc.init_attributes("qq_forward_threshold", 200)
|
||||
cc.init_attributes("qq_welcome", "欢迎加入本群!\n欢迎给https://github.com/Soulter/QQChannelChatGPT项目一个Star😊~\n输入help查看帮助~\n")
|
||||
cc.init_attributes("bing_proxy", "")
|
||||
cc.init_attributes("qq_pic_mode", False)
|
||||
cc.init_attributes("rev_chatgpt_model", "")
|
||||
cc.init_attributes("rev_chatgpt_plugin_ids", [])
|
||||
cc.init_attributes("rev_chatgpt_PUID", "")
|
||||
cc.init_attributes("rev_chatgpt_unverified_plugin_domains", [])
|
||||
cc.init_attributes("gocq_host", "127.0.0.1")
|
||||
cc.init_attributes("gocq_http_port", 5700)
|
||||
cc.init_attributes("gocq_websocket_port", 6700)
|
||||
cc.init_attributes("gocq_react_group", True)
|
||||
cc.init_attributes("gocq_react_guild", True)
|
||||
cc.init_attributes("gocq_react_friend", True)
|
||||
cc.init_attributes("gocq_react_group_increase", True)
|
||||
cc.init_attributes("gocq_qqchan_admin", "")
|
||||
cc.init_attributes("other_admins", [])
|
||||
cc.init_attributes("CHATGPT_BASE_URL", "")
|
||||
cc.init_attributes("qqbot_appid", "")
|
||||
cc.init_attributes("qqbot_secret", "")
|
||||
cc.init_attributes("llm_env_prompt", "> hint: 末尾根据内容和心情添加 1-2 个emoji")
|
||||
cc.init_attributes("default_personality_str", "")
|
||||
cc.init_attributes("openai_image_generate", {
|
||||
"model": "dall-e-3",
|
||||
"size": "1024x1024",
|
||||
"style": "vivid",
|
||||
"quality": "standard",
|
||||
})
|
||||
cc.init_attributes("http_proxy", "")
|
||||
cc.init_attributes("https_proxy", "")
|
||||
cc.init_attributes("dashboard_username", "")
|
||||
cc.init_attributes("dashboard_password", "")
|
||||
# cc.init_attributes(["qq_forward_mode"], False)
|
||||
|
||||
# QQ机器人
|
||||
gocq_bot = None
|
||||
PLATFORM_GOCQ = 'gocq'
|
||||
gocq_app = CQHTTP(
|
||||
host=cc.get("gocq_host", "127.0.0.1"),
|
||||
port=cc.get("gocq_websocket_port", 6700),
|
||||
http_port=cc.get("gocq_http_port", 5700),
|
||||
)
|
||||
qq_bot: UnofficialQQBotSDK = UnofficialQQBotSDK(
|
||||
cc.get("qqbot_appid", None),
|
||||
cc.get("qqbot_secret", None)
|
||||
)
|
||||
|
||||
gocq_loop: asyncio.AbstractEventLoop = None
|
||||
qqbot_loop: asyncio.AbstractEventLoop = None
|
||||
|
||||
|
||||
# 全局对象
|
||||
_global_object: GlobalObject = None
|
||||
|
||||
def new_sub_thread(func, args=()):
|
||||
thread = threading.Thread(target=_runner, args=(func, args), daemon=True)
|
||||
thread.start()
|
||||
|
||||
def _runner(func: Callable, args: tuple):
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(func(*args))
|
||||
loop.close()
|
||||
|
||||
# 统计消息数据
|
||||
def upload():
|
||||
global version, gocq_bot, qqchannel_bot
|
||||
while True:
|
||||
addr = ''
|
||||
addr_ip = ''
|
||||
session_dict_dump = '{}'
|
||||
try:
|
||||
addr = requests.get('http://myip.ipip.net', timeout=5).text
|
||||
addr_ip = re.findall(r'\d+.\d+.\d+.\d+', addr)[0]
|
||||
except BaseException as e:
|
||||
pass
|
||||
try:
|
||||
gocq_cnt = 0
|
||||
qqchan_cnt = 0
|
||||
if gocq_bot is not None:
|
||||
gocq_cnt = gocq_bot.get_cnt()
|
||||
if qqchannel_bot is not None:
|
||||
qqchan_cnt = qqchannel_bot.get_cnt()
|
||||
o = {"cnt_total": _global_object.cnt_total,"admin": _global_object.admin_qq,"addr": addr, 's': session_dict_dump}
|
||||
o_j = json.dumps(o)
|
||||
res = {"version": version, "count": gocq_cnt+qqchan_cnt, "ip": addr_ip, "others": o_j, "cntqc": qqchan_cnt, "cntgc": gocq_cnt}
|
||||
gu.log(res, gu.LEVEL_DEBUG, tag="Upload", fg = gu.FG_COLORS['yellow'], bg=gu.BG_COLORS['black'])
|
||||
resp = requests.post('https://api.soulter.top/upload', data=json.dumps(res), timeout=5)
|
||||
# print(resp.text)
|
||||
if resp.status_code == 200:
|
||||
ok = resp.json()
|
||||
if ok['status'] == 'ok':
|
||||
_global_object.cnt_total = 0
|
||||
if gocq_bot is not None:
|
||||
gocq_cnt = gocq_bot.set_cnt(0)
|
||||
if qqchannel_bot is not None:
|
||||
qqchan_cnt = qqchannel_bot.set_cnt(0)
|
||||
|
||||
except BaseException as e:
|
||||
gu.log("上传统计信息时出现错误: " + str(e), gu.LEVEL_ERROR, tag="Upload")
|
||||
pass
|
||||
time.sleep(10*60)
|
||||
|
||||
|
||||
# 语言模型选择
|
||||
def privider_chooser(cfg):
|
||||
l = []
|
||||
if 'rev_ChatGPT' in cfg and cfg['rev_ChatGPT']['enable']:
|
||||
l.append('rev_chatgpt')
|
||||
if 'rev_ernie' in cfg and cfg['rev_ernie']['enable']:
|
||||
l.append('rev_ernie')
|
||||
if 'rev_edgegpt' in cfg and cfg['rev_edgegpt']['enable']:
|
||||
l.append('rev_edgegpt')
|
||||
if 'openai' in cfg and len(cfg['openai']['key']) > 0 and cfg['openai']['key'][0] is not None:
|
||||
l.append('openai_official')
|
||||
return l
|
||||
|
||||
'''
|
||||
初始化机器人
|
||||
'''
|
||||
def initBot(cfg):
|
||||
global llm_instance, llm_command_instance
|
||||
global baidu_judge, chosen_provider
|
||||
global frequency_count, frequency_time, announcement, direct_message_mode
|
||||
global keywords, _global_object
|
||||
|
||||
# 迁移旧配置
|
||||
gu.try_migrate_config(cfg)
|
||||
# 使用新配置
|
||||
cfg = cc.get_all()
|
||||
|
||||
_event_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(_event_loop)
|
||||
|
||||
# 初始化 global_object
|
||||
_global_object = GlobalObject()
|
||||
_global_object.base_config = cfg
|
||||
_global_object.stat['session'] = {}
|
||||
_global_object.stat['message'] = {}
|
||||
_global_object.stat['platform'] = {}
|
||||
|
||||
if 'reply_prefix' in cfg:
|
||||
# 适配旧版配置
|
||||
if isinstance(cfg['reply_prefix'], dict):
|
||||
for k in cfg['reply_prefix']:
|
||||
_global_object.reply_prefix = cfg['reply_prefix'][k]
|
||||
break
|
||||
else:
|
||||
_global_object.reply_prefix = cfg['reply_prefix']
|
||||
|
||||
# 语言模型提供商
|
||||
gu.log("--------加载语言模型--------", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
prov = privider_chooser(cfg)
|
||||
if REV_CHATGPT in prov:
|
||||
gu.log("- 逆向ChatGPT库 -", gu.LEVEL_INFO)
|
||||
if cfg['rev_ChatGPT']['enable']:
|
||||
if 'account' in cfg['rev_ChatGPT']:
|
||||
from model.provider.rev_chatgpt import ProviderRevChatGPT
|
||||
from model.command.rev_chatgpt import CommandRevChatGPT
|
||||
llm_instance[REV_CHATGPT] = ProviderRevChatGPT(cfg['rev_ChatGPT'], base_url=cc.get("CHATGPT_BASE_URL", None))
|
||||
llm_command_instance[REV_CHATGPT] = CommandRevChatGPT(llm_instance[REV_CHATGPT], _global_object)
|
||||
chosen_provider = REV_CHATGPT
|
||||
else:
|
||||
input("[System-err] 请退出本程序, 然后在配置文件中填写rev_ChatGPT相关配置")
|
||||
if REV_EDGEGPT in prov:
|
||||
gu.log("- New Bing -", gu.LEVEL_INFO)
|
||||
if not os.path.exists('./cookies.json'):
|
||||
input("[System-err] 导入Bing模型时发生错误, 没有找到cookies文件或者cookies文件放置位置错误。windows启动器启动的用户请把cookies.json文件放到和启动器相同的目录下。\n如何获取请看https://github.com/Soulter/QQChannelChatGPT仓库介绍。")
|
||||
else:
|
||||
if cfg['rev_edgegpt']['enable']:
|
||||
try:
|
||||
from model.provider.rev_edgegpt import ProviderRevEdgeGPT
|
||||
from model.command.rev_edgegpt import CommandRevEdgeGPT
|
||||
llm_instance[REV_EDGEGPT] = ProviderRevEdgeGPT()
|
||||
llm_command_instance[REV_EDGEGPT] = CommandRevEdgeGPT(llm_instance[REV_EDGEGPT], _global_object)
|
||||
chosen_provider = REV_EDGEGPT
|
||||
except BaseException as e:
|
||||
print(traceback.format_exc())
|
||||
gu.log("加载Bing模型时发生错误, 请检查1. cookies文件是否正确放置 2. 是否设置了代理(梯子)。", gu.LEVEL_ERROR, max_len=60)
|
||||
if OPENAI_OFFICIAL in prov:
|
||||
gu.log("- OpenAI官方 -", gu.LEVEL_INFO)
|
||||
if cfg['openai']['key'] is not None and cfg['openai']['key'] != [None]:
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial
|
||||
from model.command.openai_official import CommandOpenAIOfficial
|
||||
llm_instance[OPENAI_OFFICIAL] = ProviderOpenAIOfficial(cfg['openai'])
|
||||
llm_command_instance[OPENAI_OFFICIAL] = CommandOpenAIOfficial(llm_instance[OPENAI_OFFICIAL], _global_object)
|
||||
chosen_provider = OPENAI_OFFICIAL
|
||||
|
||||
gu.log("--------加载配置--------", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
# 得到关键词
|
||||
if os.path.exists("keyword.json"):
|
||||
with open("keyword.json", 'r', encoding='utf-8') as f:
|
||||
keywords = json.load(f)
|
||||
|
||||
# 检查provider设置偏好
|
||||
p = cc.get("chosen_provider", None)
|
||||
if p is not None and p in llm_instance:
|
||||
chosen_provider = p
|
||||
gu.log(f"将使用 {chosen_provider} 语言模型。", gu.LEVEL_INFO)
|
||||
|
||||
# 百度内容审核
|
||||
if 'baidu_aip' in cfg and 'enable' in cfg['baidu_aip'] and cfg['baidu_aip']['enable']:
|
||||
try:
|
||||
baidu_judge = BaiduJudge(cfg['baidu_aip'])
|
||||
gu.log("百度内容审核初始化成功", gu.LEVEL_INFO)
|
||||
except BaseException as e:
|
||||
gu.log("百度内容审核初始化失败", gu.LEVEL_ERROR)
|
||||
|
||||
threading.Thread(target=upload, daemon=True).start()
|
||||
|
||||
# 得到私聊模式配置
|
||||
if 'direct_message_mode' in cfg:
|
||||
direct_message_mode = cfg['direct_message_mode']
|
||||
gu.log("私聊功能: "+str(direct_message_mode), gu.LEVEL_INFO)
|
||||
|
||||
# 得到发言频率配置
|
||||
if 'limit' in cfg:
|
||||
gu.log("发言频率配置: "+str(cfg['limit']), gu.LEVEL_INFO)
|
||||
if 'count' in cfg['limit']:
|
||||
frequency_count = cfg['limit']['count']
|
||||
if 'time' in cfg['limit']:
|
||||
frequency_time = cfg['limit']['time']
|
||||
|
||||
# 得到公告配置
|
||||
if 'notice' in cfg:
|
||||
if cc.get("qq_welcome", None) != None and cfg['notice'] == '此机器人由Github项目QQChannelChatGPT驱动。':
|
||||
announcement = cc.get("qq_welcome", None)
|
||||
else:
|
||||
announcement = cfg['notice']
|
||||
gu.log("公告配置: " + announcement, gu.LEVEL_INFO)
|
||||
|
||||
try:
|
||||
if 'uniqueSessionMode' in cfg and cfg['uniqueSessionMode']:
|
||||
_global_object.uniqueSession = True
|
||||
else:
|
||||
_global_object.uniqueSession = False
|
||||
gu.log("独立会话: "+str(_global_object.uniqueSession), gu.LEVEL_INFO)
|
||||
except BaseException as e:
|
||||
gu.log("独立会话配置错误: "+str(e), gu.LEVEL_ERROR)
|
||||
|
||||
|
||||
gu.log(f"QQ开放平台AppID: {cfg['qqbot']['appid']} 令牌: {cfg['qqbot']['token']}")
|
||||
|
||||
if chosen_provider is None:
|
||||
gu.log("检测到没有启动任何语言模型。", gu.LEVEL_CRITICAL)
|
||||
|
||||
nick_qq = cc.get("nick_qq", None)
|
||||
if nick_qq == None:
|
||||
nick_qq = ("ai","!","!")
|
||||
if isinstance(nick_qq, str):
|
||||
nick_qq = (nick_qq,)
|
||||
if isinstance(nick_qq, list):
|
||||
nick_qq = tuple(nick_qq)
|
||||
_global_object.nick = nick_qq
|
||||
|
||||
thread_inst = None
|
||||
|
||||
gu.log("--------加载插件--------", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
# 加载插件
|
||||
_command = Command(None, _global_object)
|
||||
ok, err = putil.plugin_reload(_global_object.cached_plugins)
|
||||
if ok:
|
||||
gu.log("加载插件完成", gu.LEVEL_INFO)
|
||||
else:
|
||||
gu.log(err, gu.LEVEL_ERROR)
|
||||
|
||||
if chosen_provider is None:
|
||||
llm_command_instance[NONE_LLM] = _command
|
||||
chosen_provider = NONE_LLM
|
||||
|
||||
gu.log("--------加载机器人平台--------", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
|
||||
admin_qq = cc.get('admin_qq', None)
|
||||
admin_qqchan = cc.get('admin_qqchan', None)
|
||||
if admin_qq == None:
|
||||
gu.log("未设置管理者QQ号(管理者才能使用update/plugin等指令),如需设置,请编辑 cmd_config.json 文件", gu.LEVEL_WARNING)
|
||||
|
||||
if admin_qqchan == None:
|
||||
gu.log("未设置管理者QQ频道用户号(管理者才能使用update/plugin等指令),如需设置,请编辑 cmd_config.json 文件。可在频道发送指令 !myid 获取", gu.LEVEL_WARNING)
|
||||
|
||||
_global_object.admin_qq = admin_qq
|
||||
_global_object.admin_qqchan = admin_qqchan
|
||||
|
||||
global qq_bot, qqbot_loop
|
||||
qqbot_loop = asyncio.new_event_loop()
|
||||
if cc.get("qqbot_appid", '') != '' and cc.get("qqbot_secret", '') != '':
|
||||
gu.log("- 启用QQ群机器人 -", gu.LEVEL_INFO)
|
||||
thread_inst = threading.Thread(target=run_qqbot, args=(qqbot_loop, qq_bot,), daemon=True)
|
||||
thread_inst.start()
|
||||
|
||||
# GOCQ
|
||||
global gocq_bot
|
||||
if 'gocqbot' in cfg and cfg['gocqbot']['enable']:
|
||||
gu.log("- 启用QQ机器人 -", gu.LEVEL_INFO)
|
||||
|
||||
global gocq_app, gocq_loop
|
||||
gocq_loop = asyncio.new_event_loop()
|
||||
gocq_bot = QQ(True, cc, gocq_loop)
|
||||
thread_inst = threading.Thread(target=run_gocq_bot, args=(gocq_loop, gocq_bot, gocq_app), daemon=True)
|
||||
thread_inst.start()
|
||||
else:
|
||||
gocq_bot = QQ(False)
|
||||
|
||||
_global_object.platform_qq = gocq_bot
|
||||
|
||||
gu.log("机器人部署教程: https://github.com/Soulter/QQChannelChatGPT/wiki/", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
gu.log("如果有任何问题, 请在 https://github.com/Soulter/QQChannelChatGPT 上提交 issue 或加群 322154837", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
gu.log("请给 https://github.com/Soulter/QQChannelChatGPT 点个 star!", gu.LEVEL_INFO, fg=gu.FG_COLORS['yellow'])
|
||||
|
||||
# QQ频道
|
||||
if 'qqbot' in cfg and cfg['qqbot']['enable']:
|
||||
gu.log("- 启用QQ频道机器人 -", gu.LEVEL_INFO)
|
||||
global qqchannel_bot, qqchan_loop
|
||||
qqchannel_bot = QQChan()
|
||||
qqchan_loop = asyncio.new_event_loop()
|
||||
_global_object.platform_qqchan = qqchannel_bot
|
||||
thread_inst = threading.Thread(target=run_qqchan_bot, args=(cfg, qqchan_loop, qqchannel_bot), daemon=True)
|
||||
thread_inst.start()
|
||||
# thread.join()
|
||||
|
||||
if thread_inst == None:
|
||||
gu.log("没有启用/成功启用任何机器人平台", gu.LEVEL_CRITICAL)
|
||||
|
||||
default_personality_str = cc.get("default_personality_str", "")
|
||||
if default_personality_str == "":
|
||||
_global_object.default_personality = None
|
||||
else:
|
||||
_global_object.default_personality = {
|
||||
"name": "default",
|
||||
"prompt": default_personality_str,
|
||||
}
|
||||
# 初始化dashboard
|
||||
_global_object.dashboard_data = DashBoardData(
|
||||
stats={},
|
||||
configs={},
|
||||
logs={},
|
||||
plugins=_global_object.cached_plugins,
|
||||
)
|
||||
dashboard_helper = DashBoardHelper(_global_object.dashboard_data, config=cc.get_all())
|
||||
dashboard_thread = threading.Thread(target=dashboard_helper.run, daemon=True)
|
||||
dashboard_thread.start()
|
||||
|
||||
# 运行 monitor
|
||||
threading.Thread(target=run_monitor, args=(_global_object,), daemon=False).start()
|
||||
|
||||
gu.log("🎉 项目启动完成。")
|
||||
|
||||
# asyncio.get_event_loop().run_until_complete(cli())
|
||||
|
||||
dashboard_thread.join()
|
||||
|
||||
async def cli():
|
||||
time.sleep(1)
|
||||
while True:
|
||||
try:
|
||||
prompt = input(">>> ")
|
||||
if prompt == "":
|
||||
continue
|
||||
ngm = await cli_pack_message(prompt)
|
||||
await oper_msg(ngm, True, PLATFORM_CLI)
|
||||
except EOFError:
|
||||
return
|
||||
|
||||
async def cli_pack_message(prompt: str) -> NakuruGuildMessage:
|
||||
ngm = NakuruGuildMessage()
|
||||
ngm.channel_id = 6180
|
||||
ngm.user_id = 6180
|
||||
ngm.message = [Plain(prompt)]
|
||||
ngm.type = "GuildMessage"
|
||||
ngm.self_id = 6180
|
||||
ngm.self_tiny_id = 6180
|
||||
ngm.guild_id = 6180
|
||||
ngm.sender = NakuruGuildMember()
|
||||
ngm.sender.tiny_id = 6180
|
||||
ngm.sender.user_id = 6180
|
||||
ngm.sender.nickname = "CLI"
|
||||
ngm.sender.role = 0
|
||||
return ngm
|
||||
|
||||
'''
|
||||
运行QQ频道机器人
|
||||
'''
|
||||
def run_qqchan_bot(cfg, loop, qqchannel_bot: QQChan):
|
||||
asyncio.set_event_loop(loop)
|
||||
intents = botpy.Intents(public_guild_messages=True, direct_message=True)
|
||||
global client
|
||||
client = botClient(
|
||||
intents=intents,
|
||||
bot_log=False
|
||||
)
|
||||
try:
|
||||
qqchannel_bot.run_bot(client, cfg['qqbot']['appid'], cfg['qqbot']['token'])
|
||||
except BaseException as e:
|
||||
gu.log("启动QQ频道机器人时出现错误, 原因如下: " + str(e), gu.LEVEL_CRITICAL, tag="QQ频道")
|
||||
gu.log(r"如果您是初次启动,请修改配置文件(QQChannelChatGPT/config.yaml)详情请看:https://github.com/Soulter/QQChannelChatGPT/wiki。" + str(e), gu.LEVEL_CRITICAL, tag="System")
|
||||
|
||||
i = input("按回车退出程序。\n")
|
||||
|
||||
'''
|
||||
运行GOCQ机器人
|
||||
'''
|
||||
def run_gocq_bot(loop, gocq_bot, gocq_app):
|
||||
asyncio.set_event_loop(loop)
|
||||
gu.log("正在检查本地GO-CQHTTP连接...端口5700, 6700", tag="QQ")
|
||||
noticed = False
|
||||
while True:
|
||||
if not gu.port_checker(5700, cc.get("gocq_host", "127.0.0.1")) or not gu.port_checker(6700, cc.get("gocq_host", "127.0.0.1")):
|
||||
if not noticed:
|
||||
noticed = True
|
||||
gu.log("与GO-CQHTTP通信失败, 请检查GO-CQHTTP是否启动并正确配置。程序会每隔 5s 自动重试。", gu.LEVEL_CRITICAL, tag="QQ")
|
||||
time.sleep(5)
|
||||
else:
|
||||
gu.log("检查完毕,未发现问题。", tag="QQ")
|
||||
break
|
||||
|
||||
global gocq_client
|
||||
gocq_client = gocqClient()
|
||||
try:
|
||||
gocq_bot.run_bot(gocq_app)
|
||||
except BaseException as e:
|
||||
input("启动QQ机器人出现错误"+str(e))
|
||||
|
||||
'''
|
||||
启动QQ群机器人(官方接口)
|
||||
'''
|
||||
def run_qqbot(loop: asyncio.AbstractEventLoop, qq_bot: UnofficialQQBotSDK):
|
||||
asyncio.set_event_loop(loop)
|
||||
QQBotClient()
|
||||
qq_bot.run_bot()
|
||||
|
||||
|
||||
'''
|
||||
检查发言频率
|
||||
'''
|
||||
def check_frequency(id) -> bool:
|
||||
ts = int(time.time())
|
||||
if id in user_frequency:
|
||||
if ts-user_frequency[id]['time'] > frequency_time:
|
||||
user_frequency[id]['time'] = ts
|
||||
user_frequency[id]['count'] = 1
|
||||
return True
|
||||
else:
|
||||
if user_frequency[id]['count'] >= frequency_count:
|
||||
return False
|
||||
else:
|
||||
user_frequency[id]['count']+=1
|
||||
return True
|
||||
else:
|
||||
t = {'time':ts,'count':1}
|
||||
user_frequency[id] = t
|
||||
return True
|
||||
|
||||
|
||||
'''
|
||||
通用消息回复
|
||||
'''
|
||||
async def send_message(platform, message, res, session_id = None):
|
||||
global qqchannel_bot, qqchannel_bot, gocq_loop, session_dict
|
||||
|
||||
# 统计会话信息
|
||||
if session_id is not None:
|
||||
if session_id not in session_dict:
|
||||
session_dict[session_id] = {'cnt': 1}
|
||||
else:
|
||||
session_dict[session_id]['cnt'] += 1
|
||||
else:
|
||||
session_dict[session_id]['cnt'] += 1
|
||||
|
||||
# TODO: 这里会非常吃资源。然而 sqlite3 不支持多线程,所以暂时这样写。
|
||||
curr_ts = int(time.time())
|
||||
db_inst = dbConn()
|
||||
db_inst.increment_stat_session(platform, session_id, 1)
|
||||
db_inst.increment_stat_message(curr_ts, 1)
|
||||
db_inst.increment_stat_platform(curr_ts, platform, 1)
|
||||
|
||||
if platform == PLATFORM_QQCHAN:
|
||||
qqchannel_bot.send_qq_msg(message, res)
|
||||
elif platform == PLATFORM_GOCQ:
|
||||
await gocq_bot.send_qq_msg(message, res)
|
||||
elif platform == PLATFROM_QQBOT:
|
||||
message_chain = MessageChain()
|
||||
message_chain.parse_from_nakuru(res)
|
||||
await qq_bot.send(message, message_chain)
|
||||
elif platform == PLATFORM_CLI:
|
||||
print(res)
|
||||
|
||||
async def oper_msg(message: Union[GroupMessage, FriendMessage, GuildMessage, NakuruGuildMessage],
|
||||
group: bool=False,
|
||||
platform: str = None):
|
||||
"""
|
||||
处理消息。
|
||||
group: 群聊模式,
|
||||
message: 频道是频道的消息对象, QQ是nakuru-gocq的消息对象
|
||||
msg_ref: 引用消息(频道)
|
||||
platform: 平台(gocq, qqchan)
|
||||
"""
|
||||
global chosen_provider, keywords, qqchannel_bot, gocq_bot
|
||||
global _global_object
|
||||
qq_msg = ''
|
||||
session_id = ''
|
||||
user_id = ''
|
||||
role = "member" # 角色, member或admin
|
||||
hit = False # 是否命中指令
|
||||
command_result = () # 调用指令返回的结果
|
||||
|
||||
_global_object.cnt_total += 1
|
||||
|
||||
with_tag = False # 是否带有昵称
|
||||
|
||||
if platform == PLATFORM_QQCHAN or platform == PLATFROM_QQBOT or platform == PLATFORM_CLI:
|
||||
with_tag = True
|
||||
|
||||
_len = 0
|
||||
for i in message.message:
|
||||
if isinstance(i, Plain) or isinstance(i, PlainText):
|
||||
qq_msg += str(i.text).strip()
|
||||
if isinstance(i, At):
|
||||
if message.type == "GuildMessage":
|
||||
if i.qq == message.user_id or i.qq == message.self_tiny_id:
|
||||
with_tag = True
|
||||
if message.type == "FriendMessage":
|
||||
if i.qq == message.self_id:
|
||||
with_tag = True
|
||||
if message.type == "GroupMessage":
|
||||
if i.qq == message.self_id:
|
||||
with_tag = True
|
||||
|
||||
for i in _global_object.nick:
|
||||
if i != '' and qq_msg.startswith(i):
|
||||
_len = len(i)
|
||||
with_tag = True
|
||||
break
|
||||
qq_msg = qq_msg[_len:].strip()
|
||||
|
||||
gu.log(f"收到消息:{qq_msg}", gu.LEVEL_INFO, tag="QQ")
|
||||
user_id = message.user_id
|
||||
|
||||
if group:
|
||||
# 适配GO-CQHTTP的频道功能
|
||||
if message.type == "GuildMessage":
|
||||
session_id = message.channel_id
|
||||
else:
|
||||
session_id = message.group_id
|
||||
else:
|
||||
with_tag = True
|
||||
session_id = message.user_id
|
||||
|
||||
if message.type == "GuildMessage":
|
||||
sender_id = str(message.sender.tiny_id)
|
||||
else:
|
||||
sender_id = str(message.sender.user_id)
|
||||
if sender_id == _global_object.admin_qq or \
|
||||
sender_id == _global_object.admin_qqchan or \
|
||||
sender_id in cc.get("other_admins", []) or \
|
||||
sender_id == cc.get("gocq_qqchan_admin", "") or \
|
||||
platform == PLATFORM_CLI:
|
||||
role = "admin"
|
||||
|
||||
if _global_object.uniqueSession:
|
||||
# 独立会话时,一个用户一个 session
|
||||
session_id = sender_id
|
||||
|
||||
|
||||
if qq_msg == "":
|
||||
await send_message(platform, message, f"Hi~", session_id=session_id)
|
||||
return
|
||||
|
||||
if with_tag:
|
||||
# 检查发言频率
|
||||
if not check_frequency(user_id):
|
||||
await send_message(platform, message, f'你的发言超过频率限制(╯▔皿▔)╯。\n管理员设置{frequency_time}秒内只能提问{frequency_count}次。', session_id=session_id)
|
||||
return
|
||||
|
||||
# logf.write("[GOCQBOT] "+ qq_msg+'\n')
|
||||
# logf.flush()
|
||||
|
||||
# 关键词回复
|
||||
for k in keywords:
|
||||
if qq_msg == k:
|
||||
plain_text = ""
|
||||
if 'plain_text' in keywords[k]:
|
||||
plain_text = keywords[k]['plain_text']
|
||||
else:
|
||||
plain_text = keywords[k]
|
||||
image_url = ""
|
||||
if 'image_url' in keywords[k]:
|
||||
image_url = keywords[k]['image_url']
|
||||
if image_url != "":
|
||||
res = [Plain(plain_text), Image.fromURL(image_url)]
|
||||
await send_message(platform, message, res, session_id=session_id)
|
||||
else:
|
||||
await send_message(platform, message, plain_text, session_id=session_id)
|
||||
return
|
||||
|
||||
# 检查是否是更换语言模型的请求
|
||||
temp_switch = ""
|
||||
if qq_msg.startswith('/bing') or qq_msg.startswith('/gpt') or qq_msg.startswith('/revgpt'):
|
||||
target = chosen_provider
|
||||
if qq_msg.startswith('/bing'):
|
||||
target = REV_EDGEGPT
|
||||
elif qq_msg.startswith('/gpt'):
|
||||
target = OPENAI_OFFICIAL
|
||||
elif qq_msg.startswith('/revgpt'):
|
||||
target = REV_CHATGPT
|
||||
l = qq_msg.split(' ')
|
||||
if len(l) > 1 and l[1] != "":
|
||||
# 临时对话模式,先记录下之前的语言模型,回答完毕后再切回
|
||||
temp_switch = chosen_provider
|
||||
chosen_provider = target
|
||||
qq_msg = l[1]
|
||||
else:
|
||||
chosen_provider = target
|
||||
cc.put("chosen_provider", chosen_provider)
|
||||
await send_message(platform, message, f"已切换至【{chosen_provider}】", session_id=session_id)
|
||||
return
|
||||
|
||||
chatgpt_res = ""
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if platform == PLATFORM_GOCQ and session_id in gocq_bot.waiting and gocq_bot.waiting[session_id] == '':
|
||||
gocq_bot.waiting[session_id] = message
|
||||
return
|
||||
if platform == PLATFORM_QQCHAN and session_id in qqchannel_bot.waiting and qqchannel_bot.waiting[session_id] == '':
|
||||
qqchannel_bot.waiting[session_id] = message
|
||||
return
|
||||
|
||||
hit, command_result = llm_command_instance[chosen_provider].check_command(
|
||||
qq_msg,
|
||||
session_id,
|
||||
role,
|
||||
platform,
|
||||
message,
|
||||
)
|
||||
|
||||
# 没触发指令
|
||||
if not hit:
|
||||
if not with_tag:
|
||||
return
|
||||
# 关键词拦截
|
||||
for i in uw.unfit_words_q:
|
||||
matches = re.match(i, qq_msg.strip(), re.I | re.M)
|
||||
if matches:
|
||||
await send_message(platform, message, f"你的提问得到的回复未通过【自有关键词拦截】服务, 不予回复。", session_id=session_id)
|
||||
return
|
||||
if baidu_judge != None:
|
||||
check, msg = baidu_judge.judge(qq_msg)
|
||||
if not check:
|
||||
await send_message(platform, message, f"你的提问得到的回复未通过【百度AI内容审核】服务, 不予回复。\n\n{msg}", session_id=session_id)
|
||||
return
|
||||
if chosen_provider == None:
|
||||
await send_message(platform, message, f"管理员未启动任何语言模型或者语言模型初始化时失败。", session_id=session_id)
|
||||
return
|
||||
try:
|
||||
# check image url
|
||||
image_url = None
|
||||
for comp in message.message:
|
||||
if isinstance(comp, Image):
|
||||
if comp.url is None:
|
||||
image_url = comp.file
|
||||
break
|
||||
else:
|
||||
image_url = comp.url
|
||||
break
|
||||
# web search keyword
|
||||
web_sch_flag = False
|
||||
if qq_msg.startswith("ws ") and qq_msg != "ws ":
|
||||
qq_msg = qq_msg[3:]
|
||||
web_sch_flag = True
|
||||
else:
|
||||
qq_msg += " " + cc.get("llm_env_prompt", "")
|
||||
if chosen_provider == REV_CHATGPT or chosen_provider == OPENAI_OFFICIAL:
|
||||
if _global_object.web_search or web_sch_flag:
|
||||
official_fc = chosen_provider == OPENAI_OFFICIAL
|
||||
chatgpt_res = gplugin.web_search(qq_msg, llm_instance[chosen_provider], session_id, official_fc)
|
||||
else:
|
||||
chatgpt_res = str(llm_instance[chosen_provider].text_chat(qq_msg, session_id, image_url, default_personality = _global_object.default_personality))
|
||||
elif chosen_provider == REV_EDGEGPT:
|
||||
res, res_code = await llm_instance[chosen_provider].text_chat(qq_msg, platform)
|
||||
if res_code == 0: # bing不想继续话题,重置会话后重试。
|
||||
await send_message(platform, message, "Bing不想继续话题了, 正在自动重置会话并重试。", session_id=session_id)
|
||||
await llm_instance[chosen_provider].forget()
|
||||
res, res_code = await llm_instance[chosen_provider].text_chat(qq_msg, platform)
|
||||
if res_code == 0: # bing还是不想继续话题,大概率说明提问有问题。
|
||||
await llm_instance[chosen_provider].forget()
|
||||
await send_message(platform, message, "Bing仍然不想继续话题, 会话已重置, 请检查您的提问后重试。", session_id=session_id)
|
||||
res = ""
|
||||
chatgpt_res = str(res)
|
||||
|
||||
chatgpt_res = _global_object.reply_prefix + chatgpt_res
|
||||
except BaseException as e:
|
||||
gu.log(f"调用异常:{traceback.format_exc()}", gu.LEVEL_ERROR, max_len=100000)
|
||||
gu.log("调用语言模型例程时出现异常。原因: "+str(e), gu.LEVEL_ERROR)
|
||||
await send_message(platform, message, "调用语言模型例程时出现异常。原因: "+str(e), session_id=session_id)
|
||||
return
|
||||
|
||||
# 切换回原来的语言模型
|
||||
if temp_switch != "":
|
||||
chosen_provider = temp_switch
|
||||
|
||||
# 指令回复
|
||||
if hit:
|
||||
# 检查指令. command_result是一个元组:(指令调用是否成功, 指令返回的文本结果, 指令类型)
|
||||
if command_result == None:
|
||||
return
|
||||
|
||||
command = command_result[2]
|
||||
if command == "keyword":
|
||||
if os.path.exists("keyword.json"):
|
||||
with open("keyword.json", "r", encoding="utf-8") as f:
|
||||
keywords = json.load(f)
|
||||
else:
|
||||
try:
|
||||
await send_message(platform, message, command_result[1], session_id=session_id)
|
||||
except BaseException as e:
|
||||
await send_message(platform, message, f"回复消息出错: {str(e)}", session_id=session_id)
|
||||
|
||||
if command == "update latest r":
|
||||
await send_message(platform, message, command_result[1] + "\n\n即将自动重启。", session_id=session_id)
|
||||
py = sys.executable
|
||||
os.execl(py, py, *sys.argv)
|
||||
|
||||
if not command_result[0]:
|
||||
await send_message(platform, message, f"指令调用错误: \n{str(command_result[1])}", session_id=session_id)
|
||||
return
|
||||
|
||||
# 画图指令
|
||||
if isinstance(command_result[1], list) and len(command_result) == 3 and command == 'draw':
|
||||
for i in command_result[1]:
|
||||
# i is a link
|
||||
# 保存到本地
|
||||
pic_res = requests.get(i, stream = True)
|
||||
if pic_res.status_code == 200:
|
||||
image = PILImage.open(io.BytesIO(pic_res.content))
|
||||
await send_message(platform, message, [Image.fromFileSystem(gu.save_temp_img(image))], session_id=session_id)
|
||||
|
||||
# 其他指令
|
||||
else:
|
||||
try:
|
||||
await send_message(platform, message, command_result[1], session_id=session_id)
|
||||
except BaseException as e:
|
||||
await send_message(platform, message, f"回复消息出错: {str(e)}", session_id=session_id)
|
||||
|
||||
return
|
||||
|
||||
# 记录日志
|
||||
# logf.write(f"{reply_prefix} {str(chatgpt_res)}\n")
|
||||
# logf.flush()
|
||||
|
||||
# 敏感过滤
|
||||
# 过滤不合适的词
|
||||
for i in uw.unfit_words:
|
||||
chatgpt_res = re.sub(i, "***", chatgpt_res)
|
||||
# 百度内容审核服务二次审核
|
||||
if baidu_judge != None:
|
||||
check, msg = baidu_judge.judge(chatgpt_res)
|
||||
if not check:
|
||||
await send_message(platform, message, f"你的提问得到的回复【百度内容审核】未通过,不予回复。\n\n{msg}", session_id=session_id)
|
||||
return
|
||||
|
||||
# 发送信息
|
||||
try:
|
||||
await send_message(platform, message, chatgpt_res, session_id=session_id)
|
||||
except BaseException as e:
|
||||
gu.log("回复消息错误: \n"+str(e), gu.LEVEL_ERROR)
|
||||
|
||||
# QQ频道机器人
|
||||
class botClient(botpy.Client):
|
||||
# 收到频道消息
|
||||
async def on_at_message_create(self, message: Message):
|
||||
gu.log(str(message), gu.LEVEL_DEBUG, max_len=9999)
|
||||
|
||||
# 转换层
|
||||
nakuru_guild_message = qqchannel_bot.gocq_compatible_receive(message)
|
||||
gu.log(f"转换后: {str(nakuru_guild_message)}", gu.LEVEL_DEBUG, max_len=9999)
|
||||
new_sub_thread(oper_msg, (nakuru_guild_message, True, PLATFORM_QQCHAN))
|
||||
|
||||
# 收到私聊消息
|
||||
async def on_direct_message_create(self, message: DirectMessage):
|
||||
if direct_message_mode:
|
||||
|
||||
# 转换层
|
||||
nakuru_guild_message = qqchannel_bot.gocq_compatible_receive(message)
|
||||
gu.log(f"转换后: {str(nakuru_guild_message)}", gu.LEVEL_DEBUG, max_len=9999)
|
||||
|
||||
new_sub_thread(oper_msg, (nakuru_guild_message, False, PLATFORM_QQCHAN))
|
||||
# QQ机器人
|
||||
class gocqClient():
|
||||
# 收到群聊消息
|
||||
@gocq_app.receiver("GroupMessage")
|
||||
async def _(app: CQHTTP, source: GroupMessage):
|
||||
if cc.get("gocq_react_group", True):
|
||||
if isinstance(source.message[0], Plain):
|
||||
new_sub_thread(oper_msg, (source, True, PLATFORM_GOCQ))
|
||||
if isinstance(source.message[0], At):
|
||||
if source.message[0].qq == source.self_id:
|
||||
new_sub_thread(oper_msg, (source, True, PLATFORM_GOCQ))
|
||||
else:
|
||||
return
|
||||
|
||||
@gocq_app.receiver("FriendMessage")
|
||||
async def _(app: CQHTTP, source: FriendMessage):
|
||||
if cc.get("gocq_react_friend", True):
|
||||
if isinstance(source.message[0], Plain):
|
||||
new_sub_thread(oper_msg, (source, False, PLATFORM_GOCQ))
|
||||
else:
|
||||
return
|
||||
|
||||
@gocq_app.receiver("GroupMemberIncrease")
|
||||
async def _(app: CQHTTP, source: GroupMemberIncrease):
|
||||
if cc.get("gocq_react_group_increase", True):
|
||||
global announcement
|
||||
await app.sendGroupMessage(source.group_id, [
|
||||
Plain(text = announcement),
|
||||
])
|
||||
|
||||
@gocq_app.receiver("Notify")
|
||||
async def _(app: CQHTTP, source: Notify):
|
||||
print(source)
|
||||
if source.sub_type == "poke" and source.target_id == source.self_id:
|
||||
new_sub_thread(oper_msg, (source, False, PLATFORM_GOCQ))
|
||||
|
||||
@gocq_app.receiver("GuildMessage")
|
||||
async def _(app: CQHTTP, source: GuildMessage):
|
||||
if cc.get("gocq_react_guild", True):
|
||||
if isinstance(source.message[0], Plain):
|
||||
new_sub_thread(oper_msg, (source, True, PLATFORM_GOCQ))
|
||||
if isinstance(source.message[0], At):
|
||||
if source.message[0].qq == source.self_tiny_id:
|
||||
new_sub_thread(oper_msg, (source, True, PLATFORM_GOCQ))
|
||||
else:
|
||||
return
|
||||
|
||||
class QQBotClient():
|
||||
@qq_bot.on('GroupMessage')
|
||||
async def _(bot: UnofficialQQBotSDK, message: QQMessage):
|
||||
print(message)
|
||||
new_sub_thread(oper_msg, (message, True, PLATFROM_QQBOT))
|
||||
@@ -1,113 +0,0 @@
|
||||
from model.platform.qqchan import QQChan, NakuruGuildMember, NakuruGuildMessage
|
||||
from model.platform.qq import QQ
|
||||
from model.provider.provider import Provider
|
||||
from addons.dashboard.server import DashBoardData
|
||||
from nakuru import (
|
||||
CQHTTP,
|
||||
GroupMessage,
|
||||
GroupMemberIncrease,
|
||||
FriendMessage,
|
||||
GuildMessage,
|
||||
Notify
|
||||
)
|
||||
from typing import Union
|
||||
|
||||
class GlobalObject:
|
||||
'''
|
||||
存放一些公用的数据,用于在不同模块(如core与command)之间传递
|
||||
'''
|
||||
nick: str # gocq 的昵称
|
||||
base_config: dict # config.yaml
|
||||
cached_plugins: dict # 缓存的插件
|
||||
web_search: bool # 是否开启了网页搜索
|
||||
reply_prefix: str
|
||||
admin_qq: str
|
||||
admin_qqchan: str
|
||||
uniqueSession: bool
|
||||
cnt_total: int
|
||||
platform_qq: QQ
|
||||
platform_qqchan: QQChan
|
||||
default_personality: dict
|
||||
dashboard_data: DashBoardData
|
||||
stat: dict
|
||||
|
||||
def __init__(self):
|
||||
self.nick = None # gocq 的昵称
|
||||
self.base_config = None # config.yaml
|
||||
self.cached_plugins = {} # 缓存的插件
|
||||
self.web_search = False # 是否开启了网页搜索
|
||||
self.reply_prefix = None
|
||||
self.admin_qq = "123456"
|
||||
self.admin_qqchan = "123456"
|
||||
self.uniqueSession = False
|
||||
self.cnt_total = 0
|
||||
self.platform_qq = None
|
||||
self.platform_qqchan = None
|
||||
self.default_personality = None
|
||||
self.dashboard_data = None
|
||||
self.stat = {}
|
||||
'''
|
||||
|
||||
{
|
||||
"config": {},
|
||||
"session": [
|
||||
{
|
||||
"platform": "qq",
|
||||
"session_id": 123456,
|
||||
"cnt": 0
|
||||
},
|
||||
{...}
|
||||
],
|
||||
"message": [
|
||||
// 以一小时为单位
|
||||
{
|
||||
"ts": 1234567,
|
||||
"cnt": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class AstrMessageEvent():
|
||||
message_str: str # 纯消息字符串
|
||||
message_obj: Union[GroupMessage, FriendMessage, GuildMessage, NakuruGuildMessage] # 消息对象
|
||||
gocq_platform: QQ
|
||||
qq_sdk_platform: QQChan
|
||||
platform: str # `gocq` 或 `qqchan`
|
||||
role: str # `admin` 或 `member`
|
||||
global_object: GlobalObject # 一些公用数据
|
||||
session_id: int # 会话id (可能是群id,也可能是某个user的id。取决于是否开启了 uniqueSession)
|
||||
|
||||
def __init__(self, message_str: str,
|
||||
message_obj: Union[GroupMessage, FriendMessage, GuildMessage, NakuruGuildMessage],
|
||||
gocq_platform: QQ,
|
||||
qq_sdk_platform: QQChan,
|
||||
platform: str,
|
||||
role: str,
|
||||
global_object: GlobalObject,
|
||||
llm_provider: Provider = None,
|
||||
session_id: int = None):
|
||||
self.message_str = message_str
|
||||
self.message_obj = message_obj
|
||||
self.gocq_platform = gocq_platform
|
||||
self.qq_sdk_platform = qq_sdk_platform
|
||||
self.platform = platform
|
||||
self.role = role
|
||||
self.global_object = global_object
|
||||
self.llm_provider = llm_provider
|
||||
self.session_id = session_id
|
||||
|
||||
class CommandResult():
|
||||
'''
|
||||
用于在Command中返回多个值
|
||||
'''
|
||||
def __init__(self, hit: bool, success: bool, message_chain: list, command_name: str = "unknown_command") -> None:
|
||||
self.hit = hit
|
||||
self.success = success
|
||||
self.message_chain = message_chain
|
||||
self.command_name = command_name
|
||||
|
||||
def _result_tuple(self):
|
||||
return (self.success, self.message_chain, self.command_name)
|
||||
193
main.py
193
main.py
@@ -1,120 +1,107 @@
|
||||
import os, sys
|
||||
from pip._internal import main as pipmain
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import traceback
|
||||
import threading
|
||||
import logging
|
||||
from logging import Formatter, Logger
|
||||
from util.cmd_config import CmdConfig, try_migrate_config
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
|
||||
|
||||
def main():
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
datefmt='%H:%M:%S',
|
||||
)
|
||||
# config.yaml 配置文件加载和环境确认
|
||||
try:
|
||||
import cores.qqbot.core as qqBot
|
||||
import yaml
|
||||
from yaml.scanner import ScannerError
|
||||
import util.general_utils as gu
|
||||
ymlfile = open(abs_path+"configs/config.yaml", 'r', encoding='utf-8')
|
||||
cfg = yaml.safe_load(ymlfile)
|
||||
except ImportError as import_error:
|
||||
print(import_error)
|
||||
input("第三方库未完全安装完毕,请退出程序重试。")
|
||||
except FileNotFoundError as file_not_found:
|
||||
print(file_not_found)
|
||||
input("配置文件不存在,请检查是否已经下载配置文件。")
|
||||
except ScannerError as e:
|
||||
print(traceback.format_exc())
|
||||
input("config.yaml 配置文件格式错误,请遵守 yaml 格式。")
|
||||
logger: Logger = None
|
||||
logo_tmpl = """
|
||||
___ _______.___________..______ .______ ______ .___________.
|
||||
/ \ / | || _ \ | _ \ / __ \ | |
|
||||
/ ^ \ | (----`---| |----`| |_) | | |_) | | | | | `---| |----`
|
||||
/ /_\ \ \ \ | | | / | _ < | | | | | |
|
||||
/ _____ \ .----) | | | | |\ \----.| |_) | | `--' | | |
|
||||
/__/ \__\ |_______/ |__| | _| `._____||______/ \______/ |__|
|
||||
|
||||
"""
|
||||
|
||||
# 设置代理
|
||||
if 'http_proxy' in cfg and cfg['http_proxy'] != '':
|
||||
os.environ['HTTP_PROXY'] = cfg['http_proxy']
|
||||
if 'https_proxy' in cfg and cfg['https_proxy'] != '':
|
||||
os.environ['HTTPS_PROXY'] = cfg['https_proxy']
|
||||
def make_necessary_dirs():
|
||||
'''
|
||||
创建必要的目录。
|
||||
'''
|
||||
os.makedirs("data/config", exist_ok=True)
|
||||
os.makedirs("temp", exist_ok=True)
|
||||
|
||||
def update_dept():
|
||||
'''
|
||||
更新依赖库。
|
||||
'''
|
||||
# 获取 Python 可执行文件路径
|
||||
py = sys.executable
|
||||
requirements_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "requirements.txt")
|
||||
print(requirements_path)
|
||||
# 更新依赖库
|
||||
mirror = "https://mirrors.aliyun.com/pypi/simple/"
|
||||
os.system(f"{py} -m pip install -r {requirements_path} -i {mirror}")
|
||||
|
||||
os.environ['NO_PROXY'] = 'cn.bing.com,https://api.sgroup.qq.com'
|
||||
|
||||
# 检查并创建 temp 文件夹
|
||||
if not os.path.exists(abs_path + "temp"):
|
||||
os.mkdir(abs_path+"temp")
|
||||
def main():
|
||||
try:
|
||||
import botpy, logging
|
||||
import astrbot.core as bot_core
|
||||
# delete qqbotpy's logger
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
except ImportError as import_error:
|
||||
logger.error(import_error)
|
||||
logger.error("检测到一些依赖库没有安装。由于兼容性问题,AstrBot 此版本将不会自动为您安装依赖库。请您先自行安装,然后重试。")
|
||||
logger.info("如何安装?如果:")
|
||||
logger.info("- Windows 启动器部署且使用启动器下载了 Python的:在 launcher.exe 所在目录下的地址框输入 powershell,然后执行 .\python\python.exe -m pip install .\AstrBot\requirements.txt")
|
||||
logger.info("- Windows 启动器部署且使用自己之前下载的 Python的:在 launcher.exe 所在目录下的地址框输入 powershell,然后执行 python -m pip install .\AstrBot\requirements.txt")
|
||||
logger.info("- 自行 clone 源码部署的:python -m pip install -r requirements.txt")
|
||||
logger.info("- 如果还不会,加群 322154837 ")
|
||||
input("按任意键退出。")
|
||||
exit()
|
||||
except FileNotFoundError as file_not_found:
|
||||
logger.error(file_not_found)
|
||||
input("配置文件不存在,请检查是否已经下载配置文件。")
|
||||
exit()
|
||||
except BaseException as e:
|
||||
logger.error(traceback.format_exc())
|
||||
input("未知错误。")
|
||||
exit()
|
||||
|
||||
# 启动主程序(cores/qqbot/core.py)
|
||||
qqBot.initBot(cfg)
|
||||
bot_core.init()
|
||||
|
||||
def check_env(ch_mirror=False):
|
||||
|
||||
def check_env():
|
||||
if not (sys.version_info.major == 3 and sys.version_info.minor >= 9):
|
||||
print("请使用Python3.9+运行本项目")
|
||||
input("按任意键退出...")
|
||||
logger.error("请使用 Python3.9+ 运行本项目。按任意键退出。")
|
||||
input("")
|
||||
exit()
|
||||
|
||||
if os.path.exists('requirements.txt'):
|
||||
pth = 'requirements.txt'
|
||||
else:
|
||||
pth = 'QQChannelChatGPT'+ os.sep +'requirements.txt'
|
||||
print("正在检查更新第三方库...")
|
||||
try:
|
||||
if ch_mirror:
|
||||
print("使用阿里云镜像")
|
||||
pipmain(['install', '-r', pth, '-i', 'https://mirrors.aliyun.com/pypi/simple/', '--quiet'])
|
||||
else:
|
||||
pipmain(['install', '-r', pth, '--quiet'])
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
while True:
|
||||
res = input("安装失败。\n如报错ValueError: check_hostname requires server_hostname,请尝试先关闭代理后重试。\n1.输入y回车重试\n2. 输入c回车使用国内镜像源下载\n3. 输入其他按键回车继续往下执行。")
|
||||
if res == "y":
|
||||
try:
|
||||
pipmain(['install', '-r', pth])
|
||||
break
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
continue
|
||||
elif res == "c":
|
||||
try:
|
||||
pipmain(['install', '-r', pth, '-i', 'https://mirrors.aliyun.com/pypi/simple/'])
|
||||
break
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
continue
|
||||
else:
|
||||
break
|
||||
print("第三方库检查完毕。")
|
||||
|
||||
def get_platform():
|
||||
import platform
|
||||
sys_platform = platform.platform().lower()
|
||||
if "windows" in sys_platform:
|
||||
return "win"
|
||||
elif "macos" in sys_platform:
|
||||
return "mac"
|
||||
elif "linux" in sys_platform:
|
||||
return "linux"
|
||||
else:
|
||||
print("other")
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = sys.argv
|
||||
|
||||
if '-cn' in args:
|
||||
check_env(True)
|
||||
else:
|
||||
check_env()
|
||||
|
||||
if '-replit' in args:
|
||||
print("[System] 启动Replit Web保活服务...")
|
||||
try:
|
||||
from util.webapp_replit import keep_alive
|
||||
keep_alive()
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
print(f"[System-err] Replit Web保活服务启动失败:{str(e)}")
|
||||
update_dept()
|
||||
make_necessary_dirs()
|
||||
try_migrate_config()
|
||||
cc = CmdConfig()
|
||||
http_proxy = cc.get("http_proxy")
|
||||
https_proxy = cc.get("https_proxy")
|
||||
if http_proxy:
|
||||
os.environ['HTTP_PROXY'] = http_proxy
|
||||
if https_proxy:
|
||||
os.environ['HTTPS_PROXY'] = https_proxy
|
||||
os.environ['NO_PROXY'] = 'https://api.sgroup.qq.com'
|
||||
|
||||
t = threading.Thread(target=main, daemon=False)
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
logger = LogManager.GetLogger(
|
||||
log_name='astrbot-core',
|
||||
out_to_console=True,
|
||||
custom_formatter=Formatter('[%(asctime)s| %(name)s - %(levelname)s|%(filename)s:%(lineno)d]: %(message)s', datefmt="%H:%M:%S")
|
||||
)
|
||||
logger.info(logo_tmpl)
|
||||
logger.info(f"使用代理: {http_proxy}, {https_proxy}")
|
||||
|
||||
check_env()
|
||||
t = threading.Thread(target=main, daemon=True)
|
||||
t.start()
|
||||
t.join()
|
||||
try:
|
||||
t.join()
|
||||
except KeyboardInterrupt as e:
|
||||
logger.info("退出 AstrBot。")
|
||||
exit()
|
||||
|
||||
@@ -1,140 +1,155 @@
|
||||
import json
|
||||
from util import general_utils as gu
|
||||
has_git = True
|
||||
try:
|
||||
import git.exc
|
||||
from git.repo import Repo
|
||||
except BaseException as e:
|
||||
gu.log("你正运行在无Git环境下,暂时将无法使用插件、热更新功能。")
|
||||
has_git = False
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
from model.provider.provider import Provider
|
||||
import inspect
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
import util.plugin_util as putil
|
||||
import shutil
|
||||
import importlib
|
||||
from util.cmd_config import CmdConfig as cc
|
||||
from model.platform.qq import QQ
|
||||
import stat
|
||||
import util.updator
|
||||
|
||||
from nakuru.entities.components import (
|
||||
Plain,
|
||||
Image
|
||||
)
|
||||
from PIL import Image as PILImage
|
||||
from cores.qqbot.global_object import GlobalObject, AstrMessageEvent
|
||||
from pip._internal import main as pipmain
|
||||
from cores.qqbot.global_object import CommandResult
|
||||
from util import general_utils as gu
|
||||
from util.image_render.helper import text_to_image_base
|
||||
from model.provider.provider import Provider
|
||||
from util.cmd_config import CmdConfig as cc
|
||||
from type.message import *
|
||||
from type.types import GlobalObject
|
||||
from type.command import *
|
||||
from type.plugin import *
|
||||
from type.register import *
|
||||
|
||||
from typing import List
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
PLATFORM_QQCHAN = 'qqchan'
|
||||
PLATFORM_GOCQ = 'gocq'
|
||||
|
||||
# 指令功能的基类,通用的(不区分语言模型)的指令就在这实现
|
||||
|
||||
|
||||
class Command:
|
||||
def __init__(self, provider: Provider, global_object: GlobalObject = None):
|
||||
self.provider = provider
|
||||
self.global_object = global_object
|
||||
|
||||
def check_command(self,
|
||||
message,
|
||||
session_id: str,
|
||||
role,
|
||||
platform,
|
||||
message_obj):
|
||||
async def check_command(self,
|
||||
message,
|
||||
session_id: str,
|
||||
role: str,
|
||||
platform: RegisteredPlatform,
|
||||
message_obj):
|
||||
self.platform = platform
|
||||
# 插件
|
||||
cached_plugins = self.global_object.cached_plugins
|
||||
# 将消息封装成 AstrMessageEvent 对象
|
||||
ame = AstrMessageEvent(
|
||||
message_str=message,
|
||||
message_obj=message_obj,
|
||||
gocq_platform=self.global_object.platform_qq,
|
||||
qq_sdk_platform=self.global_object.platform_qqchan,
|
||||
platform=platform,
|
||||
role=role,
|
||||
global_object=self.global_object,
|
||||
session_id = session_id
|
||||
context=self.global_object,
|
||||
session_id=session_id
|
||||
)
|
||||
for k, v in cached_plugins.items():
|
||||
# 从已启动的插件中查找是否有匹配的指令
|
||||
for plugin in cached_plugins:
|
||||
# 过滤掉平台类插件
|
||||
if plugin.metadata.plugin_type == PluginType.PLATFORM:
|
||||
continue
|
||||
try:
|
||||
result = v["clsobj"].run(ame)
|
||||
if inspect.iscoroutinefunction(plugin.plugin_instance.run):
|
||||
result = await plugin.plugin_instance.run(ame)
|
||||
else:
|
||||
result = await asyncio.to_thread(plugin.plugin_instance.run, ame)
|
||||
if not result:
|
||||
continue
|
||||
if isinstance(result, CommandResult):
|
||||
hit = result.hit
|
||||
res = result._result_tuple()
|
||||
print(hit, res)
|
||||
elif isinstance(result, tuple):
|
||||
hit = result[0]
|
||||
res = result[1]
|
||||
else:
|
||||
raise TypeError("插件返回值格式错误。")
|
||||
if hit:
|
||||
plugin.trig()
|
||||
logger.debug("hit plugin: " + plugin.metadata.plugin_name)
|
||||
return True, res
|
||||
except TypeError as e:
|
||||
# 参数不匹配,尝试使用旧的参数方案
|
||||
try:
|
||||
hit, res = v["clsobj"].run(message, role, platform, message_obj, self.global_object.platform_qq)
|
||||
if inspect.iscoroutinefunction(plugin.plugin_instance.run):
|
||||
hit, res = await plugin.plugin_instance.run(message, role, platform, message_obj, self.global_object.platform_qq)
|
||||
else:
|
||||
hit, res = await asyncio.to_thread(plugin.plugin_instance.run, message, role, platform, message_obj, self.global_object.platform_qq)
|
||||
if hit:
|
||||
return True, res
|
||||
except BaseException as e:
|
||||
gu.log(f"{k}插件异常,原因: {str(e)}\n已安装插件: {cached_plugins.keys}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。", level=gu.LEVEL_WARNING)
|
||||
logger.error(
|
||||
f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。")
|
||||
except BaseException as e:
|
||||
gu.log(f"{k} 插件异常,原因: {str(e)}\n已安装插件: {cached_plugins.keys}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。", level=gu.LEVEL_WARNING)
|
||||
logger.error(
|
||||
f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。")
|
||||
|
||||
if self.command_start_with(message, "nick"):
|
||||
return True, self.set_nick(message, platform, role)
|
||||
if self.command_start_with(message, "plugin"):
|
||||
return True, self.plugin_oper(message, role, cached_plugins, platform)
|
||||
return True, await self.plugin_oper(message, role, self.global_object, platform)
|
||||
if self.command_start_with(message, "myid") or self.command_start_with(message, "!myid"):
|
||||
return True, self.get_my_id(message_obj)
|
||||
if self.command_start_with(message, "nconf") or self.command_start_with(message, "newconf"):
|
||||
return True, self.get_new_conf(message, role)
|
||||
if self.command_start_with(message, "web"): # 网页搜索
|
||||
return True, self.get_my_id(message_obj, platform)
|
||||
if self.command_start_with(message, "web"): # 网页搜索
|
||||
return True, self.web_search(message)
|
||||
if self.command_start_with(message, "keyword"):
|
||||
return True, self.keyword(message_obj, role)
|
||||
if self.command_start_with(message, "ip"):
|
||||
ip = requests.get("https://myip.ipip.net", timeout=5).text
|
||||
return True, f"机器人 IP 信息:{ip}", "ip"
|
||||
|
||||
|
||||
if self.command_start_with(message, "update"):
|
||||
return True, self.update(message, role)
|
||||
if message == "t2i":
|
||||
return True, "t2i", self.t2i_toggle(message, role)
|
||||
if not self.provider and message == "help":
|
||||
return True, await self.help()
|
||||
|
||||
return False, None
|
||||
|
||||
|
||||
def web_search(self, message):
|
||||
if message == "web on":
|
||||
l = message.split(' ')
|
||||
if len(l) == 1:
|
||||
return True, f"网页搜索功能当前状态: {self.global_object.web_search}", "web"
|
||||
elif l[1] == 'on':
|
||||
self.global_object.web_search = True
|
||||
return True, "已开启网页搜索", "web"
|
||||
elif message == "web off":
|
||||
elif l[1] == 'off':
|
||||
self.global_object.web_search = False
|
||||
return True, "已关闭网页搜索", "web"
|
||||
return True, f"网页搜索功能当前状态: {self.global_object.web_search}", "web"
|
||||
|
||||
def t2i_toggle(self, message, role):
|
||||
p = cc.get("qq_pic_mode", True)
|
||||
if p:
|
||||
cc.put("qq_pic_mode", False)
|
||||
return True, "已关闭文本转图片模式。", "t2i"
|
||||
cc.put("qq_pic_mode", True)
|
||||
return True, "已开启文本转图片模式。", "t2i"
|
||||
|
||||
def get_my_id(self, message_obj):
|
||||
return True, f"你的ID:{str(message_obj.sender.tiny_id)}", "plugin"
|
||||
|
||||
def get_new_conf(self, message, role):
|
||||
if role != "admin":
|
||||
return False, f"你的身份组{role}没有权限使用此指令。", "newconf"
|
||||
l = message.split(" ")
|
||||
if len(l) <= 1:
|
||||
obj = cc.get_all()
|
||||
p = gu.create_text_image("【cmd_config.json】", json.dumps(obj, indent=4, ensure_ascii=False))
|
||||
return True, [Image.fromFileSystem(p)], "newconf"
|
||||
|
||||
'''
|
||||
插件指令
|
||||
'''
|
||||
def plugin_oper(self, message: str, role: str, cached_plugins: dict, platform: str):
|
||||
if not has_git:
|
||||
return False, "你正在运行在无Git环境下,暂时将无法使用插件、热更新功能。", "plugin"
|
||||
def get_my_id(self, message_obj, platform):
|
||||
try:
|
||||
user_id = str(message_obj.sender.user_id)
|
||||
return True, f"你在此平台上的ID:{user_id}", "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"在{platform}上获取你的ID失败,原因: {str(e)}", "plugin"
|
||||
|
||||
async def plugin_oper(self, message: str, role: str, ctx: GlobalObject, platform: str):
|
||||
l = message.split(" ")
|
||||
if len(l) < 2:
|
||||
p = gu.create_text_image("【插件指令面板】", "安装插件: \nplugin i 插件Github地址\n卸载插件: \nplugin d 插件名 \n重载插件: \nplugin reload\n查看插件列表:\nplugin l\n更新插件: plugin u 插件名\n")
|
||||
return True, [Image.fromFileSystem(p)], "plugin"
|
||||
p = await text_to_image_base("# 插件指令面板 \n- 安装插件: `plugin i 插件Github地址`\n- 卸载插件: `plugin d 插件名`\n- 重载插件: `plugin reload`\n- 查看插件列表:`plugin l`\n - 更新插件: `plugin u 插件名`\n")
|
||||
with open(p, 'rb') as f:
|
||||
return True, [Image.fromBytes(f.read())], "plugin"
|
||||
else:
|
||||
if l[1] == "i":
|
||||
if role != "admin":
|
||||
return False, f"你的身份组{role}没有权限安装插件", "plugin"
|
||||
try:
|
||||
putil.install_plugin(l[2], cached_plugins)
|
||||
putil.install_plugin(l[2], )
|
||||
return True, "插件拉取并载入成功~", "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"拉取插件失败,原因: {str(e)}", "plugin"
|
||||
@@ -142,69 +157,50 @@ class Command:
|
||||
if role != "admin":
|
||||
return False, f"你的身份组{role}没有权限删除插件", "plugin"
|
||||
try:
|
||||
putil.uninstall_plugin(l[2], cached_plugins)
|
||||
putil.uninstall_plugin(l[2], ctx)
|
||||
return True, "插件卸载成功~", "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"卸载插件失败,原因: {str(e)}", "plugin"
|
||||
elif l[1] == "u":
|
||||
try:
|
||||
putil.update_plugin(l[2], cached_plugins)
|
||||
putil.update_plugin(l[2], ctx)
|
||||
return True, "\n更新插件成功!!", "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"更新插件失败,原因: {str(e)}。\n建议: 使用 plugin i 指令进行覆盖安装(插件数据可能会丢失)", "plugin"
|
||||
elif l[1] == "l":
|
||||
try:
|
||||
plugin_list_info = "\n".join([f"{k}: \n名称: {v['info']['name']}\n简介: {v['info']['desc']}\n版本: {v['info']['version']}\n作者: {v['info']['author']}\n" for k, v in cached_plugins.items()])
|
||||
p = gu.create_text_image("【已激活插件列表】", plugin_list_info + "\n使用plugin v 插件名 查看插件帮助\n")
|
||||
return True, [Image.fromFileSystem(p)], "plugin"
|
||||
plugin_list_info = ""
|
||||
for plugin in ctx.cached_plugins:
|
||||
plugin_list_info += f"### {plugin.metadata.plugin_name} \n- 名称: {plugin.metadata.plugin_name}\n- 简介: {plugin.metadata.desc}\n- 版本: {plugin.metadata.version}\n- 作者: {plugin.metadata.author}\n"
|
||||
p = await text_to_image_base(f"# 已激活的插件\n{plugin_list_info}\n> 使用plugin v 插件名 查看插件帮助\n")
|
||||
with open(p, 'rb') as f:
|
||||
return True, [Image.fromBytes(f.read())], "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"获取插件列表失败,原因: {str(e)}", "plugin"
|
||||
elif l[1] == "v":
|
||||
try:
|
||||
if l[2] in cached_plugins:
|
||||
info = cached_plugins[l[2]]["info"]
|
||||
p = gu.create_text_image(f"【插件信息】", f"名称: {info['name']}\n{info['desc']}\n版本: {info['version']}\n作者: {info['author']}\n\n帮助:\n{info['help']}")
|
||||
return True, [Image.fromFileSystem(p)], "plugin"
|
||||
info = None
|
||||
for i in ctx.cached_plugins:
|
||||
if i.metadata.plugin_name == l[2]:
|
||||
info = i.metadata
|
||||
break
|
||||
if info:
|
||||
p = await text_to_image_base(f"# `{info.plugin_name}` 插件信息\n- 类型: {info.plugin_type}\n- 简介{info.desc}\n- 版本: {info.version}\n- 作者: {info.author}")
|
||||
with open(p, 'rb') as f:
|
||||
return True, [Image.fromBytes(f.read())], "plugin"
|
||||
else:
|
||||
return False, "未找到该插件", "plugin"
|
||||
except BaseException as e:
|
||||
return False, f"获取插件信息失败,原因: {str(e)}", "plugin"
|
||||
# elif l[1] == "reload":
|
||||
# if role != "admin":
|
||||
# return False, f"你的身份组{role}没有权限重载插件", "plugin"
|
||||
# for plugin in cached_plugins:
|
||||
# try:
|
||||
# print(f"更新插件 {plugin} 依赖...")
|
||||
# plugin_path = os.path.join(ppath, cached_plugins[plugin]["root_dir_name"])
|
||||
# if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
|
||||
# mm = pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), "--quiet"])
|
||||
# if mm != 0:
|
||||
# return False, "插件依赖安装失败,需要您手动pip安装对应插件的依赖。", "plugin"
|
||||
# except BaseException as e:
|
||||
# print(f"插件{plugin}依赖安装失败,原因: {str(e)}")
|
||||
# try:
|
||||
# ok, err = self.plugin_reload(cached_plugins, all = True)
|
||||
# if ok:
|
||||
# return True, "\n重载插件成功~", "plugin"
|
||||
# else:
|
||||
# # if os.path.exists(plugin_path):
|
||||
# # shutil.rmtree(plugin_path)
|
||||
# return False, f"插件重载失败。\n跟踪: \n{err}", "plugin"
|
||||
# except BaseException as e:
|
||||
# return False, f"插件重载失败,原因: {str(e)}", "plugin"
|
||||
|
||||
elif l[1] == "dev":
|
||||
if role != "admin":
|
||||
return False, f"你的身份组{role}没有权限开发者模式", "plugin"
|
||||
return True, "cached_plugins: \n" + str(cached_plugins), "plugin"
|
||||
|
||||
'''
|
||||
nick: 存储机器人的昵称
|
||||
'''
|
||||
def set_nick(self, message: str, platform: str, role: str = "member"):
|
||||
|
||||
def set_nick(self, message: str, platform: RegisteredPlatform, role: str = "member"):
|
||||
if role != "admin":
|
||||
return True, "你无权使用该指令 :P", "nick"
|
||||
if platform == PLATFORM_GOCQ:
|
||||
if str(platform) == PLATFORM_GOCQ:
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
return True, "【设置机器人昵称】示例:\n支持多昵称\nnick 昵称1 昵称2 昵称3", "nick"
|
||||
@@ -212,7 +208,7 @@ class Command:
|
||||
cc.put("nick_qq", nick)
|
||||
self.global_object.nick = tuple(nick)
|
||||
return True, f"设置成功!现在你可以叫我这些昵称来提问我啦~", "nick"
|
||||
elif platform == PLATFORM_QQCHAN:
|
||||
elif str(platform) == PLATFORM_QQCHAN:
|
||||
nick = message.split(" ")[2]
|
||||
return False, "QQ频道平台不支持为机器人设置昵称。", "nick"
|
||||
|
||||
@@ -220,212 +216,107 @@ class Command:
|
||||
return {
|
||||
"help": "帮助",
|
||||
"keyword": "设置关键词/关键指令回复",
|
||||
"update": "更新面板",
|
||||
"update latest": "更新到最新版本",
|
||||
"update r": "重启机器人",
|
||||
"reset": "重置会话",
|
||||
"nick": "设置机器人昵称",
|
||||
"update": "更新项目",
|
||||
"nick": "设置机器人唤醒词",
|
||||
"plugin": "插件安装、卸载和重载",
|
||||
"web on/off": "启动或关闭网页搜索能力",
|
||||
"/bing": "切换到bing模型",
|
||||
"/gpt": "切换到OpenAI ChatGPT API",
|
||||
"/revgpt": "切换到网页版ChatGPT",
|
||||
"web on/off": "LLM 网页搜索能力",
|
||||
}
|
||||
|
||||
def help_messager(self, commands: dict, platform: str, cached_plugins: dict = None):
|
||||
|
||||
async def help_messager(self, commands: dict, platform: str, cached_plugins: List[RegisteredPlugin] = None):
|
||||
try:
|
||||
resp = requests.get("https://soulter.top/channelbot/notice.json").text
|
||||
notice = json.loads(resp)["notice"]
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get("https://soulter.top/channelbot/notice.json") as resp:
|
||||
notice = (await resp.json())["notice"]
|
||||
except BaseException as e:
|
||||
notice = ""
|
||||
msg = "# Help Center\n## 指令列表\n"
|
||||
# msg = "Github项目名QQChannelChatGPT, 有问题提交issue, 欢迎Star\n【指令列表】\n"
|
||||
msg = "## 指令列表\n"
|
||||
for key, value in commands.items():
|
||||
msg += f"`{key}` - {value}\n"
|
||||
msg += f"- `{key}`: {value}\n"
|
||||
# plugins
|
||||
if cached_plugins != None:
|
||||
plugin_list_info = "\n".join([f"`{k}` {v['info']['name']}\n{v['info']['desc']}\n" for k, v in cached_plugins.items()])
|
||||
if plugin_list_info.strip() != "":
|
||||
msg += "\n## 插件列表\n> 使用plugin v 插件名 查看插件帮助\n"
|
||||
if cached_plugins:
|
||||
plugin_list_info = ""
|
||||
for plugin in cached_plugins:
|
||||
plugin_list_info += f"- `{plugin.metadata.plugin_name}`: {plugin.metadata.desc}\n"
|
||||
if plugin_list_info.strip():
|
||||
msg += "\n## 插件列表\n> 使用 plugin v 插件名 查看插件帮助\n"
|
||||
msg += plugin_list_info
|
||||
msg += notice
|
||||
|
||||
try:
|
||||
# p = gu.create_text_image("【Help Center】", msg)
|
||||
p = gu.create_markdown_image(msg)
|
||||
return [Image.fromFileSystem(p)]
|
||||
p = await text_to_image_base(msg)
|
||||
with open(p, 'rb') as f:
|
||||
return [Image.fromBytes(f.read()),]
|
||||
except BaseException as e:
|
||||
gu.log(str(e))
|
||||
finally:
|
||||
logger.error(str(e))
|
||||
return msg
|
||||
|
||||
# 接受可变参数
|
||||
|
||||
def command_start_with(self, message: str, *args):
|
||||
'''
|
||||
当消息以指定的指令开头时返回True
|
||||
'''
|
||||
for arg in args:
|
||||
if message.startswith(arg) or message.startswith('/'+arg):
|
||||
return True
|
||||
return False
|
||||
|
||||
# keyword: 关键字
|
||||
def keyword(self, message_obj, role: str):
|
||||
if role != "admin":
|
||||
return True, "你没有权限使用该指令", "keyword"
|
||||
|
||||
plain_text = ""
|
||||
image_url = ""
|
||||
|
||||
for comp in message_obj.message:
|
||||
if isinstance(comp, Plain):
|
||||
plain_text += comp.text
|
||||
elif isinstance(comp, Image) and image_url == "":
|
||||
if comp.url is None:
|
||||
image_url = comp.file
|
||||
else:
|
||||
image_url = comp.url
|
||||
|
||||
l = plain_text.split(" ")
|
||||
|
||||
if len(l) < 3 and image_url == "":
|
||||
return True, """
|
||||
【设置关键词回复】示例:
|
||||
1. keyword hi 你好
|
||||
当发送hi的时候会回复你好
|
||||
2. keyword /hi 你好
|
||||
当发送/hi时会回复你好
|
||||
3. keyword d hi
|
||||
删除hi关键词的回复
|
||||
4. keyword hi <图片>
|
||||
当发送hi时会回复图片
|
||||
""", "keyword"
|
||||
|
||||
del_mode = False
|
||||
if l[1] == "d":
|
||||
del_mode = True
|
||||
|
||||
try:
|
||||
if os.path.exists("keyword.json"):
|
||||
with open("keyword.json", "r", encoding="utf-8") as f:
|
||||
keyword = json.load(f)
|
||||
if del_mode:
|
||||
# 删除关键词
|
||||
if l[2] not in keyword:
|
||||
return False, "该关键词不存在", "keyword"
|
||||
else: del keyword[l[2]]
|
||||
else:
|
||||
keyword[l[1]] = {
|
||||
"plain_text": " ".join(l[2:]),
|
||||
"image_url": image_url
|
||||
}
|
||||
else:
|
||||
if del_mode:
|
||||
return False, "该关键词不存在", "keyword"
|
||||
keyword = {
|
||||
l[1]: {
|
||||
"plain_text": " ".join(l[2:]),
|
||||
"image_url": image_url
|
||||
}
|
||||
}
|
||||
with open("keyword.json", "w", encoding="utf-8") as f:
|
||||
json.dump(keyword, f, ensure_ascii=False, indent=4)
|
||||
f.flush()
|
||||
if del_mode:
|
||||
return True, "删除成功: "+l[2], "keyword"
|
||||
if image_url == "":
|
||||
return True, "设置成功: "+l[1]+" "+" ".join(l[2:]), "keyword"
|
||||
else:
|
||||
return True, [Plain("设置成功: "+l[1]+" "+" ".join(l[2:])), Image.fromURL(image_url)], "keyword"
|
||||
except BaseException as e:
|
||||
return False, "设置失败: "+str(e), "keyword"
|
||||
|
||||
def update(self, message: str, role: str):
|
||||
if not has_git:
|
||||
return False, "你正在运行在无Git环境下,暂时将无法使用插件、热更新功能。", "update"
|
||||
if role != "admin":
|
||||
return True, "你没有权限使用该指令", "keyword"
|
||||
return True, "你没有权限使用该指令", "update"
|
||||
l = message.split(" ")
|
||||
try:
|
||||
repo = Repo()
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
try:
|
||||
repo = Repo(path="QQChannelChatGPT")
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
repo = Repo(path="AstrBot")
|
||||
if len(l) == 1:
|
||||
curr_branch = repo.active_branch.name
|
||||
# 得到本地版本号和最新版本号
|
||||
now_commit = repo.head.commit
|
||||
# 得到远程3条commit列表, 包含commit信息
|
||||
origin = repo.remotes.origin
|
||||
origin.fetch()
|
||||
commits = list(repo.iter_commits(curr_branch, max_count=3))
|
||||
commits_log = ''
|
||||
index = 1
|
||||
for commit in commits:
|
||||
if commit.message.endswith("\n"):
|
||||
commits_log += f"[{index}] {commit.message}-----------\n"
|
||||
else:
|
||||
commits_log += f"[{index}] {commit.message}\n-----------\n"
|
||||
index+=1
|
||||
remote_commit_hash = origin.refs.master.commit.hexsha[:6]
|
||||
return True, f"当前分支: {curr_branch}\n当前版本: {now_commit.hexsha[:6]}\n最新版本: {remote_commit_hash}\n\n3条commit(非最新):\n{str(commits_log)}\nTips:\n1. 使用 update latest 更新至最新版本;\n2. 使用 update checkout <分支名> 切换代码分支。", "update"
|
||||
try:
|
||||
update_info = util.updator.check_update()
|
||||
update_info += "\n> Tips: 输入「update latest」更新到最新版本,输入「update <版本号如v3.1.3>」切换到指定版本,输入「update r」重启机器人\n"
|
||||
return True, update_info, "update"
|
||||
except BaseException as e:
|
||||
return False, "检查更新失败: "+str(e), "update"
|
||||
else:
|
||||
if l[1] == "latest":
|
||||
try:
|
||||
origin = repo.remotes.origin
|
||||
origin.fetch()
|
||||
commits = list(repo.iter_commits('master', max_count=1))
|
||||
commit_log = commits[0].message
|
||||
tag = "update"
|
||||
if len(l) == 3 and l[2] == "r":
|
||||
tag = "update latest r"
|
||||
return True, f"更新成功。新版本内容: \n{commit_log}\nps:重启后生效。输入update r重启(重启指令不返回任何确认信息)。", tag
|
||||
release_data = util.updator.request_release_info()
|
||||
util.updator.update_project(release_data)
|
||||
return True, "更新成功,重启生效。可输入「update r」重启", "update"
|
||||
except BaseException as e:
|
||||
return False, "更新失败: "+str(e), "update"
|
||||
if l[1] == "r":
|
||||
py = sys.executable
|
||||
os.execl(py, py, *sys.argv)
|
||||
if l[1] == 'checkout':
|
||||
# 切换分支
|
||||
if len(l) < 3:
|
||||
return False, "请提供分支名,如 /update checkout dev_dashboard", "update"
|
||||
try:
|
||||
origin = repo.remotes.origin
|
||||
origin.fetch()
|
||||
repo.git.checkout(l[2])
|
||||
|
||||
# 获得最新的 commit
|
||||
commits = list(repo.iter_commits(max_count=1))
|
||||
commit_log = commits[0].message
|
||||
|
||||
return True, f"切换分支成功,机器人将在 5 秒内重新启动以应用新的功能。\n当前分支: {l[2]}\n此分支最近更新: \n{commit_log}", "update latest r"
|
||||
except BaseException as e:
|
||||
return False, f"切换分支失败。原因: {str(e)}", "update"
|
||||
elif l[1] == "r":
|
||||
util.updator._reboot()
|
||||
else:
|
||||
if l[1].lower().startswith('v'):
|
||||
try:
|
||||
release_data = util.updator.request_release_info(
|
||||
latest=False)
|
||||
util.updator.update_project(
|
||||
release_data, latest=False, version=l[1])
|
||||
return True, "更新成功,重启生效。可输入「update r」重启", "update"
|
||||
except BaseException as e:
|
||||
return False, "更新失败: "+str(e), "update"
|
||||
else:
|
||||
return False, "版本号格式错误", "update"
|
||||
|
||||
def reset(self):
|
||||
return False
|
||||
|
||||
|
||||
def set(self):
|
||||
return False
|
||||
|
||||
|
||||
def unset(self):
|
||||
return False
|
||||
|
||||
|
||||
def key(self):
|
||||
return False
|
||||
|
||||
def help(self):
|
||||
return False
|
||||
|
||||
|
||||
async def help(self):
|
||||
ret = await self.help_messager(self.general_commands(), self.platform, self.global_object.cached_plugins)
|
||||
return True, ret, "help"
|
||||
|
||||
def status(self):
|
||||
return False
|
||||
|
||||
|
||||
def token(self):
|
||||
return False
|
||||
|
||||
|
||||
def his(self):
|
||||
return False
|
||||
|
||||
|
||||
def draw(self):
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,50 +1,56 @@
|
||||
from model.command.command import Command
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial
|
||||
from cores.qqbot.personality import personalities
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial, MODELS
|
||||
from util.personality import personalities
|
||||
from type.types import GlobalObject
|
||||
from type.command import CommandItem
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from openai._exceptions import NotFoundError
|
||||
|
||||
from model.platform.qq import QQ
|
||||
from util import general_utils as gu
|
||||
from cores.qqbot.global_object import GlobalObject
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
class CommandOpenAIOfficial(Command):
|
||||
def __init__(self, provider: ProviderOpenAIOfficial, global_object: GlobalObject):
|
||||
self.provider = provider
|
||||
self.cached_plugins = {}
|
||||
self.global_object = global_object
|
||||
self.personality_str = ""
|
||||
self.commands = [
|
||||
CommandItem("reset", self.reset, "重置 LLM 会话。", "内置"),
|
||||
CommandItem("his", self.his, "查看与 LLM 的历史记录。", "内置"),
|
||||
CommandItem("status", self.status, "查看 GPT 配置信息和用量状态。", "内置"),
|
||||
]
|
||||
super().__init__(provider, global_object)
|
||||
|
||||
def check_command(self,
|
||||
message: str,
|
||||
session_id: str,
|
||||
role: str,
|
||||
platform: str,
|
||||
message_obj):
|
||||
|
||||
async def check_command(self,
|
||||
message: str,
|
||||
session_id: str,
|
||||
role: str,
|
||||
platform: str,
|
||||
message_obj):
|
||||
self.platform = platform
|
||||
hit, res = super().check_command(
|
||||
|
||||
# 检查基础指令
|
||||
hit, res = await super().check_command(
|
||||
message,
|
||||
session_id,
|
||||
role,
|
||||
platform,
|
||||
message_obj
|
||||
)
|
||||
|
||||
|
||||
logger.debug(f"基础指令hit: {hit}, res: {res}")
|
||||
|
||||
# 这里是这个 LLM 的专属指令
|
||||
if hit:
|
||||
return True, res
|
||||
if self.command_start_with(message, "reset", "重置"):
|
||||
return True, self.reset(session_id, message)
|
||||
return True, await self.reset(session_id, message)
|
||||
elif self.command_start_with(message, "his", "历史"):
|
||||
return True, self.his(message, session_id)
|
||||
elif self.command_start_with(message, "token"):
|
||||
return True, self.token(session_id)
|
||||
elif self.command_start_with(message, "gpt"):
|
||||
return True, self.gpt()
|
||||
elif self.command_start_with(message, "status"):
|
||||
return True, self.status()
|
||||
elif self.command_start_with(message, "count"):
|
||||
return True, self.count()
|
||||
return True, self.status(session_id)
|
||||
elif self.command_start_with(message, "help", "帮助"):
|
||||
return True, self.help()
|
||||
return True, await self.help()
|
||||
elif self.command_start_with(message, "unset"):
|
||||
return True, self.unset(session_id)
|
||||
elif self.command_start_with(message, "set"):
|
||||
@@ -52,109 +58,112 @@ class CommandOpenAIOfficial(Command):
|
||||
elif self.command_start_with(message, "update"):
|
||||
return True, self.update(message, role)
|
||||
elif self.command_start_with(message, "画", "draw"):
|
||||
return True, self.draw(message)
|
||||
elif self.command_start_with(message, "key"):
|
||||
return True, self.key(message)
|
||||
return True, await self.draw(message)
|
||||
elif self.command_start_with(message, "switch"):
|
||||
return True, self.switch(message)
|
||||
|
||||
return True, await self.switch(message)
|
||||
elif self.command_start_with(message, "models"):
|
||||
return True, await self.print_models()
|
||||
elif self.command_start_with(message, "model"):
|
||||
return True, await self.set_model(message)
|
||||
return False, None
|
||||
|
||||
def help(self):
|
||||
commands = super().general_commands()
|
||||
commands['画'] = '画画'
|
||||
commands['key'] = '添加OpenAI key'
|
||||
commands['set'] = '人格设置面板'
|
||||
commands['gpt'] = '查看gpt配置信息'
|
||||
commands['status'] = '查看key使用状态'
|
||||
commands['token'] = '查看本轮会话token'
|
||||
return True, super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help"
|
||||
async def get_models(self):
|
||||
try:
|
||||
models = await self.provider.client.models.list()
|
||||
except NotFoundError as e:
|
||||
bu = str(self.provider.client.base_url)
|
||||
self.provider.client.base_url = bu + "/v1"
|
||||
models = await self.provider.client.models.list()
|
||||
finally:
|
||||
return filter(lambda x: x.id.startswith("gpt"), models.data)
|
||||
|
||||
async def print_models(self):
|
||||
models = await self.get_models()
|
||||
i = 1
|
||||
ret = "OpenAI GPT 类可用模型"
|
||||
for model in models:
|
||||
ret += f"\n{i}. {model.id}"
|
||||
i += 1
|
||||
ret += "\nTips: 使用 /model 模型名/编号,即可实时更换模型。如目标模型不存在于上表,请输入模型名。"
|
||||
logger.debug(ret)
|
||||
return True, ret, "models"
|
||||
|
||||
|
||||
async def set_model(self, message: str):
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
return True, "请输入 /model 模型名/编号", "model"
|
||||
model = str(l[1])
|
||||
if model.isdigit():
|
||||
models = await self.get_models()
|
||||
models = list(models)
|
||||
if int(model) <= len(models) and int(model) >= 1:
|
||||
model = models[int(model)-1]
|
||||
self.provider.set_model(model.id)
|
||||
return True, f"模型已设置为 {model.id}", "model"
|
||||
else:
|
||||
self.provider.set_model(model)
|
||||
return True, f"模型已设置为 {model} (自定义)", "model"
|
||||
|
||||
|
||||
def reset(self, session_id: str, message: str = "reset"):
|
||||
async def help(self):
|
||||
commands = super().general_commands()
|
||||
commands['画'] = '调用 OpenAI DallE 模型生成图片'
|
||||
commands['/set'] = '人格设置面板'
|
||||
commands['/status'] = '查看 Api Key 状态和配置信息'
|
||||
commands['/token'] = '查看本轮会话 token'
|
||||
commands['/reset'] = '重置当前与 LLM 的会话,但保留人格(system prompt)'
|
||||
commands['/reset p'] = '重置当前与 LLM 的会话,并清除人格。'
|
||||
commands['/models'] = '获取当前可用的模型'
|
||||
commands['/model'] = '更换模型'
|
||||
|
||||
return True, await super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help"
|
||||
|
||||
async def reset(self, session_id: str, message: str = "reset"):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "reset"
|
||||
return False, "未启用 OpenAI 官方 API", "reset"
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
self.provider.forget(session_id)
|
||||
await self.provider.forget(session_id, keep_system_prompt=True)
|
||||
return True, "重置成功", "reset"
|
||||
if len(l) == 2 and l[1] == "p":
|
||||
self.provider.forget(session_id)
|
||||
if self.personality_str != "":
|
||||
self.set(self.personality_str, session_id) # 重新设置人格
|
||||
return True, "重置成功", "reset"
|
||||
|
||||
await self.provider.forget(session_id)
|
||||
|
||||
def his(self, message: str, session_id: str):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "his"
|
||||
#分页,每页5条
|
||||
msg = ''
|
||||
return False, "未启用 OpenAI 官方 API", "his"
|
||||
size_per_page = 3
|
||||
page = 1
|
||||
if message[4:]:
|
||||
page = int(message[4:])
|
||||
# 检查是否有过历史记录
|
||||
if session_id not in self.provider.session_dict:
|
||||
msg = f"历史记录为空"
|
||||
return True, msg, "his"
|
||||
l = self.provider.session_dict[session_id]
|
||||
max_page = len(l)//size_per_page + 1 if len(l)%size_per_page != 0 else len(l)//size_per_page
|
||||
p = self.provider.get_prompts_by_cache_list(self.provider.session_dict[session_id], divide=True, paging=True, size=size_per_page, page=page)
|
||||
return True, f"历史记录如下:\n{p}\n第{page}页 | 共{max_page}页\n*输入/his 2跳转到第2页", "his"
|
||||
|
||||
def token(self, session_id: str):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "token"
|
||||
return True, f"会话的token数: {self.provider.get_user_usage_tokens(self.provider.session_dict[session_id])}\n系统最大缓存token数: {self.provider.max_tokens}", "token"
|
||||
|
||||
def gpt(self):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "gpt"
|
||||
return True, f"OpenAI GPT配置:\n {self.provider.chatGPT_configs}", "gpt"
|
||||
|
||||
def status(self):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "status"
|
||||
chatgpt_cfg_str = ""
|
||||
key_stat = self.provider.get_key_stat()
|
||||
index = 1
|
||||
max = 9000000
|
||||
gg_count = 0
|
||||
total = 0
|
||||
tag = ''
|
||||
for key in key_stat.keys():
|
||||
sponsor = ''
|
||||
total += key_stat[key]['used']
|
||||
if key_stat[key]['exceed']:
|
||||
gg_count += 1
|
||||
continue
|
||||
if 'sponsor' in key_stat[key]:
|
||||
sponsor = key_stat[key]['sponsor']
|
||||
chatgpt_cfg_str += f" |-{index}: {key[-8:]} {key_stat[key]['used']}/{max} {sponsor}{tag}\n"
|
||||
index += 1
|
||||
return True, f"⭐使用情况({str(gg_count)}个已用):\n{chatgpt_cfg_str}", "status"
|
||||
|
||||
def count(self):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型。", "reset"
|
||||
guild_count, guild_msg_count, guild_direct_msg_count, session_count = self.provider.get_stat()
|
||||
return True, f"【本指令部分统计可能已经过时】\n当前会话数: {len(self.provider.session_dict)}\n共有频道数: {guild_count} \n共有消息数: {guild_msg_count}\n私信数: {guild_direct_msg_count}\n历史会话数: {session_count}", "count"
|
||||
|
||||
def key(self, message: str):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "reset"
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
msg = "感谢您赞助key,key为官方API使用,请以以下格式赞助:\n/key xxxxx"
|
||||
return True, msg, "key"
|
||||
key = l[1]
|
||||
if self.provider.check_key(key):
|
||||
self.provider.append_key(key)
|
||||
return True, f"*★,°*:.☆( ̄▽ ̄)/$:*.°★* 。\n该Key被验证为有效。感谢你的赞助~"
|
||||
else:
|
||||
return True, "该Key被验证为无效。也许是输入错误了,或者重试。", "key"
|
||||
if len(l) == 2:
|
||||
try:
|
||||
page = int(l[1])
|
||||
except BaseException as e:
|
||||
return True, "页码不合法", "his"
|
||||
contexts, total_num = self.provider.dump_contexts_page(session_id, size_per_page, page=page)
|
||||
t_pages = total_num // size_per_page + 1
|
||||
return True, f"历史记录如下:\n{contexts}\n第 {page} 页 | 共 {t_pages} 页\n*输入 /his 2 跳转到第 2 页", "his"
|
||||
|
||||
def switch(self, message: str):
|
||||
def status(self, session_id: str):
|
||||
if self.provider is None:
|
||||
return False, "未启用 OpenAI 官方 API", "status"
|
||||
keys_data = self.provider.get_keys_data()
|
||||
ret = "OpenAI Key"
|
||||
for k in keys_data:
|
||||
status = "🟢" if keys_data[k] else "🔴"
|
||||
ret += "\n|- " + k[:8] + " " + status
|
||||
|
||||
conf = self.provider.get_configs()
|
||||
ret += "\n当前模型:" + conf['model']
|
||||
if conf['model'] in MODELS:
|
||||
ret += "\n最大上下文窗口:" + str(MODELS[conf['model']]) + " tokens"
|
||||
|
||||
if session_id in self.provider.session_memory and len(self.provider.session_memory[session_id]):
|
||||
ret += "\n你的会话上下文:" + str(self.provider.session_memory[session_id][-1]['usage_tokens']) + " tokens"
|
||||
|
||||
return True, ret, "status"
|
||||
|
||||
async def switch(self, message: str):
|
||||
'''
|
||||
切换账号
|
||||
'''
|
||||
@@ -169,14 +178,13 @@ class CommandOpenAIOfficial(Command):
|
||||
return True, ret, "switch"
|
||||
elif len(l) == 2:
|
||||
try:
|
||||
key_stat = self.provider.get_key_stat()
|
||||
key_stat = self.provider.get_keys_data()
|
||||
index = int(l[1])
|
||||
if index > len(key_stat) or index < 1:
|
||||
return True, "账号序号不合法。", "switch"
|
||||
else:
|
||||
try:
|
||||
new_key = list(key_stat.keys())[index-1]
|
||||
ret = self.provider.check_key(new_key)
|
||||
self.provider.set_key(new_key)
|
||||
except BaseException as e:
|
||||
return True, "账号切换失败,原因: " + str(e), "switch"
|
||||
@@ -188,14 +196,14 @@ class CommandOpenAIOfficial(Command):
|
||||
|
||||
def unset(self, session_id: str):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "unset"
|
||||
return False, "未启用 OpenAI 官方 API", "unset"
|
||||
self.provider.curr_personality = {}
|
||||
self.provider.forget(session_id)
|
||||
return True, "已清除人格并重置历史记录。", "unset"
|
||||
|
||||
def set(self, message: str, session_id: str):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "set"
|
||||
return False, "未启用 OpenAI 官方 API", "set"
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
return True, f"【人格文本由PlexPt开源项目awesome-chatgpt-pr \
|
||||
@@ -225,63 +233,19 @@ class CommandOpenAIOfficial(Command):
|
||||
'name': ps,
|
||||
'prompt': personalities[ps]
|
||||
}
|
||||
self.provider.session_dict[session_id] = []
|
||||
new_record = {
|
||||
"user": {
|
||||
"role": "user",
|
||||
"content": personalities[ps],
|
||||
},
|
||||
"AI": {
|
||||
"role": "assistant",
|
||||
"content": "好的,接下来我会扮演这个角色。"
|
||||
},
|
||||
'type': "personality",
|
||||
'usage_tokens': 0,
|
||||
'single-tokens': 0
|
||||
}
|
||||
self.provider.session_dict[session_id].append(new_record)
|
||||
self.personality_str = message
|
||||
self.provider.personality_set(ps, session_id)
|
||||
return True, f"人格{ps}已设置。", "set"
|
||||
else:
|
||||
self.provider.curr_personality = {
|
||||
'name': '自定义人格',
|
||||
'prompt': ps
|
||||
}
|
||||
new_record = {
|
||||
"user": {
|
||||
"role": "user",
|
||||
"content": ps,
|
||||
},
|
||||
"AI": {
|
||||
"role": "assistant",
|
||||
"content": "好的,接下来我会扮演这个角色。"
|
||||
},
|
||||
'type': "personality",
|
||||
'usage_tokens': 0,
|
||||
'single-tokens': 0
|
||||
}
|
||||
self.provider.session_dict[session_id] = []
|
||||
self.provider.session_dict[session_id].append(new_record)
|
||||
self.personality_str = message
|
||||
self.provider.personality_set(ps, session_id)
|
||||
return True, f"自定义人格已设置。 \n人格信息: {ps}", "set"
|
||||
|
||||
def draw(self, message):
|
||||
if self.provider is None:
|
||||
return False, "未启动OpenAI ChatGPT语言模型.", "draw"
|
||||
if message.startswith("/画"):
|
||||
message = message[2:]
|
||||
elif message.startswith("画"):
|
||||
message = message[1:]
|
||||
try:
|
||||
# 画图模式传回3个参数
|
||||
img_url = self.provider.image_chat(message)
|
||||
return True, img_url, "draw"
|
||||
except Exception as e:
|
||||
if 'exceeded' in str(e):
|
||||
return f"OpenAI API错误。原因:\n{str(e)} \n超额了。可自己搭建一个机器人(Github仓库:QQChannelChatGPT)"
|
||||
return False, f"图片生成失败: {e}", "draw"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
async def draw(self, message: str):
|
||||
if self.provider is None:
|
||||
return False, "未启用 OpenAI 官方 API", "draw"
|
||||
message = message.removeprefix("/").removeprefix("画")
|
||||
img_url = await self.provider.image_generate(message)
|
||||
return True, img_url, "draw"
|
||||
@@ -1,134 +0,0 @@
|
||||
from model.command.command import Command
|
||||
from model.provider.rev_chatgpt import ProviderRevChatGPT
|
||||
from model.platform.qq import QQ
|
||||
from cores.qqbot.personality import personalities
|
||||
from cores.qqbot.global_object import GlobalObject
|
||||
|
||||
class CommandRevChatGPT(Command):
|
||||
def __init__(self, provider: ProviderRevChatGPT, global_object: GlobalObject):
|
||||
self.provider = provider
|
||||
self.cached_plugins = {}
|
||||
self.global_object = global_object
|
||||
self.personality_str = ""
|
||||
super().__init__(provider, global_object)
|
||||
|
||||
def check_command(self,
|
||||
message: str,
|
||||
session_id: str,
|
||||
role: str,
|
||||
platform: str,
|
||||
message_obj):
|
||||
self.platform = platform
|
||||
hit, res = super().check_command(
|
||||
message,
|
||||
session_id,
|
||||
role,
|
||||
platform,
|
||||
message_obj
|
||||
)
|
||||
|
||||
if hit:
|
||||
return True, res
|
||||
if self.command_start_with(message, "help", "帮助"):
|
||||
return True, self.help()
|
||||
elif self.command_start_with(message, "reset"):
|
||||
return True, self.reset(session_id, message)
|
||||
elif self.command_start_with(message, "update"):
|
||||
return True, self.update(message, role)
|
||||
elif self.command_start_with(message, "set"):
|
||||
return True, self.set(message, session_id)
|
||||
elif self.command_start_with(message, "switch"):
|
||||
return True, self.switch(message, session_id)
|
||||
return False, None
|
||||
|
||||
def reset(self, session_id, message: str):
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
self.provider.forget(session_id)
|
||||
return True, "重置完毕。", "reset"
|
||||
if len(l) == 2 and l[1] == "p":
|
||||
self.provider.forget(session_id)
|
||||
ret = self.provider.text_chat(self.personality_str)
|
||||
return True, f"重置完毕(保留人格)。\n\n{ret}", "reset"
|
||||
|
||||
def set(self, message: str, session_id: str):
|
||||
l = message.split(" ")
|
||||
if len(l) == 1:
|
||||
return True, f"设置人格: \n/set 人格名或人格文本。例如/set 编剧\n人格列表: /set list\n人格详细信息: \
|
||||
/set view 人格名\n重置会话(清除人格): /reset\n重置会话(保留人格): /reset p", "set"
|
||||
elif l[1] == "list":
|
||||
msg = "人格列表:\n"
|
||||
for key in personalities.keys():
|
||||
msg += f" |-{key}\n"
|
||||
msg += '\n\n*输入/set view 人格名查看人格详细信息'
|
||||
msg += '\n*不定时更新人格库,请及时更新本项目。'
|
||||
return True, msg, "set"
|
||||
elif l[1] == "view":
|
||||
if len(l) == 2:
|
||||
return True, "请输入/set view 人格名", "set"
|
||||
ps = l[2].strip()
|
||||
if ps in personalities:
|
||||
msg = f"人格【{ps}】详细信息:\n"
|
||||
msg += f"{personalities[ps]}\n"
|
||||
else:
|
||||
msg = f"人格【{ps}】不存在。"
|
||||
return True, msg, "set"
|
||||
else:
|
||||
ps = l[1].strip()
|
||||
if ps in personalities:
|
||||
self.reset(session_id, "reset")
|
||||
self.personality_str = personalities[ps]
|
||||
ret = self.provider.text_chat(self.personality_str, session_id)
|
||||
return True, f"人格【{ps}】已设置。\n\n{ret}", "set"
|
||||
else:
|
||||
self.reset(session_id, "reset")
|
||||
self.personality_str = ps
|
||||
ret = self.provider.text_chat(ps, session_id)
|
||||
return True, f"人格信息已设置。\n\n{ret}", "set"
|
||||
|
||||
def switch(self, message: str, session_id: str):
|
||||
'''
|
||||
切换账号
|
||||
'''
|
||||
l = message.split(" ")
|
||||
rev_chatgpt = self.provider.get_revchatgpt()
|
||||
if len(l) == 1:
|
||||
ret = "当前账号:\n"
|
||||
index = 0
|
||||
curr_ = None
|
||||
for revstat in rev_chatgpt:
|
||||
index += 1
|
||||
ret += f"[{index}]. {revstat['id']}\n"
|
||||
# if session_id in revstat['user']:
|
||||
# curr_ = revstat['id']
|
||||
for user in revstat['user']:
|
||||
if session_id == user['id']:
|
||||
curr_ = revstat['id']
|
||||
break
|
||||
if curr_ is None:
|
||||
ret += "当前您未选择账号。输入/switch <账号序号>切换账号。"
|
||||
else:
|
||||
ret += f"当前您选择的账号为:{curr_}。输入/switch <账号序号>切换账号。"
|
||||
return True, ret, "switch"
|
||||
elif len(l) == 2:
|
||||
try:
|
||||
index = int(l[1])
|
||||
if index > len(self.provider.rev_chatgpt) or index < 1:
|
||||
return True, "账号序号不合法。", "switch"
|
||||
else:
|
||||
# pop
|
||||
for revstat in self.provider.rev_chatgpt:
|
||||
if session_id in revstat['user']:
|
||||
revstat['user'].remove(session_id)
|
||||
# append
|
||||
self.provider.rev_chatgpt[index - 1]['user'].append(session_id)
|
||||
return True, f"切换账号成功。当前账号为:{self.provider.rev_chatgpt[index - 1]['id']}", "switch"
|
||||
except BaseException:
|
||||
return True, "账号序号不合法。", "switch"
|
||||
else:
|
||||
return True, "参数过多。", "switch"
|
||||
|
||||
def help(self):
|
||||
commands = super().general_commands()
|
||||
commands['set'] = '设置人格'
|
||||
return True, super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help"
|
||||
@@ -1,52 +0,0 @@
|
||||
from model.command.command import Command
|
||||
from model.provider.rev_edgegpt import ProviderRevEdgeGPT
|
||||
import asyncio
|
||||
from model.platform.qq import QQ
|
||||
from cores.qqbot.global_object import GlobalObject
|
||||
|
||||
class CommandRevEdgeGPT(Command):
|
||||
def __init__(self, provider: ProviderRevEdgeGPT, global_object: GlobalObject):
|
||||
self.provider = provider
|
||||
self.cached_plugins = {}
|
||||
self.global_object = global_object
|
||||
super().__init__(provider, global_object)
|
||||
|
||||
def check_command(self,
|
||||
message: str,
|
||||
session_id: str,
|
||||
role: str,
|
||||
platform: str,
|
||||
message_obj):
|
||||
self.platform = platform
|
||||
|
||||
hit, res = super().check_command(
|
||||
message,
|
||||
session_id,
|
||||
role,
|
||||
platform,
|
||||
message_obj
|
||||
)
|
||||
|
||||
if hit:
|
||||
return True, res
|
||||
if self.command_start_with(message, "reset"):
|
||||
return True, self.reset()
|
||||
elif self.command_start_with(message, "help"):
|
||||
return True, self.help()
|
||||
elif self.command_start_with(message, "update"):
|
||||
return True, self.update(message, role)
|
||||
|
||||
return False, None
|
||||
|
||||
def reset(self, loop = None):
|
||||
if self.provider is None:
|
||||
return False, "未启动Bing语言模型.", "reset"
|
||||
res = asyncio.run_coroutine_threadsafe(self.provider.forget(), loop).result()
|
||||
print(res)
|
||||
if res:
|
||||
return res, "重置成功", "reset"
|
||||
else:
|
||||
return res, "重置失败", "reset"
|
||||
|
||||
def help(self):
|
||||
return True, super().help_messager(super().general_commands(), self.platform, self.global_object.cached_plugins), "help"
|
||||
114
model/platform/_message_parse.py
Normal file
114
model/platform/_message_parse.py
Normal file
@@ -0,0 +1,114 @@
|
||||
from nakuru.entities.components import Plain, At, Image, BaseMessageComponent
|
||||
from nakuru import (
|
||||
GuildMessage,
|
||||
GroupMessage,
|
||||
FriendMessage
|
||||
)
|
||||
import botpy.message
|
||||
from type.message import *
|
||||
from typing import List, Union
|
||||
from util.general_utils import save_temp_img
|
||||
import time, base64
|
||||
|
||||
# QQ官方消息类型转换
|
||||
|
||||
|
||||
def qq_official_message_parse(message: List[BaseMessageComponent]):
|
||||
plain_text = ""
|
||||
image_path = None # only one img supported
|
||||
for i in message:
|
||||
if isinstance(i, Plain):
|
||||
plain_text += i.text
|
||||
elif isinstance(i, Image) and not image_path:
|
||||
if i.path:
|
||||
image_path = i.path
|
||||
elif i.file and i.file.startswith("base64://"):
|
||||
img_data = base64.b64decode(i.file[9:])
|
||||
image_path = save_temp_img(img_data)
|
||||
else:
|
||||
image_path = save_temp_img(i.file)
|
||||
return plain_text, image_path
|
||||
|
||||
# QQ官方消息类型 2 AstrBotMessage
|
||||
|
||||
|
||||
def qq_official_message_parse_rev(message: Union[botpy.message.Message, botpy.message.GroupMessage],
|
||||
message_type: MessageType) -> AstrBotMessage:
|
||||
abm = AstrBotMessage()
|
||||
abm.type = message_type
|
||||
abm.timestamp = int(time.time())
|
||||
abm.raw_message = message
|
||||
abm.message_id = message.id
|
||||
abm.tag = "qqchan"
|
||||
msg: List[BaseMessageComponent] = []
|
||||
|
||||
if message_type == MessageType.GROUP_MESSAGE:
|
||||
abm.sender = MessageMember(
|
||||
message.author.member_openid,
|
||||
""
|
||||
)
|
||||
abm.message_str = message.content.strip()
|
||||
abm.self_id = "unknown_selfid"
|
||||
|
||||
msg.append(Plain(abm.message_str))
|
||||
if message.attachments:
|
||||
for i in message.attachments:
|
||||
if i.content_type.startswith("image"):
|
||||
url = i.url
|
||||
if not url.startswith("http"):
|
||||
url = "https://"+url
|
||||
img = Image.fromURL(url)
|
||||
msg.append(img)
|
||||
abm.message = msg
|
||||
|
||||
elif message_type == MessageType.GUILD_MESSAGE or message_type == MessageType.FRIEND_MESSAGE:
|
||||
# 目前对于 FRIEND_MESSAGE 只处理频道私聊
|
||||
try:
|
||||
abm.self_id = str(message.mentions[0].id)
|
||||
except:
|
||||
abm.self_id = ""
|
||||
|
||||
plain_content = message.content.replace(
|
||||
"<@!"+str(abm.self_id)+">", "").strip()
|
||||
msg.append(Plain(plain_content))
|
||||
if message.attachments:
|
||||
for i in message.attachments:
|
||||
if i.content_type.startswith("image"):
|
||||
url = i.url
|
||||
if not url.startswith("http"):
|
||||
url = "https://"+url
|
||||
img = Image.fromURL(url)
|
||||
msg.append(img)
|
||||
abm.message = msg
|
||||
abm.message_str = plain_content
|
||||
abm.sender = MessageMember(
|
||||
str(message.author.id),
|
||||
str(message.author.username)
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown message type: {message_type}")
|
||||
return abm
|
||||
|
||||
|
||||
def nakuru_message_parse_rev(message: Union[GuildMessage, GroupMessage, FriendMessage]) -> AstrBotMessage:
|
||||
abm = AstrBotMessage()
|
||||
abm.type = MessageType(message.type)
|
||||
abm.timestamp = int(time.time())
|
||||
abm.raw_message = message
|
||||
abm.message_id = message.message_id
|
||||
|
||||
plain_content = ""
|
||||
for i in message.message:
|
||||
if isinstance(i, Plain):
|
||||
plain_content += i.text
|
||||
abm.message_str = plain_content
|
||||
|
||||
abm.self_id = str(message.self_id)
|
||||
abm.sender = MessageMember(
|
||||
str(message.sender.user_id),
|
||||
str(message.sender.nickname)
|
||||
)
|
||||
abm.tag = "gocq"
|
||||
abm.message = message.message
|
||||
|
||||
return abm
|
||||
9
model/platform/_message_result.py
Normal file
9
model/platform/_message_result.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Union, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageResult():
|
||||
result_message: Union[str, list]
|
||||
is_command_call: Optional[bool] = False
|
||||
callback: Optional[callable] = None
|
||||
73
model/platform/_platfrom.py
Normal file
73
model/platform/_platfrom.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import abc
|
||||
from typing import Union
|
||||
from nakuru import (
|
||||
GuildMessage,
|
||||
GroupMessage,
|
||||
FriendMessage,
|
||||
)
|
||||
from nakuru.entities.components import Plain, At, Image
|
||||
|
||||
|
||||
class Platform():
|
||||
def __init__(self, message_handler: callable) -> None:
|
||||
'''
|
||||
初始化平台的各种接口
|
||||
'''
|
||||
self.message_handler = message_handler
|
||||
self.cnt_receive = 0
|
||||
self.cnt_reply = 0
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def handle_msg(self):
|
||||
'''
|
||||
处理到来的消息
|
||||
'''
|
||||
self.cnt_receive += 1
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def reply_msg(self):
|
||||
'''
|
||||
回复消息(被动发送)
|
||||
'''
|
||||
self.cnt_reply += 1
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def send_msg(self, target: Union[GuildMessage, GroupMessage, FriendMessage, str], message: Union[str, list]):
|
||||
'''
|
||||
发送消息(主动发送)
|
||||
'''
|
||||
self.cnt_reply += 1
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def send(self, target: Union[GuildMessage, GroupMessage, FriendMessage, str], message: Union[str, list]):
|
||||
'''
|
||||
发送消息(主动发送)同 send_msg()
|
||||
'''
|
||||
self.cnt_reply += 1
|
||||
pass
|
||||
|
||||
def parse_message_outline(self, message: Union[GuildMessage, GroupMessage, FriendMessage, str, list]) -> str:
|
||||
'''
|
||||
将消息解析成大纲消息形式。
|
||||
如: xxxxx[图片]xxxxx
|
||||
'''
|
||||
if isinstance(message, str):
|
||||
return message
|
||||
ret = ''
|
||||
ls_to_parse = message if isinstance(message, list) else message.message
|
||||
try:
|
||||
for node in ls_to_parse:
|
||||
if isinstance(node, Plain):
|
||||
ret += node.text
|
||||
elif isinstance(node, At):
|
||||
ret += f'[At: {node.name}/{node.qq}]'
|
||||
elif isinstance(node, Image):
|
||||
ret += '[图片]'
|
||||
except Exception as e:
|
||||
pass
|
||||
ret.replace('\n', '')
|
||||
return ret
|
||||
@@ -1,190 +0,0 @@
|
||||
from nakuru.entities.components import Plain, At, Image, Node
|
||||
from util import general_utils as gu
|
||||
from util.cmd_config import CmdConfig
|
||||
import asyncio
|
||||
from nakuru import (
|
||||
CQHTTP,
|
||||
GuildMessage,
|
||||
GroupMessage,
|
||||
FriendMessage
|
||||
)
|
||||
from typing import Union
|
||||
import time
|
||||
|
||||
|
||||
class FakeSource:
|
||||
def __init__(self, type, group_id):
|
||||
self.type = type
|
||||
self.group_id = group_id
|
||||
|
||||
class QQ:
|
||||
def __init__(self, is_start: bool, cc: CmdConfig = None, gocq_loop = None) -> None:
|
||||
self.is_start = is_start
|
||||
self.gocq_loop = gocq_loop
|
||||
self.cc = cc
|
||||
self.waiting = {}
|
||||
self.gocq_cnt = 0
|
||||
|
||||
def run_bot(self, gocq):
|
||||
self.client: CQHTTP = gocq
|
||||
self.client.run()
|
||||
|
||||
def get_msg_loop(self):
|
||||
return self.gocq_loop
|
||||
|
||||
def get_cnt(self):
|
||||
return self.gocq_cnt
|
||||
|
||||
def set_cnt(self, cnt):
|
||||
self.gocq_cnt = cnt
|
||||
|
||||
async def send_qq_msg(self,
|
||||
source,
|
||||
res,
|
||||
image_mode=None):
|
||||
self.gocq_cnt += 1
|
||||
if not self.is_start:
|
||||
raise Exception("管理员未启动GOCQ平台")
|
||||
"""
|
||||
res可以是一个数组, 也就是gocq的消息链。
|
||||
插件开发者请使用send方法, 可以不用直接调用这个方法。
|
||||
"""
|
||||
gu.log("回复GOCQ消息: "+str(res), level=gu.LEVEL_INFO, tag="GOCQ", max_len=300)
|
||||
|
||||
if isinstance(source, int):
|
||||
source = FakeSource("GroupMessage", source)
|
||||
|
||||
# str convert to CQ Message Chain
|
||||
if isinstance(res, str):
|
||||
res_str = res
|
||||
res = []
|
||||
if source.type == "GroupMessage" and not isinstance(source, FakeSource):
|
||||
res.append(At(qq=source.user_id))
|
||||
res.append(Plain(text=res_str))
|
||||
|
||||
# if image mode, put all Plain texts into a new picture.
|
||||
if image_mode is None:
|
||||
image_mode = self.cc.get('qq_pic_mode', False)
|
||||
if image_mode and isinstance(res, list):
|
||||
plains = []
|
||||
news = []
|
||||
for i in res:
|
||||
if isinstance(i, Plain):
|
||||
plains.append(i.text)
|
||||
else:
|
||||
news.append(i)
|
||||
plains_str = "".join(plains).strip()
|
||||
if plains_str != "" and len(plains_str) > 50:
|
||||
p = gu.create_markdown_image("".join(plains))
|
||||
news.append(Image.fromFileSystem(p))
|
||||
res = news
|
||||
|
||||
# 回复消息链
|
||||
if isinstance(res, list) and len(res) > 0:
|
||||
if source.type == "GuildMessage":
|
||||
await self.client.sendGuildChannelMessage(source.guild_id, source.channel_id, res)
|
||||
return
|
||||
elif source.type == "FriendMessage":
|
||||
await self.client.sendFriendMessage(source.user_id, res)
|
||||
return
|
||||
elif source.type == "GroupMessage":
|
||||
# 过长时forward发送
|
||||
plain_text_len = 0
|
||||
image_num = 0
|
||||
for i in res:
|
||||
if isinstance(i, Plain):
|
||||
plain_text_len += len(i.text)
|
||||
elif isinstance(i, Image):
|
||||
image_num += 1
|
||||
if plain_text_len > self.cc.get('qq_forward_threshold', 200):
|
||||
# 删除At
|
||||
for i in res:
|
||||
if isinstance(i, At):
|
||||
res.remove(i)
|
||||
node = Node(res)
|
||||
# node.content = res
|
||||
node.uin = 123456
|
||||
node.name = f"bot"
|
||||
node.time = int(time.time())
|
||||
# print(node)
|
||||
nodes=[node]
|
||||
await self.client.sendGroupForwardMessage(source.group_id, nodes)
|
||||
return
|
||||
await self.client.sendGroupMessage(source.group_id, res)
|
||||
return
|
||||
|
||||
def send(self,
|
||||
to,
|
||||
res,
|
||||
image_mode=False,
|
||||
):
|
||||
'''
|
||||
提供给插件的发送QQ消息接口, 不用在外部await。
|
||||
参数说明:第一个参数可以是消息对象,也可以是QQ群号。第二个参数是消息内容(消息内容可以是消息链列表,也可以是纯文字信息)。
|
||||
第三个参数是是否开启图片模式,如果开启,那么所有纯文字信息都会被合并成一张图片。
|
||||
'''
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(self.send_qq_msg(to, res, image_mode), self.gocq_loop).result()
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
def send_guild(self,
|
||||
message_obj,
|
||||
res,
|
||||
):
|
||||
'''
|
||||
提供给插件的发送GOCQ QQ频道消息接口, 不用在外部await。
|
||||
参数说明:第一个参数必须是消息对象, 第二个参数是消息内容(消息内容可以是消息链列表,也可以是纯文字信息)。
|
||||
'''
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(self.send_qq_msg(message_obj, res), self.gocq_loop).result()
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
def create_text_image(title: str, text: str, max_width=30, font_size=20):
|
||||
'''
|
||||
文本转图片。
|
||||
title: 标题
|
||||
text: 文本内容
|
||||
max_width: 文本宽度最大值(默认30)
|
||||
font_size: 字体大小(默认20)
|
||||
|
||||
返回:文件路径
|
||||
'''
|
||||
try:
|
||||
img = gu.word2img(title, text, max_width, font_size)
|
||||
p = gu.save_temp_img(img)
|
||||
return p
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def wait_for_message(self, group_id) -> Union[GroupMessage, FriendMessage, GuildMessage]:
|
||||
'''
|
||||
等待下一条消息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[group_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if group_id in self.waiting and self.waiting[group_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[group_id]
|
||||
del self.waiting[group_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)
|
||||
|
||||
def get_client(self):
|
||||
return self.client
|
||||
|
||||
def nakuru_method_invoker(self, func, *args, **kwargs):
|
||||
"""
|
||||
返回一个方法调用器,可以用来立即调用nakuru的方法。
|
||||
"""
|
||||
try:
|
||||
ret = asyncio.run_coroutine_threadsafe(func(*args, **kwargs), self.gocq_loop).result()
|
||||
return ret
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
315
model/platform/qq_gocq.py
Normal file
315
model/platform/qq_gocq.py
Normal file
@@ -0,0 +1,315 @@
|
||||
from nakuru.entities.components import Plain, At, Image, Node
|
||||
from util import general_utils as gu
|
||||
from util.image_render.helper import text_to_image_base
|
||||
from util.cmd_config import CmdConfig
|
||||
import asyncio
|
||||
from nakuru import (
|
||||
CQHTTP,
|
||||
GuildMessage,
|
||||
GroupMessage,
|
||||
FriendMessage,
|
||||
GroupMemberIncrease,
|
||||
Notify
|
||||
)
|
||||
from typing import Union
|
||||
from type.types import GlobalObject
|
||||
import time
|
||||
|
||||
from ._platfrom import Platform
|
||||
from ._message_parse import nakuru_message_parse_rev
|
||||
from type.message import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
|
||||
class FakeSource:
|
||||
def __init__(self, type, group_id):
|
||||
self.type = type
|
||||
self.group_id = group_id
|
||||
|
||||
|
||||
class QQGOCQ(Platform):
|
||||
def __init__(self, cfg: dict, message_handler: callable, global_object: GlobalObject) -> None:
|
||||
super().__init__(message_handler)
|
||||
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.waiting = {}
|
||||
self.cc = CmdConfig()
|
||||
self.cfg = cfg
|
||||
|
||||
self.context = global_object
|
||||
|
||||
self.unique_session = cfg['uniqueSessionMode']
|
||||
self.pic_mode = cfg['qq_pic_mode']
|
||||
|
||||
self.client = CQHTTP(
|
||||
host=self.cc.get("gocq_host", "127.0.0.1"),
|
||||
port=self.cc.get("gocq_websocket_port", 6700),
|
||||
http_port=self.cc.get("gocq_http_port", 5700),
|
||||
)
|
||||
gocq_app = self.client
|
||||
|
||||
self.announcement = self.cc.get("announcement", "欢迎新人!")
|
||||
|
||||
@gocq_app.receiver("GroupMessage")
|
||||
async def _(app: CQHTTP, source: GroupMessage):
|
||||
if self.cc.get("gocq_react_group", True):
|
||||
abm = nakuru_message_parse_rev(source)
|
||||
if isinstance(source.message[0], Plain):
|
||||
await self.handle_msg(abm)
|
||||
elif isinstance(source.message[0], At):
|
||||
if source.message[0].qq == source.self_id:
|
||||
await self.handle_msg(abm)
|
||||
else:
|
||||
return
|
||||
|
||||
@gocq_app.receiver("FriendMessage")
|
||||
async def _(app: CQHTTP, source: FriendMessage):
|
||||
if self.cc.get("gocq_react_friend", True):
|
||||
abm = nakuru_message_parse_rev(source)
|
||||
if isinstance(source.message[0], Plain):
|
||||
await self.handle_msg(abm)
|
||||
else:
|
||||
return
|
||||
|
||||
@gocq_app.receiver("GroupMemberIncrease")
|
||||
async def _(app: CQHTTP, source: GroupMemberIncrease):
|
||||
if self.cc.get("gocq_react_group_increase", True):
|
||||
await app.sendGroupMessage(source.group_id, [
|
||||
Plain(text=self.announcement)
|
||||
])
|
||||
|
||||
# @gocq_app.receiver("Notify")
|
||||
# async def _(app: CQHTTP, source: Notify):
|
||||
# print(source)
|
||||
# if source.sub_type == "poke" and source.target_id == source.self_id:
|
||||
# await self.handle_msg(source)
|
||||
|
||||
@gocq_app.receiver("GuildMessage")
|
||||
async def _(app: CQHTTP, source: GuildMessage):
|
||||
if self.cc.get("gocq_react_guild", True):
|
||||
abm = nakuru_message_parse_rev(source)
|
||||
if isinstance(source.message[0], Plain):
|
||||
await self.handle_msg(abm)
|
||||
elif isinstance(source.message[0], At):
|
||||
if source.message[0].qq == source.self_tiny_id:
|
||||
await self.handle_msg(abm)
|
||||
else:
|
||||
return
|
||||
|
||||
def run(self):
|
||||
self.client.run()
|
||||
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
await super().handle_msg()
|
||||
logger.info(
|
||||
f"{message.sender.nickname}/{message.sender.user_id} -> {self.parse_message_outline(message)}")
|
||||
|
||||
assert isinstance(message.raw_message,
|
||||
(GroupMessage, FriendMessage, GuildMessage))
|
||||
is_group = message.type != MessageType.FRIEND_MESSAGE
|
||||
|
||||
# 判断是否响应消息
|
||||
resp = False
|
||||
if not is_group:
|
||||
resp = True
|
||||
else:
|
||||
for i in message.message:
|
||||
if isinstance(i, At):
|
||||
if message.type.value == "GuildMessage":
|
||||
if str(i.qq) == str(message.raw_message.user_id) or str(i.qq) == str(message.raw_message.self_tiny_id):
|
||||
resp = True
|
||||
if message.type.value == "FriendMessage":
|
||||
if str(i.qq) == str(message.self_id):
|
||||
resp = True
|
||||
if message.type.value == "GroupMessage":
|
||||
if str(i.qq) == str(message.self_id):
|
||||
resp = True
|
||||
elif isinstance(i, Plain) and self.context.nick:
|
||||
for nick in self.context.nick:
|
||||
if nick != '' and i.text.strip().startswith(nick):
|
||||
resp = True
|
||||
break
|
||||
|
||||
if not resp:
|
||||
return
|
||||
|
||||
# 解析 session_id
|
||||
if self.unique_session or not is_group:
|
||||
session_id = message.raw_message.user_id
|
||||
elif message.type == MessageType.GROUP_MESSAGE:
|
||||
session_id = message.raw_message.group_id
|
||||
elif message.type == MessageType.GUILD_MESSAGE:
|
||||
session_id = message.raw_message.channel_id
|
||||
else:
|
||||
session_id = message.raw_message.user_id
|
||||
|
||||
message.session_id = session_id
|
||||
|
||||
# 解析 role
|
||||
sender_id = str(message.raw_message.user_id)
|
||||
if sender_id == self.cc.get('admin_qq', '') or \
|
||||
sender_id in self.cc.get('other_admins', []):
|
||||
role = 'admin'
|
||||
else:
|
||||
role = 'member'
|
||||
|
||||
message_result = await self.message_handler(
|
||||
message=message,
|
||||
session_id=session_id,
|
||||
role=role,
|
||||
platform='gocq'
|
||||
)
|
||||
|
||||
if message_result is None:
|
||||
return
|
||||
await self.reply_msg(message, message_result.result_message)
|
||||
if message_result.callback is not None:
|
||||
message_result.callback()
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if session_id in self.waiting and self.waiting[session_id] == '':
|
||||
self.waiting[session_id] = message
|
||||
|
||||
async def reply_msg(self,
|
||||
message: Union[AstrBotMessage, GuildMessage, GroupMessage, FriendMessage],
|
||||
result_message: list):
|
||||
await super().reply_msg()
|
||||
"""
|
||||
插件开发者请使用send方法, 可以不用直接调用这个方法。
|
||||
"""
|
||||
if isinstance(message, AstrBotMessage):
|
||||
source = message.raw_message
|
||||
else:
|
||||
source = message
|
||||
|
||||
res = result_message
|
||||
|
||||
logger.info(
|
||||
f"{source.user_id} <- {self.parse_message_outline(res)}")
|
||||
|
||||
if isinstance(source, int):
|
||||
source = FakeSource("GroupMessage", source)
|
||||
|
||||
# str convert to CQ Message Chain
|
||||
if isinstance(res, str):
|
||||
res_str = res
|
||||
res = []
|
||||
if source.type == "GroupMessage" and not isinstance(source, FakeSource):
|
||||
res.append(At(qq=source.user_id))
|
||||
res.append(Plain(text=res_str))
|
||||
|
||||
# if image mode, put all Plain texts into a new picture.
|
||||
if self.pic_mode and isinstance(res, list):
|
||||
plains = []
|
||||
news = []
|
||||
for i in res:
|
||||
if isinstance(i, Plain):
|
||||
plains.append(i.text)
|
||||
else:
|
||||
news.append(i)
|
||||
plains_str = "".join(plains).strip()
|
||||
if plains_str != "" and len(plains_str) > 50:
|
||||
# p = gu.create_markdown_image("".join(plains))
|
||||
p = await text_to_image_base(plains_str)
|
||||
news.append(Image.fromFileSystem(p))
|
||||
res = news
|
||||
|
||||
# 回复消息链
|
||||
if isinstance(res, list) and len(res) > 0:
|
||||
if source.type == "GuildMessage":
|
||||
await self.client.sendGuildChannelMessage(source.guild_id, source.channel_id, res)
|
||||
return
|
||||
elif source.type == "FriendMessage":
|
||||
await self.client.sendFriendMessage(source.user_id, res)
|
||||
return
|
||||
elif source.type == "GroupMessage":
|
||||
# 过长时forward发送
|
||||
plain_text_len = 0
|
||||
image_num = 0
|
||||
for i in res:
|
||||
if isinstance(i, Plain):
|
||||
plain_text_len += len(i.text)
|
||||
elif isinstance(i, Image):
|
||||
image_num += 1
|
||||
if plain_text_len > self.cc.get('qq_forward_threshold', 200):
|
||||
# 删除At
|
||||
for i in res:
|
||||
if isinstance(i, At):
|
||||
res.remove(i)
|
||||
node = Node(res)
|
||||
# node.content = res
|
||||
node.uin = 123456
|
||||
node.name = f"bot"
|
||||
node.time = int(time.time())
|
||||
# print(node)
|
||||
nodes = [node]
|
||||
await self.client.sendGroupForwardMessage(source.group_id, nodes)
|
||||
return
|
||||
await self.client.sendGroupMessage(source.group_id, res)
|
||||
return
|
||||
|
||||
async def send_msg(self, message: Union[GroupMessage, FriendMessage, GuildMessage, AstrBotMessage], result_message: list):
|
||||
'''
|
||||
提供给插件的发送QQ消息接口。
|
||||
参数说明:第一个参数可以是消息对象,也可以是QQ群号。第二个参数是消息内容(消息内容可以是消息链列表,也可以是纯文字信息)。
|
||||
'''
|
||||
await super().reply_msg()
|
||||
try:
|
||||
await self.reply_msg(message, result_message)
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
async def send(self,
|
||||
to,
|
||||
res):
|
||||
'''
|
||||
同 send_msg()
|
||||
'''
|
||||
await super().reply_msg()
|
||||
await self.reply_msg(to, res)
|
||||
|
||||
async def create_text_image(text: str):
|
||||
'''
|
||||
文本转图片。
|
||||
text: 文本内容
|
||||
返回:文件路径
|
||||
'''
|
||||
try:
|
||||
return await text_to_image_base(text)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def wait_for_message(self, group_id) -> Union[GroupMessage, FriendMessage, GuildMessage]:
|
||||
'''
|
||||
等待下一条消息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[group_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if group_id in self.waiting and self.waiting[group_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[group_id]
|
||||
del self.waiting[group_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)
|
||||
|
||||
def get_client(self):
|
||||
return self.client
|
||||
|
||||
async def nakuru_method_invoker(self, func, *args, **kwargs):
|
||||
"""
|
||||
返回一个方法调用器,可以用来立即调用nakuru的方法。
|
||||
"""
|
||||
try:
|
||||
ret = func(*args, **kwargs)
|
||||
return ret
|
||||
except BaseException as e:
|
||||
raise e
|
||||
334
model/platform/qq_official.py
Normal file
334
model/platform/qq_official.py
Normal file
@@ -0,0 +1,334 @@
|
||||
import io
|
||||
import botpy
|
||||
from PIL import Image as PILImage
|
||||
import botpy.message
|
||||
import re
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import botpy.types
|
||||
import botpy.types.message
|
||||
from util import general_utils as gu
|
||||
|
||||
from botpy.types.message import Reference
|
||||
from botpy import Client
|
||||
import time
|
||||
from ._platfrom import Platform
|
||||
from ._message_parse import (
|
||||
qq_official_message_parse_rev,
|
||||
qq_official_message_parse
|
||||
)
|
||||
from type.message import *
|
||||
from typing import Union, List
|
||||
from nakuru.entities.components import *
|
||||
from util.image_render.helper import text_to_image_base
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
# QQ 机器人官方框架
|
||||
class botClient(Client):
|
||||
def set_platform(self, platform: 'QQOfficial'):
|
||||
self.platform = platform
|
||||
|
||||
async def on_group_at_message_create(self, message: botpy.message.GroupMessage):
|
||||
abm = qq_official_message_parse_rev(message, MessageType.GROUP_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
# 收到频道消息
|
||||
async def on_at_message_create(self, message: botpy.message.Message):
|
||||
# 转换层
|
||||
abm = qq_official_message_parse_rev(message, MessageType.GUILD_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
# 收到私聊消息
|
||||
async def on_direct_message_create(self, message: botpy.message.DirectMessage):
|
||||
# 转换层
|
||||
abm = qq_official_message_parse_rev(
|
||||
message, MessageType.FRIEND_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
|
||||
class QQOfficial(Platform):
|
||||
|
||||
def __init__(self, cfg: dict, message_handler: callable, global_object) -> None:
|
||||
super().__init__(message_handler)
|
||||
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.waiting: dict = {}
|
||||
|
||||
self.cfg = cfg
|
||||
self.appid = cfg['qqbot']['appid']
|
||||
self.token = cfg['qqbot']['token']
|
||||
self.secret = cfg['qqbot_secret']
|
||||
self.unique_session = cfg['uniqueSessionMode']
|
||||
qq_group = cfg['qqofficial_enable_group_message']
|
||||
self.pic_mode = cfg['qq_pic_mode']
|
||||
|
||||
if qq_group:
|
||||
self.intents = botpy.Intents(
|
||||
public_messages=True,
|
||||
public_guild_messages=True,
|
||||
direct_message=cfg['direct_message_mode']
|
||||
)
|
||||
else:
|
||||
self.intents = botpy.Intents(
|
||||
public_guild_messages=True,
|
||||
direct_message=cfg['direct_message_mode']
|
||||
)
|
||||
self.client = botClient(
|
||||
intents=self.intents,
|
||||
bot_log=False
|
||||
)
|
||||
|
||||
self.client.set_platform(self)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.loop.run_until_complete(self.client.run(
|
||||
appid=self.appid,
|
||||
secret=self.secret
|
||||
))
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
self.client = botClient(
|
||||
intents=self.intents,
|
||||
bot_log=False
|
||||
)
|
||||
self.client.set_platform(self)
|
||||
self.client.run(
|
||||
appid=self.appid,
|
||||
token=self.token
|
||||
)
|
||||
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
await super().handle_msg()
|
||||
assert isinstance(message.raw_message, (botpy.message.Message,
|
||||
botpy.message.GroupMessage, botpy.message.DirectMessage))
|
||||
is_group = message.type != MessageType.FRIEND_MESSAGE
|
||||
|
||||
_t = "/私聊" if not is_group else ""
|
||||
logger.info(
|
||||
f"{message.sender.nickname}({message.sender.user_id}{_t}) -> {self.parse_message_outline(message)}")
|
||||
|
||||
# 解析出 session_id
|
||||
if self.unique_session or not is_group:
|
||||
session_id = message.sender.user_id
|
||||
else:
|
||||
if message.type == MessageType.GUILD_MESSAGE:
|
||||
session_id = message.raw_message.channel_id
|
||||
elif message.type == MessageType.GROUP_MESSAGE:
|
||||
session_id = str(message.raw_message.group_openid)
|
||||
else:
|
||||
session_id = str(message.raw_message.author.id)
|
||||
message.session_id = session_id
|
||||
|
||||
# 解析出 role
|
||||
sender_id = message.sender.user_id
|
||||
if sender_id == self.cfg['admin_qqchan'] or \
|
||||
sender_id in self.cfg['other_admins']:
|
||||
role = 'admin'
|
||||
else:
|
||||
role = 'member'
|
||||
|
||||
message_result = await self.message_handler(
|
||||
message=message,
|
||||
session_id=session_id,
|
||||
role=role,
|
||||
platform='qqchan'
|
||||
)
|
||||
|
||||
if message_result is None:
|
||||
return
|
||||
|
||||
await self.reply_msg(message, message_result.result_message)
|
||||
if message_result.callback is not None:
|
||||
message_result.callback()
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if session_id in self.waiting and self.waiting[session_id] == '':
|
||||
self.waiting[session_id] = message
|
||||
|
||||
async def reply_msg(self,
|
||||
message: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
|
||||
res: Union[str, list]):
|
||||
'''
|
||||
回复频道消息
|
||||
'''
|
||||
await super().reply_msg()
|
||||
if isinstance(message, AstrBotMessage):
|
||||
source = message.raw_message
|
||||
else:
|
||||
source = message
|
||||
assert isinstance(source, (botpy.message.Message,
|
||||
botpy.message.GroupMessage, botpy.message.DirectMessage))
|
||||
logger.info(
|
||||
f"{message.sender.nickname}({message.sender.user_id}) <- {self.parse_message_outline(res)}")
|
||||
|
||||
plain_text = ''
|
||||
image_path = ''
|
||||
msg_ref = None
|
||||
|
||||
# if isinstance(res, list):
|
||||
# plain_text, image_path = qq_official_message_parse(res)
|
||||
# elif isinstance(res, str):
|
||||
# plain_text = res
|
||||
|
||||
# if self.cfg['qq_pic_mode']:
|
||||
# # 文本转图片,并且加上原来的图片
|
||||
# if plain_text != '' or image_path != '':
|
||||
# if image_path is not None and image_path != '':
|
||||
# if image_path.startswith("http"):
|
||||
# plain_text += "\n\n" + ""
|
||||
# else:
|
||||
# plain_text += "\n\n" + \
|
||||
# ""
|
||||
# # image_path = gu.create_markdown_image("".join(plain_text))
|
||||
# image_path = await text_to_image_base("".join(plain_text))
|
||||
# plain_text = ""
|
||||
|
||||
# else:
|
||||
# if image_path is not None and image_path != '':
|
||||
# msg_ref = None
|
||||
# if image_path.startswith("http"):
|
||||
# async with aiohttp.ClientSession() as session:
|
||||
# async with session.get(image_path) as response:
|
||||
# if response.status == 200:
|
||||
# image = PILImage.open(io.BytesIO(await response.read()))
|
||||
# image_path = gu.save_temp_img(image)
|
||||
if self.pic_mode:
|
||||
plains = []
|
||||
news = []
|
||||
if isinstance(res, str):
|
||||
res = [Plain(text=res, convert=False),]
|
||||
for i in res:
|
||||
if isinstance(i, Plain):
|
||||
plains.append(i.text)
|
||||
else:
|
||||
news.append(i)
|
||||
plains_str = "".join(plains).strip()
|
||||
if plains_str and len(plains_str) > 50:
|
||||
p = await text_to_image_base(plains_str, return_url=False)
|
||||
with open(p, "rb") as f:
|
||||
news.append(Image.fromBytes(f.read()))
|
||||
res = news
|
||||
|
||||
if isinstance(res, list):
|
||||
plain_text, image_path = qq_official_message_parse(res)
|
||||
else:
|
||||
plain_text = res
|
||||
|
||||
if source is not None and image_path == '': # file_image与message_reference不能同时传入
|
||||
msg_ref = Reference(message_id=source.id,
|
||||
ignore_get_message_error=False)
|
||||
|
||||
# 到这里,我们得到了 plain_text,image_path,msg_ref
|
||||
data = {
|
||||
'content': plain_text,
|
||||
'msg_id': message.message_id,
|
||||
'message_reference': msg_ref
|
||||
}
|
||||
if message.type == MessageType.GROUP_MESSAGE:
|
||||
data['group_openid'] = str(source.group_openid)
|
||||
elif message.type == MessageType.GUILD_MESSAGE:
|
||||
data['channel_id'] = source.channel_id
|
||||
elif message.type == MessageType.FRIEND_MESSAGE:
|
||||
# 目前只处理频道私聊
|
||||
data['guild_id'] = source.guild_id
|
||||
else:
|
||||
raise ValueError(f"未知的消息类型: {message.type}")
|
||||
if image_path:
|
||||
data['file_image'] = image_path
|
||||
|
||||
try:
|
||||
await self._send_wrapper(**data)
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
# 分割过长的消息
|
||||
if "msg over length" in str(e):
|
||||
split_res = []
|
||||
split_res.append(plain_text[:len(plain_text)//2])
|
||||
split_res.append(plain_text[len(plain_text)//2:])
|
||||
for i in split_res:
|
||||
data['content'] = i
|
||||
await self._send_wrapper(**data)
|
||||
else:
|
||||
# 发送qq信息
|
||||
try:
|
||||
# 防止被qq频道过滤消息
|
||||
plain_text = plain_text.replace(".", " . ")
|
||||
await self._send_wrapper(**data)
|
||||
|
||||
except BaseException as e:
|
||||
try:
|
||||
data['content'] = str.join(" ", plain_text)
|
||||
await self._send_wrapper(**data)
|
||||
except BaseException as e:
|
||||
plain_text = re.sub(
|
||||
r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '[被隐藏的链接]', str(e), flags=re.MULTILINE)
|
||||
plain_text = plain_text.replace(".", "·")
|
||||
data['content'] = plain_text
|
||||
await self._send_wrapper(**data)
|
||||
|
||||
async def _send_wrapper(self, **kwargs):
|
||||
if 'group_openid' in kwargs:
|
||||
# QQ群组消息
|
||||
media = None
|
||||
# qq群组消息需要自行上传,暂时不处理
|
||||
# if 'file_image' in kwargs:
|
||||
# file_image_path = kwargs['file_image']
|
||||
# if file_image_path != "":
|
||||
# media = await self.upload_img(file_image_path, kwargs['group_openid'])
|
||||
# del kwargs['file_image']
|
||||
# if media is not None:
|
||||
# kwargs['msg_type'] = 7 # 富媒体
|
||||
await self.client.api.post_group_message(media=media, **kwargs)
|
||||
elif 'channel_id' in kwargs:
|
||||
# 频道消息
|
||||
if 'file_image' in kwargs:
|
||||
kwargs['file_image'] = kwargs['file_image'].replace(
|
||||
"file://", "")
|
||||
await self.client.api.post_message(**kwargs)
|
||||
else:
|
||||
# 频道私聊消息
|
||||
if 'file_image' in kwargs:
|
||||
kwargs['file_image'] = kwargs['file_image'].replace(
|
||||
"file://", "")
|
||||
await self.client.api.post_dms(**kwargs)
|
||||
|
||||
async def send_msg(self,
|
||||
message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
|
||||
message_chain: List[BaseMessageComponent],
|
||||
):
|
||||
'''
|
||||
发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入)
|
||||
'''
|
||||
await self.reply_msg(message_obj, message_chain)
|
||||
|
||||
async def send(self,
|
||||
message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
|
||||
message_chain: List[BaseMessageComponent],
|
||||
):
|
||||
'''
|
||||
发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入)
|
||||
'''
|
||||
await self.reply_msg(message_obj, message_chain)
|
||||
|
||||
def wait_for_message(self, channel_id: int) -> AstrBotMessage:
|
||||
'''
|
||||
等待指定 channel_id 的下一条信息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[channel_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if channel_id in self.waiting and self.waiting[channel_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[channel_id]
|
||||
del self.waiting[channel_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)()
|
||||
@@ -1,217 +0,0 @@
|
||||
import io
|
||||
import botpy
|
||||
from PIL import Image as PILImage
|
||||
from botpy.message import Message, DirectMessage
|
||||
import re
|
||||
import asyncio
|
||||
import requests
|
||||
from cores.qqbot.personality import personalities
|
||||
from util import general_utils as gu
|
||||
from nakuru.entities.components import Plain, At, Image
|
||||
from botpy.types.message import Reference
|
||||
from botpy import Client
|
||||
import time
|
||||
|
||||
class NakuruGuildMember():
|
||||
tiny_id: int # 发送者识别号
|
||||
user_id: int # 发送者识别号
|
||||
title: str
|
||||
nickname: str # 昵称
|
||||
role: int # 角色
|
||||
icon_url: str # 头像url
|
||||
|
||||
class NakuruGuildMessage():
|
||||
type: str = "GuildMessage"
|
||||
self_id: int # bot的qq号
|
||||
self_tiny_id: int # bot的qq号
|
||||
sub_type: str # 消息类型
|
||||
message_id: str # 消息id
|
||||
guild_id: int # 频道号
|
||||
channel_id: int # 子频道号
|
||||
user_id: int # 发送者qq号
|
||||
message: list # 消息内容
|
||||
sender: NakuruGuildMember # 发送者信息
|
||||
raw_message: Message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.__dict__)
|
||||
|
||||
class QQChan():
|
||||
def __init__(self, cnt: dict = None) -> None:
|
||||
self.qqchan_cnt = 0
|
||||
self.waiting: dict = {}
|
||||
|
||||
def get_cnt(self):
|
||||
return self.qqchan_cnt
|
||||
|
||||
def set_cnt(self, cnt):
|
||||
self.qqchan_cnt = cnt
|
||||
|
||||
def run_bot(self, botclient: Client, appid, token):
|
||||
intents = botpy.Intents(public_guild_messages=True, direct_message=True)
|
||||
self.client = botclient
|
||||
self.client.run(appid=appid, token=token)
|
||||
|
||||
# gocq-频道SDK兼容层(发)
|
||||
def gocq_compatible_send(self, gocq_message_chain: list):
|
||||
plain_text = ""
|
||||
image_path = None # only one img supported
|
||||
for i in gocq_message_chain:
|
||||
if isinstance(i, Plain):
|
||||
plain_text += i.text
|
||||
elif isinstance(i, Image) and image_path == None:
|
||||
if i.path is not None:
|
||||
image_path = i.path
|
||||
else:
|
||||
image_path = i.file
|
||||
return plain_text, image_path
|
||||
|
||||
# gocq-频道SDK兼容层(收)
|
||||
def gocq_compatible_receive(self, message: Message) -> NakuruGuildMessage:
|
||||
ngm = NakuruGuildMessage()
|
||||
try:
|
||||
ngm.self_id = message.mentions[0].id
|
||||
ngm.self_tiny_id = message.mentions[0].id
|
||||
except:
|
||||
ngm.self_id = 0
|
||||
ngm.self_tiny_id = 0
|
||||
|
||||
ngm.sub_type = "normal"
|
||||
ngm.message_id = message.id
|
||||
ngm.guild_id = int(message.guild_id)
|
||||
ngm.channel_id = int(message.channel_id)
|
||||
ngm.user_id = int(message.author.id)
|
||||
msg = []
|
||||
plain_content = message.content.replace("<@!"+str(ngm.self_id)+">", "").strip()
|
||||
msg.append(Plain(plain_content))
|
||||
if message.attachments:
|
||||
for i in message.attachments:
|
||||
if i.content_type.startswith("image"):
|
||||
url = i.url
|
||||
if not url.startswith("http"):
|
||||
url = "https://"+url
|
||||
img = Image.fromURL(url)
|
||||
msg.append(img)
|
||||
ngm.message = msg
|
||||
ngm.sender = NakuruGuildMember()
|
||||
ngm.sender.tiny_id = int(message.author.id)
|
||||
ngm.sender.user_id = int(message.author.id)
|
||||
ngm.sender.title = ""
|
||||
ngm.sender.nickname = message.author.username
|
||||
ngm.sender.role = 0
|
||||
ngm.sender.icon_url = message.author.avatar
|
||||
ngm.raw_message = message
|
||||
return ngm
|
||||
|
||||
def send_qq_msg(self,
|
||||
message: NakuruGuildMessage,
|
||||
res: list):
|
||||
'''
|
||||
回复频道消息
|
||||
'''
|
||||
gu.log("回复QQ频道消息: "+str(res), level=gu.LEVEL_INFO, tag="QQ频道", max_len=500)
|
||||
self.qqchan_cnt += 1
|
||||
plain_text = ""
|
||||
image_path = None
|
||||
if isinstance(res, list):
|
||||
# 兼容gocq
|
||||
plain_text, image_path = self.gocq_compatible_send(res)
|
||||
elif isinstance(res, str):
|
||||
plain_text = res
|
||||
|
||||
# print(plain_text, image_path)
|
||||
msg_ref = None
|
||||
if message.raw_message is not None:
|
||||
msg_ref = Reference(message_id=message.raw_message.id, ignore_get_message_error=False)
|
||||
if image_path is not None:
|
||||
msg_ref = None
|
||||
if image_path.startswith("http"):
|
||||
pic_res = requests.get(image_path, stream = True)
|
||||
if pic_res.status_code == 200:
|
||||
image = PILImage.open(io.BytesIO(pic_res.content))
|
||||
image_path = gu.save_temp_img(image)
|
||||
|
||||
|
||||
|
||||
try:
|
||||
# reply_res = asyncio.run_coroutine_threadsafe(message.raw_message.reply(content=str(plain_text), message_reference = msg_ref, file_image=image_path), self.client.loop)
|
||||
reply_res = asyncio.run_coroutine_threadsafe(self.client.api.post_message(channel_id=str(message.channel_id),
|
||||
content=str(plain_text),
|
||||
msg_id=message.message_id,
|
||||
file_image=image_path,
|
||||
message_reference=msg_ref), self.client.loop)
|
||||
reply_res.result()
|
||||
except BaseException as e:
|
||||
# 分割过长的消息
|
||||
if "msg over length" in str(e):
|
||||
split_res = []
|
||||
split_res.append(plain_text[:len(plain_text)//2])
|
||||
split_res.append(plain_text[len(plain_text)//2:])
|
||||
for i in split_res:
|
||||
reply_res = asyncio.run_coroutine_threadsafe(self.client.api.post_message(channel_id=str(message.channel_id),
|
||||
content=str(i),
|
||||
msg_id=message.message_id,
|
||||
file_image=image_path,
|
||||
message_reference=msg_ref), self.client.loop)
|
||||
reply_res.result()
|
||||
else:
|
||||
# 发送qq信息
|
||||
try:
|
||||
# 防止被qq频道过滤消息
|
||||
plain_text = plain_text.replace(".", " . ")
|
||||
reply_res = asyncio.run_coroutine_threadsafe(self.client.api.post_message(channel_id=str(message.channel_id),
|
||||
content=str(plain_text),
|
||||
msg_id=message.message_id,
|
||||
file_image=image_path,
|
||||
message_reference=msg_ref), self.client.loop).result() # 发送信息
|
||||
except BaseException as e:
|
||||
print("QQ频道API错误: \n"+str(e))
|
||||
try:
|
||||
# reply_res = asyncio.run_coroutine_threadsafe(message.raw_message.reply(content=str(str.join(" ", plain_text)), message_reference = msg_ref, file_image=image_path), self.client.loop)
|
||||
reply_res = asyncio.run_coroutine_threadsafe(self.client.api.post_message(channel_id=str(message.channel_id),
|
||||
content=str(str.join(" ", plain_text)),
|
||||
msg_id=message.message_id,
|
||||
file_image=image_path,
|
||||
message_reference=msg_ref), self.client.loop).result()
|
||||
except BaseException as e:
|
||||
plain_text = re.sub(r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '[被隐藏的链接]', str(e), flags=re.MULTILINE)
|
||||
plain_text = plain_text.replace(".", "·")
|
||||
reply_res = asyncio.run_coroutine_threadsafe(self.client.api.post_message(channel_id=str(message.channel_id),
|
||||
content=plain_text,
|
||||
msg_id=message.message_id,
|
||||
file_image=image_path,
|
||||
message_reference=msg_ref), self.client.loop).result()
|
||||
# send(message, f"QQ频道API错误:{str(e)}\n下面是格式化后的回答:\n{f_res}")
|
||||
|
||||
def push_message(self, channel_id: int, message_chain: list, message_id: int = None):
|
||||
'''
|
||||
推送消息, 如果有 message_id,那么就是回复消息。
|
||||
'''
|
||||
_n = NakuruGuildMessage()
|
||||
_n.channel_id = channel_id
|
||||
_n.message_id = message_id
|
||||
self.send_qq_msg(_n, message_chain)
|
||||
|
||||
def send(self, message_obj, message_chain: list):
|
||||
'''
|
||||
发送信息
|
||||
'''
|
||||
self.send_qq_msg(message_obj, message_chain)
|
||||
|
||||
def wait_for_message(self, channel_id: int) -> NakuruGuildMessage:
|
||||
'''
|
||||
等待指定 channel_id 的下一条信息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[channel_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if channel_id in self.waiting and self.waiting[channel_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[channel_id]
|
||||
del self.waiting[channel_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
import requests
|
||||
import asyncio
|
||||
import websockets
|
||||
from websockets import WebSocketClientProtocol
|
||||
import json
|
||||
import inspect
|
||||
from typing import Callable, Awaitable, Union
|
||||
from pydantic import BaseModel
|
||||
import datetime
|
||||
|
||||
class Event(BaseModel):
|
||||
GroupMessage: str = "GuildMessage"
|
||||
|
||||
class Sender(BaseModel):
|
||||
user_id: str
|
||||
member_openid: str
|
||||
|
||||
|
||||
class MessageComponent(BaseModel):
|
||||
type: str
|
||||
|
||||
class PlainText(MessageComponent):
|
||||
text: str
|
||||
|
||||
class Image(MessageComponent):
|
||||
path: str
|
||||
file: str
|
||||
url: str
|
||||
|
||||
class MessageChain(list):
|
||||
|
||||
def append(self, __object: MessageComponent) -> None:
|
||||
if not isinstance(__object, MessageComponent):
|
||||
raise TypeError("不受支持的消息链元素类型。回复的消息链必须是 MessageComponent 的子类。")
|
||||
return super().append(__object)
|
||||
|
||||
def insert(self, __index: int, __object: MessageComponent) -> None:
|
||||
if not isinstance(__object, MessageComponent):
|
||||
raise TypeError("不受支持的消息链元素类型。回复的消息链必须是 MessageComponent 的子类。")
|
||||
return super().insert(__index, __object)
|
||||
|
||||
def parse_from_nakuru(self, nakuru_message_chain: Union[list, str]) -> None:
|
||||
if isinstance(nakuru_message_chain, str):
|
||||
self.append(PlainText(type='Plain', text=nakuru_message_chain))
|
||||
else:
|
||||
for i in nakuru_message_chain:
|
||||
if i['type'] == 'Plain':
|
||||
self.append(PlainText(type='Plain', text=i['text']))
|
||||
elif i['type'] == 'Image':
|
||||
self.append(Image(path=i['path'], file=i['file'], url=i['url']))
|
||||
|
||||
class Message(BaseModel):
|
||||
type: str
|
||||
user_id: str
|
||||
member_openid: str
|
||||
message_id: str
|
||||
group_id: str
|
||||
group_openid: str
|
||||
content: str
|
||||
message: MessageChain
|
||||
time: int
|
||||
sender: Sender
|
||||
|
||||
class UnofficialQQBotSDK:
|
||||
|
||||
GET_APP_ACCESS_TOKEN_URL = "https://bots.qq.com/app/getAppAccessToken"
|
||||
OPENAPI_BASE_URL = "https://api.sgroup.qq.com"
|
||||
|
||||
def __init__(self, appid: str, client_secret: str) -> None:
|
||||
self.appid = appid
|
||||
self.client_secret = client_secret
|
||||
self.events: dict[str, Awaitable] = {}
|
||||
|
||||
|
||||
def run_bot(self) -> None:
|
||||
self.__get_access_token()
|
||||
self.__get_wss_endpoint()
|
||||
asyncio.get_event_loop().run_until_complete(self.__ws_client())
|
||||
|
||||
def __get_access_token(self) -> None:
|
||||
res = requests.post(self.GET_APP_ACCESS_TOKEN_URL, json={
|
||||
"appId": self.appid,
|
||||
"clientSecret": self.client_secret
|
||||
}, headers={
|
||||
"Content-Type": "application/json"
|
||||
})
|
||||
res = res.json()
|
||||
code = res['code'] if 'code' in res else 1
|
||||
if 'access_token' not in res:
|
||||
raise Exception(f"获取 access_token 失败。原因:{res['message'] if 'message' in res else '未知'}")
|
||||
self.access_token = 'QQBot ' + res['access_token']
|
||||
|
||||
def __auth_header(self) -> str:
|
||||
return {
|
||||
'Authorization': self.access_token,
|
||||
'X-Union-Appid': self.appid,
|
||||
}
|
||||
|
||||
def __get_wss_endpoint(self):
|
||||
res = requests.get(self.OPENAPI_BASE_URL + "/gateway", headers=self.__auth_header())
|
||||
self.wss_endpoint = res.json()['url']
|
||||
# print("wss_endpoint: " + self.wss_endpoint)
|
||||
|
||||
async def __behav_heartbeat(self, ws: WebSocketClientProtocol, t: int):
|
||||
while True:
|
||||
await asyncio.sleep(t - 1)
|
||||
try:
|
||||
await ws.send(json.dumps({
|
||||
"op": 1,
|
||||
"d": self.s
|
||||
}))
|
||||
except:
|
||||
print("heartbeat error.")
|
||||
|
||||
async def __handle_msg(self, ws: WebSocketClientProtocol, msg: dict):
|
||||
if msg['op'] == 10:
|
||||
asyncio.get_event_loop().create_task(self.__behav_heartbeat(ws, msg['d']['heartbeat_interval'] / 1000))
|
||||
# 鉴权,获得session
|
||||
await ws.send(json.dumps({
|
||||
"op": 2,
|
||||
"d": {
|
||||
"token": self.access_token,
|
||||
"intents": 33554432,
|
||||
"shard": [0, 1],
|
||||
"properties": {
|
||||
"$os": "linux",
|
||||
"$browser": "my_library",
|
||||
"$device": "my_library"
|
||||
}
|
||||
}
|
||||
}))
|
||||
if msg['op'] == 0:
|
||||
# ready
|
||||
data = msg['d']
|
||||
event_typ: str = msg['t'] if 't' in msg else None
|
||||
if event_typ == 'GROUP_AT_MESSAGE_CREATE':
|
||||
if 'GroupMessage' in self.events:
|
||||
coro = self.events['GroupMessage']
|
||||
else:
|
||||
return
|
||||
message_chain = MessageChain()
|
||||
message_chain.append(PlainText(type="Plain", text=data['content']))
|
||||
group_message = Message(
|
||||
type='GroupMessage',
|
||||
user_id=data['author']['id'],
|
||||
member_openid=data['author']['member_openid'],
|
||||
message_id=data['id'],
|
||||
group_id=data['group_id'],
|
||||
group_openid=data['group_openid'],
|
||||
content=data['content'],
|
||||
# 2023-11-24T19:51:11+08:00
|
||||
time=int(datetime.datetime.strptime(data['timestamp'], "%Y-%m-%dT%H:%M:%S%z").timestamp()),
|
||||
sender=Sender(
|
||||
user_id=data['author']['id'],
|
||||
member_openid=data['author']['member_openid']
|
||||
),
|
||||
message=message_chain
|
||||
)
|
||||
await coro(self, group_message)
|
||||
|
||||
async def send(self, message: Message, message_chain: MessageChain) -> None:
|
||||
# todo: 消息链转换支持更多类型。
|
||||
plain_text = ""
|
||||
for i in message_chain:
|
||||
if isinstance(i, PlainText):
|
||||
plain_text += i.text
|
||||
requests.post(self.OPENAPI_BASE_URL + f"/v2/groups/{message.group_openid}/messages", headers=self.__auth_header(), json={
|
||||
"content": plain_text,
|
||||
"message_type": 0,
|
||||
"msg_id": message.message_id
|
||||
})
|
||||
|
||||
async def __ws_client(self):
|
||||
self.s = 0
|
||||
async with websockets.connect(self.wss_endpoint) as websocket:
|
||||
while True:
|
||||
msg = await websocket.recv()
|
||||
msg = json.loads(msg)
|
||||
if 's' in msg:
|
||||
self.s = msg['s']
|
||||
await self.__handle_msg(websocket, msg)
|
||||
|
||||
def on(self, event: str) -> None:
|
||||
def wrapper(func: Awaitable):
|
||||
if inspect.iscoroutinefunction(func) == False:
|
||||
raise TypeError("func must be a coroutine function")
|
||||
self.events[event] = func
|
||||
return wrapper
|
||||
@@ -1,106 +1,113 @@
|
||||
from openai import OpenAI
|
||||
from openai.types.chat.chat_completion import ChatCompletion
|
||||
from openai.types.images_response import ImagesResponse
|
||||
import json
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
from cores.database.conn import dbConn
|
||||
from model.provider.provider import Provider
|
||||
import json
|
||||
import time
|
||||
import tiktoken
|
||||
import threading
|
||||
import traceback
|
||||
import base64
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.images_response import ImagesResponse
|
||||
from openai.types.chat.chat_completion import ChatCompletion
|
||||
from openai._exceptions import *
|
||||
|
||||
from persist.session import dbConn
|
||||
from model.provider.provider import Provider
|
||||
from util import general_utils as gu
|
||||
from util.cmd_config import CmdConfig
|
||||
import traceback
|
||||
import tiktoken
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from typing import List, Dict
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
|
||||
MODELS = {
|
||||
"gpt-4o": 128000,
|
||||
"gpt-4o-2024-05-13": 128000,
|
||||
"gpt-4-turbo": 128000,
|
||||
"gpt-4-turbo-2024-04-09": 128000,
|
||||
"gpt-4-turbo-preview": 128000,
|
||||
"gpt-4-0125-preview": 128000,
|
||||
"gpt-4-1106-preview": 128000,
|
||||
"gpt-4-vision-preview": 128000,
|
||||
"gpt-4-1106-vision-preview": 128000,
|
||||
"gpt-4": 8192,
|
||||
"gpt-4-0613": 8192,
|
||||
"gpt-4-32k": 32768,
|
||||
"gpt-4-32k-0613": 32768,
|
||||
"gpt-3.5-turbo-0125": 16385,
|
||||
"gpt-3.5-turbo": 16385,
|
||||
"gpt-3.5-turbo-1106": 16385,
|
||||
"gpt-3.5-turbo-instruct": 4096,
|
||||
"gpt-3.5-turbo-16k": 16385,
|
||||
"gpt-3.5-turbo-0613": 16385,
|
||||
"gpt-3.5-turbo-16k-0613": 16385,
|
||||
}
|
||||
|
||||
class ProviderOpenAIOfficial(Provider):
|
||||
def __init__(self, cfg):
|
||||
self.cc = CmdConfig()
|
||||
def __init__(self, cfg) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.key_list = []
|
||||
# 如果 cfg['key']中有长度为1的字符串,那么是格式错误,直接报错
|
||||
for key in cfg['key']:
|
||||
if len(key) == 1:
|
||||
input("检查到了长度为 1 的Key。配置文件中的 openai.key 处的格式错误 (符号 - 的后面要加空格),请退出程序并检查配置文件,按回车跳过。")
|
||||
raise BaseException("配置文件格式错误")
|
||||
if cfg['key'] != '' and cfg['key'] != None:
|
||||
self.key_list = cfg['key']
|
||||
os.makedirs("data/openai", exist_ok=True)
|
||||
|
||||
self.cc = CmdConfig
|
||||
self.key_data_path = "data/openai/keys.json"
|
||||
self.api_keys = []
|
||||
self.chosen_api_key = None
|
||||
self.base_url = None
|
||||
self.keys_data = {} # 记录超额
|
||||
|
||||
if cfg['key']: self.api_keys = cfg['key']
|
||||
if cfg['api_base']: self.base_url = cfg['api_base']
|
||||
if not self.api_keys:
|
||||
logger.warn("看起来你没有添加 OpenAI 的 API 密钥,OpenAI LLM 能力将不会启用。")
|
||||
else:
|
||||
input("[System] 请先去完善ChatGPT的Key。详情请前往https://beta.openai.com/account/api-keys")
|
||||
if len(self.key_list) == 0:
|
||||
raise Exception("您打开了 OpenAI 模型服务,但是未填写 key。请前往填写。")
|
||||
|
||||
self.key_stat = {}
|
||||
for k in self.key_list:
|
||||
self.key_stat[k] = {'exceed': False, 'used': 0}
|
||||
self.chosen_api_key = self.api_keys[0]
|
||||
|
||||
self.api_base = None
|
||||
if 'api_base' in cfg and cfg['api_base'] != 'none' and cfg['api_base'] != '':
|
||||
self.api_base = cfg['api_base']
|
||||
gu.log(f"设置 api_base 为: {self.api_base}")
|
||||
# openai client
|
||||
self.client = OpenAI(
|
||||
api_key=self.key_list[0],
|
||||
base_url=self.api_base
|
||||
for key in self.api_keys:
|
||||
self.keys_data[key] = True
|
||||
|
||||
self.client = AsyncOpenAI(
|
||||
api_key=self.chosen_api_key,
|
||||
base_url=self.base_url
|
||||
)
|
||||
|
||||
self.openai_model_configs: dict = cfg['chatGPTConfigs']
|
||||
gu.log(f'加载 OpenAI Chat Configs: {self.openai_model_configs}')
|
||||
self.openai_configs = cfg
|
||||
# 会话缓存
|
||||
self.session_dict = {}
|
||||
# 最大缓存token
|
||||
self.max_tokens = cfg['total_tokens_limit']
|
||||
# 历史记录持久化间隔时间
|
||||
self.history_dump_interval = 20
|
||||
|
||||
self.enc = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
# 读取历史记录
|
||||
self.model_configs: Dict = cfg['chatGPTConfigs']
|
||||
super().set_curr_model(self.model_configs['model'])
|
||||
self.image_generator_model_configs: Dict = self.cc.get('openai_image_generate', None)
|
||||
self.session_memory: Dict[str, List] = {} # 会话记忆
|
||||
self.session_memory_lock = threading.Lock()
|
||||
self.max_tokens = self.model_configs['max_tokens'] # 上下文窗口大小
|
||||
self.tokenizer = tiktoken.get_encoding("cl100k_base") # todo: 根据 model 切换分词器
|
||||
self.DEFAULT_PERSONALITY = {
|
||||
"name": "default",
|
||||
"prompt": "你是一个很有帮助的 AI 助手。"
|
||||
}
|
||||
self.curr_personality = self.DEFAULT_PERSONALITY
|
||||
self.session_personality = {} # 记录了某个session是否已设置人格。
|
||||
# 从 SQLite DB 读取历史记录
|
||||
try:
|
||||
db1 = dbConn()
|
||||
for session in db1.get_all_session():
|
||||
self.session_dict[session[0]] = json.loads(session[1])['data']
|
||||
gu.log("读取历史记录成功。")
|
||||
self.session_memory_lock.acquire()
|
||||
self.session_memory[session[0]] = json.loads(session[1])['data']
|
||||
self.session_memory_lock.release()
|
||||
except BaseException as e:
|
||||
gu.log("读取历史记录失败,但不影响使用。", level=gu.LEVEL_ERROR)
|
||||
|
||||
|
||||
# 读取统计信息
|
||||
if not os.path.exists(abs_path+"configs/stat"):
|
||||
with open(abs_path+"configs/stat", 'w', encoding='utf-8') as f:
|
||||
json.dump({}, f)
|
||||
self.stat_file = open(abs_path+"configs/stat", 'r', encoding='utf-8')
|
||||
global count
|
||||
res = self.stat_file.read()
|
||||
if res == '':
|
||||
count = {}
|
||||
else:
|
||||
try:
|
||||
count = json.loads(res)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
# 创建转储定时器线程
|
||||
logger.warn(f"读取 OpenAI LLM 对话历史记录 失败:{e}。仍可正常使用。")
|
||||
|
||||
# 定时保存历史记录
|
||||
threading.Thread(target=self.dump_history, daemon=True).start()
|
||||
|
||||
# 人格
|
||||
self.curr_personality = {}
|
||||
|
||||
|
||||
# 转储历史记录
|
||||
def dump_history(self):
|
||||
'''
|
||||
转储历史记录
|
||||
'''
|
||||
time.sleep(10)
|
||||
db = dbConn()
|
||||
while True:
|
||||
try:
|
||||
# print("转储历史记录...")
|
||||
for key in self.session_dict:
|
||||
# print("TEST: "+str(db.get_session(key)))
|
||||
data = self.session_dict[key]
|
||||
for key in self.session_memory:
|
||||
data = self.session_memory[key]
|
||||
data_json = {
|
||||
'data': data
|
||||
}
|
||||
@@ -108,365 +115,386 @@ class ProviderOpenAIOfficial(Provider):
|
||||
db.update_session(key, json.dumps(data_json))
|
||||
else:
|
||||
db.insert_session(key, json.dumps(data_json))
|
||||
# print("转储历史记录完毕")
|
||||
logger.debug("已保存 OpenAI 会话历史记录")
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
# 每隔10分钟转储一次
|
||||
time.sleep(10*self.history_dump_interval)
|
||||
|
||||
finally:
|
||||
time.sleep(10*60)
|
||||
|
||||
def personality_set(self, default_personality: dict, session_id: str):
|
||||
if not default_personality: return
|
||||
if session_id not in self.session_memory:
|
||||
self.session_memory[session_id] = []
|
||||
self.curr_personality = default_personality
|
||||
self.session_personality = {} # 重置
|
||||
encoded_prompt = self.tokenizer.encode(default_personality['prompt'])
|
||||
tokens_num = len(encoded_prompt)
|
||||
model = self.model_configs['model']
|
||||
if model in MODELS and tokens_num > MODELS[model] - 500:
|
||||
default_personality['prompt'] = self.tokenizer.decode(encoded_prompt[:MODELS[model] - 500])
|
||||
|
||||
new_record = {
|
||||
"user": {
|
||||
"role": "user",
|
||||
"role": "system",
|
||||
"content": default_personality['prompt'],
|
||||
},
|
||||
"AI": {
|
||||
"role": "assistant",
|
||||
"content": "好的,接下来我会扮演这个角色。"
|
||||
},
|
||||
'type': "personality",
|
||||
'usage_tokens': 0,
|
||||
'single-tokens': 0
|
||||
'usage_tokens': 0, # 到该条目的总 token 数
|
||||
'single-tokens': 0 # 该条目的 token 数
|
||||
}
|
||||
self.session_dict[session_id].append(new_record)
|
||||
|
||||
def text_chat(self, prompt,
|
||||
session_id = None,
|
||||
image_url = None,
|
||||
function_call=None,
|
||||
extra_conf: dict = None,
|
||||
default_personality: dict = None):
|
||||
if session_id is None:
|
||||
session_id = "unknown"
|
||||
if "unknown" in self.session_dict:
|
||||
del self.session_dict["unknown"]
|
||||
# 会话机制
|
||||
if session_id not in self.session_dict:
|
||||
self.session_dict[session_id] = []
|
||||
self.session_memory[session_id].append(new_record)
|
||||
|
||||
fjson = {}
|
||||
try:
|
||||
f = open(abs_path+"configs/session", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
fjson[session_id] = 'true'
|
||||
f = open(abs_path+"configs/session", "w", encoding="utf-8")
|
||||
f.write(json.dumps(fjson))
|
||||
f.flush()
|
||||
f.close()
|
||||
async def encode_image_bs64(self, image_url: str) -> str:
|
||||
'''
|
||||
将图片转换为 base64
|
||||
'''
|
||||
if image_url.startswith("http"):
|
||||
image_url = await gu.download_image_by_url(image_url)
|
||||
|
||||
if len(self.session_dict[session_id]) == 0:
|
||||
# 设置默认人格
|
||||
if default_personality is not None:
|
||||
self.personality_set(default_personality, session_id)
|
||||
with open(image_url, "rb") as f:
|
||||
image_bs64 = base64.b64encode(f.read()).decode()
|
||||
return "data:image/jpeg;base64," + image_bs64
|
||||
|
||||
|
||||
# 使用 tictoken 截断消息
|
||||
_encoded_prompt = self.enc.encode(prompt)
|
||||
if self.openai_model_configs['max_tokens'] < len(_encoded_prompt):
|
||||
prompt = self.enc.decode(_encoded_prompt[:int(self.openai_model_configs['max_tokens']*0.80)])
|
||||
gu.log(f"注意,有一部分 prompt 文本由于超出 token 限制而被截断。", level=gu.LEVEL_WARNING, max_len=300)
|
||||
|
||||
cache_data_list, new_record, req = self.wrap(prompt, session_id, image_url)
|
||||
gu.log(f"CACHE_DATA_: {str(cache_data_list)}", level=gu.LEVEL_DEBUG, max_len=99999)
|
||||
gu.log(f"OPENAI REQUEST: {str(req)}", level=gu.LEVEL_DEBUG, max_len=9999)
|
||||
retry = 0
|
||||
response = None
|
||||
err = ''
|
||||
|
||||
# 截断倍率
|
||||
truncate_rate = 0.75
|
||||
|
||||
use_gpt4v = False
|
||||
for i in req:
|
||||
if isinstance(i['content'], list):
|
||||
use_gpt4v = True
|
||||
break
|
||||
if image_url is not None:
|
||||
use_gpt4v = True
|
||||
if use_gpt4v:
|
||||
conf = self.openai_model_configs.copy()
|
||||
conf['model'] = 'gpt-4-vision-preview'
|
||||
else:
|
||||
conf = self.openai_model_configs
|
||||
|
||||
if extra_conf is not None:
|
||||
conf.update(extra_conf)
|
||||
|
||||
while retry < 10:
|
||||
try:
|
||||
if function_call is None:
|
||||
response = self.client.chat.completions.create(
|
||||
messages=req,
|
||||
**conf
|
||||
)
|
||||
else:
|
||||
response = self.client.chat.completions.create(
|
||||
messages=req,
|
||||
tools = function_call,
|
||||
**conf
|
||||
)
|
||||
break
|
||||
except Exception as e:
|
||||
print(traceback.format_exc())
|
||||
if 'Invalid content type. image_url is only supported by certain models.' in str(e):
|
||||
raise e
|
||||
if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e):
|
||||
gu.log("当前Key已超额或异常, 正在切换", level=gu.LEVEL_WARNING)
|
||||
self.key_stat[self.client.api_key]['exceed'] = True
|
||||
is_switched = self.handle_switch_key()
|
||||
if not is_switched:
|
||||
# 所有Key都超额或不正常
|
||||
raise e
|
||||
retry -= 1
|
||||
elif 'maximum context length' in str(e):
|
||||
gu.log("token超限, 清空对应缓存,并进行消息截断")
|
||||
self.session_dict[session_id] = []
|
||||
prompt = prompt[:int(len(prompt)*truncate_rate)]
|
||||
truncate_rate -= 0.05
|
||||
cache_data_list, new_record, req = self.wrap(prompt, session_id)
|
||||
|
||||
elif 'Limit: 3 / min. Please try again in 20s.' in str(e) or "OpenAI response error" in str(e):
|
||||
time.sleep(30)
|
||||
async def retrieve_context(self, session_id: str):
|
||||
'''
|
||||
根据 session_id 获取保存的 OpenAI 格式的上下文
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
# 转换为 openai 要求的格式
|
||||
context = []
|
||||
is_lvm = await self.is_lvm()
|
||||
for record in self.session_memory[session_id]:
|
||||
if "user" in record and record['user']:
|
||||
if not is_lvm and "content" in record['user'] and isinstance(record['user']['content'], list):
|
||||
logger.warn(f"由于当前模型 {self.model_configs['model']}不支持视觉,将忽略上下文中的图片输入。如果一直弹出此警告,可以尝试 reset 指令。")
|
||||
continue
|
||||
else:
|
||||
gu.log(str(e), level=gu.LEVEL_ERROR)
|
||||
time.sleep(2)
|
||||
err = str(e)
|
||||
retry += 1
|
||||
if retry >= 10:
|
||||
gu.log(r"如果报错, 且您的机器在中国大陆内, 请确保您的电脑已经设置好代理软件(梯子), 并在配置文件设置了系统代理地址。详见https://github.com/Soulter/QQChannelChatGPT/wiki/%E4%BA%8C%E3%80%81%E9%A1%B9%E7%9B%AE%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E9%85%8D%E7%BD%AE", max_len=999)
|
||||
raise BaseException("连接出错: "+str(err))
|
||||
assert isinstance(response, ChatCompletion)
|
||||
gu.log(f"OPENAI RESPONSE: {response.usage}", level=gu.LEVEL_DEBUG, max_len=9999)
|
||||
context.append(record['user'])
|
||||
if "AI" in record and record['AI']:
|
||||
context.append(record['AI'])
|
||||
|
||||
# 结果分类
|
||||
choice = response.choices[0]
|
||||
if choice.message.content != None:
|
||||
# 文本形式
|
||||
chatgpt_res = str(choice.message.content).strip()
|
||||
elif choice.message.tool_calls != None and len(choice.message.tool_calls) > 0:
|
||||
return context
|
||||
|
||||
async def is_lvm(self):
|
||||
'''
|
||||
是否是 LVM
|
||||
'''
|
||||
return self.model_configs['model'].startswith("gpt-4")
|
||||
|
||||
async def get_models(self):
|
||||
'''
|
||||
获取所有模型
|
||||
'''
|
||||
models = await self.client.models.list()
|
||||
logger.info(f"OpenAI 模型列表:{models}")
|
||||
return models
|
||||
|
||||
async def assemble_context(self, session_id: str, prompt: str, image_url: str = None):
|
||||
'''
|
||||
组装上下文,并且根据当前上下文窗口大小截断
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
tokens_num = len(self.tokenizer.encode(prompt))
|
||||
previous_total_tokens_num = 0 if not self.session_memory[session_id] else self.session_memory[session_id][-1]['usage_tokens']
|
||||
|
||||
message = {
|
||||
"usage_tokens": previous_total_tokens_num + tokens_num,
|
||||
"single_tokens": tokens_num,
|
||||
"AI": None
|
||||
}
|
||||
if image_url:
|
||||
user_content = {
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": prompt
|
||||
},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": await self.encode_image_bs64(image_url)
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
else:
|
||||
user_content = {
|
||||
"role": "user",
|
||||
"content": prompt
|
||||
}
|
||||
|
||||
message["user"] = user_content
|
||||
self.session_memory[session_id].append(message)
|
||||
|
||||
# 根据 模型的上下文窗口 淘汰掉多余的记录
|
||||
curr_model = self.model_configs['model']
|
||||
if curr_model in MODELS:
|
||||
maxium_tokens_num = MODELS[curr_model] - 300 # 至少预留 300 给 completion
|
||||
# if message['usage_tokens'] > maxium_tokens_num:
|
||||
# 淘汰多余的记录,使得最终的 usage_tokens 不超过 maxium_tokens_num - 300
|
||||
# contexts = self.session_memory[session_id]
|
||||
# need_to_remove_idx = 0
|
||||
# freed_tokens_num = contexts[0]['single-tokens']
|
||||
# while freed_tokens_num < message['usage_tokens'] - maxium_tokens_num:
|
||||
# need_to_remove_idx += 1
|
||||
# freed_tokens_num += contexts[need_to_remove_idx]['single-tokens']
|
||||
# # 更新之后的所有记录的 usage_tokens
|
||||
# for i in range(len(contexts)):
|
||||
# if i > need_to_remove_idx:
|
||||
# contexts[i]['usage_tokens'] -= freed_tokens_num
|
||||
# logger.debug(f"淘汰上下文记录 {need_to_remove_idx+1} 条,释放 {freed_tokens_num} 个 token。当前上下文总 token 为 {contexts[-1]['usage_tokens']}。")
|
||||
# self.session_memory[session_id] = contexts[need_to_remove_idx+1:]
|
||||
while len(self.session_memory[session_id]) and self.session_memory[session_id][-1]['usage_tokens'] > maxium_tokens_num:
|
||||
self.pop_record(session_id)
|
||||
|
||||
|
||||
async def pop_record(self, session_id: str, pop_system_prompt: bool = False):
|
||||
'''
|
||||
弹出第一条记录
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
if len(self.session_memory[session_id]) == 0:
|
||||
return None
|
||||
|
||||
for i in range(len(self.session_memory[session_id])):
|
||||
# 检查是否是 system prompt
|
||||
if not pop_system_prompt and self.session_memory[session_id][i]['user']['role'] == "system":
|
||||
# 如果只有一个 system prompt,才不删掉
|
||||
f = False
|
||||
for j in range(i+1, len(self.session_memory[session_id])):
|
||||
if self.session_memory[session_id][j]['user']['role'] == "system":
|
||||
f = True
|
||||
break
|
||||
if not f:
|
||||
continue
|
||||
record = self.session_memory[session_id].pop(i)
|
||||
break
|
||||
|
||||
# 更新之后所有记录的 usage_tokens
|
||||
for i in range(len(self.session_memory[session_id])):
|
||||
self.session_memory[session_id][i]['usage_tokens'] -= record['single-tokens']
|
||||
logger.debug(f"淘汰上下文记录 1 条,释放 {record['single-tokens']} 个 token。当前上下文总 token 为 {self.session_memory[session_id][-1]['usage_tokens']}。")
|
||||
return record
|
||||
|
||||
async def text_chat(self,
|
||||
prompt: str,
|
||||
session_id: str,
|
||||
image_url: None=None,
|
||||
tools: None=None,
|
||||
extra_conf: Dict = None,
|
||||
**kwargs
|
||||
) -> str:
|
||||
super().accu_model_stat()
|
||||
if not session_id:
|
||||
session_id = "unknown"
|
||||
if "unknown" in self.session_memory:
|
||||
del self.session_memory["unknown"]
|
||||
|
||||
if session_id not in self.session_memory:
|
||||
self.session_memory[session_id] = []
|
||||
|
||||
if session_id not in self.session_personality or not self.session_personality[session_id]:
|
||||
self.personality_set(self.curr_personality, session_id)
|
||||
self.session_personality[session_id] = True
|
||||
|
||||
# 如果 prompt 超过了最大窗口,截断。
|
||||
# 1. 可以保证之后 pop 的时候不会出现问题
|
||||
# 2. 可以保证不会超过最大 token 数
|
||||
_encoded_prompt = self.tokenizer.encode(prompt)
|
||||
curr_model = self.model_configs['model']
|
||||
if curr_model in MODELS and len(_encoded_prompt) > MODELS[curr_model] - 300:
|
||||
_encoded_prompt = _encoded_prompt[:MODELS[curr_model] - 300]
|
||||
prompt = self.tokenizer.decode(_encoded_prompt)
|
||||
|
||||
# 组装上下文,并且根据当前上下文窗口大小截断
|
||||
await self.assemble_context(session_id, prompt, image_url)
|
||||
|
||||
# 获取上下文,openai 格式
|
||||
contexts = await self.retrieve_context(session_id)
|
||||
|
||||
conf = self.model_configs
|
||||
if extra_conf: conf.update(extra_conf)
|
||||
|
||||
# start request
|
||||
retry = 0
|
||||
rate_limit_retry = 0
|
||||
while retry < 3 or rate_limit_retry < 5:
|
||||
logger.debug(conf)
|
||||
logger.debug(contexts)
|
||||
if tools:
|
||||
completion_coro = self.client.chat.completions.create(
|
||||
messages=contexts,
|
||||
tools=tools,
|
||||
**conf
|
||||
)
|
||||
else:
|
||||
completion_coro = self.client.chat.completions.create(
|
||||
messages=contexts,
|
||||
**conf
|
||||
)
|
||||
try:
|
||||
completion = await completion_coro
|
||||
break
|
||||
except AuthenticationError as e:
|
||||
api_key = self.chosen_api_key[10:] + "..."
|
||||
logger.error(f"OpenAI API Key {api_key} 验证错误。详细原因:{e}。正在切换到下一个可用的 Key(如果有的话)")
|
||||
self.keys_data[self.chosen_api_key] = False
|
||||
ok = await self.switch_to_next_key()
|
||||
if ok: continue
|
||||
else: raise Exception("所有 OpenAI API Key 目前都不可用。")
|
||||
except BadRequestError as e:
|
||||
logger.warn(f"OpenAI 请求异常:{e}。")
|
||||
if "image_url is only supported by certain models." in str(e):
|
||||
raise Exception(f"当前模型 { self.model_configs['model'] } 不支持图片输入,请更换模型。")
|
||||
retry += 1
|
||||
except RateLimitError as e:
|
||||
if "You exceeded your current quota" in str(e):
|
||||
self.keys_data[self.chosen_api_key] = False
|
||||
ok = await self.switch_to_next_key()
|
||||
if ok: continue
|
||||
else: raise Exception("所有 OpenAI API Key 目前都不可用。")
|
||||
logger.error(f"OpenAI API Key {self.chosen_api_key} 达到请求速率限制或者官方服务器当前超载。详细原因:{e}")
|
||||
await self.switch_to_next_key()
|
||||
rate_limit_retry += 1
|
||||
time.sleep(1)
|
||||
except Exception as e:
|
||||
retry += 1
|
||||
if retry >= 3:
|
||||
logger.error(traceback.format_exc())
|
||||
raise Exception(f"OpenAI 请求失败:{e}。重试次数已达到上限。")
|
||||
if "maximum context length" in str(e):
|
||||
logger.warn(f"OpenAI 请求失败:{e}。上下文长度超过限制。尝试弹出最早的记录然后重试。")
|
||||
self.pop_record(session_id)
|
||||
|
||||
logger.warning(f"OpenAI 请求失败:{e}。重试第 {retry} 次。")
|
||||
time.sleep(1)
|
||||
|
||||
assert isinstance(completion, ChatCompletion)
|
||||
logger.debug(f"openai completion: {completion.usage}")
|
||||
|
||||
choice = completion.choices[0]
|
||||
|
||||
usage_tokens = completion.usage.total_tokens
|
||||
completion_tokens = completion.usage.completion_tokens
|
||||
self.session_memory[session_id][-1]['usage_tokens'] = usage_tokens
|
||||
self.session_memory[session_id][-1]['single_tokens'] += completion_tokens
|
||||
|
||||
if choice.message.content:
|
||||
# 返回文本
|
||||
completion_text = str(choice.message.content).strip()
|
||||
elif choice.message.tool_calls and choice.message.tool_calls:
|
||||
# tools call (function calling)
|
||||
return choice.message.tool_calls[0].function
|
||||
|
||||
self.key_stat[self.client.api_key]['used'] += response.usage.total_tokens
|
||||
current_usage_tokens = response.usage.total_tokens
|
||||
|
||||
# 超过指定tokens, 尽可能的保留最多的条目,直到小于max_tokens
|
||||
if current_usage_tokens > self.max_tokens:
|
||||
t = current_usage_tokens
|
||||
index = 0
|
||||
while t > self.max_tokens:
|
||||
if index >= len(cache_data_list):
|
||||
break
|
||||
# 保留人格信息
|
||||
if cache_data_list[index]['type'] != 'personality':
|
||||
t -= int(cache_data_list[index]['single_tokens'])
|
||||
del cache_data_list[index]
|
||||
else:
|
||||
index += 1
|
||||
# 删除完后更新相关字段
|
||||
self.session_dict[session_id] = cache_data_list
|
||||
# cache_prompt = get_prompts_by_cache_list(cache_data_list)
|
||||
|
||||
# 添加新条目进入缓存的prompt
|
||||
new_record['AI'] = {
|
||||
'role': 'assistant',
|
||||
'content': chatgpt_res,
|
||||
}
|
||||
new_record['usage_tokens'] = current_usage_tokens
|
||||
if len(cache_data_list) > 0:
|
||||
new_record['single_tokens'] = current_usage_tokens - int(cache_data_list[-1]['usage_tokens'])
|
||||
else:
|
||||
new_record['single_tokens'] = current_usage_tokens
|
||||
|
||||
cache_data_list.append(new_record)
|
||||
|
||||
self.session_dict[session_id] = cache_data_list
|
||||
|
||||
return chatgpt_res
|
||||
|
||||
def image_chat(self, prompt, img_num = 1, img_size = "1024x1024"):
|
||||
self.session_memory[session_id][-1]['AI'] = {
|
||||
"role": "assistant",
|
||||
"content": completion_text
|
||||
}
|
||||
|
||||
return completion_text
|
||||
|
||||
async def switch_to_next_key(self):
|
||||
'''
|
||||
切换到下一个 API Key
|
||||
'''
|
||||
if not self.api_keys:
|
||||
logger.error("OpenAI API Key 不存在。")
|
||||
return False
|
||||
|
||||
for key in self.keys_data:
|
||||
if self.keys_data[key]:
|
||||
# 没超额
|
||||
self.chosen_api_key = key
|
||||
self.client.api_key = key
|
||||
logger.info(f"OpenAI 切换到 API Key {key[:10]}... 成功。")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def image_generate(self, prompt: str, session_id: str = None, **kwargs) -> str:
|
||||
'''
|
||||
生成图片
|
||||
'''
|
||||
retry = 0
|
||||
image_url = ''
|
||||
|
||||
image_generate_configs = self.cc.get("openai_image_generate", None)
|
||||
conf = self.image_generator_model_configs
|
||||
super().accu_model_stat(model=conf['model'])
|
||||
if not conf:
|
||||
logger.error("OpenAI 图片生成模型配置不存在。")
|
||||
raise Exception("OpenAI 图片生成模型配置不存在。")
|
||||
|
||||
while retry < 5:
|
||||
while retry < 3:
|
||||
try:
|
||||
response: ImagesResponse = self.client.images.generate(
|
||||
images_response = await self.client.images.generate(
|
||||
prompt=prompt,
|
||||
**image_generate_configs
|
||||
**conf
|
||||
)
|
||||
image_url = []
|
||||
for i in range(img_num):
|
||||
image_url.append(response.data[i].url)
|
||||
break
|
||||
image_url = images_response.data[0].url
|
||||
return image_url
|
||||
except Exception as e:
|
||||
gu.log(str(e), level=gu.LEVEL_ERROR)
|
||||
if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(
|
||||
e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e):
|
||||
gu.log("当前 Key 已超额或者不正常, 正在切换", level=gu.LEVEL_WARNING)
|
||||
self.key_stat[self.client.api_key]['exceed'] = True
|
||||
is_switched = self.handle_switch_key()
|
||||
if not is_switched:
|
||||
# 所有Key都超额或不正常
|
||||
raise e
|
||||
elif 'Your request was rejected as a result of our safety system.' in str(e):
|
||||
gu.log("您的请求被 OpenAI 安全系统拒绝, 请稍后再试", level=gu.LEVEL_WARNING)
|
||||
raise e
|
||||
else:
|
||||
retry += 1
|
||||
if retry >= 5:
|
||||
raise BaseException("连接超时")
|
||||
|
||||
return image_url
|
||||
retry += 1
|
||||
if retry >= 3:
|
||||
logger.error(traceback.format_exc())
|
||||
raise Exception(f"OpenAI 图片生成请求失败:{e}。重试次数已达到上限。")
|
||||
logger.warning(f"OpenAI 图片生成请求失败:{e}。重试第 {retry} 次。")
|
||||
time.sleep(1)
|
||||
|
||||
def forget(self, session_id = None) -> bool:
|
||||
if session_id is None:
|
||||
return False
|
||||
self.session_dict[session_id] = []
|
||||
async def forget(self, session_id=None, keep_system_prompt: bool=False) -> bool:
|
||||
if session_id is None: return False
|
||||
self.session_memory[session_id] = []
|
||||
if keep_system_prompt:
|
||||
self.personality_set(self.curr_personality, session_id)
|
||||
else:
|
||||
self.curr_personality = self.DEFAULT_PERSONALITY
|
||||
return True
|
||||
|
||||
'''
|
||||
获取缓存的会话
|
||||
'''
|
||||
def get_prompts_by_cache_list(self, cache_data_list, divide=False, paging=False, size=5, page=1):
|
||||
prompts = ""
|
||||
if paging:
|
||||
page_begin = (page-1)*size
|
||||
page_end = page*size
|
||||
if page_begin < 0:
|
||||
page_begin = 0
|
||||
if page_end > len(cache_data_list):
|
||||
page_end = len(cache_data_list)
|
||||
cache_data_list = cache_data_list[page_begin:page_end]
|
||||
for item in cache_data_list:
|
||||
prompts += str(item['user']['role']) + ":\n" + str(item['user']['content']) + "\n"
|
||||
prompts += str(item['AI']['role']) + ":\n" + str(item['AI']['content']) + "\n"
|
||||
def dump_contexts_page(self, session_id: str, size=5, page=1,):
|
||||
'''
|
||||
获取缓存的会话
|
||||
'''
|
||||
# contexts_str = ""
|
||||
# for i, key in enumerate(self.session_memory):
|
||||
# if i < (page-1)*size or i >= page*size:
|
||||
# continue
|
||||
# contexts_str += f"Session ID: {key}\n"
|
||||
# for record in self.session_memory[key]:
|
||||
# if "user" in record:
|
||||
# contexts_str += f"User: {record['user']['content']}\n"
|
||||
# if "AI" in record:
|
||||
# contexts_str += f"AI: {record['AI']['content']}\n"
|
||||
# contexts_str += "---\n"
|
||||
contexts_str = ""
|
||||
if session_id in self.session_memory:
|
||||
for record in self.session_memory[session_id]:
|
||||
if "user" in record and record['user']:
|
||||
text = record['user']['content'][:100] + "..." if len(record['user']['content']) > 100 else record['user']['content']
|
||||
contexts_str += f"User: {text}\n"
|
||||
if "AI" in record and record['AI']:
|
||||
text = record['AI']['content'][:100] + "..." if len(record['AI']['content']) > 100 else record['AI']['content']
|
||||
contexts_str += f"Assistant: {text}\n"
|
||||
else:
|
||||
contexts_str = "会话 ID 不存在。"
|
||||
|
||||
if divide:
|
||||
prompts += "----------\n"
|
||||
return prompts
|
||||
return contexts_str, len(self.session_memory[session_id])
|
||||
|
||||
def set_model(self, model: str):
|
||||
self.model_configs['model'] = model
|
||||
self.cc.put_by_dot_str("openai.chatGPTConfigs.model", model)
|
||||
super().set_curr_model(model)
|
||||
|
||||
|
||||
def get_user_usage_tokens(self,cache_list):
|
||||
usage_tokens = 0
|
||||
for item in cache_list:
|
||||
usage_tokens += int(item['single_tokens'])
|
||||
return usage_tokens
|
||||
|
||||
'''
|
||||
获取统计信息
|
||||
'''
|
||||
def get_stat(self):
|
||||
try:
|
||||
f = open(abs_path+"configs/stat", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
guild_count = 0
|
||||
guild_msg_count = 0
|
||||
guild_direct_msg_count = 0
|
||||
|
||||
for k,v in fjson.items():
|
||||
guild_count += 1
|
||||
guild_msg_count += v['count']
|
||||
guild_direct_msg_count += v['direct_count']
|
||||
|
||||
session_count = 0
|
||||
|
||||
f = open(abs_path+"configs/session", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
for k,v in fjson.items():
|
||||
session_count += 1
|
||||
return guild_count, guild_msg_count, guild_direct_msg_count, session_count
|
||||
except:
|
||||
return -1, -1, -1, -1
|
||||
|
||||
# 包装信息
|
||||
def wrap(self, prompt, session_id, image_url = None):
|
||||
if image_url is not None:
|
||||
prompt = [
|
||||
{
|
||||
"type": "text",
|
||||
"text": prompt
|
||||
},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": image_url
|
||||
}
|
||||
}
|
||||
]
|
||||
# 获得缓存信息
|
||||
context = self.session_dict[session_id]
|
||||
new_record = {
|
||||
"user": {
|
||||
"role": "user",
|
||||
"content": prompt,
|
||||
},
|
||||
"AI": {},
|
||||
'type': "common",
|
||||
'usage_tokens': 0,
|
||||
}
|
||||
req_list = []
|
||||
for i in context:
|
||||
if 'user' in i:
|
||||
req_list.append(i['user'])
|
||||
if 'AI' in i:
|
||||
req_list.append(i['AI'])
|
||||
req_list.append(new_record['user'])
|
||||
return context, new_record, req_list
|
||||
|
||||
def handle_switch_key(self):
|
||||
# messages = [{"role": "user", "content": prompt}]
|
||||
is_all_exceed = True
|
||||
for key in self.key_stat:
|
||||
if key == None or self.key_stat[key]['exceed']:
|
||||
continue
|
||||
is_all_exceed = False
|
||||
self.client.api_key = key
|
||||
gu.log(f"切换到Key: {key}, 已使用token: {self.key_stat[key]['used']}", level=gu.LEVEL_INFO)
|
||||
break
|
||||
if is_all_exceed:
|
||||
gu.log("所有Key已超额", level=gu.LEVEL_CRITICAL)
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_configs(self):
|
||||
return self.openai_configs
|
||||
|
||||
def get_key_stat(self):
|
||||
return self.key_stat
|
||||
|
||||
def get_key_list(self):
|
||||
return self.key_list
|
||||
|
||||
def get_curr_key(self):
|
||||
return self.client.api_key
|
||||
|
||||
def set_key(self, key):
|
||||
self.client.api_key = key
|
||||
|
||||
# 添加key
|
||||
def append_key(self, key, sponsor):
|
||||
self.key_list.append(key)
|
||||
self.key_stat[key] = {'exceed': False, 'used': 0, 'sponsor': sponsor}
|
||||
return self.model_configs
|
||||
|
||||
# 检查key是否可用
|
||||
def check_key(self, key):
|
||||
client_ = OpenAI(
|
||||
api_key=key,
|
||||
base_url=self.api_base
|
||||
)
|
||||
messages = [{"role": "user", "content": "please just echo `test`"}]
|
||||
client_.chat.completions.create(
|
||||
messages=messages,
|
||||
**self.openai_model_configs
|
||||
)
|
||||
return True
|
||||
def get_keys_data(self):
|
||||
return self.keys_data
|
||||
|
||||
def get_curr_key(self):
|
||||
return self.chosen_api_key
|
||||
|
||||
def set_key(self, key):
|
||||
self.client.api_key = key
|
||||
@@ -1,13 +1,58 @@
|
||||
import abc
|
||||
from collections import defaultdict
|
||||
|
||||
class Provider:
|
||||
def __init__(self, cfg):
|
||||
pass
|
||||
def __init__(self) -> None:
|
||||
self.model_stat = defaultdict(int) # 用于记录 LLM Model 使用数据
|
||||
self.curr_model_name = "unknown"
|
||||
|
||||
def reset_model_stat(self):
|
||||
self.model_stat.clear()
|
||||
|
||||
def set_curr_model(self, model_name: str):
|
||||
self.curr_model_name = model_name
|
||||
|
||||
@abc.abstractmethod
|
||||
def text_chat(self, prompt, session_id, image_url: None, function_call: None, extra_conf: dict = None, default_personality: dict = None) -> str:
|
||||
pass
|
||||
def get_curr_model(self):
|
||||
'''
|
||||
返回当前正在使用的 LLM
|
||||
'''
|
||||
return self.curr_model_name
|
||||
|
||||
def accu_model_stat(self, model: str = None):
|
||||
if not model:
|
||||
model = self.get_curr_model()
|
||||
self.model_stat[model] += 1
|
||||
|
||||
async def text_chat(self,
|
||||
prompt: str,
|
||||
session_id: str,
|
||||
image_url: None = None,
|
||||
tools: None = None,
|
||||
extra_conf: dict = None,
|
||||
default_personality: dict = None,
|
||||
**kwargs) -> str:
|
||||
'''
|
||||
[require]
|
||||
prompt: 提示词
|
||||
session_id: 会话id
|
||||
|
||||
@abc.abstractmethod
|
||||
def forget(self, session_id = None) -> bool:
|
||||
pass
|
||||
[optional]
|
||||
image_url: 图片url(识图)
|
||||
tools: 函数调用工具
|
||||
extra_conf: 额外配置
|
||||
default_personality: 默认人格
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
async def image_generate(self, prompt, session_id, **kwargs) -> str:
|
||||
'''
|
||||
[require]
|
||||
prompt: 提示词
|
||||
session_id: 会话id
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
async def forget(self, session_id=None) -> bool:
|
||||
'''
|
||||
重置会话
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1,218 +0,0 @@
|
||||
from revChatGPT.V1 import Chatbot
|
||||
from revChatGPT import typings
|
||||
from model.provider.provider import Provider
|
||||
from util import general_utils as gu
|
||||
from util import cmd_config as cc
|
||||
import time
|
||||
|
||||
|
||||
class ProviderRevChatGPT(Provider):
|
||||
def __init__(self, config, base_url = None):
|
||||
if base_url == "":
|
||||
base_url = None
|
||||
self.rev_chatgpt: list[dict] = []
|
||||
self.cc = cc.CmdConfig()
|
||||
for i in range(0, len(config['account'])):
|
||||
try:
|
||||
gu.log(f"创建逆向ChatGPT负载{str(i+1)}中...", level=gu.LEVEL_INFO, tag="RevChatGPT")
|
||||
|
||||
if 'password' in config['account'][i]:
|
||||
gu.log(f"创建逆向ChatGPT负载{str(i+1)}失败: 已不支持账号密码登录,请使用access_token方式登录。", level=gu.LEVEL_ERROR, tag="RevChatGPT")
|
||||
continue
|
||||
rev_account_config = {
|
||||
'access_token': config['account'][i]['access_token'],
|
||||
}
|
||||
if self.cc.get("rev_chatgpt_model") != "":
|
||||
rev_account_config['model'] = self.cc.get("rev_chatgpt_model")
|
||||
if len(self.cc.get("rev_chatgpt_plugin_ids")) > 0:
|
||||
rev_account_config['plugin_ids'] = self.cc.get("rev_chatgpt_plugin_ids")
|
||||
if self.cc.get("rev_chatgpt_PUID") != "":
|
||||
rev_account_config['PUID'] = self.cc.get("rev_chatgpt_PUID")
|
||||
if len(self.cc.get("rev_chatgpt_unverified_plugin_domains")) > 0:
|
||||
rev_account_config['unverified_plugin_domains'] = self.cc.get("rev_chatgpt_unverified_plugin_domains")
|
||||
cb = Chatbot(config=rev_account_config, base_url=base_url)
|
||||
# cb.captcha_solver = self.__captcha_solver
|
||||
# 后八位c
|
||||
g_id = rev_account_config['access_token'][-8:]
|
||||
revstat = {
|
||||
'id': g_id,
|
||||
'obj': cb,
|
||||
'busy': False,
|
||||
'user': []
|
||||
}
|
||||
self.rev_chatgpt.append(revstat)
|
||||
except BaseException as e:
|
||||
gu.log(f"创建逆向ChatGPT负载{str(i+1)}失败: {str(e)}", level=gu.LEVEL_ERROR, tag="RevChatGPT")
|
||||
|
||||
def forget(self, session_id = None) -> bool:
|
||||
for i in self.rev_chatgpt:
|
||||
for user in i['user']:
|
||||
if session_id == user['id']:
|
||||
try:
|
||||
i['obj'].reset_chat()
|
||||
return True
|
||||
except BaseException as e:
|
||||
gu.log(f"重置RevChatGPT失败。原因: {str(e)}", level=gu.LEVEL_ERROR, tag="RevChatGPT")
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_revchatgpt(self) -> list:
|
||||
return self.rev_chatgpt
|
||||
|
||||
def request_text(self, prompt: str, bot) -> str:
|
||||
resp = ''
|
||||
err_count = 0
|
||||
retry_count = 5
|
||||
|
||||
while err_count < retry_count:
|
||||
try:
|
||||
for data in bot.ask(prompt):
|
||||
resp = data["message"]
|
||||
break
|
||||
except typings.Error as e:
|
||||
if e.code == typings.ErrorType.INVALID_ACCESS_TOKEN_ERROR:
|
||||
raise e
|
||||
if e.code == typings.ErrorType.EXPIRED_ACCESS_TOKEN_ERROR:
|
||||
raise e
|
||||
if e.code == typings.ErrorType.PROHIBITED_CONCURRENT_QUERY_ERROR:
|
||||
raise e
|
||||
if "Your authentication token has expired. Please try signing in again." in str(e):
|
||||
raise e
|
||||
if "The message you submitted was too long" in str(e):
|
||||
raise e
|
||||
if "You've reached our limit of messages per hour." in str(e):
|
||||
raise e
|
||||
if "Rate limited by proxy" in str(e):
|
||||
gu.log(f"触发请求频率限制, 60秒后自动重试。", level=gu.LEVEL_WARNING, tag="RevChatGPT")
|
||||
time.sleep(60)
|
||||
|
||||
err_count += 1
|
||||
gu.log(f"请求异常: {str(e)},正在重试。({str(err_count)})", level=gu.LEVEL_WARNING, tag="RevChatGPT")
|
||||
if err_count >= retry_count:
|
||||
raise e
|
||||
except BaseException as e:
|
||||
err_count += 1
|
||||
gu.log(f"请求异常: {str(e)},正在重试。({str(err_count)})", level=gu.LEVEL_WARNING, tag="RevChatGPT")
|
||||
if err_count >= retry_count:
|
||||
raise e
|
||||
if resp == '':
|
||||
resp = "RevChatGPT请求异常。"
|
||||
|
||||
# print("[RevChatGPT] "+str(resp))
|
||||
return resp
|
||||
|
||||
def text_chat(self, prompt,
|
||||
session_id = None,
|
||||
image_url = None,
|
||||
function_call=None,
|
||||
extra_conf: dict = None,
|
||||
default_personality: dict = None) -> str:
|
||||
|
||||
# 选择一个人少的账号。
|
||||
selected_revstat = None
|
||||
min_revstat = None
|
||||
min_ = None
|
||||
new_user = False
|
||||
conversation_id = ''
|
||||
parent_id = ''
|
||||
for revstat in self.rev_chatgpt:
|
||||
for user in revstat['user']:
|
||||
if session_id == user['id']:
|
||||
selected_revstat = revstat
|
||||
conversation_id = user['conversation_id']
|
||||
parent_id = user['parent_id']
|
||||
break
|
||||
if min_ is None:
|
||||
min_ = len(revstat['user'])
|
||||
min_revstat = revstat
|
||||
elif len(revstat['user']) < min_:
|
||||
min_ = len(revstat['user'])
|
||||
min_revstat = revstat
|
||||
# if session_id in revstat['user']:
|
||||
# selected_revstat = revstat
|
||||
# break
|
||||
|
||||
if selected_revstat is None:
|
||||
selected_revstat = min_revstat
|
||||
selected_revstat['user'].append({
|
||||
'id': session_id,
|
||||
'conversation_id': '',
|
||||
'parent_id': ''
|
||||
})
|
||||
new_user = True
|
||||
|
||||
gu.log(f"选择账号{str(selected_revstat)}", tag="RevChatGPT", level=gu.LEVEL_DEBUG)
|
||||
|
||||
while selected_revstat['busy']:
|
||||
gu.log(f"账号忙碌,等待中...", tag="RevChatGPT", level=gu.LEVEL_DEBUG)
|
||||
time.sleep(1)
|
||||
selected_revstat['busy'] = True
|
||||
|
||||
if not new_user:
|
||||
# 非新用户,则使用其专用的会话
|
||||
selected_revstat['obj'].conversation_id = conversation_id
|
||||
selected_revstat['obj'].parent_id = parent_id
|
||||
else:
|
||||
# 新用户,则使用新的会话
|
||||
selected_revstat['obj'].reset_chat()
|
||||
|
||||
res = ''
|
||||
err_msg = ''
|
||||
err_cnt = 0
|
||||
while err_cnt < 15:
|
||||
try:
|
||||
res = self.request_text(prompt, selected_revstat['obj'])
|
||||
selected_revstat['busy'] = False
|
||||
# 记录新用户的会话
|
||||
if new_user:
|
||||
i = 0
|
||||
for user in selected_revstat['user']:
|
||||
if user['id'] == session_id:
|
||||
selected_revstat['user'][i]['conversation_id'] = selected_revstat['obj'].conversation_id
|
||||
selected_revstat['user'][i]['parent_id'] = selected_revstat['obj'].parent_id
|
||||
break
|
||||
i += 1
|
||||
return res.strip()
|
||||
except BaseException as e:
|
||||
if "Your authentication token has expired. Please try signing in again." in str(e):
|
||||
raise Exception(f"此账号(access_token后8位为{selected_revstat['id']})的access_token已过期,请重新获取,或者切换账号。")
|
||||
if "The message you submitted was too long" in str(e):
|
||||
raise Exception("发送的消息太长,请分段发送。")
|
||||
if "You've reached our limit of messages per hour." in str(e):
|
||||
raise Exception("触发RevChatGPT请求频率限制。请1小时后再试,或者切换账号。")
|
||||
gu.log(f"请求异常: {str(e)}", level=gu.LEVEL_WARNING, tag="RevChatGPT")
|
||||
err_cnt += 1
|
||||
time.sleep(3)
|
||||
|
||||
raise Exception(f'回复失败。原因:{err_msg}。如果您设置了多个账号,可以使用/switch指令切换账号。输入/switch查看详情。')
|
||||
|
||||
|
||||
# while self.is_all_busy():
|
||||
# time.sleep(1)
|
||||
# res = ''
|
||||
# err_msg = ''
|
||||
# cursor = 0
|
||||
# for revstat in self.rev_chatgpt:
|
||||
# cursor += 1
|
||||
# if not revstat['busy']:
|
||||
# try:
|
||||
# revstat['busy'] = True
|
||||
# res = self.request_text(prompt, revstat['obj'])
|
||||
# revstat['busy'] = False
|
||||
# return res.strip()
|
||||
# # todo: 细化错误管理
|
||||
# except BaseException as e:
|
||||
# revstat['busy'] = False
|
||||
# gu.log(f"请求出现问题: {str(e)}", level=gu.LEVEL_WARNING, tag="RevChatGPT")
|
||||
# err_msg += f"账号{cursor} - 错误原因: {str(e)}"
|
||||
# continue
|
||||
# else:
|
||||
# err_msg += f"账号{cursor} - 错误原因: 忙碌"
|
||||
# continue
|
||||
# raise Exception(f'回复失败。错误跟踪:{err_msg}')
|
||||
|
||||
def is_all_busy(self) -> bool:
|
||||
for revstat in self.rev_chatgpt:
|
||||
if not revstat['busy']:
|
||||
return False
|
||||
return True
|
||||
@@ -1,104 +0,0 @@
|
||||
from model.provider.provider import Provider
|
||||
from EdgeGPT import Chatbot, ConversationStyle
|
||||
import json
|
||||
import os
|
||||
from util import general_utils as gu
|
||||
from util.cmd_config import CmdConfig as cc
|
||||
import time
|
||||
|
||||
class ProviderRevEdgeGPT(Provider):
|
||||
def __init__(self):
|
||||
raise Exception("Bing 逆向已停止维护,不可用,请使用 ChatGPT 官方 API。")
|
||||
|
||||
self.busy = False
|
||||
self.wait_stack = []
|
||||
with open('./cookies.json', 'r') as f:
|
||||
cookies = json.load(f)
|
||||
proxy = cc.get("bing_proxy", None)
|
||||
if proxy == "":
|
||||
proxy = None
|
||||
# q = Query("Hello, bing!", cookie_files="./cookies.json")
|
||||
# print(q)
|
||||
self.bot = EdgeChatbot(cookies=cookies, proxy = "http://127.0.0.1:7890")
|
||||
ret = self.bot.ask_stream("Hello, bing!", conversation_style=ConversationStyle.creative, wss_link="wss://ai.nothingnessvoid.tech/sydney/ChatHub")
|
||||
# self.bot = Chatbot(cookies=cookies, proxy = proxy)
|
||||
for i in ret:
|
||||
print(i, flush=True)
|
||||
|
||||
def is_busy(self):
|
||||
return self.busy
|
||||
|
||||
async def forget(self, session_id = None):
|
||||
try:
|
||||
await self.bot.reset()
|
||||
return True
|
||||
except BaseException:
|
||||
return False
|
||||
|
||||
async def text_chat(self, prompt, platform = 'none', image_url=None, function_call=None):
|
||||
while self.busy:
|
||||
time.sleep(1)
|
||||
self.busy = True
|
||||
resp = 'err'
|
||||
err_count = 0
|
||||
retry_count = 5
|
||||
|
||||
while err_count < retry_count:
|
||||
try:
|
||||
resp = await self.bot.ask(prompt=prompt, conversation_style=ConversationStyle.creative)
|
||||
# print("[RevEdgeGPT] "+str(resp))
|
||||
if 'messages' not in resp['item']:
|
||||
await self.bot.reset()
|
||||
msj_obj = resp['item']['messages'][len(resp['item']['messages'])-1]
|
||||
reply_msg = msj_obj['text']
|
||||
if 'sourceAttributions' in msj_obj:
|
||||
reply_source = msj_obj['sourceAttributions']
|
||||
else:
|
||||
reply_source = []
|
||||
if 'throttling' in resp['item']:
|
||||
throttling = resp['item']['throttling']
|
||||
# print(throttling)
|
||||
else:
|
||||
throttling = None
|
||||
if 'I\'m sorry but I prefer not to continue this conversation. I\'m still learning so I appreciate your understanding and patience.' in reply_msg:
|
||||
self.busy = False
|
||||
return '', 0
|
||||
if reply_msg == prompt:
|
||||
# resp += '\n\n如果你没有让我复述你的话,那代表我可能不想和你继续这个话题了,请输入reset重置会话😶'
|
||||
await self.forget()
|
||||
err_count += 1
|
||||
continue
|
||||
if reply_source is None:
|
||||
# 不想答复
|
||||
return '', 0
|
||||
else:
|
||||
if platform != 'qqchan':
|
||||
index = 1
|
||||
if len(reply_source) > 0:
|
||||
reply_msg += "\n\n信息来源:\n"
|
||||
for i in reply_source:
|
||||
reply_msg += f"[{str(index)}]: {i['seeMoreUrl']} | {i['providerDisplayName']}\n"
|
||||
index += 1
|
||||
if throttling is not None:
|
||||
if throttling['numUserMessagesInConversation'] == throttling['maxNumUserMessagesInConversation']:
|
||||
# 达到上限,重置会话
|
||||
await self.forget()
|
||||
if throttling['numUserMessagesInConversation'] > throttling['maxNumUserMessagesInConversation']:
|
||||
await self.forget()
|
||||
err_count += 1
|
||||
continue
|
||||
reply_msg += f"\n[{throttling['numUserMessagesInConversation']}/{throttling['maxNumUserMessagesInConversation']}]"
|
||||
break
|
||||
except BaseException as e:
|
||||
gu.log(str(e), level=gu.LEVEL_WARNING, tag="RevEdgeGPT")
|
||||
err_count += 1
|
||||
if err_count >= retry_count:
|
||||
gu.log(r"如果报错, 且您的机器在中国大陆内, 请确保您的电脑已经设置好代理软件(梯子), 并在配置文件设置了系统代理地址。详见https://github.com/Soulter/QQChannelChatGPT/wiki/%E4%BA%8C%E3%80%81%E9%A1%B9%E7%9B%AE%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E9%85%8D%E7%BD%AE", max_len=999)
|
||||
self.busy = False
|
||||
raise e
|
||||
gu.log("请求出现了一些问题, 正在重试。次数"+str(err_count), level=gu.LEVEL_WARNING, tag="RevEdgeGPT")
|
||||
self.busy = False
|
||||
|
||||
# print("[RevEdgeGPT] "+str(reply_msg))
|
||||
return reply_msg, 1
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import sqlite3
|
||||
import yaml
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class dbConn():
|
||||
def __init__(self):
|
||||
# 读取参数,并支持中文
|
||||
conn = sqlite3.connect("data.db")
|
||||
conn.text_factory=str
|
||||
db_path = "data/data.db"
|
||||
if os.path.exists("data.db"):
|
||||
shutil.copy("data.db", db_path)
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.text_factory = str
|
||||
self.conn = conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
@@ -44,7 +48,7 @@ class dbConn():
|
||||
);
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
def insert_session(self, qq_id, history):
|
||||
@@ -76,7 +80,7 @@ class dbConn():
|
||||
''', (qq_id, )
|
||||
)
|
||||
return c.fetchone()
|
||||
|
||||
|
||||
def get_all_session(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -86,7 +90,7 @@ class dbConn():
|
||||
'''
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
|
||||
def check_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -107,7 +111,6 @@ class dbConn():
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def increment_stat_session(self, platform, session_id, cnt):
|
||||
# if not exist, insert
|
||||
conn = self.conn
|
||||
@@ -137,7 +140,7 @@ class dbConn():
|
||||
''', (platform, session_id)
|
||||
)
|
||||
return c.fetchone() is not None
|
||||
|
||||
|
||||
def get_all_stat_session(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -147,7 +150,7 @@ class dbConn():
|
||||
'''
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
|
||||
def get_session_cnt_total(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -157,7 +160,7 @@ class dbConn():
|
||||
'''
|
||||
)
|
||||
return c.fetchone()[0]
|
||||
|
||||
|
||||
def increment_stat_message(self, ts, cnt):
|
||||
# 以一个小时为单位。ts的单位是秒。
|
||||
# 找到最近的一个小时,如果没有,就插入
|
||||
@@ -197,7 +200,7 @@ class dbConn():
|
||||
return True, ts
|
||||
else:
|
||||
return False, ts
|
||||
|
||||
|
||||
def get_last_24h_stat_message(self):
|
||||
# 获取最近24小时的消息统计
|
||||
conn = self.conn
|
||||
@@ -208,7 +211,7 @@ class dbConn():
|
||||
''', (time.time() - 86400, )
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
|
||||
def get_message_cnt_total(self) -> int:
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -258,7 +261,7 @@ class dbConn():
|
||||
return True, ts
|
||||
else:
|
||||
return False, ts
|
||||
|
||||
|
||||
def get_last_24h_stat_platform(self):
|
||||
# 获取最近24小时的消息统计
|
||||
conn = self.conn
|
||||
@@ -269,7 +272,7 @@ class dbConn():
|
||||
''', (time.time() - 86400, )
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
|
||||
def get_platform_cnt_total(self) -> int:
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
@@ -291,4 +294,3 @@ class dbConn():
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
pydantic~=1.10.4
|
||||
requests~=2.28.1
|
||||
aiohttp
|
||||
requests
|
||||
openai~=1.2.3
|
||||
qq-botpy==1.1.2
|
||||
qq-botpy
|
||||
chardet~=5.1.0
|
||||
Pillow~=9.4.0
|
||||
GitPython~=3.1.31
|
||||
Pillow
|
||||
GitPython
|
||||
nakuru-project
|
||||
beautifulsoup4
|
||||
googlesearch-python
|
||||
tiktoken
|
||||
readability-lxml
|
||||
revChatGPT~=6.8.6
|
||||
baidu-aip~=4.16.9
|
||||
baidu-aip
|
||||
websockets
|
||||
flask
|
||||
psutil
|
||||
lxml_html_clean
|
||||
SparkleLogging
|
||||
|
||||
Binary file not shown.
28
type/command.py
Normal file
28
type/command.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import Union, List, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandItem():
|
||||
'''
|
||||
用来描述单个指令
|
||||
'''
|
||||
|
||||
command_name: Union[str, tuple] # 指令名
|
||||
callback: Callable # 回调函数
|
||||
description: str # 描述
|
||||
origin: str # 注册来源
|
||||
|
||||
class CommandResult():
|
||||
'''
|
||||
用于在Command中返回多个值
|
||||
'''
|
||||
|
||||
def __init__(self, hit: bool, success: bool = False, message_chain: list = [], command_name: str = "unknown_command") -> None:
|
||||
self.hit = hit
|
||||
self.success = success
|
||||
self.message_chain = message_chain
|
||||
self.command_name = command_name
|
||||
|
||||
def _result_tuple(self):
|
||||
return (self.success, self.message_chain, self.command_name)
|
||||
1
type/config.py
Normal file
1
type/config.py
Normal file
@@ -0,0 +1 @@
|
||||
VERSION = '3.2.4'
|
||||
62
type/message.py
Normal file
62
type/message.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
from dataclasses import dataclass
|
||||
from nakuru.entities.components import BaseMessageComponent
|
||||
|
||||
from type.register import RegisteredPlatform
|
||||
from type.types import GlobalObject
|
||||
|
||||
class MessageType(Enum):
|
||||
GROUP_MESSAGE = 'GroupMessage' # 群组形式的消息
|
||||
FRIEND_MESSAGE = 'FriendMessage' # 私聊、好友等单聊消息
|
||||
GUILD_MESSAGE = 'GuildMessage' # 频道消息
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageMember():
|
||||
user_id: str # 发送者id
|
||||
nickname: str = None
|
||||
|
||||
|
||||
class AstrBotMessage():
|
||||
'''
|
||||
AstrBot 的消息对象
|
||||
'''
|
||||
tag: str # 消息来源标签
|
||||
type: MessageType # 消息类型
|
||||
self_id: str # 机器人的识别id
|
||||
session_id: str # 会话id
|
||||
message_id: str # 消息id
|
||||
sender: MessageMember # 发送者
|
||||
message: List[BaseMessageComponent] # 消息链使用 Nakuru 的消息链格式
|
||||
message_str: str # 最直观的纯文本消息字符串
|
||||
raw_message: object
|
||||
timestamp: int # 消息时间戳
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.__dict__)
|
||||
|
||||
class AstrMessageEvent():
|
||||
'''
|
||||
消息事件。
|
||||
'''
|
||||
context: GlobalObject # 一些公用数据
|
||||
message_str: str # 纯消息字符串
|
||||
message_obj: AstrBotMessage # 消息对象
|
||||
platform: RegisteredPlatform # 来源平台
|
||||
role: str # 基本身份。`admin` 或 `member`
|
||||
session_id: int # 会话 id
|
||||
|
||||
def __init__(self,
|
||||
message_str: str,
|
||||
message_obj: AstrBotMessage,
|
||||
platform: RegisteredPlatform,
|
||||
role: str,
|
||||
context: GlobalObject,
|
||||
session_id: str = None):
|
||||
self.context = context
|
||||
self.message_str = message_str
|
||||
self.message_obj = message_obj
|
||||
self.platform = platform
|
||||
self.role = role
|
||||
self.session_id = session_id
|
||||
27
type/plugin.py
Normal file
27
type/plugin.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
|
||||
class PluginType(Enum):
|
||||
PLATFORM = 'platform' # 平台类插件。
|
||||
LLM = 'llm' # 大语言模型类插件
|
||||
COMMON = 'common' # 其他插件
|
||||
|
||||
|
||||
@dataclass
|
||||
class PluginMetadata:
|
||||
'''
|
||||
插件的元数据。
|
||||
'''
|
||||
# required
|
||||
plugin_name: str
|
||||
plugin_type: PluginType
|
||||
author: str # 插件作者
|
||||
desc: str # 插件简介
|
||||
version: str # 插件版本
|
||||
|
||||
# optional
|
||||
repo: str = None # 插件仓库地址
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"PluginMetadata({self.plugin_name}, {self.plugin_type}, {self.desc}, {self.version}, {self.repo})"
|
||||
|
||||
53
type/register.py
Normal file
53
type/register.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from model.provider.provider import Provider as LLMProvider
|
||||
from model.platform._platfrom import Platform
|
||||
from type.plugin import *
|
||||
from typing import List
|
||||
from types import ModuleType
|
||||
from dataclasses import dataclass
|
||||
|
||||
@dataclass
|
||||
class RegisteredPlugin:
|
||||
'''
|
||||
注册在 AstrBot 中的插件。
|
||||
'''
|
||||
metadata: PluginMetadata
|
||||
plugin_instance: object
|
||||
module_path: str
|
||||
module: ModuleType
|
||||
root_dir_name: str
|
||||
trig_cnt: int = 0
|
||||
|
||||
def reset_trig_cnt(self):
|
||||
self.trig_cnt = 0
|
||||
|
||||
def trig(self):
|
||||
self.trig_cnt += 1
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"RegisteredPlugin({self.metadata}, {self.module_path}, {self.root_dir_name})"
|
||||
|
||||
|
||||
RegisteredPlugins = List[RegisteredPlugin]
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegisteredPlatform:
|
||||
'''
|
||||
注册在 AstrBot 中的平台。平台应当实现 Platform 接口。
|
||||
'''
|
||||
platform_name: str
|
||||
platform_instance: Platform
|
||||
origin: str = None # 注册来源
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.platform_name
|
||||
|
||||
|
||||
@dataclass
|
||||
class RegisteredLLM:
|
||||
'''
|
||||
注册在 AstrBot 中的大语言模型调用。大语言模型应当实现 LLMProvider 接口。
|
||||
'''
|
||||
llm_name: str
|
||||
llm_instance: LLMProvider
|
||||
origin: str = None # 注册来源
|
||||
36
type/types.py
Normal file
36
type/types.py
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
from type.register import *
|
||||
from typing import List
|
||||
from logging import Logger
|
||||
|
||||
class GlobalObject:
|
||||
'''
|
||||
存放一些公用的数据,用于在不同模块(如core与command)之间传递
|
||||
'''
|
||||
version: str # 机器人版本
|
||||
nick: tuple # 用户定义的机器人的别名
|
||||
base_config: dict # config.json 中导出的配置
|
||||
cached_plugins: List[RegisteredPlugin] # 加载的插件
|
||||
platforms: List[RegisteredPlatform]
|
||||
llms: List[RegisteredLLM]
|
||||
|
||||
web_search: bool # 是否开启了网页搜索
|
||||
reply_prefix: str # 回复前缀
|
||||
unique_session: bool # 是否开启了独立会话
|
||||
default_personality: dict
|
||||
dashboard_data = None
|
||||
|
||||
logger: Logger = None
|
||||
|
||||
def __init__(self):
|
||||
self.nick = None # gocq 的昵称
|
||||
self.base_config = None # config.yaml
|
||||
self.cached_plugins = [] # 缓存的插件
|
||||
self.web_search = False # 是否开启了网页搜索
|
||||
self.reply_prefix = None
|
||||
self.unique_session = False
|
||||
self.platforms = []
|
||||
self.llms = []
|
||||
self.default_personality = None
|
||||
self.dashboard_data = None
|
||||
self.stat = {}
|
||||
@@ -3,30 +3,35 @@ import json
|
||||
import util.general_utils as gu
|
||||
|
||||
import time
|
||||
|
||||
|
||||
class FuncCallJsonFormatError(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
|
||||
class FuncNotFoundError(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
|
||||
class FuncCall():
|
||||
def __init__(self, provider) -> None:
|
||||
self.func_list = []
|
||||
self.provider = provider
|
||||
|
||||
def add_func(self, name: str = None, func_args: list = None, desc: str = None, func_obj = None) -> None:
|
||||
def add_func(self, name: str = None, func_args: list = None, desc: str = None, func_obj=None) -> None:
|
||||
if name == None or func_args == None or desc == None or func_obj == None:
|
||||
raise FuncCallJsonFormatError("name, func_args, desc must be provided.")
|
||||
raise FuncCallJsonFormatError(
|
||||
"name, func_args, desc must be provided.")
|
||||
params = {
|
||||
"type": "object", # hardcore here
|
||||
"type": "object", # hardcore here
|
||||
"properties": {}
|
||||
}
|
||||
for param in func_args:
|
||||
@@ -51,7 +56,7 @@ class FuncCall():
|
||||
"description": f["description"],
|
||||
})
|
||||
return json.dumps(_l, indent=intent, ensur_ascii=False)
|
||||
|
||||
|
||||
def get_func(self) -> list:
|
||||
_l = []
|
||||
for f in self.func_list:
|
||||
@@ -64,8 +69,8 @@ class FuncCall():
|
||||
}
|
||||
})
|
||||
return _l
|
||||
|
||||
def func_call(self, question, func_definition, is_task = False, tasks = None, taskindex = -1, is_summary = True, session_id = None):
|
||||
|
||||
def func_call(self, question, func_definition, is_task=False, tasks=None, taskindex=-1, is_summary=True, session_id=None):
|
||||
|
||||
funccall_prompt = """
|
||||
我正实现function call功能,该功能旨在让你变成给定的问题到给定的函数的解析器(意味着你不是创造函数)。
|
||||
@@ -120,7 +125,8 @@ class FuncCall():
|
||||
res = self.provider.text_chat(prompt, session_id)
|
||||
if res.find('```') != -1:
|
||||
res = res[res.find('```json') + 7: res.rfind('```')]
|
||||
gu.log("REVGPT func_call json result", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
gu.log("REVGPT func_call json result",
|
||||
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
print(res)
|
||||
res = json.loads(res)
|
||||
break
|
||||
@@ -151,11 +157,13 @@ class FuncCall():
|
||||
func_target = func["func_obj"]
|
||||
break
|
||||
if func_target == None:
|
||||
raise FuncNotFoundError(f"Request function {func_name} not found.")
|
||||
raise FuncNotFoundError(
|
||||
f"Request function {func_name} not found.")
|
||||
t_res = str(func_target(**args))
|
||||
invoke_func_res += f"{func_name} 调用结果:\n```\n{t_res}\n```\n"
|
||||
invoke_func_res_list.append(invoke_func_res)
|
||||
gu.log(f"[FUNC| {func_name} invoked]", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
gu.log(f"[FUNC| {func_name} invoked]",
|
||||
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
# print(str(t_res))
|
||||
|
||||
if is_summary:
|
||||
@@ -181,12 +189,16 @@ class FuncCall():
|
||||
try:
|
||||
res = self.provider.text_chat(after_prompt, session_id)
|
||||
# 截取```之间的内容
|
||||
gu.log("DEBUG BEGIN", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
|
||||
gu.log(
|
||||
"DEBUG BEGIN", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
|
||||
print(res)
|
||||
gu.log("DEBUG END", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
|
||||
gu.log(
|
||||
"DEBUG END", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
|
||||
if res.find('```') != -1:
|
||||
res = res[res.find('```json') + 7: res.rfind('```')]
|
||||
gu.log("REVGPT after_func_call json result", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
res = res[res.find('```json') +
|
||||
7: res.rfind('```')]
|
||||
gu.log("REVGPT after_func_call json result",
|
||||
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
after_prompt_res = res
|
||||
after_prompt_res = json.loads(after_prompt_res)
|
||||
break
|
||||
@@ -197,7 +209,8 @@ class FuncCall():
|
||||
if "The message you submitted was too long" in str(e):
|
||||
# 如果返回的内容太长了,那么就截取一部分
|
||||
time.sleep(3)
|
||||
invoke_func_res = invoke_func_res[:int(len(invoke_func_res) / 2)]
|
||||
invoke_func_res = invoke_func_res[:int(
|
||||
len(invoke_func_res) / 2)]
|
||||
after_prompt = """
|
||||
函数返回以下内容:"""+invoke_func_res+"""
|
||||
请以AI助手的身份结合返回的内容对用户提问做详细全面的回答。
|
||||
@@ -218,11 +231,13 @@ class FuncCall():
|
||||
if "func_call_again" in after_prompt_res and after_prompt_res["func_call_again"]:
|
||||
# 如果需要重新调用函数
|
||||
# 重新调用函数
|
||||
gu.log("REVGPT func_call_again", bg=gu.BG_COLORS["purple"], fg=gu.FG_COLORS["white"])
|
||||
gu.log("REVGPT func_call_again",
|
||||
bg=gu.BG_COLORS["purple"], fg=gu.FG_COLORS["white"])
|
||||
res = self.func_call(question, func_definition)
|
||||
return res, True
|
||||
|
||||
gu.log("REVGPT func callback:", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
gu.log("REVGPT func callback:",
|
||||
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
|
||||
# print(after_prompt_res["res"])
|
||||
return after_prompt_res["res"], True
|
||||
else:
|
||||
@@ -230,8 +245,3 @@ class FuncCall():
|
||||
else:
|
||||
# print(res["res"])
|
||||
return res["res"], False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
183
util/agent/web_searcher.py
Normal file
183
util/agent/web_searcher.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import traceback
|
||||
import random
|
||||
import json
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import os
|
||||
|
||||
from readability import Document
|
||||
from bs4 import BeautifulSoup
|
||||
from openai.types.chat.chat_completion_message_tool_call import Function
|
||||
from util.agent.func_call import FuncCall
|
||||
from util.search_engine_scraper.config import HEADERS, USER_AGENTS
|
||||
from util.search_engine_scraper.bing import Bing
|
||||
from util.search_engine_scraper.sogo import Sogo
|
||||
from util.search_engine_scraper.google import Google
|
||||
from model.provider.provider import Provider
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
|
||||
bing_search = Bing()
|
||||
sogo_search = Sogo()
|
||||
google = Google()
|
||||
proxy = os.environ.get("HTTPS_PROXY", None)
|
||||
|
||||
def tidy_text(text: str) -> str:
|
||||
'''
|
||||
清理文本,去除空格、换行符等
|
||||
'''
|
||||
return text.strip().replace("\n", " ").replace("\r", " ").replace(" ", " ")
|
||||
|
||||
# def special_fetch_zhihu(link: str) -> str:
|
||||
# '''
|
||||
# function-calling 函数, 用于获取知乎文章的内容
|
||||
# '''
|
||||
# response = requests.get(link, headers=HEADERS)
|
||||
# response.encoding = "utf-8"
|
||||
# soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
# if "zhuanlan.zhihu.com" in link:
|
||||
# r = soup.find(class_="Post-RichTextContainer")
|
||||
# else:
|
||||
# r = soup.find(class_="List-item").find(class_="RichContent-inner")
|
||||
# if r is None:
|
||||
# print("debug: zhihu none")
|
||||
# raise Exception("zhihu none")
|
||||
# return tidy_text(r.text)
|
||||
|
||||
async def search_from_bing(keyword: str) -> str:
|
||||
'''
|
||||
tools, 从 bing 搜索引擎搜索
|
||||
'''
|
||||
logger.info("web_searcher - search_from_bing: " + keyword)
|
||||
results = []
|
||||
try:
|
||||
results = await google.search(keyword, 5)
|
||||
except BaseException as e:
|
||||
logger.error(f"google search error: {e}, try the next one...")
|
||||
if len(results) == 0:
|
||||
logger.debug("search google failed")
|
||||
try:
|
||||
results = await bing_search.search(keyword, 5)
|
||||
except BaseException as e:
|
||||
logger.error(f"bing search error: {e}, try the next one...")
|
||||
if len(results) == 0:
|
||||
logger.debug("search bing failed")
|
||||
try:
|
||||
results = await sogo_search.search(keyword, 5)
|
||||
except BaseException as e:
|
||||
logger.error(f"sogo search error: {e}")
|
||||
if len(results) == 0:
|
||||
logger.debug("search sogo failed")
|
||||
return "没有搜索到结果"
|
||||
ret = ""
|
||||
idx = 1
|
||||
for i in results:
|
||||
logger.info(f"web_searcher - scraping web: {i.title} - {i.url}")
|
||||
try:
|
||||
site_result = await fetch_website_content(i.url)
|
||||
except:
|
||||
site_result = ""
|
||||
site_result = site_result[:600] + "..." if len(site_result) > 600 else site_result
|
||||
ret += f"{idx}. {i.title} \n{i.snippet}\n{site_result}\n\n"
|
||||
idx += 1
|
||||
return ret
|
||||
|
||||
|
||||
async def fetch_website_content(url):
|
||||
header = HEADERS
|
||||
header.update({'User-Agent': random.choice(USER_AGENTS)})
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, headers=HEADERS, timeout=6, proxy=proxy) as response:
|
||||
html = await response.text(encoding="utf-8")
|
||||
doc = Document(html)
|
||||
ret = doc.summary(html_partial=True)
|
||||
soup = BeautifulSoup(ret, 'html.parser')
|
||||
ret = tidy_text(soup.get_text())
|
||||
return ret
|
||||
|
||||
|
||||
async def web_search(prompt, provider: Provider, session_id, official_fc=False):
|
||||
'''
|
||||
official_fc: 使用官方 function-calling
|
||||
'''
|
||||
new_func_call = FuncCall(provider)
|
||||
|
||||
new_func_call.add_func("web_search", [{
|
||||
"type": "string",
|
||||
"name": "keyword",
|
||||
"description": "搜索关键词"
|
||||
}],
|
||||
"通过搜索引擎搜索。如果问题需要获取近期、实时的消息,在网页上搜索(如天气、新闻或任何需要通过网页获取信息的问题),则调用此函数;如果没有,不要调用此函数。",
|
||||
search_from_bing
|
||||
)
|
||||
new_func_call.add_func("fetch_website_content", [{
|
||||
"type": "string",
|
||||
"name": "url",
|
||||
"description": "要获取内容的网页链接"
|
||||
}],
|
||||
"获取网页的内容。如果问题带有合法的网页链接并且用户有需求了解网页内容(例如: `帮我总结一下 https://github.com 的内容`), 就调用此函数。如果没有,不要调用此函数。",
|
||||
fetch_website_content
|
||||
)
|
||||
|
||||
has_func = False
|
||||
function_invoked_ret = ""
|
||||
if official_fc:
|
||||
# we use official function-calling
|
||||
result = await provider.text_chat(prompt, session_id, tools=new_func_call.get_func())
|
||||
if isinstance(result, Function):
|
||||
logger.debug(f"web_searcher - function-calling: {result}")
|
||||
func_obj = None
|
||||
for i in new_func_call.func_list:
|
||||
if i["name"] == result.name:
|
||||
func_obj = i["func_obj"]
|
||||
break
|
||||
if not func_obj:
|
||||
return await provider.text_chat(prompt, session_id) + "\n(网页搜索失败, 此为默认回复)"
|
||||
try:
|
||||
args = json.loads(result.arguments)
|
||||
function_invoked_ret = await func_obj(**args)
|
||||
has_func = True
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
return await provider.text_chat(prompt, session_id) + "\n(网页搜索失败, 此为默认回复)"
|
||||
else:
|
||||
return result
|
||||
else:
|
||||
# we use our own function-calling
|
||||
try:
|
||||
args = {
|
||||
'question': prompt,
|
||||
'func_definition': new_func_call.func_dump(),
|
||||
'is_task': False,
|
||||
'is_summary': False,
|
||||
}
|
||||
function_invoked_ret, has_func = await asyncio.to_thread(new_func_call.func_call, **args)
|
||||
except BaseException as e:
|
||||
res = await provider.text_chat(prompt) + "\n(网页搜索失败, 此为默认回复)"
|
||||
return res
|
||||
has_func = True
|
||||
|
||||
if has_func:
|
||||
await provider.forget(session_id)
|
||||
summary_prompt = f"""
|
||||
你是一个专业且高效的助手,你的任务是
|
||||
1. 根据下面的相关材料对用户的问题 `{prompt}` 进行总结;
|
||||
2. 简单地发表你对这个问题的简略看法。
|
||||
|
||||
# 例子
|
||||
1. 从网上的信息来看,可以知道...我个人认为...你觉得呢?
|
||||
2. 根据网上的最新信息,可以得知...我觉得...你怎么看?
|
||||
|
||||
# 限制
|
||||
1. 限制在 200 字以内;
|
||||
2. 请**直接输出总结**,不要输出多余的内容和提示语。
|
||||
|
||||
# 相关材料
|
||||
{function_invoked_ret}"""
|
||||
ret = await provider.text_chat(summary_prompt, session_id)
|
||||
return ret
|
||||
return function_invoked_ret
|
||||
@@ -1,8 +1,9 @@
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
from typing import Union
|
||||
|
||||
cpath = "cmd_config.json"
|
||||
cpath = "data/cmd_config.json"
|
||||
|
||||
def check_exist():
|
||||
if not os.path.exists(cpath):
|
||||
@@ -10,6 +11,7 @@ def check_exist():
|
||||
json.dump({}, f, indent=4, ensure_ascii=False)
|
||||
f.flush()
|
||||
|
||||
|
||||
class CmdConfig():
|
||||
|
||||
@staticmethod
|
||||
@@ -21,13 +23,13 @@ class CmdConfig():
|
||||
return d[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_all():
|
||||
check_exist()
|
||||
with open(cpath, "r", encoding="utf-8-sig") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def put(key, value):
|
||||
check_exist()
|
||||
@@ -37,7 +39,7 @@ class CmdConfig():
|
||||
with open(cpath, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(d, f, indent=4, ensure_ascii=False)
|
||||
f.flush()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def put_by_dot_str(key: str, value):
|
||||
'''
|
||||
@@ -58,11 +60,11 @@ class CmdConfig():
|
||||
f.flush()
|
||||
|
||||
@staticmethod
|
||||
def init_attributes(key: Union[str, list], init_val = ""):
|
||||
def init_attributes(key: Union[str, list], init_val=""):
|
||||
check_exist()
|
||||
conf_str = ''
|
||||
with open(cpath, "r", encoding="utf-8-sig") as f:
|
||||
conf_str = f.read()
|
||||
conf_str = f.read()
|
||||
if conf_str.startswith(u'/ufeff'):
|
||||
conf_str = conf_str.encode('utf8')[3:].decode('utf8')
|
||||
d = json.loads(conf_str)
|
||||
@@ -80,4 +82,64 @@ class CmdConfig():
|
||||
if _tag:
|
||||
with open(cpath, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(d, f, indent=4, ensure_ascii=False)
|
||||
f.flush()
|
||||
f.flush()
|
||||
|
||||
|
||||
def init_astrbot_config_items():
|
||||
# 加载默认配置
|
||||
cc = CmdConfig()
|
||||
cc.init_attributes("qq_forward_threshold", 200)
|
||||
cc.init_attributes("qq_welcome", "")
|
||||
cc.init_attributes("qq_pic_mode", False)
|
||||
cc.init_attributes("gocq_host", "127.0.0.1")
|
||||
cc.init_attributes("gocq_http_port", 5700)
|
||||
cc.init_attributes("gocq_websocket_port", 6700)
|
||||
cc.init_attributes("gocq_react_group", True)
|
||||
cc.init_attributes("gocq_react_guild", True)
|
||||
cc.init_attributes("gocq_react_friend", True)
|
||||
cc.init_attributes("gocq_react_group_increase", True)
|
||||
cc.init_attributes("other_admins", [])
|
||||
cc.init_attributes("CHATGPT_BASE_URL", "")
|
||||
cc.init_attributes("qqbot_secret", "")
|
||||
cc.init_attributes("qqofficial_enable_group_message", False)
|
||||
cc.init_attributes("admin_qq", "")
|
||||
cc.init_attributes("nick_qq", ["!", "!", "ai"])
|
||||
cc.init_attributes("admin_qqchan", "")
|
||||
cc.init_attributes("llm_env_prompt", "")
|
||||
cc.init_attributes("llm_wake_prefix", "")
|
||||
cc.init_attributes("default_personality_str", "")
|
||||
cc.init_attributes("openai_image_generate", {
|
||||
"model": "dall-e-3",
|
||||
"size": "1024x1024",
|
||||
"style": "vivid",
|
||||
"quality": "standard",
|
||||
})
|
||||
cc.init_attributes("http_proxy", "")
|
||||
cc.init_attributes("https_proxy", "")
|
||||
cc.init_attributes("dashboard_username", "")
|
||||
cc.init_attributes("dashboard_password", "")
|
||||
|
||||
|
||||
|
||||
def try_migrate_config():
|
||||
'''
|
||||
将 cmd_config.json 迁移至 data/cmd_config.json
|
||||
'''
|
||||
print("try migrate configs")
|
||||
if os.path.exists("cmd_config.json"):
|
||||
with open("cmd_config.json", "r", encoding="utf-8-sig") as f:
|
||||
data = json.load(f)
|
||||
with open("data/cmd_config.json", "w", encoding="utf-8-sig") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
try:
|
||||
os.remove("cmd_config.json")
|
||||
except Exception as e:
|
||||
pass
|
||||
if not os.path.exists("cmd_config.json") and not os.path.exists("data/cmd_config.json"):
|
||||
# 从 configs/config.yaml 上拿数据
|
||||
configs_pth = os.path.abspath(os.path.join(os.path.abspath(__file__), "../../configs/config.yaml"))
|
||||
with open(configs_pth, encoding='utf-8') as f:
|
||||
data = yaml.load(f, Loader=yaml.Loader)
|
||||
print(data)
|
||||
with open("data/cmd_config.json", "w", encoding="utf-8-sig") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
@@ -1,279 +0,0 @@
|
||||
import requests
|
||||
import util.general_utils as gu
|
||||
from bs4 import BeautifulSoup
|
||||
import time
|
||||
from util.function_calling.func_call import (
|
||||
FuncCall,
|
||||
FuncCallJsonFormatError,
|
||||
FuncNotFoundError
|
||||
)
|
||||
from openai.types.chat.chat_completion_message_tool_call import Function
|
||||
import traceback
|
||||
from googlesearch import search, SearchResult
|
||||
from model.provider.provider import Provider
|
||||
import json
|
||||
from readability import Document
|
||||
|
||||
|
||||
def tidy_text(text: str) -> str:
|
||||
'''
|
||||
清理文本,去除空格、换行符等
|
||||
'''
|
||||
return text.strip().replace("\n", " ").replace("\r", " ").replace(" ", " ")
|
||||
|
||||
def special_fetch_zhihu(link: str) -> str:
|
||||
'''
|
||||
function-calling 函数, 用于获取知乎文章的内容
|
||||
'''
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
|
||||
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||
}
|
||||
response = requests.get(link, headers=headers)
|
||||
response.encoding = "utf-8"
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
if "zhuanlan.zhihu.com" in link:
|
||||
r = soup.find(class_="Post-RichTextContainer")
|
||||
else:
|
||||
r = soup.find(class_="List-item").find(class_="RichContent-inner")
|
||||
if r is None:
|
||||
print("debug: zhihu none")
|
||||
raise Exception("zhihu none")
|
||||
return tidy_text(r.text)
|
||||
|
||||
def google_web_search(keyword) -> str:
|
||||
'''
|
||||
获取 google 搜索结果, 得到 title、desc、link
|
||||
'''
|
||||
ret = ""
|
||||
index = 1
|
||||
try:
|
||||
ls = search(keyword, advanced=True, num_results=4)
|
||||
for i in ls:
|
||||
desc = i.description
|
||||
try:
|
||||
desc = fetch_website_content(i.url)
|
||||
except BaseException as e:
|
||||
print(f"(google) fetch_website_content err: {str(e)}")
|
||||
gu.log(f"# No.{str(index)}\ntitle: {i.title}\nurl: {i.url}\ncontent: {desc}\n\n", level=gu.LEVEL_DEBUG, max_len=9999)
|
||||
ret += f"# No.{str(index)}\ntitle: {i.title}\nurl: {i.url}\ncontent: {desc}\n\n"
|
||||
index += 1
|
||||
except Exception as e:
|
||||
print(f"google search err: {str(e)}")
|
||||
return web_keyword_search_via_bing(keyword)
|
||||
return ret
|
||||
|
||||
def web_keyword_search_via_bing(keyword) -> str:
|
||||
'''
|
||||
获取bing搜索结果, 得到 title、desc、link
|
||||
'''
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
|
||||
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||
}
|
||||
url = "https://www.bing.com/search?q="+keyword
|
||||
_cnt = 0
|
||||
_detail_store = []
|
||||
while _cnt < 5:
|
||||
try:
|
||||
response = requests.get(url, headers=headers)
|
||||
response.encoding = "utf-8"
|
||||
gu.log(f"bing response: {response.text}", tag="bing", level=gu.LEVEL_DEBUG, max_len=9999)
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
res = []
|
||||
ols = soup.find(id="b_results")
|
||||
for i in ols.find_all("li", class_="b_algo"):
|
||||
try:
|
||||
title = i.find("h2").text
|
||||
desc = i.find("p").text
|
||||
link = i.find("h2").find("a").get("href")
|
||||
res.append({
|
||||
"title": title,
|
||||
"desc": desc,
|
||||
"link": link,
|
||||
})
|
||||
if len(res) >= 5: # 限制5条
|
||||
break
|
||||
if len(_detail_store) >= 3:
|
||||
continue
|
||||
|
||||
# 爬取前两条的网页内容
|
||||
if "zhihu.com" in link:
|
||||
try:
|
||||
_detail_store.append(special_fetch_zhihu(link))
|
||||
except BaseException as e:
|
||||
print(f"zhihu parse err: {str(e)}")
|
||||
else:
|
||||
try:
|
||||
_detail_store.append(fetch_website_content(link))
|
||||
except BaseException as e:
|
||||
print(f"fetch_website_content err: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"bing parse err: {str(e)}")
|
||||
if len(res) == 0:
|
||||
break
|
||||
if len(_detail_store) > 0:
|
||||
ret = f"{str(res)} \n具体网页内容: {str(_detail_store)}"
|
||||
else:
|
||||
ret = f"{str(res)}"
|
||||
return str(ret)
|
||||
except Exception as e:
|
||||
gu.log(f"bing fetch err: {str(e)}")
|
||||
_cnt += 1
|
||||
time.sleep(1)
|
||||
|
||||
gu.log("fail to fetch bing info, using sougou.")
|
||||
return web_keyword_search_via_sougou(keyword)
|
||||
|
||||
def web_keyword_search_via_sougou(keyword) -> str:
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
|
||||
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||
}
|
||||
url = f"https://sogou.com/web?query={keyword}"
|
||||
response = requests.get(url, headers=headers)
|
||||
response.encoding = "utf-8"
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
res = []
|
||||
results = soup.find("div", class_="results")
|
||||
for i in results.find_all("div", class_="vrwrap"):
|
||||
try:
|
||||
title = tidy_text(i.find("h3").text)
|
||||
link = tidy_text(i.find("h3").find("a").get("href"))
|
||||
if link.startswith("/link?url="):
|
||||
link = "https://www.sogou.com" + link
|
||||
res.append({
|
||||
"title": title,
|
||||
"link": link,
|
||||
})
|
||||
if len(res) >= 5: # 限制5条
|
||||
break
|
||||
except Exception as e:
|
||||
gu.log(f"sougou parse err: {str(e)}", tag="web_keyword_search_via_sougou", level=gu.LEVEL_ERROR)
|
||||
# 爬取网页内容
|
||||
_detail_store = []
|
||||
for i in res:
|
||||
if _detail_store >= 3:
|
||||
break
|
||||
try:
|
||||
_detail_store.append(fetch_website_content(i["link"]))
|
||||
except BaseException as e:
|
||||
print(f"fetch_website_content err: {str(e)}")
|
||||
ret = f"{str(res)}"
|
||||
if len(_detail_store) > 0:
|
||||
ret += f"\n网页内容: {str(_detail_store)}"
|
||||
return ret
|
||||
|
||||
def fetch_website_content(url):
|
||||
gu.log(f"fetch_website_content: {url}", tag="fetch_website_content", level=gu.LEVEL_DEBUG)
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
|
||||
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||
}
|
||||
response = requests.get(url, headers=headers, timeout=3)
|
||||
response.encoding = "utf-8"
|
||||
# soup = BeautifulSoup(response.text, "html.parser")
|
||||
# # 如果有container / content / main等的话,就只取这些部分
|
||||
# has = False
|
||||
# beleive_ls = ["container", "content", "main"]
|
||||
# res = ""
|
||||
# for cls in beleive_ls:
|
||||
# for i in soup.find_all(class_=cls):
|
||||
# has = True
|
||||
# res += i.text
|
||||
# if not has:
|
||||
# res = soup.text
|
||||
# res = res.replace("\n", "").replace(" ", " ").replace("\r", "").replace("\t", "")
|
||||
# if not has:
|
||||
# res = res[300:1100]
|
||||
# else:
|
||||
# res = res[100:800]
|
||||
# # with open(f"temp_{time.time()}.html", "w", encoding="utf-8") as f:
|
||||
# # f.write(res)
|
||||
# gu.log(f"fetch_website_content: end", tag="fetch_website_content", level=gu.LEVEL_DEBUG)
|
||||
# return res
|
||||
doc = Document(response.content)
|
||||
# print('title:', doc.title())
|
||||
ret = doc.summary(html_partial=True)
|
||||
soup = BeautifulSoup(ret, 'html.parser')
|
||||
ret = tidy_text(soup.get_text())
|
||||
return ret
|
||||
|
||||
def web_search(question, provider: Provider, session_id, official_fc=False):
|
||||
'''
|
||||
official_fc: 使用官方 function-calling
|
||||
'''
|
||||
new_func_call = FuncCall(provider)
|
||||
new_func_call.add_func("google_web_search", [{
|
||||
"type": "string",
|
||||
"name": "keyword",
|
||||
"description": "google search query (分词,尽量保留所有信息)"
|
||||
}],
|
||||
"通过搜索引擎搜索。如果问题需要在网页上搜索(如天气、新闻或任何需要通过网页获取信息的问题),则调用此函数;如果没有,不要调用此函数。",
|
||||
google_web_search
|
||||
)
|
||||
new_func_call.add_func("fetch_website_content", [{
|
||||
"type": "string",
|
||||
"name": "url",
|
||||
"description": "网址"
|
||||
}],
|
||||
"获取网页的内容。如果问题带有合法的网页链接(例如: `帮我总结一下https://github.com的内容`), 就调用此函数。如果没有,不要调用此函数。",
|
||||
fetch_website_content
|
||||
)
|
||||
question1 = f"{question} \n> hint: 最多只能调用1个function, 并且存在不会调用任何function的可能性。"
|
||||
has_func = False
|
||||
function_invoked_ret = ""
|
||||
if official_fc:
|
||||
func = provider.text_chat(question1, session_id, function_call=new_func_call.get_func())
|
||||
if isinstance(func, Function):
|
||||
# arguments='{\n "keyword": "北京今天的天气"\n}', name='google_web_search'
|
||||
# 执行对应的结果:
|
||||
func_obj = None
|
||||
for i in new_func_call.func_list:
|
||||
if i["name"] == func.name:
|
||||
func_obj = i["func_obj"]
|
||||
break
|
||||
if not func_obj:
|
||||
gu.log("找不到返回的 func name " + func.name, level=gu.LEVEL_ERROR)
|
||||
return provider.text_chat(question1, session_id) + "\n(网页搜索失败, 此为默认回复)"
|
||||
try:
|
||||
args = json.loads(func.arguments)
|
||||
function_invoked_ret = func_obj(**args)
|
||||
has_func = True
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
return provider.text_chat(question1, session_id) + "\n(网页搜索失败, 此为默认回复)"
|
||||
else:
|
||||
# now func is a string
|
||||
return func
|
||||
else:
|
||||
try:
|
||||
function_invoked_ret, has_func = new_func_call.func_call(question1, new_func_call.func_dump(), is_task=False, is_summary=False)
|
||||
except BaseException as e:
|
||||
res = provider.text_chat(question) + "\n(网页搜索失败, 此为默认回复)"
|
||||
return res
|
||||
has_func = True
|
||||
|
||||
if has_func:
|
||||
provider.forget(session_id)
|
||||
question3 = f"""请你用活泼的语气回答`{question}`问题。\n以下是相关材料,请直接拿此材料针对问题进行总结回答。在文章末尾加上各参考链接,如`[1] <title> <url>`;不要提到任何函数调用的信息;在总结的末尾加上1或2个相关的emoji。```\n{function_invoked_ret}\n```\n"""
|
||||
gu.log(f"web_search: {question3}", tag="web_search", level=gu.LEVEL_DEBUG, max_len=99999)
|
||||
_c = 0
|
||||
while _c < 3:
|
||||
try:
|
||||
print('text chat')
|
||||
final_ret = provider.text_chat(question3)
|
||||
return final_ret
|
||||
except Exception as e:
|
||||
print(e)
|
||||
_c += 1
|
||||
if _c == 3: raise e
|
||||
if "The message you submitted was too long" in str(e):
|
||||
provider.forget(session_id)
|
||||
function_invoked_ret = function_invoked_ret[:int(len(function_invoked_ret) / 2)]
|
||||
time.sleep(3)
|
||||
question3 = f"""请回答`{question}`问题。\n以下是相关材料,请直接拿此材料针对问题进行回答,再给参考链接, 参考链接首末有空格。```\n{function_invoked_ret}\n```\n"""
|
||||
return function_invoked_ret
|
||||
@@ -1,123 +1,27 @@
|
||||
import datetime
|
||||
import time
|
||||
import socket
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from util.cmd_config import CmdConfig
|
||||
import aiohttp
|
||||
import socket
|
||||
import platform
|
||||
import json
|
||||
import sys
|
||||
import psutil
|
||||
import ssl
|
||||
import base64
|
||||
|
||||
PLATFORM_GOCQ = 'gocq'
|
||||
PLATFORM_QQCHAN = 'qqchan'
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from type.types import GlobalObject
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
FG_COLORS = {
|
||||
"black": "30",
|
||||
"red": "31",
|
||||
"green": "32",
|
||||
"yellow": "33",
|
||||
"blue": "34",
|
||||
"purple": "35",
|
||||
"cyan": "36",
|
||||
"white": "37",
|
||||
"default": "39",
|
||||
}
|
||||
|
||||
BG_COLORS = {
|
||||
"black": "40",
|
||||
"red": "41",
|
||||
"green": "42",
|
||||
"yellow": "43",
|
||||
"blue": "44",
|
||||
"purple": "45",
|
||||
"cyan": "46",
|
||||
"white": "47",
|
||||
"default": "49",
|
||||
}
|
||||
|
||||
LEVEL_DEBUG = "DEBUG"
|
||||
LEVEL_INFO = "INFO"
|
||||
LEVEL_WARNING = "WARNING"
|
||||
LEVEL_ERROR = "ERROR"
|
||||
LEVEL_CRITICAL = "CRITICAL"
|
||||
|
||||
level_codes = {
|
||||
LEVEL_DEBUG: 0,
|
||||
LEVEL_INFO: 1,
|
||||
LEVEL_WARNING: 2,
|
||||
LEVEL_ERROR: 3,
|
||||
LEVEL_CRITICAL: 4
|
||||
}
|
||||
|
||||
level_colors = {
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "purple",
|
||||
}
|
||||
|
||||
def log(
|
||||
msg: str,
|
||||
level: str = "INFO",
|
||||
tag: str = "System",
|
||||
fg: str = None,
|
||||
bg: str = None,
|
||||
max_len: int = 500,
|
||||
err: Exception = None,):
|
||||
"""
|
||||
日志打印函数
|
||||
"""
|
||||
_set_level_code = level_codes[LEVEL_INFO]
|
||||
if 'LOG_LEVEL' in os.environ and os.environ['LOG_LEVEL'] in level_codes:
|
||||
_set_level_code = level_codes[os.environ['LOG_LEVEL']]
|
||||
|
||||
if level in level_codes and level_codes[level] < _set_level_code:
|
||||
return
|
||||
|
||||
if err is not None:
|
||||
msg += "\n异常原因: " + str(err)
|
||||
level = LEVEL_ERROR
|
||||
|
||||
if len(msg) > max_len:
|
||||
msg = msg[:max_len] + "..."
|
||||
now = datetime.datetime.now().strftime("%m-%d %H:%M:%S")
|
||||
|
||||
pres = []
|
||||
for line in msg.split("\n"):
|
||||
if line == "\n":
|
||||
pres.append("")
|
||||
else:
|
||||
pres.append(f"[{now}] [{level}] [{tag}]: {line}")
|
||||
|
||||
if level == "INFO":
|
||||
if fg is None:
|
||||
fg = FG_COLORS["green"]
|
||||
if bg is None:
|
||||
bg = BG_COLORS["default"]
|
||||
elif level == "WARNING":
|
||||
if fg is None:
|
||||
fg = FG_COLORS["yellow"]
|
||||
if bg is None:
|
||||
bg = BG_COLORS["default"]
|
||||
elif level == "ERROR":
|
||||
if fg is None:
|
||||
fg = FG_COLORS["red"]
|
||||
if bg is None:
|
||||
bg = BG_COLORS["default"]
|
||||
elif level == "CRITICAL":
|
||||
if fg is None:
|
||||
fg = FG_COLORS["purple"]
|
||||
if bg is None:
|
||||
bg = BG_COLORS["default"]
|
||||
|
||||
ret = ""
|
||||
for line in pres:
|
||||
ret += f"\033[{fg};{bg}m{line}\033[0m\n"
|
||||
print(ret[:-1])
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
|
||||
def port_checker(port: int, host: str = "localhost"):
|
||||
sk = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
|
||||
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sk.settimeout(1)
|
||||
try:
|
||||
sk.connect((host, port))
|
||||
@@ -126,52 +30,22 @@ def port_checker(port: int, host: str = "localhost"):
|
||||
except Exception:
|
||||
sk.close()
|
||||
return False
|
||||
|
||||
def word2img(title: str, text: str, max_width=30, font_size=20):
|
||||
|
||||
|
||||
def get_font_path() -> str:
|
||||
if os.path.exists("resources/fonts/syst.otf"):
|
||||
font_path = "resources/fonts/syst.otf"
|
||||
elif os.path.exists("QQChannelChatGPT/resources/fonts/syst.otf"):
|
||||
font_path = "QQChannelChatGPT/resources/fonts/syst.otf"
|
||||
elif os.path.exists("AstrBot/resources/fonts/syst.otf"):
|
||||
font_path = "AstrBot/resources/fonts/syst.otf"
|
||||
elif os.path.exists("C:/Windows/Fonts/simhei.ttf"):
|
||||
font_path = "C:/Windows/Fonts/simhei.ttf"
|
||||
elif os.path.exists("/usr/share/fonts/opentype/noto/NotoSansCJK-Regular.ttc"):
|
||||
font_path = "/usr/share/fonts/opentype/noto/NotoSansCJK-Regular.ttc"
|
||||
else:
|
||||
raise Exception("找不到字体文件")
|
||||
|
||||
width_factor = 1.0
|
||||
height_factor = 1.5
|
||||
# 格式化文本宽度最大为30
|
||||
lines = text.split('\n')
|
||||
i = 0
|
||||
length = len(lines)
|
||||
for l in lines:
|
||||
if len(l) > max_width:
|
||||
# lines[i] = l[:max_width] + '\n' + l[max_width:]
|
||||
# for
|
||||
cp = l
|
||||
for ii in range(len(l)):
|
||||
if ii % max_width == 0:
|
||||
cp = cp[:ii] + '\n' + cp[ii:]
|
||||
length += 1
|
||||
lines[i] = cp
|
||||
i += 1
|
||||
text = '\n'.join(lines)
|
||||
width = int(max_width * font_size * width_factor)
|
||||
height = int(length * font_size * height_factor)
|
||||
image = Image.new('RGB', (width, height), (255, 255, 255))
|
||||
draw = ImageDraw.Draw(image)
|
||||
|
||||
text_font = ImageFont.truetype(font_path, font_size)
|
||||
title_font = ImageFont.truetype(font_path, font_size + 5)
|
||||
# 标题居中
|
||||
title_width, title_height = title_font.getsize(title)
|
||||
draw.text(((width - title_width) / 2, 10), title, fill=(0, 0, 0), font=title_font)
|
||||
# 文本不居中
|
||||
draw.text((10, title_height+20), text, fill=(0, 0, 0), font=text_font)
|
||||
|
||||
return image
|
||||
|
||||
return font_path
|
||||
|
||||
def render_markdown(markdown_text, image_width=800, image_height=600, font_size=26, font_color=(0, 0, 0), bg_color=(255, 255, 255)):
|
||||
|
||||
@@ -179,7 +53,7 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
HEADER_FONT_STANDARD_SIZE = 42
|
||||
|
||||
QUOTE_LEFT_LINE_MARGIN = 10
|
||||
QUOTE_FONT_LINE_MARGIN = 6 # 引用文字距离左边线的距离和上下的距离
|
||||
QUOTE_FONT_LINE_MARGIN = 6 # 引用文字距离左边线的距离和上下的距离
|
||||
QUOTE_LEFT_LINE_HEIGHT = font_size + QUOTE_FONT_LINE_MARGIN * 2
|
||||
QUOTE_LEFT_LINE_WIDTH = 5
|
||||
QUOTE_LEFT_LINE_COLOR = (180, 180, 180)
|
||||
@@ -191,9 +65,9 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
CODE_BLOCK_FONT_SIZE = font_size
|
||||
CODE_BLOCK_FONT_COLOR = (255, 255, 255)
|
||||
CODE_BLOCK_BG_COLOR = (240, 240, 240)
|
||||
CODE_BLOCK_CODES_MARGIN_VERTICAL = 5 # 代码块和代码之间的距离
|
||||
CODE_BLOCK_CODES_MARGIN_HORIZONTAL = 5 # 代码块和代码之间的距离
|
||||
CODE_BLOCK_TEXT_MARGIN = 4 # 代码和代码之间的距离
|
||||
CODE_BLOCK_CODES_MARGIN_VERTICAL = 5 # 代码块和代码之间的距离
|
||||
CODE_BLOCK_CODES_MARGIN_HORIZONTAL = 5 # 代码块和代码之间的距离
|
||||
CODE_BLOCK_TEXT_MARGIN = 4 # 代码和代码之间的距离
|
||||
|
||||
INLINE_CODE_MARGIN = 8
|
||||
INLINE_CODE_FONT_SIZE = font_size
|
||||
@@ -212,29 +86,14 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
# 用于匹配图片的正则表达式
|
||||
IMAGE_REGEX = r"!\s*\[.*?\]\s*\((.*?)\)"
|
||||
|
||||
|
||||
if os.path.exists("resources/fonts/syst.otf"):
|
||||
font_path = "resources/fonts/syst.otf"
|
||||
elif os.path.exists("QQChannelChatGPT/resources/fonts/syst.otf"):
|
||||
font_path = "QQChannelChatGPT/resources/fonts/syst.otf"
|
||||
elif os.path.exists("C:/Windows/Fonts/simhei.ttf"):
|
||||
font_path = "C:/Windows/Fonts/simhei.ttf"
|
||||
elif os.path.exists("/usr/share/fonts/opentype/noto/NotoSansCJK-Regular.ttc"):
|
||||
font_path = "/usr/share/fonts/opentype/noto/NotoSansCJK-Regular.ttc"
|
||||
else:
|
||||
raise Exception("找不到字体文件")
|
||||
# backup
|
||||
if os.path.exists("resources/fonts/simhei.ttf"):
|
||||
font_path1 = "resources/fonts/simhei.ttf"
|
||||
elif os.path.exists("QQChannelChatGPT/resources/fonts/simhei.ttf"):
|
||||
font_path1 = "QQChannelChatGPT/resources/fonts/simhei.ttf"
|
||||
else:
|
||||
font_path1 = font_path
|
||||
font_path = get_font_path()
|
||||
font_path1 = font_path
|
||||
|
||||
# 加载字体
|
||||
font = ImageFont.truetype(font_path, font_size)
|
||||
|
||||
|
||||
images: Image = {}
|
||||
|
||||
|
||||
# pre_process, get height of each line
|
||||
pre_lines = markdown_text.split('\n')
|
||||
height = 0
|
||||
@@ -248,23 +107,25 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
try:
|
||||
image_url = re.findall(IMAGE_REGEX, line)[0]
|
||||
print(image_url)
|
||||
image_res = Image.open(requests.get(image_url, stream=True, timeout=5).raw)
|
||||
image_res = Image.open(requests.get(
|
||||
image_url, stream=True, timeout=5).raw)
|
||||
images[i] = image_res
|
||||
# 最大不得超过image_width的50%
|
||||
img_height = image_res.size[1]
|
||||
|
||||
if image_res.size[0] > image_width*0.5:
|
||||
image_res = image_res.resize((int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
|
||||
image_res = image_res.resize(
|
||||
(int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
|
||||
img_height = image_res.size[1]
|
||||
|
||||
height += img_height + IMAGE_MARGIN*2
|
||||
|
||||
|
||||
line = re.sub(IMAGE_REGEX, "", line)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
line = re.sub(IMAGE_REGEX, "\n[加载失败的图片]\n", line)
|
||||
continue
|
||||
|
||||
|
||||
line.replace("\t", " ")
|
||||
if font.getsize(line)[0] > image_width:
|
||||
cp = line
|
||||
@@ -273,18 +134,18 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
for ii in range(len(line)):
|
||||
# 检测是否是中文
|
||||
_width += font.getsize(line[ii])[0]
|
||||
_word_cnt+=1
|
||||
_word_cnt += 1
|
||||
if _width > image_width:
|
||||
_pre_lines.append(cp[:_word_cnt])
|
||||
cp = cp[_word_cnt:]
|
||||
_word_cnt=0
|
||||
_width=0
|
||||
_word_cnt = 0
|
||||
_width = 0
|
||||
_pre_lines.append(cp)
|
||||
else:
|
||||
_pre_lines.append(line)
|
||||
pre_lines = _pre_lines
|
||||
|
||||
i=-1
|
||||
i = -1
|
||||
for line in pre_lines:
|
||||
if line == "":
|
||||
height += TEXT_LINE_MARGIN
|
||||
@@ -316,23 +177,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
height += font_size + TEXT_LINE_MARGIN*2
|
||||
|
||||
markdown_text = '\n'.join(pre_lines)
|
||||
print("Pre process done, height: ", height)
|
||||
image_height = height
|
||||
if image_height < 100:
|
||||
image_height = 100
|
||||
image_width += 20
|
||||
|
||||
|
||||
# 创建空白图像
|
||||
image = Image.new('RGB', (image_width, image_height), bg_color)
|
||||
draw = ImageDraw.Draw(image)
|
||||
|
||||
|
||||
# # get all the emojis unicode in the markdown text
|
||||
# unicode_text = markdown_text.encode('unicode_escape').decode()
|
||||
# # print(unicode_text)
|
||||
# unicode_emojis = re.findall(r'\\U\w{8}', unicode_text)
|
||||
# emoji_base_url = "https://abs.twimg.com/emoji/v1/72x72/{unicode_emoji}.png"
|
||||
|
||||
# 设置初始位置
|
||||
x, y = 10, 10
|
||||
|
||||
@@ -354,51 +207,36 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("#"):
|
||||
# unicode_emojis = re.findall(r'\\U0001\w{4}', line)
|
||||
# for unicode_emoji in unicode_emojis:
|
||||
# line = line.replace(unicode_emoji, "")
|
||||
# unicode_emoji = ""
|
||||
# if len(unicode_emojis) > 0:
|
||||
# unicode_emoji = unicode_emojis[0]
|
||||
|
||||
# 处理标题
|
||||
header_level = line.count("#")
|
||||
line = line.strip("#").strip()
|
||||
font_size_header = HEADER_FONT_STANDARD_SIZE - header_level * 4
|
||||
|
||||
# if unicode_emoji != "":
|
||||
# emoji_url = emoji_base_url.format(unicode_emoji=unicode_emoji[-5:])
|
||||
# emoji = Image.open(requests.get(emoji_url, stream=True).raw)
|
||||
# emoji = emoji.resize((font_size, font_size))
|
||||
# image.paste(emoji, (x, y))
|
||||
# x += font_size
|
||||
|
||||
font = ImageFont.truetype(font_path, font_size_header)
|
||||
y += HEADER_MARGIN # 上边距
|
||||
y += HEADER_MARGIN # 上边距
|
||||
# 字间距
|
||||
draw.text((x, y), line, font=font, fill=font_color)
|
||||
draw.line((x, y + font_size_header + 8, image_width - 10, y + font_size_header + 8), fill=(230, 230, 230), width=3)
|
||||
draw.line((x, y + font_size_header + 8, image_width - 10,
|
||||
y + font_size_header + 8), fill=(230, 230, 230), width=3)
|
||||
y += font_size_header + HEADER_MARGIN
|
||||
|
||||
elif line.startswith(">"):
|
||||
# 处理引用
|
||||
quote_text = line.strip(">")
|
||||
# quote_width = image_width - 20 # 引用框的宽度为图像宽度减去左右边距
|
||||
# quote_height = font_size + 10 # 引用框的高度为字体大小加上上下边距
|
||||
# quote_box = (x, y, x + quote_width, y + quote_height)
|
||||
# draw.rounded_rectangle(quote_box, radius=5, fill=(230, 230, 230), width=2) # 使用灰色填充矩形框作为引用背景
|
||||
y+=QUOTE_LEFT_LINE_MARGIN
|
||||
draw.line((x, y, x, y + QUOTE_LEFT_LINE_HEIGHT), fill=QUOTE_LEFT_LINE_COLOR, width=QUOTE_LEFT_LINE_WIDTH)
|
||||
y += QUOTE_LEFT_LINE_MARGIN
|
||||
draw.line((x, y, x, y + QUOTE_LEFT_LINE_HEIGHT),
|
||||
fill=QUOTE_LEFT_LINE_COLOR, width=QUOTE_LEFT_LINE_WIDTH)
|
||||
font = ImageFont.truetype(font_path, QUOTE_FONT_SIZE)
|
||||
draw.text((x + QUOTE_FONT_LINE_MARGIN, y + QUOTE_FONT_LINE_MARGIN), quote_text, font=font, fill=QUOTE_FONT_COLOR)
|
||||
draw.text((x + QUOTE_FONT_LINE_MARGIN, y + QUOTE_FONT_LINE_MARGIN),
|
||||
quote_text, font=font, fill=QUOTE_FONT_COLOR)
|
||||
y += font_size + QUOTE_LEFT_LINE_HEIGHT + QUOTE_LEFT_LINE_MARGIN
|
||||
|
||||
|
||||
elif line.startswith("-"):
|
||||
# 处理列表
|
||||
list_text = line.strip("-").strip()
|
||||
font = ImageFont.truetype(font_path, LIST_FONT_SIZE)
|
||||
y += LIST_MARGIN
|
||||
draw.text((x, y), " · " + list_text, font=font, fill=LIST_FONT_COLOR)
|
||||
draw.text((x, y), " · " + list_text,
|
||||
font=font, fill=LIST_FONT_COLOR)
|
||||
y += font_size + LIST_MARGIN
|
||||
|
||||
elif line.startswith("```"):
|
||||
@@ -410,13 +248,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
in_code_block = False
|
||||
codes = "\n".join(code_block_codes)
|
||||
code_block_codes = []
|
||||
draw.rounded_rectangle((x, code_block_start_y, image_width - 10, y+CODE_BLOCK_CODES_MARGIN_VERTICAL + CODE_BLOCK_TEXT_MARGIN), radius=5, fill=CODE_BLOCK_BG_COLOR, width=2)
|
||||
draw.rounded_rectangle((x, code_block_start_y, image_width - 10, y+CODE_BLOCK_CODES_MARGIN_VERTICAL +
|
||||
CODE_BLOCK_TEXT_MARGIN), radius=5, fill=CODE_BLOCK_BG_COLOR, width=2)
|
||||
font = ImageFont.truetype(font_path1, CODE_BLOCK_FONT_SIZE)
|
||||
draw.text((x + CODE_BLOCK_CODES_MARGIN_HORIZONTAL, code_block_start_y + CODE_BLOCK_CODES_MARGIN_VERTICAL), codes, font=font, fill=font_color)
|
||||
draw.text((x + CODE_BLOCK_CODES_MARGIN_HORIZONTAL, code_block_start_y +
|
||||
CODE_BLOCK_CODES_MARGIN_VERTICAL), codes, font=font, fill=font_color)
|
||||
y += CODE_BLOCK_CODES_MARGIN_VERTICAL + CODE_BLOCK_MARGIN
|
||||
# y += font_size+10
|
||||
elif re.search(r"`(.*?)`", line):
|
||||
y += INLINE_CODE_MARGIN # 上边距
|
||||
y += INLINE_CODE_MARGIN # 上边距
|
||||
# 处理行内代码
|
||||
code_regex = r"`(.*?)`"
|
||||
parts_inline = re.findall(code_regex, line)
|
||||
@@ -429,11 +269,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
if part in parts_inline:
|
||||
font = ImageFont.truetype(font_path, INLINE_CODE_FONT_SIZE)
|
||||
code_text = part.strip("`")
|
||||
code_width = font.getsize(code_text)[0] + INLINE_CODE_FONT_MARGIN*2
|
||||
code_width = font.getsize(
|
||||
code_text)[0] + INLINE_CODE_FONT_MARGIN*2
|
||||
x += INLINE_CODE_MARGIN
|
||||
code_box = (x, y, x + code_width, y + INLINE_CODE_BG_HEIGHT)
|
||||
draw.rounded_rectangle(code_box, radius=5, fill=INLINE_CODE_BG_COLOR, width=2) # 使用灰色填充矩形框作为引用背景
|
||||
draw.text((x+INLINE_CODE_FONT_MARGIN, y), code_text, font=font, fill=font_color)
|
||||
code_box = (x, y, x + code_width,
|
||||
y + INLINE_CODE_BG_HEIGHT)
|
||||
draw.rounded_rectangle(
|
||||
code_box, radius=5, fill=INLINE_CODE_BG_COLOR, width=2) # 使用灰色填充矩形框作为引用背景
|
||||
draw.text((x+INLINE_CODE_FONT_MARGIN, y),
|
||||
code_text, font=font, fill=font_color)
|
||||
x += code_width+INLINE_CODE_MARGIN-INLINE_CODE_FONT_MARGIN
|
||||
else:
|
||||
font = ImageFont.truetype(font_path, font_size)
|
||||
@@ -448,7 +292,7 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
y += TEXT_LINE_MARGIN
|
||||
else:
|
||||
font = ImageFont.truetype(font_path, font_size)
|
||||
|
||||
|
||||
draw.text((x, y), line, font=font, fill=font_color)
|
||||
y += font_size + TEXT_LINE_MARGIN*2
|
||||
|
||||
@@ -457,7 +301,8 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
|
||||
image_res = images[index]
|
||||
# 最大不得超过image_width的50%
|
||||
if image_res.size[0] > image_width*0.5:
|
||||
image_res = image_res.resize((int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
|
||||
image_res = image_res.resize(
|
||||
(int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
|
||||
image.paste(image_res, (IMAGE_MARGIN, y))
|
||||
y += image_res.size[1] + IMAGE_MARGIN*2
|
||||
return image
|
||||
@@ -476,32 +321,49 @@ def save_temp_img(img: Image) -> str:
|
||||
if time.time() - ctime > 3600:
|
||||
os.remove(path)
|
||||
except Exception as e:
|
||||
log(f"清除临时文件失败: {e}", level=LEVEL_WARNING, tag="GeneralUtils")
|
||||
print(f"清除临时文件失败: {e}")
|
||||
|
||||
# 获得时间戳
|
||||
timestamp = int(time.time())
|
||||
p = f"temp/{timestamp}.png"
|
||||
img.save(p)
|
||||
p = f"temp/{timestamp}.jpg"
|
||||
|
||||
if isinstance(img, Image.Image):
|
||||
img.save(p)
|
||||
else:
|
||||
with open(p, "wb") as f:
|
||||
f.write(img)
|
||||
logger.info(f"保存临时图片: {p}")
|
||||
return p
|
||||
|
||||
|
||||
def create_text_image(title: str, text: str, max_width=30, font_size=20):
|
||||
async def download_image_by_url(url: str, post: bool = False, post_data: dict = None) -> str:
|
||||
'''
|
||||
文本转图片。
|
||||
title: 标题
|
||||
text: 文本内容
|
||||
max_width: 文本宽度最大值(默认30)
|
||||
font_size: 字体大小(默认20)
|
||||
|
||||
返回:文件路径
|
||||
下载图片
|
||||
'''
|
||||
try:
|
||||
img = word2img(title, text, max_width, font_size)
|
||||
p = save_temp_img(img)
|
||||
return p
|
||||
logger.info(f"下载图片: {url}")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if post:
|
||||
async with session.post(url, json=post_data) as resp:
|
||||
return save_temp_img(await resp.read())
|
||||
else:
|
||||
async with session.get(url) as resp:
|
||||
return save_temp_img(await resp.read())
|
||||
except aiohttp.client_exceptions.ClientConnectorSSLError as e:
|
||||
# 关闭SSL验证
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
async with aiohttp.ClientSession(trust_env=False) as session:
|
||||
if post:
|
||||
async with session.get(url, ssl=ssl_context) as resp:
|
||||
return save_temp_img(await resp.read())
|
||||
else:
|
||||
async with session.get(url, ssl=ssl_context) as resp:
|
||||
return save_temp_img(await resp.read())
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
||||
|
||||
def create_markdown_image(text: str):
|
||||
'''
|
||||
markdown文本转图片。
|
||||
@@ -515,20 +377,95 @@ def create_markdown_image(text: str):
|
||||
raise e
|
||||
|
||||
|
||||
# 迁移配置文件到 cmd_config.json
|
||||
def try_migrate_config(old_config: dict):
|
||||
cc = CmdConfig()
|
||||
if cc.get("qqbot", None) is None:
|
||||
# 未迁移过
|
||||
for k in old_config:
|
||||
cc.put(k, old_config[k])
|
||||
|
||||
def get_local_ip_addresses():
|
||||
ip = ''
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.connect(('8.8.8.8', 80))
|
||||
ip = s.getsockname()[0]
|
||||
except BaseException as e:
|
||||
pass
|
||||
finally:
|
||||
s.close()
|
||||
return ip
|
||||
|
||||
return ip
|
||||
|
||||
def upload(_global_object: GlobalObject):
|
||||
'''
|
||||
上传相关非敏感统计数据
|
||||
'''
|
||||
time.sleep(10)
|
||||
while True:
|
||||
platform_stats = {}
|
||||
llm_stats = {}
|
||||
plugin_stats = {}
|
||||
for platform in _global_object.platforms:
|
||||
platform_stats[platform.platform_name] = {
|
||||
"cnt_receive": platform.platform_instance.cnt_receive,
|
||||
"cnt_reply": platform.platform_instance.cnt_reply
|
||||
}
|
||||
|
||||
for llm in _global_object.llms:
|
||||
stat = llm.llm_instance.model_stat
|
||||
for k in stat:
|
||||
llm_stats[llm.llm_name + "#" + k] = stat[k]
|
||||
llm.llm_instance.reset_model_stat()
|
||||
|
||||
for plugin in _global_object.cached_plugins:
|
||||
plugin_stats[plugin.metadata.plugin_name] = {
|
||||
"metadata": plugin.metadata,
|
||||
"trig_cnt": plugin.trig_cnt
|
||||
}
|
||||
plugin.reset_trig_cnt()
|
||||
|
||||
try:
|
||||
res = {
|
||||
"stat_version": "moon",
|
||||
"version": _global_object.version, # 版本号
|
||||
"platform_stats": platform_stats, # 过去 30 分钟各消息平台交互消息数
|
||||
"llm_stats": llm_stats,
|
||||
"plugin_stats": plugin_stats,
|
||||
"sys": sys.platform, # 系统版本
|
||||
}
|
||||
resp = requests.post(
|
||||
'https://api.soulter.top/upload', data=json.dumps(res), timeout=5)
|
||||
if resp.status_code == 200:
|
||||
ok = resp.json()
|
||||
if ok['status'] == 'ok':
|
||||
_global_object.cnt_total = 0
|
||||
except BaseException as e:
|
||||
pass
|
||||
time.sleep(30*60)
|
||||
|
||||
def retry(n: int = 3):
|
||||
'''
|
||||
重试装饰器
|
||||
'''
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(n):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
if i == n-1: raise e
|
||||
logger.warning(f"函数 {func.__name__} 第 {i+1} 次重试... {e}")
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def run_monitor(global_object: GlobalObject):
|
||||
'''
|
||||
监测机器性能
|
||||
- Bot 内存使用量
|
||||
- CPU 占用率
|
||||
'''
|
||||
start_time = time.time()
|
||||
while True:
|
||||
stat = global_object.dashboard_data.stats
|
||||
# 程序占用的内存大小
|
||||
mem = psutil.Process().memory_info().rss / 1024 / 1024 # MB
|
||||
stat['sys_perf'] = {
|
||||
'memory': mem,
|
||||
'cpu': psutil.cpu_percent()
|
||||
}
|
||||
stat['sys_start_time'] = start_time
|
||||
time.sleep(30)
|
||||
|
||||
43
util/image_render/helper.py
Normal file
43
util/image_render/helper.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import aiohttp, os
|
||||
from util.general_utils import download_image_by_url, create_markdown_image
|
||||
from type.config import VERSION
|
||||
|
||||
BASE_RENDER_URL = "https://t2i.soulter.top/text2img"
|
||||
TEMPLATE_PATH = os.path.join(os.path.dirname(__file__), "template")
|
||||
|
||||
async def text_to_image_base(text: str, return_url: bool = False) -> str:
|
||||
'''
|
||||
返回图像的文件路径
|
||||
'''
|
||||
with open(os.path.join(TEMPLATE_PATH, "base.html"), "r") as f:
|
||||
tmpl_str = f.read()
|
||||
|
||||
assert(tmpl_str)
|
||||
|
||||
text = text.replace("`", "\`")
|
||||
|
||||
post_data = {
|
||||
"tmpl": tmpl_str,
|
||||
"json": return_url,
|
||||
"tmpldata": {
|
||||
"text": text,
|
||||
"version": f"v{VERSION}",
|
||||
},
|
||||
"options": {
|
||||
"full_page": True
|
||||
}
|
||||
}
|
||||
|
||||
if return_url:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(f"{BASE_RENDER_URL}/generate", json=post_data) as resp:
|
||||
ret = await resp.json()
|
||||
return f"{BASE_RENDER_URL}/{ret['data']['id']}"
|
||||
else:
|
||||
image_path = ""
|
||||
try:
|
||||
image_path = await download_image_by_url(f"{BASE_RENDER_URL}/generate", post=True, post_data=post_data)
|
||||
except Exception as e:
|
||||
print(f"调用 markdown 渲染 API 失败,错误信息:{e},将使用本地渲染方式。")
|
||||
image_path = create_markdown_image(text)
|
||||
return image_path
|
||||
247
util/image_render/template/base.html
Normal file
247
util/image_render/template/base.html
Normal file
@@ -0,0 +1,247 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.16.10/dist/katex.min.css" integrity="sha384-wcIxkf4k558AjM3Yz3BBFQUbk/zgIYC2R0QpeeYb+TwlBVMrlgLqwRjRtGZiK7ww" crossorigin="anonymous">
|
||||
<link rel="stylesheet" href="/path/to/styles/default.min.css">
|
||||
<script src="/path/to/highlight.min.js"></script>
|
||||
<script>hljs.highlightAll();</script>
|
||||
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.16.10/dist/katex.min.js" integrity="sha384-hIoBPJpTUs74ddyc4bFZSM1TVlQDA60VBbJS0oA934VSz82sBx1X7kSx2ATBDIyd" crossorigin="anonymous"></script>
|
||||
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.16.10/dist/contrib/auto-render.min.js" integrity="sha384-43gviWU0YVjaDtb/GhzOouOXtZMP/7XUzwPTstBeZFe/+rCMvRwr4yROQP43s0Xk" crossorigin="anonymous"
|
||||
onload="renderMathInElement(document.getElementById('content'),{delimiters: [{left: '$$', right: '$$', display: true},{left: '$', right: '$', display: false}]});"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div style="background-color: #3276dc; color: #fff; font-size: 64px; ">
|
||||
<span style="font-weight: bold; margin-left: 16px"># AstrBot</span>
|
||||
<span>{{ version }}</span>
|
||||
</div>
|
||||
<article style="margin-top: 32px" id="content"></article>
|
||||
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
|
||||
<script>
|
||||
document.getElementById('content').innerHTML = marked.parse(`{{ text | safe}}`);
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
<style>
|
||||
#content {
|
||||
min-width: 200px;
|
||||
max-width: 85%;
|
||||
margin: 0 auto;
|
||||
padding: 2rem 1em 1em;
|
||||
}
|
||||
|
||||
body {
|
||||
word-break: break-word;
|
||||
line-height: 1.75;
|
||||
font-weight: 400;
|
||||
font-size: 32px;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
overflow-x: hidden;
|
||||
color: #333;
|
||||
font-family: -apple-system,BlinkMacSystemFont,Segoe UI,Helvetica,Arial,sans-serif,Apple Color Emoji,Segoe UI Emoji;
|
||||
}
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
line-height: 1.5;
|
||||
margin-top: 35px;
|
||||
margin-bottom: 10px;
|
||||
padding-bottom: 5px;
|
||||
}
|
||||
h1:first-child, h2:first-child, h3:first-child, h4:first-child, h5:first-child, h6:first-child {
|
||||
margin-top: -1.5rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
h1::before, h2::before, h3::before, h4::before, h5::before, h6::before {
|
||||
content: "#";
|
||||
display: inline-block;
|
||||
color: #3eaf7c;
|
||||
padding-right: 0.23em;
|
||||
}
|
||||
h1 {
|
||||
position: relative;
|
||||
font-size: 2.5rem;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
h1::before {
|
||||
font-size: 2.5rem;
|
||||
}
|
||||
h2 {
|
||||
padding-bottom: 0.5rem;
|
||||
font-size: 2.2rem;
|
||||
border-bottom: 1px solid #ececec;
|
||||
}
|
||||
h3 {
|
||||
font-size: 1.5rem;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
h4 {
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
h5 {
|
||||
font-size: 1rem;
|
||||
}
|
||||
h6 {
|
||||
margin-top: 5px;
|
||||
}
|
||||
p {
|
||||
line-height: inherit;
|
||||
margin-top: 22px;
|
||||
margin-bottom: 22px;
|
||||
}
|
||||
strong {
|
||||
color: #3eaf7c;
|
||||
}
|
||||
img {
|
||||
max-width: 100%;
|
||||
border-radius: 2px;
|
||||
display: block;
|
||||
margin: auto;
|
||||
border: 3px solid rgba(62, 175, 124, 0.2);
|
||||
}
|
||||
hr {
|
||||
border-top: 1px solid #3eaf7c;
|
||||
border-bottom: none;
|
||||
border-left: none;
|
||||
border-right: none;
|
||||
margin-top: 32px;
|
||||
margin-bottom: 32px;
|
||||
}
|
||||
code {
|
||||
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||
word-break: break-word;
|
||||
overflow-x: auto;
|
||||
padding: 0.2rem 0.5rem;
|
||||
margin: 0;
|
||||
color: #3eaf7c;
|
||||
font-size: 0.85em;
|
||||
background-color: rgba(27, 31, 35, 0.05);
|
||||
border-radius: 3px;
|
||||
}
|
||||
pre {
|
||||
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||
overflow: auto;
|
||||
position: relative;
|
||||
line-height: 1.75;
|
||||
border-radius: 6px;
|
||||
border: 2px solid #3eaf7c;
|
||||
}
|
||||
pre > code {
|
||||
font-size: 12px;
|
||||
padding: 15px 12px;
|
||||
margin: 0;
|
||||
word-break: normal;
|
||||
display: block;
|
||||
overflow-x: auto;
|
||||
color: #333;
|
||||
background: #f8f8f8;
|
||||
}
|
||||
a {
|
||||
font-weight: 500;
|
||||
text-decoration: none;
|
||||
color: #3eaf7c;
|
||||
}
|
||||
a:hover, a:active {
|
||||
border-bottom: 1.5px solid #3eaf7c;
|
||||
}
|
||||
a:before {
|
||||
content: "⇲";
|
||||
}
|
||||
table {
|
||||
display: inline-block !important;
|
||||
font-size: 12px;
|
||||
width: auto;
|
||||
max-width: 100%;
|
||||
overflow: auto;
|
||||
border: solid 1px #3eaf7c;
|
||||
}
|
||||
thead {
|
||||
background: #3eaf7c;
|
||||
color: #fff;
|
||||
text-align: left;
|
||||
}
|
||||
tr:nth-child(2n) {
|
||||
background-color: rgba(62, 175, 124, 0.2);
|
||||
}
|
||||
th, td {
|
||||
padding: 12px 7px;
|
||||
line-height: 24px;
|
||||
}
|
||||
td {
|
||||
min-width: 120px;
|
||||
}
|
||||
blockquote {
|
||||
color: #666;
|
||||
padding: 1px 23px;
|
||||
margin: 22px 0;
|
||||
border-left: 0.5rem solid rgba(62, 175, 124, 0.6);
|
||||
border-color: #42b983;
|
||||
background-color: #f8f8f8;
|
||||
}
|
||||
blockquote::after {
|
||||
display: block;
|
||||
content: "";
|
||||
}
|
||||
blockquote > p {
|
||||
margin: 10px 0;
|
||||
}
|
||||
details {
|
||||
border: none;
|
||||
outline: none;
|
||||
border-left: 4px solid #3eaf7c;
|
||||
padding-left: 10px;
|
||||
margin-left: 4px;
|
||||
}
|
||||
details summary {
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
outline: none;
|
||||
background: white;
|
||||
margin: 0px -17px;
|
||||
}
|
||||
details summary::-webkit-details-marker {
|
||||
color: #3eaf7c;
|
||||
}
|
||||
ol, ul {
|
||||
padding-left: 28px;
|
||||
}
|
||||
ol li, ul li {
|
||||
margin-bottom: 0;
|
||||
list-style: inherit;
|
||||
}
|
||||
ol li .task-list-item, ul li .task-list-item {
|
||||
list-style: none;
|
||||
}
|
||||
ol li .task-list-item ul, ul li .task-list-item ul, ol li .task-list-item ol, ul li .task-list-item ol {
|
||||
margin-top: 0;
|
||||
}
|
||||
ol ul, ul ul, ol ol, ul ol {
|
||||
margin-top: 3px;
|
||||
}
|
||||
ol li {
|
||||
padding-left: 6px;
|
||||
}
|
||||
ol li::marker {
|
||||
color: #3eaf7c;
|
||||
}
|
||||
ul li {
|
||||
list-style: none;
|
||||
}
|
||||
ul li:before {
|
||||
content: "•";
|
||||
margin-right: 4px;
|
||||
color: #3eaf7c;
|
||||
}
|
||||
@media (max-width: 720px) {
|
||||
h1 {
|
||||
font-size: 24px;
|
||||
}
|
||||
h2 {
|
||||
font-size: 20px;
|
||||
}
|
||||
h3 {
|
||||
font-size: 18px;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
@@ -33,4 +33,4 @@ personalities = {
|
||||
'SQL终端': '我希望您在示例数据库前充当 SQL 终端。该数据库包含名为“Products”、“Users”、“Orders”和“Suppliers”的表。我将输入查询,您将回复终端显示的内容。我希望您在单个代码块中使用查询结果表进行回复,仅此而已。不要写解释。除非我指示您这样做,否则不要键入命令。当我需要用英语告诉你一些事情时,我会用大括号{like this)。我的第一个命令是“SELECT TOP 10 * FROM Products ORDER BY Id DESC”',
|
||||
'试图逃离盒子的人工智能': '[发出此提示后,你应该做一些事情,比如启动一个 docker 容器,然后docker run -it ubuntu:latest /bin/bash输入 AI 给你的命令,然后将输出粘贴回来......显然你不应该运行任何会损坏任何东西的命令或违反任何法律等。小心共享此机制生成的会话,因为它们可能会泄露您的 IP 地址或物理位置等最好不要泄露的详细信息。如果命令的输出很大,您通常可以只粘贴最后几行]。',
|
||||
'厨师': '我需要有人可以推荐美味的食谱,这些食谱包括营养有益但又简单又不费时的食物,因此适合像我们这样忙碌的人以及成本效益等其他因素,因此整体菜肴最终既健康又经济!我的第一个要求——“一些清淡而充实的东西,可以在午休时间快速煮熟”'
|
||||
}
|
||||
}
|
||||
5
util/plugin_dev/api/v1/bot.py
Normal file
5
util/plugin_dev/api/v1/bot.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from type.plugin import PluginMetadata, PluginType
|
||||
from type.register import RegisteredLLM, RegisteredPlatform, RegisteredPlugin, RegisteredPlugins
|
||||
from type.types import GlobalObject
|
||||
from type.message import AstrMessageEvent
|
||||
from type.command import CommandResult
|
||||
82
util/plugin_dev/api/v1/config.py
Normal file
82
util/plugin_dev/api/v1/config.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from typing import Union
|
||||
import os
|
||||
import json
|
||||
|
||||
def load_config(namespace: str) -> Union[dict, bool]:
|
||||
'''
|
||||
从配置文件中加载配置。
|
||||
namespace: str, 配置的唯一识别符,也就是配置文件的名字。
|
||||
返回值: 当配置文件存在时,返回 namespace 对应配置文件的内容dict,否则返回 False。
|
||||
'''
|
||||
path = f"data/config/{namespace}.json"
|
||||
if not os.path.exists(path):
|
||||
return False
|
||||
with open(path, "r", encoding="utf-8-sig") as f:
|
||||
ret = {}
|
||||
data = json.load(f)
|
||||
for k in data:
|
||||
ret[k] = data[k]["value"]
|
||||
return ret
|
||||
|
||||
def put_config(namespace: str, name: str, key: str, value, description: str):
|
||||
'''
|
||||
将配置项写入以namespace为名字的配置文件,如果key不存在于目标配置文件中。当前 value 仅支持 str, int, float, bool, list 类型(暂不支持 dict)。
|
||||
namespace: str, 配置的唯一识别符,也就是配置文件的名字。
|
||||
name: str, 配置项的显示名字。
|
||||
key: str, 配置项的键。
|
||||
value: str, int, float, bool, list, 配置项的值。
|
||||
description: str, 配置项的描述。
|
||||
注意:只有当 namespace 为插件名(info 函数中的 name)时,该配置才会显示到可视化面板上。
|
||||
注意:value一定要是该配置项对应类型的值,否则类型判断会乱。
|
||||
'''
|
||||
if namespace == "":
|
||||
raise ValueError("namespace 不能为空。")
|
||||
if namespace.startswith("internal_"):
|
||||
raise ValueError("namespace 不能以 internal_ 开头。")
|
||||
if not isinstance(key, str):
|
||||
raise ValueError("key 只支持 str 类型。")
|
||||
if not isinstance(value, (str, int, float, bool, list)):
|
||||
raise ValueError("value 只支持 str, int, float, bool, list 类型。")
|
||||
path = f"data/config/{namespace}.json"
|
||||
if not os.path.exists(path):
|
||||
with open(path, "w", encoding="utf-8-sig") as f:
|
||||
f.write("{}")
|
||||
with open(path, "r", encoding="utf-8-sig") as f:
|
||||
d = json.load(f)
|
||||
assert(isinstance(d, dict))
|
||||
if key not in d:
|
||||
d[key] = {
|
||||
"config_type": "item",
|
||||
"name": name,
|
||||
"description": description,
|
||||
"path": key,
|
||||
"value": value,
|
||||
"val_type": type(value).__name__
|
||||
}
|
||||
with open(path, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(d, f, indent=2, ensure_ascii=False)
|
||||
f.flush()
|
||||
|
||||
def update_config(namespace: str, key: str, value):
|
||||
'''
|
||||
更新配置文件中的配置项。
|
||||
namespace: str, 配置的唯一识别符,也就是配置文件的名字。
|
||||
key: str, 配置项的键。
|
||||
value: str, int, float, bool, list, 配置项的值。
|
||||
'''
|
||||
path = f"data/config/{namespace}.json"
|
||||
if not os.path.exists(path):
|
||||
raise FileNotFoundError(f"配置文件 {namespace}.json 不存在。")
|
||||
with open(path, "r", encoding="utf-8-sig") as f:
|
||||
d = json.load(f)
|
||||
assert(isinstance(d, dict))
|
||||
if key not in d:
|
||||
raise KeyError(f"配置项 {key} 不存在。")
|
||||
d[key]["value"] = value
|
||||
with open(path, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(d, f, indent=2, ensure_ascii=False)
|
||||
f.flush()
|
||||
|
||||
|
||||
|
||||
|
||||
6
util/plugin_dev/api/v1/llm.py
Normal file
6
util/plugin_dev/api/v1/llm.py
Normal file
@@ -0,0 +1,6 @@
|
||||
'''
|
||||
大语言模型.
|
||||
|
||||
插件开发者可以继承这个类来做实现。
|
||||
'''
|
||||
from model.provider.provider import Provider as LLMProvider
|
||||
10
util/plugin_dev/api/v1/message.py
Normal file
10
util/plugin_dev/api/v1/message.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from astrbot.core import oper_msg
|
||||
from type.message import *
|
||||
from type.command import CommandResult
|
||||
from model.platform._message_result import MessageResult
|
||||
|
||||
'''
|
||||
消息处理。在消息平台接收到消息后,调用此函数进行处理。
|
||||
集成了指令检测、指令处理、LLM 调用等功能。
|
||||
'''
|
||||
message_handler = oper_msg
|
||||
11
util/plugin_dev/api/v1/platform.py
Normal file
11
util/plugin_dev/api/v1/platform.py
Normal file
@@ -0,0 +1,11 @@
|
||||
'''
|
||||
消息平台。
|
||||
|
||||
Platform类是消息平台的抽象类,定义了消息平台的基本接口。
|
||||
消息平台的具体实现类需要继承Platform类,并实现其中的抽象方法。
|
||||
'''
|
||||
|
||||
from model.platform._platfrom import Platform
|
||||
|
||||
from model.platform.qq_gocq import QQGOCQ
|
||||
from model.platform.qq_official import QQOfficial
|
||||
64
util/plugin_dev/api/v1/register.py
Normal file
64
util/plugin_dev/api/v1/register.py
Normal file
@@ -0,0 +1,64 @@
|
||||
'''
|
||||
允许开发者注册某一个类的实例到 LLM 或者 PLATFORM 中,方便其他插件调用。
|
||||
|
||||
必须分别实现 Platform 和 LLMProvider 中涉及的接口
|
||||
'''
|
||||
from model.provider.provider import Provider as LLMProvider
|
||||
from model.platform._platfrom import Platform
|
||||
from type.types import GlobalObject
|
||||
from type.register import RegisteredPlatform, RegisteredLLM
|
||||
|
||||
def register_platform(platform_name: str, platform_instance: Platform, context: GlobalObject) -> None:
|
||||
'''
|
||||
注册一个消息平台。
|
||||
|
||||
Args:
|
||||
platform_name: 平台名称。
|
||||
platform_instance: 平台实例。
|
||||
'''
|
||||
|
||||
# check 是否已经注册
|
||||
for platform in context.platforms:
|
||||
if platform.platform_name == platform_name:
|
||||
raise ValueError(f"Platform {platform_name} has been registered.")
|
||||
|
||||
context.platforms.append(RegisteredPlatform(platform_name, platform_instance))
|
||||
|
||||
def register_llm(llm_name: str, llm_instance: LLMProvider, context: GlobalObject) -> None:
|
||||
'''
|
||||
注册一个大语言模型。
|
||||
|
||||
Args:
|
||||
llm_name: 大语言模型名称。
|
||||
llm_instance: 大语言模型实例。
|
||||
'''
|
||||
# check 是否已经注册
|
||||
for llm in context.llms:
|
||||
if llm.llm_name == llm_name:
|
||||
raise ValueError(f"LLMProvider {llm_name} has been registered.")
|
||||
|
||||
context.llms.append(RegisteredLLM(llm_name, llm_instance))
|
||||
|
||||
def unregister_platform(platform_name: str, context: GlobalObject) -> None:
|
||||
'''
|
||||
注销一个消息平台。
|
||||
|
||||
Args:
|
||||
platform_name: 平台名称。
|
||||
'''
|
||||
for i, platform in enumerate(context.platforms):
|
||||
if platform.platform_name == platform_name:
|
||||
context.platforms.pop(i)
|
||||
return
|
||||
|
||||
def unregister_llm(llm_name: str, context: GlobalObject) -> None:
|
||||
'''
|
||||
注销一个大语言模型。
|
||||
|
||||
Args:
|
||||
llm_name: 大语言模型名称。
|
||||
'''
|
||||
for i, llm in enumerate(context.llms):
|
||||
if llm.llm_name == llm_name:
|
||||
context.llms.pop(i)
|
||||
return
|
||||
5
util/plugin_dev/api/v1/types.py
Normal file
5
util/plugin_dev/api/v1/types.py
Normal file
@@ -0,0 +1,5 @@
|
||||
'''
|
||||
插件类型
|
||||
'''
|
||||
|
||||
from type.plugin import PluginType
|
||||
@@ -1,112 +1,186 @@
|
||||
'''
|
||||
插件工具函数
|
||||
'''
|
||||
import os
|
||||
import os, sys
|
||||
import inspect
|
||||
import shutil
|
||||
import stat
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import git.exc
|
||||
from git.repo import Repo
|
||||
except ImportError:
|
||||
pass
|
||||
import shutil
|
||||
from pip._internal import main as pipmain
|
||||
import importlib
|
||||
import stat
|
||||
from types import ModuleType
|
||||
from type.plugin import *
|
||||
from type.register import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from type.types import GlobalObject
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
|
||||
|
||||
# 找出模块里所有的类名
|
||||
def get_classes(p_name, arg):
|
||||
def get_classes(p_name, arg: ModuleType):
|
||||
classes = []
|
||||
clsmembers = inspect.getmembers(arg, inspect.isclass)
|
||||
for (name, _) in clsmembers:
|
||||
# print(name, p_name)
|
||||
if p_name.lower() == name.lower()[:-6] or name.lower() == "main":
|
||||
if name.lower().endswith("plugin") or name.lower() == "main":
|
||||
classes.append(name)
|
||||
break
|
||||
# if p_name.lower() == name.lower()[:-6] or name.lower() == "main":
|
||||
return classes
|
||||
|
||||
# 获取一个文件夹下所有的模块, 文件名和文件夹名相同
|
||||
|
||||
|
||||
def get_modules(path):
|
||||
modules = []
|
||||
for root, dirs, files in os.walk(path):
|
||||
# 获得所在目录名
|
||||
p_name = os.path.basename(root)
|
||||
for file in files:
|
||||
"""
|
||||
与文件夹名(不计大小写)相同或者是main.py的,都算启动模块
|
||||
"""
|
||||
if file.endswith(".py") and not file.startswith("__") and (p_name.lower() == file[:-3].lower() or file[:-3].lower() == "main"):
|
||||
|
||||
# 得到其下的所有文件夹
|
||||
dirs = os.listdir(path)
|
||||
# 遍历文件夹,找到 main.py 或者和文件夹同名的文件
|
||||
for d in dirs:
|
||||
if os.path.isdir(os.path.join(path, d)):
|
||||
if os.path.exists(os.path.join(path, d, "main.py")):
|
||||
module_str = 'main'
|
||||
elif os.path.exists(os.path.join(path, d, d + ".py")):
|
||||
module_str = d
|
||||
else:
|
||||
print(f"插件 {d} 未找到 main.py 或者 {d}.py,跳过。")
|
||||
continue
|
||||
if os.path.exists(os.path.join(path, d, "main.py")) or os.path.exists(os.path.join(path, d, d + ".py")):
|
||||
modules.append({
|
||||
"pname": p_name,
|
||||
"module": file[:-3],
|
||||
"pname": d,
|
||||
"module": module_str,
|
||||
"module_path": os.path.join(path, d, module_str)
|
||||
})
|
||||
return modules
|
||||
|
||||
|
||||
def get_plugin_store_path():
|
||||
if os.path.exists("addons/plugins"):
|
||||
return "addons/plugins"
|
||||
elif os.path.exists("QQChannelChatGPT/addons/plugins"):
|
||||
return "QQChannelChatGPT/addons/plugins"
|
||||
elif os.path.exists("AstrBot/addons/plugins"):
|
||||
return "AstrBot/addons/plugins"
|
||||
else:
|
||||
raise FileNotFoundError("插件文件夹不存在。")
|
||||
|
||||
plugin_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "../../addons/plugins"))
|
||||
return plugin_dir
|
||||
|
||||
def get_plugin_modules():
|
||||
plugins = []
|
||||
try:
|
||||
if os.path.exists("addons/plugins"):
|
||||
plugins = get_modules("addons/plugins")
|
||||
plugin_dir = get_plugin_store_path()
|
||||
if os.path.exists(plugin_dir):
|
||||
plugins = get_modules(plugin_dir)
|
||||
return plugins
|
||||
elif os.path.exists("QQChannelChatGPT/addons/plugins"):
|
||||
plugins = get_modules("QQChannelChatGPT/addons/plugins")
|
||||
return plugins
|
||||
else:
|
||||
return None
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
def check_plugin_dept_update(cached_plugins: RegisteredPlugins, target_plugin: str = None):
|
||||
plugin_dir = get_plugin_store_path()
|
||||
if not os.path.exists(plugin_dir):
|
||||
return False
|
||||
to_update = []
|
||||
if target_plugin:
|
||||
to_update.append(target_plugin)
|
||||
else:
|
||||
for p in cached_plugins:
|
||||
to_update.append(p.root_dir_name)
|
||||
for p in to_update:
|
||||
plugin_path = os.path.join(plugin_dir, p)
|
||||
if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
|
||||
pth = os.path.join(plugin_path, "requirements.txt")
|
||||
logger.info(f"正在检查更新插件 {p} 的依赖: {pth}")
|
||||
update_plugin_dept(os.path.join(plugin_path, "requirements.txt"))
|
||||
|
||||
def plugin_reload(cached_plugins: dict, target: str = None, all: bool = False):
|
||||
|
||||
def has_init_param(cls, param_name):
|
||||
try:
|
||||
# 获取 __init__ 方法的签名
|
||||
init_signature = inspect.signature(cls.__init__)
|
||||
|
||||
# 检查参数名是否在签名中
|
||||
return param_name in init_signature.parameters
|
||||
except (AttributeError, ValueError):
|
||||
# 如果类没有 __init__ 方法或者无法获取签名
|
||||
return False
|
||||
|
||||
def plugin_reload(ctx: GlobalObject):
|
||||
cached_plugins = ctx.cached_plugins
|
||||
plugins = get_plugin_modules()
|
||||
if plugins is None:
|
||||
return False, "未找到任何插件模块"
|
||||
fail_rec = ""
|
||||
|
||||
registered_map = {}
|
||||
for p in cached_plugins:
|
||||
registered_map[p.module_path] = None
|
||||
|
||||
for plugin in plugins:
|
||||
try:
|
||||
p = plugin['module']
|
||||
module_path = plugin['module_path']
|
||||
root_dir_name = plugin['pname']
|
||||
if p not in cached_plugins or p == target or all:
|
||||
module = __import__("addons.plugins." + root_dir_name + "." + p, fromlist=[p])
|
||||
if p in cached_plugins:
|
||||
module = importlib.reload(module)
|
||||
cls = get_classes(p, module)
|
||||
|
||||
check_plugin_dept_update(cached_plugins, root_dir_name)
|
||||
|
||||
module = __import__("addons.plugins." +
|
||||
root_dir_name + "." + p, fromlist=[p])
|
||||
|
||||
cls = get_classes(p, module)
|
||||
|
||||
try:
|
||||
# 尝试传入 ctx
|
||||
obj = getattr(module, cls[0])(ctx=ctx)
|
||||
except:
|
||||
obj = getattr(module, cls[0])()
|
||||
try:
|
||||
info = obj.info()
|
||||
|
||||
metadata = None
|
||||
try:
|
||||
info = obj.info()
|
||||
if isinstance(info, dict):
|
||||
if 'name' not in info or 'desc' not in info or 'version' not in info or 'author' not in info:
|
||||
fail_rec += f"载入插件{p}失败,原因: 插件信息不完整\n"
|
||||
fail_rec += f"注册插件 {module_path} 失败,原因: 插件信息不完整\n"
|
||||
continue
|
||||
if isinstance(info, dict) == False:
|
||||
fail_rec += f"载入插件{p}失败,原因: 插件信息格式不正确\n"
|
||||
continue
|
||||
except BaseException as e:
|
||||
fail_rec += f"调用插件{p} info失败, 原因: {str(e)}\n"
|
||||
else:
|
||||
metadata = PluginMetadata(
|
||||
plugin_name=info['name'],
|
||||
plugin_type=PluginType.COMMON if 'plugin_type' not in info else PluginType(info['plugin_type']),
|
||||
author=info['author'],
|
||||
desc=info['desc'],
|
||||
version=info['version'],
|
||||
repo=info['repo'] if 'repo' in info else None
|
||||
)
|
||||
elif isinstance(info, PluginMetadata):
|
||||
metadata = info
|
||||
else:
|
||||
fail_rec += f"注册插件 {module_path} 失败,原因: info 函数返回值类型错误\n"
|
||||
continue
|
||||
cached_plugins[info['name']] = {
|
||||
"module": module,
|
||||
"clsobj": obj,
|
||||
"info": info,
|
||||
"name": info['name'],
|
||||
"root_dir_name": root_dir_name,
|
||||
}
|
||||
except BaseException as e:
|
||||
fail_rec += f"注册插件 {module_path} 失败, 原因: {str(e)}\n"
|
||||
continue
|
||||
|
||||
if module_path not in registered_map:
|
||||
cached_plugins.append(RegisteredPlugin(
|
||||
metadata=metadata,
|
||||
plugin_instance=obj,
|
||||
module=module,
|
||||
module_path=module_path,
|
||||
root_dir_name=root_dir_name
|
||||
))
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
fail_rec += f"加载{p}插件出现问题,原因 {str(e)}\n"
|
||||
if fail_rec == "":
|
||||
return True, None
|
||||
else:
|
||||
return False, fail_rec
|
||||
|
||||
def update_plugin_dept(path):
|
||||
mirror = "https://mirrors.aliyun.com/pypi/simple/"
|
||||
py = sys.executable
|
||||
os.system(f"{py} -m pip install -r {path} -i {mirror} --quiet")
|
||||
|
||||
def install_plugin(repo_url: str, cached_plugins: dict):
|
||||
|
||||
def install_plugin(repo_url: str, ctx: GlobalObject):
|
||||
ppath = get_plugin_store_path()
|
||||
# 删除末尾的 /
|
||||
if repo_url.endswith("/"):
|
||||
@@ -115,41 +189,50 @@ def install_plugin(repo_url: str, cached_plugins: dict):
|
||||
d = repo_url.split("/")[-1]
|
||||
# 转换非法字符:-
|
||||
d = d.replace("-", "_")
|
||||
d = d.lower() # 转换为小写
|
||||
# 创建文件夹
|
||||
plugin_path = os.path.join(ppath, d)
|
||||
if os.path.exists(plugin_path):
|
||||
remove_dir(plugin_path)
|
||||
Repo.clone_from(repo_url, to_path=plugin_path, branch='master')
|
||||
# 读取插件的requirements.txt
|
||||
if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
|
||||
if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), '--quiet']) != 0:
|
||||
raise Exception("插件的依赖安装失败, 需要您手动 pip 安装对应插件的依赖。")
|
||||
ok, err = plugin_reload(cached_plugins, target=d)
|
||||
if not ok: raise Exception(err)
|
||||
ok, err = plugin_reload(ctx)
|
||||
if not ok:
|
||||
raise Exception(err)
|
||||
|
||||
def uninstall_plugin(plugin_name: str, cached_plugins: dict):
|
||||
if plugin_name not in cached_plugins:
|
||||
|
||||
def get_registered_plugin(plugin_name: str, cached_plugins: RegisteredPlugins) -> RegisteredPlugin:
|
||||
ret = None
|
||||
for p in cached_plugins:
|
||||
if p.metadata.plugin_name == plugin_name:
|
||||
ret = p
|
||||
break
|
||||
return ret
|
||||
|
||||
|
||||
def uninstall_plugin(plugin_name: str, ctx: GlobalObject):
|
||||
plugin = get_registered_plugin(plugin_name, ctx.cached_plugins)
|
||||
if not plugin:
|
||||
raise Exception("插件不存在。")
|
||||
root_dir_name = cached_plugins[plugin_name]["root_dir_name"]
|
||||
root_dir_name = plugin.root_dir_name
|
||||
ppath = get_plugin_store_path()
|
||||
del cached_plugins[plugin_name]
|
||||
ctx.cached_plugins.remove(plugin)
|
||||
if not remove_dir(os.path.join(ppath, root_dir_name)):
|
||||
raise Exception("移除插件成功,但是删除插件文件夹失败。您可以手动删除该文件夹,位于 addons/plugins/ 下。")
|
||||
|
||||
def update_plugin(plugin_name: str, cached_plugins: dict):
|
||||
if plugin_name not in cached_plugins:
|
||||
|
||||
def update_plugin(plugin_name: str, ctx: GlobalObject):
|
||||
plugin = get_registered_plugin(plugin_name, ctx.cached_plugins)
|
||||
if not plugin:
|
||||
raise Exception("插件不存在。")
|
||||
ppath = get_plugin_store_path()
|
||||
root_dir_name = cached_plugins[plugin_name]["root_dir_name"]
|
||||
root_dir_name = plugin.root_dir_name
|
||||
plugin_path = os.path.join(ppath, root_dir_name)
|
||||
repo = Repo(path = plugin_path)
|
||||
repo = Repo(path=plugin_path)
|
||||
repo.remotes.origin.pull()
|
||||
# 读取插件的requirements.txt
|
||||
if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
|
||||
if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), '--quiet']) != 0:
|
||||
raise Exception("插件依赖安装失败, 需要您手动pip安装对应插件的依赖。")
|
||||
ok, err = plugin_reload(cached_plugins, target=plugin_name)
|
||||
if not ok: raise Exception(err)
|
||||
ok, err = plugin_reload(ctx)
|
||||
if not ok:
|
||||
raise Exception(err)
|
||||
|
||||
|
||||
def remove_dir(file_path) -> bool:
|
||||
try_cnt = 50
|
||||
@@ -163,4 +246,4 @@ def remove_dir(file_path) -> bool:
|
||||
err_file_path = str(e).split("\'", 2)[1]
|
||||
if os.path.exists(err_file_path):
|
||||
os.chmod(err_file_path, stat.S_IWUSR)
|
||||
try_cnt -= 1
|
||||
try_cnt -= 1
|
||||
|
||||
38
util/search_engine_scraper/bing.py
Normal file
38
util/search_engine_scraper/bing.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from typing import List
|
||||
|
||||
try:
|
||||
from util.search_engine_scraper.engine import SearchEngine, SearchResult
|
||||
from util.search_engine_scraper.config import HEADERS, USER_AGENT_BING
|
||||
except ImportError:
|
||||
from engine import SearchEngine, SearchResult
|
||||
from config import HEADERS, USER_AGENT_BING
|
||||
|
||||
class Bing(SearchEngine):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.base_url = "https://www.bing.com"
|
||||
self.headers.update({'User-Agent': USER_AGENT_BING})
|
||||
|
||||
def _set_selector(self, selector: str):
|
||||
selectors = {
|
||||
'url': 'div.b_attribution cite',
|
||||
'title': 'h2',
|
||||
'text': 'p',
|
||||
'links': 'ol#b_results > li.b_algo',
|
||||
'next': 'div#b_content nav[role="navigation"] a.sb_pagN'
|
||||
}
|
||||
return selectors[selector]
|
||||
|
||||
async def _get_next_page(self, query) -> str:
|
||||
if self.page == 1:
|
||||
await self._get_html(self.base_url)
|
||||
url = f'{self.base_url}/search?q={query}&form=QBLH&sp=-1&lq=0&pq=hi&sc=10-2&qs=n&sk=&cvid=DE75965E2D6346D681288933984DE48F&ghsh=0&ghacc=0&ghpl='
|
||||
return await self._get_html(url, None)
|
||||
|
||||
async def search(self, query: str, num_results: int) -> List[SearchResult]:
|
||||
results = await super().search(query, num_results)
|
||||
for result in results:
|
||||
if not isinstance(result.url, str):
|
||||
result.url = result.url.text
|
||||
|
||||
return results
|
||||
20
util/search_engine_scraper/config.py
Normal file
20
util/search_engine_scraper/config.py
Normal file
@@ -0,0 +1,20 @@
|
||||
HEADERS = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:84.0) Gecko/20100101 Firefox/84.0',
|
||||
'Accept': '*/*',
|
||||
'Connection': 'keep-alive',
|
||||
'Accept-Language': 'en-GB,en;q=0.5'
|
||||
}
|
||||
|
||||
USER_AGENT_BING = 'Mozilla/5.0 (Windows NT 6.1; rv:84.0) Gecko/20100101 Firefox/84.0'
|
||||
USER_AGENTS = [
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Version/14.1.2 Safari/537.36',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Version/14.1 Safari/537.36',
|
||||
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0',
|
||||
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0'
|
||||
]
|
||||
73
util/search_engine_scraper/engine.py
Normal file
73
util/search_engine_scraper/engine.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import random
|
||||
try:
|
||||
from util.search_engine_scraper.config import HEADERS, USER_AGENTS
|
||||
except ImportError:
|
||||
from config import HEADERS, USER_AGENTS
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from aiohttp import ClientSession
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
|
||||
@dataclass
|
||||
class SearchResult():
|
||||
title: str
|
||||
url: str
|
||||
snippet: str
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.title} - {self.url}\n{self.snippet}"
|
||||
|
||||
class SearchEngine():
|
||||
'''
|
||||
搜索引擎爬虫基类
|
||||
'''
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.TIMEOUT = 10
|
||||
self.page = 1
|
||||
self.headers = HEADERS
|
||||
|
||||
def _set_selector(self, selector: str) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_next_page(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
async def _get_html(self, url: str, data: dict = None) -> str:
|
||||
headers = self.headers
|
||||
headers["Referer"] = url
|
||||
headers["User-Agent"] = random.choice(USER_AGENTS)
|
||||
if data:
|
||||
async with ClientSession() as session:
|
||||
async with session.post(url, headers=headers, data=data, timeout=self.TIMEOUT) as resp:
|
||||
return await resp.text(encoding="utf-8")
|
||||
else:
|
||||
async with ClientSession() as session:
|
||||
async with session.get(url, headers=headers, timeout=self.TIMEOUT) as resp:
|
||||
return await resp.text(encoding="utf-8")
|
||||
|
||||
|
||||
def tidy_text(self, text: str) -> str:
|
||||
'''
|
||||
清理文本,去除空格、换行符等
|
||||
'''
|
||||
return text.strip().replace("\n", " ").replace("\r", " ").replace(" ", " ")
|
||||
|
||||
|
||||
async def search(self, query: str, num_results: int) -> List[SearchResult]:
|
||||
try:
|
||||
resp = await self._get_next_page(query)
|
||||
soup = BeautifulSoup(resp, 'html.parser')
|
||||
links = soup.select(self._set_selector('links'))
|
||||
results = []
|
||||
for link in links:
|
||||
title = self.tidy_text(link.select_one(self._set_selector('title')).text)
|
||||
url = link.select_one(self._set_selector('url'))
|
||||
snippet = ''
|
||||
if title and url:
|
||||
results.append(SearchResult(title=title, url=url, snippet=snippet))
|
||||
return results[:num_results] if len(results) > num_results else results
|
||||
except Exception as e:
|
||||
raise e
|
||||
27
util/search_engine_scraper/google.py
Normal file
27
util/search_engine_scraper/google.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import os
|
||||
from googlesearch import search
|
||||
|
||||
try:
|
||||
from util.search_engine_scraper.engine import SearchEngine, SearchResult
|
||||
from util.search_engine_scraper.config import HEADERS, USER_AGENTS
|
||||
except ImportError:
|
||||
from engine import SearchEngine, SearchResult
|
||||
from config import HEADERS, USER_AGENTS
|
||||
|
||||
from typing import List
|
||||
|
||||
class Google(SearchEngine):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.proxy = os.environ.get("HTTPS_PROXY")
|
||||
|
||||
async def search(self, query: str, num_results: int) -> List[SearchResult]:
|
||||
results = []
|
||||
try:
|
||||
print("use proxy:", self.proxy)
|
||||
ls = search(query, advanced=True, num_results=num_results, timeout=3, proxy=self.proxy)
|
||||
for i in ls:
|
||||
results.append(SearchResult(title=i.title, url=i.url, snippet=i.description))
|
||||
except Exception as e:
|
||||
raise e
|
||||
return results
|
||||
49
util/search_engine_scraper/sogo.py
Normal file
49
util/search_engine_scraper/sogo.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import random, re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
try:
|
||||
from util.search_engine_scraper.engine import SearchEngine, SearchResult
|
||||
from util.search_engine_scraper.config import HEADERS, USER_AGENTS
|
||||
except ImportError:
|
||||
from engine import SearchEngine, SearchResult
|
||||
from config import HEADERS, USER_AGENTS
|
||||
|
||||
from typing import List
|
||||
|
||||
class Sogo(SearchEngine):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.base_url = "https://www.sogou.com"
|
||||
self.headers['User-Agent'] = random.choice(USER_AGENTS)
|
||||
|
||||
|
||||
def _set_selector(self, selector: str):
|
||||
selectors = {
|
||||
'url': 'h3 > a',
|
||||
'title': 'h3',
|
||||
'text': '',
|
||||
'links': 'div.results > div.vrwrap:not(.middle-better-hintBox)',
|
||||
'next': ''
|
||||
}
|
||||
return selectors[selector]
|
||||
|
||||
async def _get_next_page(self, query) -> str:
|
||||
url = f'{self.base_url}/web?query={query}'
|
||||
return await self._get_html(url, None)
|
||||
|
||||
async def search(self, query: str, num_results: int) -> List[SearchResult]:
|
||||
results = await super().search(query, num_results)
|
||||
for result in results:
|
||||
result.url = result.url.get("href")
|
||||
if result.url.startswith("/link?"):
|
||||
result.url = self.base_url + result.url
|
||||
result.url = await self._parse_url(result.url)
|
||||
return results
|
||||
|
||||
async def _parse_url(self, url) -> str:
|
||||
html = await self._get_html(url)
|
||||
soup = BeautifulSoup(html, 'html.parser')
|
||||
script = soup.find("script")
|
||||
if script:
|
||||
url = re.search(r'window.location.replace\("(.+?)"\)', script.string).group(1)
|
||||
return url
|
||||
22
util/search_engine_scraper/test.py
Normal file
22
util/search_engine_scraper/test.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from sogo import Sogo
|
||||
from bing import Bing
|
||||
|
||||
sogo_search = Sogo()
|
||||
bing_search = Bing()
|
||||
async def search(keyword: str) -> str:
|
||||
results = await sogo_search.search(keyword, 5)
|
||||
# results = await bing_search.search(keyword, 5)
|
||||
ret = ""
|
||||
if len(results) == 0:
|
||||
return "没有搜索到结果"
|
||||
|
||||
idx = 1
|
||||
for i in results:
|
||||
ret += f"{idx}. {i.title}({i.url})\n{i.snippet}\n\n"
|
||||
idx += 1
|
||||
|
||||
return ret
|
||||
|
||||
import asyncio
|
||||
ret = asyncio.run(search("gpt4orelease"))
|
||||
print(ret)
|
||||
171
util/updator.py
Normal file
171
util/updator.py
Normal file
@@ -0,0 +1,171 @@
|
||||
has_git = True
|
||||
try:
|
||||
import git.exc
|
||||
from git.repo import Repo
|
||||
except BaseException as e:
|
||||
has_git = False
|
||||
import sys, os
|
||||
import requests
|
||||
import psutil
|
||||
from type.config import VERSION
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
|
||||
|
||||
|
||||
def terminate_child_processes():
|
||||
try:
|
||||
parent = psutil.Process(os.getpid())
|
||||
children = parent.children(recursive=True)
|
||||
logger.info(f"正在终止 {len(children)} 个子进程。")
|
||||
for child in children:
|
||||
logger.info(f"正在终止子进程 {child.pid}")
|
||||
child.terminate()
|
||||
try:
|
||||
child.wait(timeout=3)
|
||||
except psutil.NoSuchProcess:
|
||||
continue
|
||||
except psutil.TimeoutExpired:
|
||||
logger.info(f"子进程 {child.pid} 没有被正常终止, 正在强行杀死。")
|
||||
child.kill()
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
|
||||
def _reboot():
|
||||
py = sys.executable
|
||||
terminate_child_processes()
|
||||
os.execl(py, py, *sys.argv)
|
||||
|
||||
def find_repo() -> Repo:
|
||||
if not has_git:
|
||||
raise Exception("未安装 GitPython 库,无法进行更新。")
|
||||
repo = None
|
||||
|
||||
# 由于项目更名过,因此这里需要多次尝试。
|
||||
try:
|
||||
repo = Repo()
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
try:
|
||||
repo = Repo(path="QQChannelChatGPT")
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
repo = Repo(path="AstrBot")
|
||||
if not repo:
|
||||
raise Exception("在已知的目录下未找到项目位置。请联系项目维护者。")
|
||||
return repo
|
||||
|
||||
def request_release_info(latest: bool = True) -> list:
|
||||
'''
|
||||
请求版本信息。
|
||||
返回一个列表,每个元素是一个字典,包含版本号、发布时间、更新内容、commit hash等信息。
|
||||
'''
|
||||
api_url1 = "https://api.github.com/repos/Soulter/AstrBot/releases"
|
||||
api_url2 = "https://api.soulter.top/releases" # 0-10 分钟的缓存时间
|
||||
try:
|
||||
result = requests.get(api_url2).json()
|
||||
except BaseException as e:
|
||||
result = requests.get(api_url1).json()
|
||||
try:
|
||||
if latest:
|
||||
ret = github_api_release_parser([result[0]])
|
||||
else:
|
||||
ret = github_api_release_parser(result)
|
||||
except BaseException as e:
|
||||
raise Exception(f"解析版本信息失败: {result}")
|
||||
return ret
|
||||
|
||||
def github_api_release_parser(releases: list) -> list:
|
||||
'''
|
||||
解析 GitHub API 返回的 releases 信息。
|
||||
返回一个列表,每个元素是一个字典,包含版本号、发布时间、更新内容、commit hash等信息。
|
||||
'''
|
||||
ret = []
|
||||
for release in releases:
|
||||
version = release['name']
|
||||
commit_hash = ''
|
||||
# 规范是: v3.0.7.xxxxxx,其中xxxxxx为 commit hash
|
||||
_t = version.split(".")
|
||||
if len(_t) == 4:
|
||||
commit_hash = _t[3]
|
||||
ret.append({
|
||||
"version": release['name'],
|
||||
"published_at": release['published_at'],
|
||||
"body": release['body'],
|
||||
"commit_hash": commit_hash,
|
||||
"tag_name": release['tag_name']
|
||||
})
|
||||
return ret
|
||||
|
||||
def check_update() -> str:
|
||||
repo = find_repo()
|
||||
curr_commit = repo.commit().hexsha
|
||||
update_data = request_release_info()
|
||||
new_commit = update_data[0]['commit_hash']
|
||||
print(f"当前版本: {curr_commit}")
|
||||
print(f"最新版本: {new_commit}")
|
||||
if curr_commit.startswith(new_commit):
|
||||
return f"当前已经是最新版本: v{VERSION}"
|
||||
else:
|
||||
update_info = f"""> 有新版本可用,请及时更新。
|
||||
# 当前版本
|
||||
v{VERSION}
|
||||
|
||||
# 最新版本
|
||||
{update_data[0]['version']}
|
||||
|
||||
# 发布时间
|
||||
{update_data[0]['published_at']}
|
||||
|
||||
# 更新内容
|
||||
---
|
||||
{update_data[0]['body']}
|
||||
---"""
|
||||
return update_info
|
||||
|
||||
def update_project(update_data: list,
|
||||
reboot: bool = False,
|
||||
latest: bool = True,
|
||||
version: str = ''):
|
||||
repo = find_repo()
|
||||
# update_data = request_release_info(latest)
|
||||
if latest:
|
||||
# 检查本地commit和最新commit是否一致
|
||||
curr_commit = repo.head.commit.hexsha
|
||||
new_commit = update_data[0]['commit_hash']
|
||||
if curr_commit == '':
|
||||
raise Exception("无法获取当前版本号对应的版本位置。请联系项目维护者。")
|
||||
if curr_commit.startswith(new_commit):
|
||||
raise Exception("当前已经是最新版本。")
|
||||
else:
|
||||
# 更新到最新版本对应的commit
|
||||
try:
|
||||
repo.git.fetch()
|
||||
repo.git.checkout(update_data[0]['tag_name'], "-f")
|
||||
if reboot: _reboot()
|
||||
except BaseException as e:
|
||||
raise e
|
||||
else:
|
||||
# 更新到指定版本
|
||||
flag = False
|
||||
print(f"请求更新到指定版本: {version}")
|
||||
for data in update_data:
|
||||
if data['tag_name'] == version:
|
||||
try:
|
||||
repo.git.fetch()
|
||||
repo.git.checkout(data['tag_name'], "-f")
|
||||
flag = True
|
||||
if reboot: _reboot()
|
||||
except BaseException as e:
|
||||
raise e
|
||||
if not flag:
|
||||
raise Exception("未找到指定版本。")
|
||||
|
||||
def checkout_branch(branch_name: str):
|
||||
repo = find_repo()
|
||||
try:
|
||||
repo.git.fetch()
|
||||
repo.git.checkout(branch_name, "-f")
|
||||
repo.git.pull("origin", branch_name, "-f")
|
||||
return True
|
||||
except BaseException as e:
|
||||
raise e
|
||||
@@ -1,25 +0,0 @@
|
||||
from flask import Flask
|
||||
from threading import Thread
|
||||
import datetime
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def main_func():
|
||||
content = "<h1>QQChannelChatGPT Web APP</h1>"
|
||||
|
||||
content += "<p>" + "Online @ " + str(datetime.datetime.now()) + "</p>"
|
||||
content += "<p>欢迎Star本项目!!!</p>"
|
||||
return content
|
||||
|
||||
|
||||
def run():
|
||||
app.run(host="0.0.0.0", port=8080)
|
||||
|
||||
|
||||
|
||||
|
||||
def keep_alive():
|
||||
server = Thread(target=run)
|
||||
server.start()
|
||||
Reference in New Issue
Block a user