Compare commits

...

24 Commits

Author SHA1 Message Date
Soulter
235e0b9b8f fix: gocq logging 2024-05-09 13:24:31 +08:00
Soulter
d5a9bed8a4 fix(updator): IterableList object has no
attribute origin
2024-05-08 19:18:21 +08:00
Soulter
d7dc8a7612 chore: 添加一些日志;更新版本 2024-05-08 19:12:23 +08:00
Soulter
08cd3ca40c perf: 更好的日志输出;
fix: 修复可视化面板刷新404
2024-05-08 19:01:36 +08:00
Soulter
a13562dcea fix: 修复启动器启动加载带有配置的插件时提示配置文件缺失的问题 2024-05-08 16:28:30 +08:00
Soulter
d7a0c0d1d0 Update requirements.txt 2024-05-07 15:58:51 +08:00
Soulter
c0729b2d29 fix: 修复插件重载相关问题 2024-04-22 19:04:15 +08:00
Soulter
a80f474290 fix: 修复更新插件时的报错 2024-04-22 18:36:56 +08:00
Soulter
699207dd54 update: version 2024-04-21 22:41:48 +08:00
Soulter
e7708010c9 fix: 修复 gocq 平台下无法回复消息的问题 2024-04-21 22:39:09 +08:00
Soulter
f66091e08f 🎨: clean codes 2024-04-21 22:20:23 +08:00
Soulter
03bb932f8f fix: 修复可视化面板报错 2024-04-21 22:16:42 +08:00
Soulter
fbf8b349e0 update: helloworld 2024-04-21 22:13:27 +08:00
Soulter
e9278fce6a !! delete: 移除对逆向 ChatGPT 的所有支持。 2024-04-21 22:12:09 +08:00
Soulter
9a7db956d5 fix: 修复 3.10.x readibility 依赖导致的报错 2024-04-21 16:40:02 +08:00
Soulter
13196dd667 perf: 修改包路径 2024-03-15 14:49:44 +08:00
Soulter
52b80e24d2 Merge remote-tracking branch 'refs/remotes/origin/master' 2024-03-15 14:29:48 +08:00
Soulter
7dff87e65d fix: 修复无法更新到指定版本的问题 2024-03-15 14:29:28 +08:00
Soulter
31ee64d1b2 Update docker-image.yml 2024-03-15 14:11:57 +08:00
Soulter
8e865b6918 fix: 修复无LLM情况下update不管用的问题 2024-03-15 14:05:16 +08:00
Soulter
66f91e5832 update: 更新版本号 2024-03-15 13:50:57 +08:00
Soulter
cd2d368f9c fix: 修复可视化面板指定版本更新失效的问题 2024-03-15 13:48:14 +08:00
Soulter
7736c1c9bd feat: QQ机器人官方API 支持可选择是否接收Q群消息 2024-03-15 13:44:18 +08:00
Soulter
6728c0b7b5 chore: 改变包名 2024-03-15 13:37:51 +08:00
56 changed files with 948 additions and 1282 deletions

View File

@@ -14,6 +14,8 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Build image - name: Build image
run: | run: |
git clone https://github.com/Soulter/AstrBot
cd AstrBot
docker build -t ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest . docker build -t ${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest .
- name: Publish image - name: Publish image
run: | run: |

View File

@@ -160,7 +160,6 @@
- `/key` 动态添加key - `/key` 动态添加key
- `/set` 人格设置面板 - `/set` 人格设置面板
- `/keyword nihao 你好` 设置关键词回复。nihao->你好 - `/keyword nihao 你好` 设置关键词回复。nihao->你好
- `/revgpt` 切换为ChatGPT逆向库
- `/画` 画画 - `/画` 画画
#### 逆向ChatGPT库语言模型 #### 逆向ChatGPT库语言模型

View File

@@ -1,14 +1,17 @@
from aip import AipContentCensor from aip import AipContentCensor
class BaiduJudge: class BaiduJudge:
def __init__(self, baidu_configs) -> None: def __init__(self, baidu_configs) -> None:
if 'app_id' in baidu_configs and 'api_key' in baidu_configs and 'secret_key' in baidu_configs: if 'app_id' in baidu_configs and 'api_key' in baidu_configs and 'secret_key' in baidu_configs:
self.app_id = str(baidu_configs['app_id']) self.app_id = str(baidu_configs['app_id'])
self.api_key = baidu_configs['api_key'] self.api_key = baidu_configs['api_key']
self.secret_key = baidu_configs['secret_key'] self.secret_key = baidu_configs['secret_key']
self.client = AipContentCensor(self.app_id, self.api_key, self.secret_key) self.client = AipContentCensor(
self.app_id, self.api_key, self.secret_key)
else: else:
raise ValueError("Baidu configs error! 请填写百度内容审核服务相关配置!") raise ValueError("Baidu configs error! 请填写百度内容审核服务相关配置!")
def judge(self, text): def judge(self, text):
res = self.client.textCensorUserDefined(text) res = self.client.textCensorUserDefined(text)
if 'conclusionType' not in res: if 'conclusionType' not in res:

View File

@@ -1 +1 @@
import{x as i,o as l,c as _,w as s,a as e,f as a,J as m,V as c,b as t,t as u,ad as p,B as n,ae as o,j as f}from"./index-9075b0bb.js";const b={class:"text-h3"},h={class:"d-flex align-center"},g={class:"d-flex align-center"},V=i({__name:"BaseBreadcrumb",props:{title:String,breadcrumbs:Array,icon:String},setup(d){const r=d;return(x,B)=>(l(),_(c,{class:"page-breadcrumb mb-1 mt-1"},{default:s(()=>[e(a,{cols:"12",md:"12"},{default:s(()=>[e(m,{variant:"outlined",elevation:"0",class:"px-4 py-3 withbg"},{default:s(()=>[e(c,{"no-gutters":"",class:"align-center"},{default:s(()=>[e(a,{md:"5"},{default:s(()=>[t("h3",b,u(r.title),1)]),_:1}),e(a,{md:"7",sm:"12",cols:"12"},{default:s(()=>[e(p,{items:r.breadcrumbs,class:"text-h5 justify-md-end pa-1"},{divider:s(()=>[t("div",h,[e(n(o),{size:"17"})])]),prepend:s(()=>[e(f,{size:"small",icon:"mdi-home",class:"text-secondary mr-2"}),t("div",g,[e(n(o),{size:"17"})])]),_:1},8,["items"])]),_:1})]),_:1})]),_:1})]),_:1})]),_:1}))}});export{V as _}; import{x as i,o as l,c as _,w as s,a as e,f as a,J as m,V as c,b as t,t as u,ad as p,B as n,ae as o,j as f}from"./index-dc96e1be.js";const b={class:"text-h3"},h={class:"d-flex align-center"},g={class:"d-flex align-center"},V=i({__name:"BaseBreadcrumb",props:{title:String,breadcrumbs:Array,icon:String},setup(d){const r=d;return(x,B)=>(l(),_(c,{class:"page-breadcrumb mb-1 mt-1"},{default:s(()=>[e(a,{cols:"12",md:"12"},{default:s(()=>[e(m,{variant:"outlined",elevation:"0",class:"px-4 py-3 withbg"},{default:s(()=>[e(c,{"no-gutters":"",class:"align-center"},{default:s(()=>[e(a,{md:"5"},{default:s(()=>[t("h3",b,u(r.title),1)]),_:1}),e(a,{md:"7",sm:"12",cols:"12"},{default:s(()=>[e(p,{items:r.breadcrumbs,class:"text-h5 justify-md-end pa-1"},{divider:s(()=>[t("div",h,[e(n(o),{size:"17"})])]),prepend:s(()=>[e(f,{size:"small",icon:"mdi-home",class:"text-secondary mr-2"}),t("div",g,[e(n(o),{size:"17"})])]),_:1},8,["items"])]),_:1})]),_:1})]),_:1})]),_:1})]),_:1}))}});export{V as _};

View File

@@ -1 +1 @@
import{x as e,o as a,c as t,w as o,a as s,B as n,Z as r,W as c}from"./index-9075b0bb.js";const f=e({__name:"BlankLayout",setup(p){return(u,_)=>(a(),t(c,null,{default:o(()=>[s(n(r))]),_:1}))}});export{f as default}; import{x as e,o as a,c as t,w as o,a as s,B as n,Z as r,W as c}from"./index-dc96e1be.js";const f=e({__name:"BlankLayout",setup(p){return(u,_)=>(a(),t(c,null,{default:o(()=>[s(n(r))]),_:1}))}});export{f as default};

View File

@@ -1 +1 @@
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-3bf6ea80.js";import{_}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as p,D as a,o as r,s,a as e,w as t,f as o,V as i,F as n,u as g,c as h,a0 as b,e as x,t as y}from"./index-9075b0bb.js";const P=p({__name:"ColorPage",setup(C){const c=a({title:"Colors Page"}),d=a([{title:"Utilities",disabled:!1,href:"#"},{title:"Colors",disabled:!0,href:"#"}]),u=a(["primary","lightprimary","secondary","lightsecondary","info","success","accent","warning","error","darkText","lightText","borderLight","inputBorder","containerBg"]);return(V,k)=>(r(),s(n,null,[e(m,{title:c.value.title,breadcrumbs:d.value},null,8,["title","breadcrumbs"]),e(i,null,{default:t(()=>[e(o,{cols:"12",md:"12"},{default:t(()=>[e(_,{title:"Color Palette"},{default:t(()=>[e(i,null,{default:t(()=>[(r(!0),s(n,null,g(u.value,(l,f)=>(r(),h(o,{md:"3",cols:"12",key:f},{default:t(()=>[e(b,{rounded:"md",class:"align-center justify-center d-flex",height:"100",width:"100%",color:l},{default:t(()=>[x("class: "+y(l),1)]),_:2},1032,["color"])]),_:2},1024))),128))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{P as default}; import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as p,D as a,o as r,s,a as e,w as t,f as o,V as i,F as n,u as g,c as h,a0 as b,e as x,t as y}from"./index-dc96e1be.js";const P=p({__name:"ColorPage",setup(C){const c=a({title:"Colors Page"}),d=a([{title:"Utilities",disabled:!1,href:"#"},{title:"Colors",disabled:!0,href:"#"}]),u=a(["primary","lightprimary","secondary","lightsecondary","info","success","accent","warning","error","darkText","lightText","borderLight","inputBorder","containerBg"]);return(V,k)=>(r(),s(n,null,[e(m,{title:c.value.title,breadcrumbs:d.value},null,8,["title","breadcrumbs"]),e(i,null,{default:t(()=>[e(o,{cols:"12",md:"12"},{default:t(()=>[e(_,{title:"Color Palette"},{default:t(()=>[e(i,null,{default:t(()=>[(r(!0),s(n,null,g(u.value,(l,f)=>(r(),h(o,{md:"3",cols:"12",key:f},{default:t(()=>[e(b,{rounded:"md",class:"align-center justify-center d-flex",height:"100",width:"100%",color:l},{default:t(()=>[x("class: "+y(l),1)]),_:2},1032,["color"])]),_:2},1024))),128))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{P as default};

View File

@@ -1 +1 @@
import{o as l,s as o,u as c,c as n,w as u,Q as g,b as s,R as k,F as t,ab as h,O as p,t as m,a as V,ac as f,i as C,q as x,k as v,A as U}from"./index-9075b0bb.js";import{_ as w}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";const S={__name:"ConfigDetailCard",props:{config:Array},setup(d){return(y,B)=>(l(!0),o(t,null,c(d.config,r=>(l(),n(w,{key:r.name,title:r.name,style:{"margin-bottom":"16px"}},{default:u(()=>[g(s("a",null,"No data",512),[[k,d.config.length===0]]),(l(!0),o(t,null,c(r.body,e=>(l(),o(t,null,[e.config_type==="item"?(l(),o(t,{key:0},[e.val_type==="bool"?(l(),n(h,{key:0,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,color:"primary",inset:""},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="str"?(l(),n(p,{key:1,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="int"?(l(),n(p,{key:2,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="list"?(l(),o(t,{key:3},[s("span",null,m(e.name),1),V(f,{modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,chips:"",clearable:"",label:"请添加",multiple:"","prepend-icon":"mdi-tag-multiple-outline"},{selection:u(({attrs:a,item:i,select:b,selected:_})=>[V(C,x(a,{"model-value":_,closable:"",onClick:b,"onClick:close":D=>y.remove(i)}),{default:u(()=>[s("strong",null,m(i),1)]),_:2},1040,["model-value","onClick","onClick:close"])]),_:2},1032,["modelValue","onUpdate:modelValue"])],64)):v("",!0)],64)):e.config_type==="divider"?(l(),n(U,{key:1,style:{"margin-top":"8px","margin-bottom":"8px"}})):v("",!0)],64))),256))]),_:2},1032,["title"]))),128))}};export{S as _}; import{o as l,s as o,u as c,c as n,w as u,Q as g,b as s,R as k,F as t,ab as h,O as p,t as m,a as V,ac as f,i as C,q as x,k as v,A as U}from"./index-dc96e1be.js";import{_ as w}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";const S={__name:"ConfigDetailCard",props:{config:Array},setup(d){return(y,B)=>(l(!0),o(t,null,c(d.config,r=>(l(),n(w,{key:r.name,title:r.name,style:{"margin-bottom":"16px"}},{default:u(()=>[g(s("a",null,"No data",512),[[k,d.config.length===0]]),(l(!0),o(t,null,c(r.body,e=>(l(),o(t,null,[e.config_type==="item"?(l(),o(t,{key:0},[e.val_type==="bool"?(l(),n(h,{key:0,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,color:"primary",inset:""},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="str"?(l(),n(p,{key:1,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="int"?(l(),n(p,{key:2,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="list"?(l(),o(t,{key:3},[s("span",null,m(e.name),1),V(f,{modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,chips:"",clearable:"",label:"请添加",multiple:"","prepend-icon":"mdi-tag-multiple-outline"},{selection:u(({attrs:a,item:i,select:b,selected:_})=>[V(C,x(a,{"model-value":_,closable:"",onClick:b,"onClick:close":D=>y.remove(i)}),{default:u(()=>[s("strong",null,m(i),1)]),_:2},1040,["model-value","onClick","onClick:close"])]),_:2},1032,["modelValue","onUpdate:modelValue"])],64)):v("",!0)],64)):e.config_type==="divider"?(l(),n(U,{key:1,style:{"margin-top":"8px","margin-bottom":"8px"}})):v("",!0)],64))),256))]),_:2},1032,["title"]))),128))}};export{S as _};

View File

@@ -1 +1 @@
import{_ as y}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as h,o,c as u,w as t,a,a8 as b,b as c,K as x,e as f,t as g,G as V,A as w,L as S,a9 as $,J as B,s as _,d as v,F as d,u as p,f as G,V as T,aa as j,T as l}from"./index-9075b0bb.js";import{_ as m}from"./ConfigDetailCard-d45b9ca7.js";const D={class:"d-sm-flex align-center justify-space-between"},C=h({__name:"ConfigGroupCard",props:{title:String},setup(e){const s=e;return(i,n)=>(o(),u(B,{variant:"outlined",elevation:"0",class:"withbg",style:{width:"50%"}},{default:t(()=>[a(b,{style:{padding:"10px 20px"}},{default:t(()=>[c("div",D,[a(x,null,{default:t(()=>[f(g(s.title),1)]),_:1}),a(V)])]),_:1}),a(w),a(S,null,{default:t(()=>[$(i.$slots,"default")]),_:3})]),_:3}))}}),I={style:{display:"flex","flex-direction":"row","justify-content":"space-between","align-items":"center","margin-bottom":"12px"}},N={style:{display:"flex","flex-direction":"row"}},R={style:{"margin-right":"10px",color:"black"}},F={style:{color:"#222"}},k=h({__name:"ConfigGroupItem",props:{title:String,desc:String,btnRoute:String,namespace:String},setup(e){const s=e;return(i,n)=>(o(),_("div",I,[c("div",N,[c("h3",R,g(s.title),1),c("p",F,g(s.desc),1)]),a(v,{to:s.btnRoute,color:"primary",class:"ml-2",style:{"border-radius":"10px"}},{default:t(()=>[f("配置")]),_:1},8,["to"])]))}}),L={style:{display:"flex","flex-direction":"row",padding:"16px",gap:"16px",width:"100%"}},P={name:"ConfigPage",components:{UiParentCard:y,ConfigGroupCard:C,ConfigGroupItem:k,ConfigDetailCard:m},data(){return{config_data:[],config_base:[],save_message_snack:!1,save_message:"",save_message_success:"",config_outline:[],namespace:""}},mounted(){this.getConfig()},methods:{switchConfig(e){l.get("/api/configs?namespace="+e).then(s=>{this.namespace=e,this.config_data=s.data.data,console.log(this.config_data)}).catch(s=>{save_message=s,save_message_snack=!0,save_message_success="error"})},getConfig(){l.get("/api/config_outline").then(e=>{this.config_outline=e.data.data,console.log(this.config_outline)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"}),l.get("/api/configs").then(e=>{this.config_base=e.data.data,console.log(this.config_data)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"})},updateConfig(){l.post("/api/configs",{base_config:this.config_base,config:this.config_data,namespace:this.namespace}).then(e=>{e.data.status==="success"?(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="success"):(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="error")}).catch(e=>{this.save_message=e,this.save_message_snack=!0,this.save_message_success="error"})}}},J=Object.assign(P,{setup(e){return(s,i)=>(o(),_(d,null,[a(T,null,{default:t(()=>[c("div",L,[(o(!0),_(d,null,p(s.config_outline,n=>(o(),u(C,{key:n.name,title:n.name},{default:t(()=>[(o(!0),_(d,null,p(n.body,r=>(o(),u(k,{title:r.title,desc:r.desc,namespace:r.namespace,onClick:U=>s.switchConfig(r.namespace)},null,8,["title","desc","namespace","onClick"]))),256))]),_:2},1032,["title"]))),128))]),a(G,{cols:"12",md:"12"},{default:t(()=>[a(m,{config:s.config_data},null,8,["config"]),a(m,{config:s.config_base},null,8,["config"])]),_:1})]),_:1}),a(v,{icon:"mdi-content-save",size:"x-large",style:{position:"fixed",right:"52px",bottom:"52px"},color:"darkprimary",onClick:s.updateConfig},null,8,["onClick"]),a(j,{timeout:2e3,elevation:"24",color:s.save_message_success,modelValue:s.save_message_snack,"onUpdate:modelValue":i[0]||(i[0]=n=>s.save_message_snack=n)},{default:t(()=>[f(g(s.save_message),1)]),_:1},8,["color","modelValue"])],64))}});export{J as default}; import{_ as y}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as h,o,c as u,w as t,a,a8 as b,b as c,K as x,e as f,t as g,G as V,A as w,L as S,a9 as $,J as B,s as _,d as v,F as d,u as p,f as G,V as T,aa as j,T as l}from"./index-dc96e1be.js";import{_ as m}from"./ConfigDetailCard-8467c848.js";const D={class:"d-sm-flex align-center justify-space-between"},C=h({__name:"ConfigGroupCard",props:{title:String},setup(e){const s=e;return(i,n)=>(o(),u(B,{variant:"outlined",elevation:"0",class:"withbg",style:{width:"50%"}},{default:t(()=>[a(b,{style:{padding:"10px 20px"}},{default:t(()=>[c("div",D,[a(x,null,{default:t(()=>[f(g(s.title),1)]),_:1}),a(V)])]),_:1}),a(w),a(S,null,{default:t(()=>[$(i.$slots,"default")]),_:3})]),_:3}))}}),I={style:{display:"flex","flex-direction":"row","justify-content":"space-between","align-items":"center","margin-bottom":"12px"}},N={style:{display:"flex","flex-direction":"row"}},R={style:{"margin-right":"10px",color:"black"}},F={style:{color:"#222"}},k=h({__name:"ConfigGroupItem",props:{title:String,desc:String,btnRoute:String,namespace:String},setup(e){const s=e;return(i,n)=>(o(),_("div",I,[c("div",N,[c("h3",R,g(s.title),1),c("p",F,g(s.desc),1)]),a(v,{to:s.btnRoute,color:"primary",class:"ml-2",style:{"border-radius":"10px"}},{default:t(()=>[f("配置")]),_:1},8,["to"])]))}}),L={style:{display:"flex","flex-direction":"row",padding:"16px",gap:"16px",width:"100%"}},P={name:"ConfigPage",components:{UiParentCard:y,ConfigGroupCard:C,ConfigGroupItem:k,ConfigDetailCard:m},data(){return{config_data:[],config_base:[],save_message_snack:!1,save_message:"",save_message_success:"",config_outline:[],namespace:""}},mounted(){this.getConfig()},methods:{switchConfig(e){l.get("/api/configs?namespace="+e).then(s=>{this.namespace=e,this.config_data=s.data.data,console.log(this.config_data)}).catch(s=>{save_message=s,save_message_snack=!0,save_message_success="error"})},getConfig(){l.get("/api/config_outline").then(e=>{this.config_outline=e.data.data,console.log(this.config_outline)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"}),l.get("/api/configs").then(e=>{this.config_base=e.data.data,console.log(this.config_data)}).catch(e=>{save_message=e,save_message_snack=!0,save_message_success="error"})},updateConfig(){l.post("/api/configs",{base_config:this.config_base,config:this.config_data,namespace:this.namespace}).then(e=>{e.data.status==="success"?(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="success"):(this.save_message=e.data.message,this.save_message_snack=!0,this.save_message_success="error")}).catch(e=>{this.save_message=e,this.save_message_snack=!0,this.save_message_success="error"})}}},J=Object.assign(P,{setup(e){return(s,i)=>(o(),_(d,null,[a(T,null,{default:t(()=>[c("div",L,[(o(!0),_(d,null,p(s.config_outline,n=>(o(),u(C,{key:n.name,title:n.name},{default:t(()=>[(o(!0),_(d,null,p(n.body,r=>(o(),u(k,{title:r.title,desc:r.desc,namespace:r.namespace,onClick:U=>s.switchConfig(r.namespace)},null,8,["title","desc","namespace","onClick"]))),256))]),_:2},1032,["title"]))),128))]),a(G,{cols:"12",md:"12"},{default:t(()=>[a(m,{config:s.config_data},null,8,["config"]),a(m,{config:s.config_base},null,8,["config"])]),_:1})]),_:1}),a(v,{icon:"mdi-content-save",size:"x-large",style:{position:"fixed",right:"52px",bottom:"52px"},color:"darkprimary",onClick:s.updateConfig},null,8,["onClick"]),a(j,{timeout:2e3,elevation:"24",color:s.save_message_success,modelValue:s.save_message_snack,"onUpdate:modelValue":i[0]||(i[0]=n=>s.save_message_snack=n)},{default:t(()=>[f(g(s.save_message),1)]),_:1},8,["color","modelValue"])],64))}});export{J as default};

View File

@@ -1 +1 @@
import{_ as t}from"./_plugin-vue_export-helper-c27b6911.js";import{o,c,w as s,V as i,a as r,b as e,d as l,e as a,f as d}from"./index-9075b0bb.js";const n="/assets/img-error-bg-ab6474a0.svg",_="/assets/img-error-blue-2675a7a9.svg",m="/assets/img-error-text-a6aebfa0.svg",g="/assets/img-error-purple-edee3fbc.svg";const p={},u={class:"text-center"},f=e("div",{class:"CardMediaWrapper"},[e("img",{src:n,alt:"grid",class:"w-100"}),e("img",{src:_,alt:"grid",class:"CardMediaParts"}),e("img",{src:m,alt:"build",class:"CardMediaBuild"}),e("img",{src:g,alt:"build",class:"CardMediaBuild"})],-1),h=e("h1",{class:"text-h1"},"Something is wrong",-1),v=e("p",null,[e("small",null,[a("The page you are looking was moved, removed, "),e("br"),a("renamed, or might never exist! ")])],-1);function x(b,V){return o(),c(i,{"no-gutters":"",class:"h-100vh"},{default:s(()=>[r(d,{class:"d-flex align-center justify-center"},{default:s(()=>[e("div",u,[f,h,v,r(l,{variant:"flat",color:"primary",class:"mt-4",to:"/","prepend-icon":"mdi-home"},{default:s(()=>[a(" Home")]),_:1})])]),_:1})]),_:1})}const C=t(p,[["render",x]]);export{C as default}; import{_ as t}from"./_plugin-vue_export-helper-c27b6911.js";import{o,c,w as s,V as i,a as r,b as e,d as l,e as a,f as d}from"./index-dc96e1be.js";const n="/assets/img-error-bg-ab6474a0.svg",_="/assets/img-error-blue-2675a7a9.svg",m="/assets/img-error-text-a6aebfa0.svg",g="/assets/img-error-purple-edee3fbc.svg";const p={},u={class:"text-center"},f=e("div",{class:"CardMediaWrapper"},[e("img",{src:n,alt:"grid",class:"w-100"}),e("img",{src:_,alt:"grid",class:"CardMediaParts"}),e("img",{src:m,alt:"build",class:"CardMediaBuild"}),e("img",{src:g,alt:"build",class:"CardMediaBuild"})],-1),h=e("h1",{class:"text-h1"},"Something is wrong",-1),v=e("p",null,[e("small",null,[a("The page you are looking was moved, removed, "),e("br"),a("renamed, or might never exist! ")])],-1);function x(b,V){return o(),c(i,{"no-gutters":"",class:"h-100vh"},{default:s(()=>[r(d,{class:"d-flex align-center justify-center"},{default:s(()=>[e("div",u,[f,h,v,r(l,{variant:"flat",color:"primary",class:"mt-4",to:"/","prepend-icon":"mdi-home"},{default:s(()=>[a(" Home")]),_:1})])]),_:1})]),_:1})}const C=t(p,[["render",x]]);export{C as default};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
import{_ as _t}from"./LogoDark.vue_vue_type_script_setup_true_lang-b3521e6c.js";import{x as ke,af as we,r as Ot,ag as Vt,D as A,ah as Ne,a2 as P,B as I,ai as Q,aj as St,I as Be,ak as Ie,al as Et,am as jt,an as At,ao as G,y as wt,o as Re,c as tt,w as C,a as j,O as He,b as ge,ap as Ft,d as Pt,e as Ge,s as Ct,aq as Tt,t as Nt,k as Bt,U as It,f as Fe,N as Rt,V as Pe,J as Ye,L as kt}from"./index-9075b0bb.js";import{a as Mt}from"./md5-48eb25f3.js";/** import{_ as _t}from"./LogoDark.vue_vue_type_script_setup_true_lang-7df35c25.js";import{x as ke,af as we,r as Ot,ag as Vt,D as A,ah as Ne,a2 as P,B as I,ai as Q,aj as St,I as Be,ak as Ie,al as Et,am as jt,an as At,ao as G,y as wt,o as Re,c as tt,w as C,a as j,O as He,b as ge,ap as Ft,d as Pt,e as Ge,s as Ct,aq as Tt,t as Nt,k as Bt,U as It,f as Fe,N as Rt,V as Pe,J as Ye,L as kt}from"./index-dc96e1be.js";import{a as Mt}from"./md5-45627dcb.js";/**
* vee-validate v4.11.3 * vee-validate v4.11.3
* (c) 2023 Abdelrahman Awad * (c) 2023 Abdelrahman Awad
* @license MIT * @license MIT

View File

@@ -1 +1 @@
import{av as _,x as d,D as n,o as c,s as m,a as f,w as p,Q as r,b as a,R as o,B as t,aw as h}from"./index-9075b0bb.js";const s={Sidebar_drawer:!0,Customizer_drawer:!1,mini_sidebar:!1,fontTheme:"Roboto",inputBg:!1},l=_({id:"customizer",state:()=>({Sidebar_drawer:s.Sidebar_drawer,Customizer_drawer:s.Customizer_drawer,mini_sidebar:s.mini_sidebar,fontTheme:"Poppins",inputBg:s.inputBg}),getters:{},actions:{SET_SIDEBAR_DRAWER(){this.Sidebar_drawer=!this.Sidebar_drawer},SET_MINI_SIDEBAR(e){this.mini_sidebar=e},SET_FONT(e){this.fontTheme=e}}}),u={class:"logo",style:{display:"flex","align-items":"center"}},b={style:{"font-size":"24px","font-weight":"1000"}},w={style:{"font-size":"20px","font-weight":"1000"}},S={style:{"font-size":"20px"}},z=d({__name:"LogoDark",setup(e){n("rgb(var(--v-theme-primary))"),n("rgb(var(--v-theme-secondary))");const i=l();return(g,B)=>(c(),m("div",u,[f(t(h),{to:"/",style:{"text-decoration":"none",color:"black"}},{default:p(()=>[r(a("span",b,"AstrBot 仪表盘",512),[[o,!t(i).mini_sidebar]]),r(a("span",w,"Astr",512),[[o,t(i).mini_sidebar]]),r(a("span",S,"Bot",512),[[o,t(i).mini_sidebar]])]),_:1})]))}});export{z as _,l as u}; import{av as _,x as d,D as n,o as c,s as m,a as f,w as p,Q as r,b as a,R as o,B as t,aw as h}from"./index-dc96e1be.js";const s={Sidebar_drawer:!0,Customizer_drawer:!1,mini_sidebar:!1,fontTheme:"Roboto",inputBg:!1},l=_({id:"customizer",state:()=>({Sidebar_drawer:s.Sidebar_drawer,Customizer_drawer:s.Customizer_drawer,mini_sidebar:s.mini_sidebar,fontTheme:"Poppins",inputBg:s.inputBg}),getters:{},actions:{SET_SIDEBAR_DRAWER(){this.Sidebar_drawer=!this.Sidebar_drawer},SET_MINI_SIDEBAR(e){this.mini_sidebar=e},SET_FONT(e){this.fontTheme=e}}}),u={class:"logo",style:{display:"flex","align-items":"center"}},b={style:{"font-size":"24px","font-weight":"1000"}},w={style:{"font-size":"20px","font-weight":"1000"}},S={style:{"font-size":"20px"}},z=d({__name:"LogoDark",setup(e){n("rgb(var(--v-theme-primary))"),n("rgb(var(--v-theme-secondary))");const i=l();return(g,B)=>(c(),m("div",u,[f(t(h),{to:"/",style:{"text-decoration":"none",color:"black"}},{default:p(()=>[r(a("span",b,"AstrBot 仪表盘",512),[[o,!t(i).mini_sidebar]]),r(a("span",w,"Astr",512),[[o,t(i).mini_sidebar]]),r(a("span",S,"Bot",512),[[o,t(i).mini_sidebar]])]),_:1})]))}});export{z as _,l as u};

View File

@@ -1 +1 @@
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-3bf6ea80.js";import{_ as i}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as n,D as a,o as c,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-9075b0bb.js";const p=["innerHTML"],v=n({__name:"MaterialIcons",setup(b){const s=a({title:"Material Icons"}),r=a('<iframe src="https://materialdesignicons.com/" frameborder="0" width="100%" height="1000"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Material Icons",disabled:!0,href:"#"}]);return(h,M)=>(c(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(i,{title:"Material Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,p)]),_:1})]),_:1})]),_:1})],64))}});export{v as default}; import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as i}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as n,D as a,o as c,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-dc96e1be.js";const p=["innerHTML"],v=n({__name:"MaterialIcons",setup(b){const s=a({title:"Material Icons"}),r=a('<iframe src="https://materialdesignicons.com/" frameborder="0" width="100%" height="1000"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Material Icons",disabled:!0,href:"#"}]);return(h,M)=>(c(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(i,{title:"Material Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,p)]),_:1})]),_:1})]),_:1})],64))}});export{v as default};

View File

@@ -1 +1 @@
import{_ as B}from"./LogoDark.vue_vue_type_script_setup_true_lang-b3521e6c.js";import{x as y,D as o,o as b,s as U,a as e,w as a,b as n,B as $,d as u,f as d,A as _,e as f,V as r,O as m,ap as A,au as E,F,c as T,N as q,J as V,L as P}from"./index-9075b0bb.js";const z="/assets/social-google-a359a253.svg",N=["src"],S=n("span",{class:"ml-2"},"Sign up with Google",-1),D=n("h5",{class:"text-h5 text-center my-4 mb-8"},"Sign up with Email address",-1),G={class:"d-sm-inline-flex align-center mt-2 mb-7 mb-sm-0 font-weight-bold"},L=n("a",{href:"#",class:"ml-1 text-lightText"},"Terms and Condition",-1),O={class:"mt-5 text-right"},j=y({__name:"AuthRegister",setup(w){const c=o(!1),i=o(!1),p=o(""),v=o(""),g=o(),h=o(""),x=o(""),k=o([s=>!!s||"Password is required",s=>s&&s.length<=10||"Password must be less than 10 characters"]),C=o([s=>!!s||"E-mail is required",s=>/.+@.+\..+/.test(s)||"E-mail must be valid"]);function R(){g.value.validate()}return(s,l)=>(b(),U(F,null,[e(u,{block:"",color:"primary",variant:"outlined",class:"text-lightText googleBtn"},{default:a(()=>[n("img",{src:$(z),alt:"google"},null,8,N),S]),_:1}),e(r,null,{default:a(()=>[e(d,{class:"d-flex align-center"},{default:a(()=>[e(_,{class:"custom-devider"}),e(u,{variant:"outlined",class:"orbtn",rounded:"md",size:"small"},{default:a(()=>[f("OR")]),_:1}),e(_,{class:"custom-devider"})]),_:1})]),_:1}),D,e(E,{ref_key:"Regform",ref:g,"lazy-validation":"",action:"/dashboards/analytical",class:"mt-7 loginForm"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:h.value,"onUpdate:modelValue":l[0]||(l[0]=t=>h.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Firstname"},null,8,["modelValue"])]),_:1}),e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:x.value,"onUpdate:modelValue":l[1]||(l[1]=t=>x.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Lastname"},null,8,["modelValue"])]),_:1})]),_:1}),e(m,{modelValue:v.value,"onUpdate:modelValue":l[2]||(l[2]=t=>v.value=t),rules:C.value,label:"Email Address / Username",class:"mt-4 mb-4",required:"",density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary"},null,8,["modelValue","rules"]),e(m,{modelValue:p.value,"onUpdate:modelValue":l[3]||(l[3]=t=>p.value=t),rules:k.value,label:"Password",required:"",density:"comfortable",variant:"outlined",color:"primary","hide-details":"auto","append-icon":i.value?"mdi-eye":"mdi-eye-off",type:i.value?"text":"password","onClick:append":l[4]||(l[4]=t=>i.value=!i.value),class:"pwdInput"},null,8,["modelValue","rules","append-icon","type"]),n("div",G,[e(A,{modelValue:c.value,"onUpdate:modelValue":l[5]||(l[5]=t=>c.value=t),rules:[t=>!!t||"You must agree to continue!"],label:"Agree with?",required:"",color:"primary",class:"ms-n2","hide-details":""},null,8,["modelValue","rules"]),L]),e(u,{color:"secondary",block:"",class:"mt-2",variant:"flat",size:"large",onClick:l[6]||(l[6]=t=>R())},{default:a(()=>[f("Sign Up")]),_:1})]),_:1},512),n("div",O,[e(_),e(u,{variant:"plain",to:"/auth/login",class:"mt-2 text-capitalize mr-n2"},{default:a(()=>[f("Already have an account?")]),_:1})])],64))}});const I={class:"pa-7 pa-sm-12"},J=n("h2",{class:"text-secondary text-h2 mt-8"},"Sign up",-1),Y=n("h4",{class:"text-disabled text-h4 mt-3"},"Enter credentials to continue",-1),M=y({__name:"RegisterPage",setup(w){return(c,i)=>(b(),T(r,{class:"h-100vh","no-gutters":""},{default:a(()=>[e(d,{cols:"12",class:"d-flex align-center bg-lightprimary"},{default:a(()=>[e(q,null,{default:a(()=>[n("div",I,[e(r,{justify:"center"},{default:a(()=>[e(d,{cols:"12",lg:"10",xl:"6",md:"7"},{default:a(()=>[e(V,{elevation:"0",class:"loginBox"},{default:a(()=>[e(V,{variant:"outlined"},{default:a(()=>[e(P,{class:"pa-9"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",class:"text-center"},{default:a(()=>[e(B),J,Y]),_:1})]),_:1}),e(j)]),_:1})]),_:1})]),_:1})]),_:1})]),_:1})])]),_:1})]),_:1})]),_:1}))}});export{M as default}; import{_ as B}from"./LogoDark.vue_vue_type_script_setup_true_lang-7df35c25.js";import{x as y,D as o,o as b,s as U,a as e,w as a,b as n,B as $,d as u,f as d,A as _,e as f,V as r,O as m,ap as A,au as E,F,c as T,N as q,J as V,L as P}from"./index-dc96e1be.js";const z="/assets/social-google-a359a253.svg",N=["src"],S=n("span",{class:"ml-2"},"Sign up with Google",-1),D=n("h5",{class:"text-h5 text-center my-4 mb-8"},"Sign up with Email address",-1),G={class:"d-sm-inline-flex align-center mt-2 mb-7 mb-sm-0 font-weight-bold"},L=n("a",{href:"#",class:"ml-1 text-lightText"},"Terms and Condition",-1),O={class:"mt-5 text-right"},j=y({__name:"AuthRegister",setup(w){const c=o(!1),i=o(!1),p=o(""),v=o(""),g=o(),h=o(""),x=o(""),k=o([s=>!!s||"Password is required",s=>s&&s.length<=10||"Password must be less than 10 characters"]),C=o([s=>!!s||"E-mail is required",s=>/.+@.+\..+/.test(s)||"E-mail must be valid"]);function R(){g.value.validate()}return(s,l)=>(b(),U(F,null,[e(u,{block:"",color:"primary",variant:"outlined",class:"text-lightText googleBtn"},{default:a(()=>[n("img",{src:$(z),alt:"google"},null,8,N),S]),_:1}),e(r,null,{default:a(()=>[e(d,{class:"d-flex align-center"},{default:a(()=>[e(_,{class:"custom-devider"}),e(u,{variant:"outlined",class:"orbtn",rounded:"md",size:"small"},{default:a(()=>[f("OR")]),_:1}),e(_,{class:"custom-devider"})]),_:1})]),_:1}),D,e(E,{ref_key:"Regform",ref:g,"lazy-validation":"",action:"/dashboards/analytical",class:"mt-7 loginForm"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:h.value,"onUpdate:modelValue":l[0]||(l[0]=t=>h.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Firstname"},null,8,["modelValue"])]),_:1}),e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:x.value,"onUpdate:modelValue":l[1]||(l[1]=t=>x.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Lastname"},null,8,["modelValue"])]),_:1})]),_:1}),e(m,{modelValue:v.value,"onUpdate:modelValue":l[2]||(l[2]=t=>v.value=t),rules:C.value,label:"Email Address / Username",class:"mt-4 mb-4",required:"",density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary"},null,8,["modelValue","rules"]),e(m,{modelValue:p.value,"onUpdate:modelValue":l[3]||(l[3]=t=>p.value=t),rules:k.value,label:"Password",required:"",density:"comfortable",variant:"outlined",color:"primary","hide-details":"auto","append-icon":i.value?"mdi-eye":"mdi-eye-off",type:i.value?"text":"password","onClick:append":l[4]||(l[4]=t=>i.value=!i.value),class:"pwdInput"},null,8,["modelValue","rules","append-icon","type"]),n("div",G,[e(A,{modelValue:c.value,"onUpdate:modelValue":l[5]||(l[5]=t=>c.value=t),rules:[t=>!!t||"You must agree to continue!"],label:"Agree with?",required:"",color:"primary",class:"ms-n2","hide-details":""},null,8,["modelValue","rules"]),L]),e(u,{color:"secondary",block:"",class:"mt-2",variant:"flat",size:"large",onClick:l[6]||(l[6]=t=>R())},{default:a(()=>[f("Sign Up")]),_:1})]),_:1},512),n("div",O,[e(_),e(u,{variant:"plain",to:"/auth/login",class:"mt-2 text-capitalize mr-n2"},{default:a(()=>[f("Already have an account?")]),_:1})])],64))}});const I={class:"pa-7 pa-sm-12"},J=n("h2",{class:"text-secondary text-h2 mt-8"},"Sign up",-1),Y=n("h4",{class:"text-disabled text-h4 mt-3"},"Enter credentials to continue",-1),M=y({__name:"RegisterPage",setup(w){return(c,i)=>(b(),T(r,{class:"h-100vh","no-gutters":""},{default:a(()=>[e(d,{cols:"12",class:"d-flex align-center bg-lightprimary"},{default:a(()=>[e(q,null,{default:a(()=>[n("div",I,[e(r,{justify:"center"},{default:a(()=>[e(d,{cols:"12",lg:"10",xl:"6",md:"7"},{default:a(()=>[e(V,{elevation:"0",class:"loginBox"},{default:a(()=>[e(V,{variant:"outlined"},{default:a(()=>[e(P,{class:"pa-9"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",class:"text-center"},{default:a(()=>[e(B),J,Y]),_:1})]),_:1}),e(j)]),_:1})]),_:1})]),_:1})]),_:1})]),_:1})])]),_:1})]),_:1})]),_:1}))}});export{M as default};

View File

@@ -1 +1 @@
import{_ as c}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-3bf6ea80.js";import{_ as f}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as m,D as s,o as l,s as r,a as e,w as a,f as i,V as o,F as d,u as _,J as p,X as b,b as h,t as g}from"./index-9075b0bb.js";const v=m({__name:"ShadowPage",setup(w){const n=s({title:"Shadow Page"}),u=s([{title:"Utilities",disabled:!1,href:"#"},{title:"Shadow",disabled:!0,href:"#"}]);return(V,x)=>(l(),r(d,null,[e(c,{title:n.value.title,breadcrumbs:u.value},null,8,["title","breadcrumbs"]),e(o,null,{default:a(()=>[e(i,{cols:"12",md:"12"},{default:a(()=>[e(f,{title:"Basic Shadow"},{default:a(()=>[e(o,{justify:"center"},{default:a(()=>[(l(),r(d,null,_(25,t=>e(i,{key:t,cols:"auto"},{default:a(()=>[e(p,{height:"100",width:"100",class:b(["mb-5",["d-flex justify-center align-center bg-primary",`elevation-${t}`]])},{default:a(()=>[h("div",null,g(t-1),1)]),_:2},1032,["class"])]),_:2},1024)),64))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{v as default}; import{_ as c}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as f}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as m,D as s,o as l,s as r,a as e,w as a,f as i,V as o,F as d,u as _,J as p,X as b,b as h,t as g}from"./index-dc96e1be.js";const v=m({__name:"ShadowPage",setup(w){const n=s({title:"Shadow Page"}),u=s([{title:"Utilities",disabled:!1,href:"#"},{title:"Shadow",disabled:!0,href:"#"}]);return(V,x)=>(l(),r(d,null,[e(c,{title:n.value.title,breadcrumbs:u.value},null,8,["title","breadcrumbs"]),e(o,null,{default:a(()=>[e(i,{cols:"12",md:"12"},{default:a(()=>[e(f,{title:"Basic Shadow"},{default:a(()=>[e(o,{justify:"center"},{default:a(()=>[(l(),r(d,null,_(25,t=>e(i,{key:t,cols:"auto"},{default:a(()=>[e(p,{height:"100",width:"100",class:b(["mb-5",["d-flex justify-center align-center bg-primary",`elevation-${t}`]])},{default:a(()=>[h("div",null,g(t-1),1)]),_:2},1032,["class"])]),_:2},1024)),64))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{v as default};

View File

@@ -1 +1 @@
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-3bf6ea80.js";import{_ as n}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as c,D as a,o as i,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-9075b0bb.js";const b=["innerHTML"],w=c({__name:"TablerIcons",setup(p){const s=a({title:"Tabler Icons"}),r=a('<iframe src="https://tablericons.com/" frameborder="0" width="100%" height="600"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Tabler Icons",disabled:!0,href:"#"}]);return(h,T)=>(i(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(n,{title:"Tabler Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,b)]),_:1})]),_:1})]),_:1})],64))}});export{w as default}; import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as n}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as c,D as a,o as i,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-dc96e1be.js";const b=["innerHTML"],w=c({__name:"TablerIcons",setup(p){const s=a({title:"Tabler Icons"}),r=a('<iframe src="https://tablericons.com/" frameborder="0" width="100%" height="600"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Tabler Icons",disabled:!0,href:"#"}]);return(h,T)=>(i(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(n,{title:"Tabler Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,b)]),_:1})]),_:1})]),_:1})],64))}});export{w as default};

View File

@@ -1 +1 @@
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-3bf6ea80.js";import{_ as v}from"./UiParentCard.vue_vue_type_script_setup_true_lang-73bcbbd5.js";import{x as f,o as i,c as g,w as e,a,a8 as y,K as b,e as w,t as d,A as C,L as V,a9 as L,J as _,D as o,s as h,f as k,b as t,F as x,u as B,X as H,V as T}from"./index-9075b0bb.js";const s=f({__name:"UiChildCard",props:{title:String},setup(r){const l=r;return(n,c)=>(i(),g(_,{variant:"outlined"},{default:e(()=>[a(y,{class:"py-3"},{default:e(()=>[a(b,{class:"text-h5"},{default:e(()=>[w(d(l.title),1)]),_:1})]),_:1}),a(C),a(V,null,{default:e(()=>[L(n.$slots,"default")]),_:3})]),_:3}))}}),D={class:"d-flex flex-column gap-1"},S={class:"text-caption pa-2 bg-lightprimary"},z=t("div",{class:"text-grey"},"Class",-1),N={class:"font-weight-medium"},$=t("div",null,[t("p",{class:"text-left"},"Left aligned on all viewport sizes."),t("p",{class:"text-center"},"Center aligned on all viewport sizes."),t("p",{class:"text-right"},"Right aligned on all viewport sizes."),t("p",{class:"text-sm-left"},"Left aligned on viewports SM (small) or wider."),t("p",{class:"text-right text-md-left"},"Left aligned on viewports MD (medium) or wider."),t("p",{class:"text-right text-lg-left"},"Left aligned on viewports LG (large) or wider."),t("p",{class:"text-right text-xl-left"},"Left aligned on viewports XL (extra-large) or wider.")],-1),M=t("div",{class:"d-flex justify-space-between flex-row"},[t("a",{href:"#",class:"text-decoration-none"},"Non-underlined link"),t("div",{class:"text-decoration-line-through"},"Line-through text"),t("div",{class:"text-decoration-overline"},"Overline text"),t("div",{class:"text-decoration-underline"},"Underline text")],-1),O=t("div",null,[t("p",{class:"text-high-emphasis"},"High-emphasis has an opacity of 87% in light theme and 100% in dark."),t("p",{class:"text-medium-emphasis"},"Medium-emphasis text and hint text have opacities of 60% in light theme and 70% in dark."),t("p",{class:"text-disabled"},"Disabled text has an opacity of 38% in light theme and 50% in dark.")],-1),j=f({__name:"TypographyPage",setup(r){const l=o({title:"Typography Page"}),n=o([["Heading 1","text-h1"],["Heading 2","text-h2"],["Heading 3","text-h3"],["Heading 4","text-h4"],["Heading 5","text-h5"],["Heading 6","text-h6"],["Subtitle 1","text-subtitle-1"],["Subtitle 2","text-subtitle-2"],["Body 1","text-body-1"],["Body 2","text-body-2"],["Button","text-button"],["Caption","text-caption"],["Overline","text-overline"]]),c=o([{title:"Utilities",disabled:!1,href:"#"},{title:"Typography",disabled:!0,href:"#"}]);return(U,F)=>(i(),h(x,null,[a(m,{title:l.value.title,breadcrumbs:c.value},null,8,["title","breadcrumbs"]),a(T,null,{default:e(()=>[a(k,{cols:"12",md:"12"},{default:e(()=>[a(v,{title:"Basic Typography"},{default:e(()=>[a(s,{title:"Heading"},{default:e(()=>[t("div",D,[(i(!0),h(x,null,B(n.value,([p,u])=>(i(),g(_,{variant:"outlined",key:p,class:"my-4"},{default:e(()=>[t("div",{class:H([u,"pa-2"])},d(p),3),t("div",S,[z,t("div",N,d(u),1)])]),_:2},1024))),128))])]),_:1}),a(s,{title:"Text-alignment",class:"mt-8"},{default:e(()=>[$]),_:1}),a(s,{title:"Decoration",class:"mt-8"},{default:e(()=>[M]),_:1}),a(s,{title:"Opacity",class:"mt-8"},{default:e(()=>[O]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{j as default}; import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-e31f96f8.js";import{_ as v}from"./UiParentCard.vue_vue_type_script_setup_true_lang-f2b2db58.js";import{x as f,o as i,c as g,w as e,a,a8 as y,K as b,e as w,t as d,A as C,L as V,a9 as L,J as _,D as o,s as h,f as k,b as t,F as x,u as B,X as H,V as T}from"./index-dc96e1be.js";const s=f({__name:"UiChildCard",props:{title:String},setup(r){const l=r;return(n,c)=>(i(),g(_,{variant:"outlined"},{default:e(()=>[a(y,{class:"py-3"},{default:e(()=>[a(b,{class:"text-h5"},{default:e(()=>[w(d(l.title),1)]),_:1})]),_:1}),a(C),a(V,null,{default:e(()=>[L(n.$slots,"default")]),_:3})]),_:3}))}}),D={class:"d-flex flex-column gap-1"},S={class:"text-caption pa-2 bg-lightprimary"},z=t("div",{class:"text-grey"},"Class",-1),N={class:"font-weight-medium"},$=t("div",null,[t("p",{class:"text-left"},"Left aligned on all viewport sizes."),t("p",{class:"text-center"},"Center aligned on all viewport sizes."),t("p",{class:"text-right"},"Right aligned on all viewport sizes."),t("p",{class:"text-sm-left"},"Left aligned on viewports SM (small) or wider."),t("p",{class:"text-right text-md-left"},"Left aligned on viewports MD (medium) or wider."),t("p",{class:"text-right text-lg-left"},"Left aligned on viewports LG (large) or wider."),t("p",{class:"text-right text-xl-left"},"Left aligned on viewports XL (extra-large) or wider.")],-1),M=t("div",{class:"d-flex justify-space-between flex-row"},[t("a",{href:"#",class:"text-decoration-none"},"Non-underlined link"),t("div",{class:"text-decoration-line-through"},"Line-through text"),t("div",{class:"text-decoration-overline"},"Overline text"),t("div",{class:"text-decoration-underline"},"Underline text")],-1),O=t("div",null,[t("p",{class:"text-high-emphasis"},"High-emphasis has an opacity of 87% in light theme and 100% in dark."),t("p",{class:"text-medium-emphasis"},"Medium-emphasis text and hint text have opacities of 60% in light theme and 70% in dark."),t("p",{class:"text-disabled"},"Disabled text has an opacity of 38% in light theme and 50% in dark.")],-1),j=f({__name:"TypographyPage",setup(r){const l=o({title:"Typography Page"}),n=o([["Heading 1","text-h1"],["Heading 2","text-h2"],["Heading 3","text-h3"],["Heading 4","text-h4"],["Heading 5","text-h5"],["Heading 6","text-h6"],["Subtitle 1","text-subtitle-1"],["Subtitle 2","text-subtitle-2"],["Body 1","text-body-1"],["Body 2","text-body-2"],["Button","text-button"],["Caption","text-caption"],["Overline","text-overline"]]),c=o([{title:"Utilities",disabled:!1,href:"#"},{title:"Typography",disabled:!0,href:"#"}]);return(U,F)=>(i(),h(x,null,[a(m,{title:l.value.title,breadcrumbs:c.value},null,8,["title","breadcrumbs"]),a(T,null,{default:e(()=>[a(k,{cols:"12",md:"12"},{default:e(()=>[a(v,{title:"Basic Typography"},{default:e(()=>[a(s,{title:"Heading"},{default:e(()=>[t("div",D,[(i(!0),h(x,null,B(n.value,([p,u])=>(i(),g(_,{variant:"outlined",key:p,class:"my-4"},{default:e(()=>[t("div",{class:H([u,"pa-2"])},d(p),3),t("div",S,[z,t("div",N,d(u),1)])]),_:2},1024))),128))])]),_:1}),a(s,{title:"Text-alignment",class:"mt-8"},{default:e(()=>[$]),_:1}),a(s,{title:"Decoration",class:"mt-8"},{default:e(()=>[M]),_:1}),a(s,{title:"Opacity",class:"mt-8"},{default:e(()=>[O]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{j as default};

View File

@@ -1 +1 @@
import{x as n,o,c as i,w as e,a,a8 as d,b as c,K as u,e as p,t as _,a9 as s,A as f,L as V,J as m}from"./index-9075b0bb.js";const C={class:"d-sm-flex align-center justify-space-between"},h=n({__name:"UiParentCard",props:{title:String},setup(l){const r=l;return(t,x)=>(o(),i(m,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(d,null,{default:e(()=>[c("div",C,[a(u,null,{default:e(()=>[p(_(r.title),1)]),_:1}),s(t.$slots,"action")])]),_:3}),a(f),a(V,null,{default:e(()=>[s(t.$slots,"default")]),_:3})]),_:3}))}});export{h as _}; import{x as n,o,c as i,w as e,a,a8 as d,b as c,K as u,e as p,t as _,a9 as s,A as f,L as V,J as m}from"./index-dc96e1be.js";const C={class:"d-sm-flex align-center justify-space-between"},h=n({__name:"UiParentCard",props:{title:String},setup(l){const r=l;return(t,x)=>(o(),i(m,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(d,null,{default:e(()=>[c("div",C,[a(u,null,{default:e(()=>[p(_(r.title),1)]),_:1}),s(t.$slots,"action")])]),_:3}),a(f),a(V,null,{default:e(()=>[s(t.$slots,"default")]),_:3})]),_:3}))}});export{h as _};

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
import{ar as K,as as Y,at as V}from"./index-9075b0bb.js";var C={exports:{}};const $={},k=Object.freeze(Object.defineProperty({__proto__:null,default:$},Symbol.toStringTag,{value:"Module"})),z=K(k);/** import{ar as K,as as Y,at as V}from"./index-dc96e1be.js";var C={exports:{}};const $={},k=Object.freeze(Object.defineProperty({__proto__:null,default:$},Symbol.toStringTag,{value:"Module"})),z=K(k);/**
* [js-md5]{@link https://github.com/emn178/js-md5} * [js-md5]{@link https://github.com/emn178/js-md5}
* *
* @namespace md5 * @namespace md5

View File

@@ -11,7 +11,7 @@
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap" href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap"
/> />
<title>AstrBot - 仪表盘</title> <title>AstrBot - 仪表盘</title>
<script type="module" crossorigin src="/assets/index-9075b0bb.js"></script> <script type="module" crossorigin src="/assets/index-dc96e1be.js"></script>
<link rel="stylesheet" href="/assets/index-0f1523f3.css"> <link rel="stylesheet" href="/assets/index-0f1523f3.css">
</head> </head>
<body> <body>

View File

@@ -11,22 +11,27 @@ import threading
import time import time
import asyncio import asyncio
from util.plugin_dev.api.v1.config import update_config from util.plugin_dev.api.v1.config import update_config
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
@dataclass @dataclass
class DashBoardConfig(): class DashBoardConfig():
config_type: str config_type: str
name: Optional[str] = None name: Optional[str] = None
description: Optional[str] = None description: Optional[str] = None
path: Optional[str] = None # 仅 item 才需要 path: Optional[str] = None # 仅 item 才需要
body: Optional[list['DashBoardConfig']] = None # 仅 group 才需要 body: Optional[list['DashBoardConfig']] = None # 仅 group 才需要
value: Optional[Union[list, dict, str, int, bool]] = None # 仅 item 才需要 value: Optional[Union[list, dict, str, int, bool]] = None # 仅 item 才需要
val_type: Optional[str] = None # 仅 item 才需要 val_type: Optional[str] = None # 仅 item 才需要
class DashBoardHelper(): class DashBoardHelper():
def __init__(self, global_object, config: dict): def __init__(self, global_object, config: dict):
self.loop = asyncio.new_event_loop() self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop) asyncio.set_event_loop(self.loop)
self.logger = global_object.logger
dashboard_data = global_object.dashboard_data dashboard_data = global_object.dashboard_data
dashboard_data.configs = { dashboard_data.configs = {
"data": [] "data": []
@@ -34,21 +39,23 @@ class DashBoardHelper():
self.parse_default_config(dashboard_data, config) self.parse_default_config(dashboard_data, config)
self.dashboard_data: DashBoardData = dashboard_data self.dashboard_data: DashBoardData = dashboard_data
self.dashboard = AstrBotDashBoard(global_object) self.dashboard = AstrBotDashBoard(global_object)
self.key_map = {} # key: uuid, value: config key name self.key_map = {} # key: uuid, value: config key name
self.cc = CmdConfig() self.cc = CmdConfig()
@self.dashboard.register("post_configs") @self.dashboard.register("post_configs")
def on_post_configs(post_configs: dict): def on_post_configs(post_configs: dict):
try: try:
# self.logger.log(f"收到配置更新请求", gu.LEVEL_INFO, tag="可视化面板")
if 'base_config' in post_configs: if 'base_config' in post_configs:
self.save_config(post_configs['base_config'], namespace='') # 基础配置 self.save_config(
self.save_config(post_configs['config'], namespace=post_configs['namespace']) # 选定配置 post_configs['base_config'], namespace='') # 基础配置
self.parse_default_config(self.dashboard_data, self.cc.get_all()) self.save_config(
post_configs['config'], namespace=post_configs['namespace']) # 选定配置
self.parse_default_config(
self.dashboard_data, self.cc.get_all())
# 重启 # 重启
threading.Thread(target=self.dashboard.shutdown_bot, args=(2,), daemon=True).start() threading.Thread(target=self.dashboard.shutdown_bot,
args=(2,), daemon=True).start()
except Exception as e: except Exception as e:
# self.logger.log(f"在保存配置时发生错误:{e}", gu.LEVEL_ERROR, tag="可视化面板")
raise e raise e
# 将 config.yaml、 中的配置解析到 dashboard_data.configs 中 # 将 config.yaml、 中的配置解析到 dashboard_data.configs 中
@@ -100,6 +107,14 @@ class DashBoardHelper():
value=config['direct_message_mode'], value=config['direct_message_mode'],
path="direct_message_mode", path="direct_message_mode",
), ),
DashBoardConfig(
config_type="item",
val_type="bool",
name="是否接收QQ群消息",
description="需要机器人有相应的群消息接收权限。在 q.qq.com 上查看。",
value=config['qqofficial_enable_group_message'],
path="qqofficial_enable_group_message",
),
] ]
) )
qq_gocq_platform_group = DashBoardConfig( qq_gocq_platform_group = DashBoardConfig(
@@ -374,47 +389,6 @@ class DashBoardHelper():
] ]
) )
rev_chatgpt_accounts = config['rev_ChatGPT']['account']
new_accs = []
for i in rev_chatgpt_accounts:
if isinstance(i, dict) and 'access_token' in i:
new_accs.append(i['access_token'])
elif isinstance(i, str):
new_accs.append(i)
config['rev_ChatGPT']['account'] = new_accs
rev_chatgpt_llm_group = DashBoardConfig(
config_type="group",
name="逆向语言模型服务设置",
description="",
body=[
DashBoardConfig(
config_type="item",
val_type="bool",
name="启用逆向语言模型服务",
description="",
value=config['rev_ChatGPT']['enable'],
path="rev_ChatGPT.enable",
),
DashBoardConfig(
config_type="item",
val_type="str",
name="终结点Endpoint地址",
description="逆向服务的终结点服务器的地址。",
value=config['CHATGPT_BASE_URL'],
path="CHATGPT_BASE_URL",
),
DashBoardConfig(
config_type="item",
val_type="list",
name="assess_token",
description="assess_token",
value=config['rev_ChatGPT']['account'],
path="rev_ChatGPT.account",
),
]
)
baidu_aip_group = DashBoardConfig( baidu_aip_group = DashBoardConfig(
config_type="group", config_type="group",
name="百度内容审核", name="百度内容审核",
@@ -428,9 +402,6 @@ class DashBoardHelper():
value=config['baidu_aip']['enable'], value=config['baidu_aip']['enable'],
path="baidu_aip.enable" path="baidu_aip.enable"
), ),
# "app_id": null,
# "api_key": null,
# "secret_key": null
DashBoardConfig( DashBoardConfig(
config_type="item", config_type="item",
val_type="str", val_type="str",
@@ -495,16 +466,14 @@ class DashBoardHelper():
qq_gocq_platform_group, qq_gocq_platform_group,
general_platform_detail_group, general_platform_detail_group,
openai_official_llm_group, openai_official_llm_group,
rev_chatgpt_llm_group,
other_group, other_group,
baidu_aip_group baidu_aip_group
] ]
except Exception as e: except Exception as e:
self.logger.log(f"配置文件解析错误:{e}", gu.LEVEL_ERROR) logger.error(f"配置文件解析错误:{e}")
raise e raise e
def save_config(self, post_config: list, namespace: str): def save_config(self, post_config: list, namespace: str):
''' '''
根据 path 解析并保存配置 根据 path 解析并保存配置
@@ -525,17 +494,21 @@ class DashBoardHelper():
continue continue
if config['val_type'] == "bool": if config['val_type'] == "bool":
self._write_config(namespace, config['path'], config['value']) self._write_config(
namespace, config['path'], config['value'])
elif config['val_type'] == "str": elif config['val_type'] == "str":
self._write_config(namespace, config['path'], config['value']) self._write_config(
namespace, config['path'], config['value'])
elif config['val_type'] == "int": elif config['val_type'] == "int":
try: try:
self._write_config(namespace, config['path'], int(config['value'])) self._write_config(
namespace, config['path'], int(config['value']))
except: except:
raise ValueError(f"配置项 {config['name']} 的值必须是整数") raise ValueError(f"配置项 {config['name']} 的值必须是整数")
elif config['val_type'] == "float": elif config['val_type'] == "float":
try: try:
self._write_config(namespace, config['path'], float(config['value'])) self._write_config(
namespace, config['path'], float(config['value']))
except: except:
raise ValueError(f"配置项 {config['name']} 的值必须是浮点数") raise ValueError(f"配置项 {config['name']} 的值必须是浮点数")
elif config['val_type'] == "list": elif config['val_type'] == "list":
@@ -543,9 +516,11 @@ class DashBoardHelper():
self._write_config(namespace, config['path'], []) self._write_config(namespace, config['path'], [])
elif not isinstance(config['value'], list): elif not isinstance(config['value'], list):
raise ValueError(f"配置项 {config['name']} 的值必须是列表") raise ValueError(f"配置项 {config['name']} 的值必须是列表")
self._write_config(namespace, config['path'], config['value']) self._write_config(
namespace, config['path'], config['value'])
else: else:
raise NotImplementedError(f"未知或者未实现的配置项类型:{config['val_type']}") raise NotImplementedError(
f"未知或者未实现的配置项类型:{config['val_type']}")
def _write_config(self, namespace: str, key: str, value): def _write_config(self, namespace: str, key: str, value):
if namespace == "" or namespace.startswith("internal_"): if namespace == "" or namespace.startswith("internal_"):

View File

@@ -1,21 +1,25 @@
from flask import Flask, request
from flask.logging import default_handler
from werkzeug.serving import make_server
from util import general_utils as gu
from dataclasses import dataclass
import logging
from cores.database.conn import dbConn
from util.cmd_config import CmdConfig
from util.updator import check_update, update_project, request_release_info
from cores.qqbot.types import *
import util.plugin_util as putil import util.plugin_util as putil
import websockets import websockets
import json import json
import threading import threading
import asyncio import asyncio
import os, sys import os
import sys
import time import time
from flask import Flask, request
from flask.logging import default_handler
from werkzeug.serving import make_server
from util import general_utils as gu
from dataclasses import dataclass
from cores.database.conn import dbConn
from util.cmd_config import CmdConfig
from util.updator import check_update, update_project, request_release_info
from cores.astrbot.types import *
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
@dataclass @dataclass
class DashBoardData(): class DashBoardData():
stats: dict stats: dict
@@ -23,25 +27,25 @@ class DashBoardData():
logs: dict logs: dict
plugins: List[RegisteredPlugin] plugins: List[RegisteredPlugin]
@dataclass @dataclass
class Response(): class Response():
status: str status: str
message: str message: str
data: dict data: dict
class AstrBotDashBoard(): class AstrBotDashBoard():
def __init__(self, global_object: 'gu.GlobalObject'): def __init__(self, global_object: 'gu.GlobalObject'):
self.global_object = global_object self.global_object = global_object
self.loop = asyncio.get_event_loop() self.loop = asyncio.get_event_loop()
asyncio.set_event_loop(self.loop) asyncio.set_event_loop(self.loop)
self.dashboard_data: DashBoardData = global_object.dashboard_data self.dashboard_data: DashBoardData = global_object.dashboard_data
self.dashboard_be = Flask(__name__, static_folder="dist", static_url_path="/") self.dashboard_be = Flask(
log = logging.getLogger('werkzeug') __name__, static_folder="dist", static_url_path="/")
log.setLevel(logging.ERROR)
self.funcs = {} self.funcs = {}
self.cc = CmdConfig() self.cc = CmdConfig()
self.logger = global_object.logger self.ws_clients = {} # remote_ip: ws
self.ws_clients = {} # remote_ip: ws
# 启动 websocket 服务器 # 启动 websocket 服务器
self.ws_server = websockets.serve(self.__handle_msg, "0.0.0.0", 6186) self.ws_server = websockets.serve(self.__handle_msg, "0.0.0.0", 6186)
@@ -50,6 +54,22 @@ class AstrBotDashBoard():
# 返回页面 # 返回页面
return self.dashboard_be.send_static_file("index.html") return self.dashboard_be.send_static_file("index.html")
@self.dashboard_be.get("/config")
def rt_config():
return self.dashboard_be.send_static_file("index.html")
@self.dashboard_be.get("/logs")
def rt_logs():
return self.dashboard_be.send_static_file("index.html")
@self.dashboard_be.get("/extension")
def rt_extension():
return self.dashboard_be.send_static_file("index.html")
@self.dashboard_be.get("/dashboard/default")
def rt_dashboard():
return self.dashboard_be.send_static_file("index.html")
@self.dashboard_be.post("/api/authenticate") @self.dashboard_be.post("/api/authenticate")
def authenticate(): def authenticate():
username = self.cc.get("dashboard_username", "") username = self.cc.get("dashboard_username", "")
@@ -99,9 +119,11 @@ class AstrBotDashBoard():
# last_24_platform = db_inst.get_last_24h_stat_platform() # last_24_platform = db_inst.get_last_24h_stat_platform()
platforms = db_inst.get_platform_cnt_total() platforms = db_inst.get_platform_cnt_total()
self.dashboard_data.stats["session"] = [] self.dashboard_data.stats["session"] = []
self.dashboard_data.stats["session_total"] = db_inst.get_session_cnt_total() self.dashboard_data.stats["session_total"] = db_inst.get_session_cnt_total(
)
self.dashboard_data.stats["message"] = last_24_message self.dashboard_data.stats["message"] = last_24_message
self.dashboard_data.stats["message_total"] = db_inst.get_message_cnt_total() self.dashboard_data.stats["message_total"] = db_inst.get_message_cnt_total(
)
self.dashboard_data.stats["platform"] = platforms self.dashboard_data.stats["platform"] = platforms
return Response( return Response(
@@ -172,9 +194,9 @@ class AstrBotDashBoard():
post_data = request.json post_data = request.json
repo_url = post_data["url"] repo_url = post_data["url"]
try: try:
self.logger.log(f"正在安装插件 {repo_url}", tag="可视化面板") logger.info(f"正在安装插件 {repo_url}")
putil.install_plugin(repo_url, self.dashboard_data.plugins) putil.install_plugin(repo_url, self.dashboard_data.plugins)
self.logger.log(f"安装插件 {repo_url} 成功", tag="可视化面板") logger.info(f"安装插件 {repo_url} 成功")
return Response( return Response(
status="success", status="success",
message="安装成功~", message="安装成功~",
@@ -192,9 +214,10 @@ class AstrBotDashBoard():
post_data = request.json post_data = request.json
plugin_name = post_data["name"] plugin_name = post_data["name"]
try: try:
self.logger.log(f"正在卸载插件 {plugin_name}", tag="可视化面板") logger.info(f"正在卸载插件 {plugin_name}")
putil.uninstall_plugin(plugin_name, self.dashboard_data.plugins) putil.uninstall_plugin(
self.logger.log(f"卸载插件 {plugin_name} 成功", tag="可视化面板") plugin_name, self.dashboard_data.plugins)
logger.info(f"卸载插件 {plugin_name} 成功")
return Response( return Response(
status="success", status="success",
message="卸载成功~", message="卸载成功~",
@@ -212,9 +235,9 @@ class AstrBotDashBoard():
post_data = request.json post_data = request.json
plugin_name = post_data["name"] plugin_name = post_data["name"]
try: try:
self.logger.log(f"正在更新插件 {plugin_name}", tag="可视化面板") logger.info(f"正在更新插件 {plugin_name}")
putil.update_plugin(plugin_name, self.dashboard_data.plugins) putil.update_plugin(plugin_name, self.dashboard_data.plugins)
self.logger.log(f"更新插件 {plugin_name} 成功", tag="可视化面板") logger.info(f"更新插件 {plugin_name} 成功")
return Response( return Response(
status="success", status="success",
message="更新成功~", message="更新成功~",
@@ -231,7 +254,8 @@ class AstrBotDashBoard():
def log(): def log():
for item in self.ws_clients: for item in self.ws_clients:
try: try:
asyncio.run_coroutine_threadsafe(self.ws_clients[item].send(request.data.decode()), self.loop) asyncio.run_coroutine_threadsafe(
self.ws_clients[item].send(request.data.decode()), self.loop)
except Exception as e: except Exception as e:
pass pass
return 'ok' return 'ok'
@@ -244,7 +268,7 @@ class AstrBotDashBoard():
status="success", status="success",
message=ret, message=ret,
data={ data={
"has_new_version": ret != "当前已经是最新版本。" # 先这样吧,累了=.= "has_new_version": ret != "当前已经是最新版本。" # 先这样吧,累了=.=
} }
).__dict__ ).__dict__
except Exception as e: except Exception as e:
@@ -262,9 +286,9 @@ class AstrBotDashBoard():
version = '' version = ''
else: else:
latest = False latest = False
version = request.json["version"]
try: try:
update_project(request_release_info(), latest=latest, version=version) update_project(request_release_info(latest),
latest=latest, version=version)
threading.Thread(target=self.shutdown_bot, args=(3,)).start() threading.Thread(target=self.shutdown_bot, args=(3,)).start()
return Response( return Response(
status="success", status="success",
@@ -278,6 +302,45 @@ class AstrBotDashBoard():
data=None data=None
).__dict__ ).__dict__
@self.dashboard_be.get("/api/llm/list")
def llm_list():
ret = []
for llm in self.global_object.llms:
ret.append(llm.llm_name)
return Response(
status="success",
message="",
data=ret
).__dict__
@self.dashboard_be.get("/api/llm")
def llm():
text = request.args["text"]
llm = request.args["llm"]
for llm_ in self.global_object.llms:
if llm_.llm_name == llm:
try:
# ret = await llm_.llm_instance.text_chat(text)
ret = asyncio.run_coroutine_threadsafe(
llm_.llm_instance.text_chat(text), self.loop).result()
return Response(
status="success",
message="",
data=ret
).__dict__
except Exception as e:
return Response(
status="error",
message=e.__str__(),
data=None
).__dict__
return Response(
status="error",
message="LLM not found.",
data=None
).__dict__
def shutdown_bot(self, delay_s: int): def shutdown_bot(self, delay_s: int):
time.sleep(delay_s) time.sleep(delay_s)
py = sys.executable py = sys.executable
@@ -285,18 +348,16 @@ class AstrBotDashBoard():
def _get_configs(self, namespace: str): def _get_configs(self, namespace: str):
if namespace == "": if namespace == "":
ret = [self.dashboard_data.configs['data'][5], ret = [self.dashboard_data.configs['data'][4],
self.dashboard_data.configs['data'][6],] self.dashboard_data.configs['data'][5],]
elif namespace == "internal_platform_qq_official": elif namespace == "internal_platform_qq_official":
ret = [self.dashboard_data.configs['data'][0],] ret = [self.dashboard_data.configs['data'][0],]
elif namespace == "internal_platform_qq_gocq": elif namespace == "internal_platform_qq_gocq":
ret = [self.dashboard_data.configs['data'][1],] ret = [self.dashboard_data.configs['data'][1],]
elif namespace == "internal_platform_general": # 全局平台配置 elif namespace == "internal_platform_general": # 全局平台配置
ret = [self.dashboard_data.configs['data'][2],] ret = [self.dashboard_data.configs['data'][2],]
elif namespace == "internal_llm_openai_official": elif namespace == "internal_llm_openai_official":
ret = [self.dashboard_data.configs['data'][3],] ret = [self.dashboard_data.configs['data'][3],]
elif namespace == "internal_llm_rev_chatgpt":
ret = [self.dashboard_data.configs['data'][4],]
else: else:
path = f"data/config/{namespace}.json" path = f"data/config/{namespace}.json"
if not os.path.exists(path): if not os.path.exists(path):
@@ -317,28 +378,28 @@ class AstrBotDashBoard():
''' '''
outline = [ outline = [
{ {
"type": "platform", "type": "platform",
"name": "配置通用消息平台", "name": "配置通用消息平台",
"body": [ "body": [
{ {
"title": "通用", "title": "通用",
"desc": "通用平台配置", "desc": "通用平台配置",
"namespace": "internal_platform_general", "namespace": "internal_platform_general",
"tag": "" "tag": ""
}, },
{ {
"title": "QQ_OFFICIAL", "title": "QQ_OFFICIAL",
"desc": "QQ官方API仅支持频道", "desc": "QQ官方API仅支持频道",
"namespace": "internal_platform_qq_official", "namespace": "internal_platform_qq_official",
"tag": "" "tag": ""
}, },
{ {
"title": "OneBot协议", "title": "OneBot协议",
"desc": "支持cq-http、shamrock等目前仅支持QQ平台", "desc": "支持cq-http、shamrock等目前仅支持QQ平台",
"namespace": "internal_platform_qq_gocq", "namespace": "internal_platform_qq_gocq",
"tag": "" "tag": ""
} }
] ]
}, },
{ {
"type": "llm", "type": "llm",
@@ -349,12 +410,6 @@ class AstrBotDashBoard():
"desc": "也支持使用官方接口的中转服务", "desc": "也支持使用官方接口的中转服务",
"namespace": "internal_llm_openai_official", "namespace": "internal_llm_openai_official",
"tag": "" "tag": ""
},
{
"title": "Rev ChatGPT",
"desc": "早期的逆向ChatGPT不推荐",
"namespace": "internal_llm_rev_chatgpt",
"tag": ""
} }
] ]
} }
@@ -376,21 +431,29 @@ class AstrBotDashBoard():
return func return func
return decorator return decorator
async def get_log_history(self):
try:
with open("logs/astrbot-core/astrbot-core.log", "r", encoding="utf-8") as f:
return f.readlines()[-100:]
except Exception as e:
logger.warning(f"读取日志历史失败: {e.__str__()}")
return []
async def __handle_msg(self, websocket, path): async def __handle_msg(self, websocket, path):
address = websocket.remote_address address = websocket.remote_address
# self.logger.log(f"和 {address} 建立了 websocket 连接", tag="可视化面板")
self.ws_clients[address] = websocket self.ws_clients[address] = websocket
data = ''.join(self.logger.history).replace('\n', '\r\n') data = await self.get_log_history()
data = ''.join(data).replace('\n', '\r\n')
await websocket.send(data) await websocket.send(data)
while True: while True:
try: try:
msg = await websocket.recv() msg = await websocket.recv()
except websockets.exceptions.ConnectionClosedError: except websockets.exceptions.ConnectionClosedError:
# self.logger.log(f"和 {address} 的 websocket 连接已断开", tag="可视化面板") # logger.info(f"和 {address} 的 websocket 连接已断开")
del self.ws_clients[address] del self.ws_clients[address]
break break
except Exception as e: except Exception as e:
# self.logger.log(f"和 {path} 的 websocket 连接发生了错误: {e.__str__()}", tag="可视化面板") # logger.info(f"和 {path} 的 websocket 连接发生了错误: {e.__str__()}")
del self.ws_clients[address] del self.ws_clients[address]
break break
@@ -401,10 +464,12 @@ class AstrBotDashBoard():
def run(self): def run(self):
threading.Thread(target=self.run_ws_server, args=(self.loop,)).start() threading.Thread(target=self.run_ws_server, args=(self.loop,)).start()
self.logger.log("已启动 websocket 服务器", tag="可视化面板") logger.info("已启动 websocket 服务器")
ip_address = gu.get_local_ip_addresses() ip_address = gu.get_local_ip_addresses()
ip_str = f"http://{ip_address}:6185\n\thttp://localhost:6185" ip_str = f"http://{ip_address}:6185\n\thttp://localhost:6185"
self.logger.log(f"\n==================\n您可访问:\n\n\t{ip_str}\n\n来登录可视化面板,默认账号密码为空。\n注意: 所有配置项现已全量迁移至 cmd_config.json 文件下,可登录可视化面板在线修改配置。\n==================\n", tag="可视化面板") logger.info(
http_server = make_server('0.0.0.0', 6185, self.dashboard_be, threaded=True) f"\n==================\n您可访问:\n\n\t{ip_str}\n\n来登录可视化面板,默认账号密码为空。\n注意: 所有配置项现已全量迁移至 cmd_config.json 文件下,可登录可视化面板在线修改配置。\n==================\n")
http_server.serve_forever()
http_server = make_server(
'0.0.0.0', 6185, self.dashboard_be, threaded=True)
http_server.serve_forever()

View File

@@ -1,34 +1,29 @@
import os import os
import shutil import shutil
from nakuru.entities.components import * from nakuru.entities.components import *
from nakuru import (
GroupMessage,
FriendMessage
)
from botpy.message import Message, DirectMessage
flag_not_support = False flag_not_support = False
try: try:
from util.plugin_dev.api.v1.config import * from util.plugin_dev.api.v1.config import *
from util.plugin_dev.api.v1.bot import ( from util.plugin_dev.api.v1.bot import (
PluginMetadata,
PluginType,
AstrMessageEvent, AstrMessageEvent,
CommandResult, CommandResult,
) )
from util.plugin_dev.api.v1.register import register_llm, unregister_llm
except ImportError: except ImportError:
flag_not_support = True flag_not_support = True
print("llms: 导入接口失败。请升级到 AstrBot 最新版本。") print("导入接口失败。请升级到 AstrBot 最新版本。")
''' '''
注意改插件名噢格式XXXPlugin 或 Main 注意改插件名噢格式XXXPlugin 或 Main
小提示:把此模板仓库 fork 之后 clone 到机器人文件夹下的 addons/plugins/ 目录下,然后用 Pycharm/VSC 等工具打开可获更棒的编程体验(自动补全等) 小提示:把此模板仓库 fork 之后 clone 到机器人文件夹下的 addons/plugins/ 目录下,然后用 Pycharm/VSC 等工具打开可获更棒的编程体验(自动补全等)
''' '''
class HelloWorldPlugin: class HelloWorldPlugin:
""" """
初始化函数, 可以选择直接pass 初始化函数, 可以选择直接pass
""" """
def __init__(self) -> None: def __init__(self) -> None:
# 复制旧配置文件到 data 目录下。 # 复制旧配置文件到 data 目录下。
if os.path.exists("keyword.json"): if os.path.exists("keyword.json"):
@@ -45,6 +40,7 @@ class HelloWorldPlugin:
Tuple: Non e或者长度为 3 的元组。如果不响应, 返回 None 如果响应, 第 1 个参数为指令是否调用成功, 第 2 个参数为返回的消息链列表, 第 3 个参数为指令名称 Tuple: Non e或者长度为 3 的元组。如果不响应, 返回 None 如果响应, 第 1 个参数为指令是否调用成功, 第 2 个参数为返回的消息链列表, 第 3 个参数为指令名称
例子:一个名为"yuanshen"的插件;当接收到消息为“原神 可莉”, 如果不想要处理此消息则返回False, None如果想要处理但是执行失败了返回True, tuple([False, "请求失败。", "yuanshen"]) 执行成功了返回True, tuple([True, "结果文本", "yuanshen"]) 例子:一个名为"yuanshen"的插件;当接收到消息为“原神 可莉”, 如果不想要处理此消息则返回False, None如果想要处理但是执行失败了返回True, tuple([False, "请求失败。", "yuanshen"]) 执行成功了返回True, tuple([True, "结果文本", "yuanshen"])
""" """
def run(self, ame: AstrMessageEvent): def run(self, ame: AstrMessageEvent):
if ame.message_str == "helloworld": if ame.message_str == "helloworld":
return CommandResult( return CommandResult(
@@ -57,7 +53,8 @@ class HelloWorldPlugin:
return self.handle_keyword_command(ame) return self.handle_keyword_command(ame)
ret = self.check_keyword(ame.message_str) ret = self.check_keyword(ame.message_str)
if ret: return ret if ret:
return ret
return CommandResult( return CommandResult(
hit=False, hit=False,
@@ -118,8 +115,8 @@ keyword d hi
return command_result return command_result
def save_keyword(self): def save_keyword(self):
json.dump(self.keywords, open("data/keyword.json", "w"), ensure_ascii=False) json.dump(self.keywords, open(
"data/keyword.json", "w"), ensure_ascii=False)
def check_keyword(self, message_str: str): def check_keyword(self, message_str: str):
for k in self.keywords: for k in self.keywords:
@@ -160,6 +157,7 @@ keyword d hi
"homepage": str, # 插件主页 [ 可选 ] "homepage": str, # 插件主页 [ 可选 ]
} }
""" """
def info(self): def info(self):
return { return {
"name": "helloworld", "name": "helloworld",

View File

@@ -13,19 +13,13 @@ import util.function_calling.gplugin as gplugin
import util.plugin_util as putil import util.plugin_util as putil
from PIL import Image as PILImage from PIL import Image as PILImage
from typing import Union
from nakuru import (
GroupMessage,
FriendMessage,
GuildMessage,
)
from nakuru.entities.components import Plain, At, Image from nakuru.entities.components import Plain, At, Image
from addons.baidu_aip_judge import BaiduJudge from addons.baidu_aip_judge import BaiduJudge
from model.provider.provider import Provider from model.provider.provider import Provider
from model.command.command import Command from model.command.command import Command
from util import general_utils as gu from util import general_utils as gu
from util.general_utils import Logger, upload, run_monitor from util.general_utils import upload, run_monitor
from util.cmd_config import CmdConfig as cc from util.cmd_config import CmdConfig as cc
from util.cmd_config import init_astrbot_config_items from util.cmd_config import init_astrbot_config_items
from .types import * from .types import *
@@ -33,7 +27,10 @@ from addons.dashboard.helper import DashBoardHelper
from addons.dashboard.server import DashBoardData from addons.dashboard.server import DashBoardData
from cores.database.conn import dbConn from cores.database.conn import dbConn
from model.platform._message_result import MessageResult from model.platform._message_result import MessageResult
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
# 用户发言频率 # 用户发言频率
user_frequency = {} user_frequency = {}
@@ -43,10 +40,9 @@ frequency_time = 60
frequency_count = 10 frequency_count = 10
# 版本 # 版本
version = '3.1.10' version = '3.1.13'
# 语言模型 # 语言模型
REV_CHATGPT = 'rev_chatgpt'
OPENAI_OFFICIAL = 'openai_official' OPENAI_OFFICIAL = 'openai_official'
NONE_LLM = 'none_llm' NONE_LLM = 'none_llm'
chosen_provider = None chosen_provider = None
@@ -65,26 +61,27 @@ init_astrbot_config_items()
# 全局对象 # 全局对象
_global_object: GlobalObject = None _global_object: GlobalObject = None
logger: Logger = Logger()
# 语言模型选择 # 语言模型选择
def privider_chooser(cfg): def privider_chooser(cfg):
l = [] l = []
if 'rev_ChatGPT' in cfg and cfg['rev_ChatGPT']['enable']:
l.append('rev_chatgpt')
if 'openai' in cfg and len(cfg['openai']['key']) > 0 and cfg['openai']['key'][0] is not None: if 'openai' in cfg and len(cfg['openai']['key']) > 0 and cfg['openai']['key'][0] is not None:
l.append('openai_official') l.append('openai_official')
return l return l
''' '''
初始化机器人 初始化机器人
''' '''
def init(cfg): def init(cfg):
global llm_instance, llm_command_instance global llm_instance, llm_command_instance
global baidu_judge, chosen_provider global baidu_judge, chosen_provider
global frequency_count, frequency_time global frequency_count, frequency_time
global _global_object global _global_object
global logger
# 迁移旧配置 # 迁移旧配置
gu.try_migrate_config(cfg) gu.try_migrate_config(cfg)
@@ -98,8 +95,7 @@ def init(cfg):
_global_object = GlobalObject() _global_object = GlobalObject()
_global_object.version = version _global_object.version = version
_global_object.base_config = cfg _global_object.base_config = cfg
_global_object.logger = logger logger.info("AstrBot v"+version)
logger.log("AstrBot v"+version, gu.LEVEL_INFO)
if 'reply_prefix' in cfg: if 'reply_prefix' in cfg:
# 适配旧版配置 # 适配旧版配置
@@ -111,28 +107,19 @@ def init(cfg):
_global_object.reply_prefix = cfg['reply_prefix'] _global_object.reply_prefix = cfg['reply_prefix']
# 语言模型提供商 # 语言模型提供商
logger.log("正在载入语言模型...", gu.LEVEL_INFO) logger.info("正在载入语言模型...")
prov = privider_chooser(cfg) prov = privider_chooser(cfg)
if REV_CHATGPT in prov:
logger.log("初始化:逆向 ChatGPT", gu.LEVEL_INFO)
if cfg['rev_ChatGPT']['enable']:
if 'account' in cfg['rev_ChatGPT']:
from model.provider.rev_chatgpt import ProviderRevChatGPT
from model.command.rev_chatgpt import CommandRevChatGPT
llm_instance[REV_CHATGPT] = ProviderRevChatGPT(cfg['rev_ChatGPT'], base_url=cc.get("CHATGPT_BASE_URL", None))
llm_command_instance[REV_CHATGPT] = CommandRevChatGPT(llm_instance[REV_CHATGPT], _global_object)
chosen_provider = REV_CHATGPT
_global_object.llms.append(RegisteredLLM(llm_name=REV_CHATGPT, llm_instance=llm_instance[REV_CHATGPT], origin="internal"))
else:
input("请退出本程序, 然后在配置文件中填写rev_ChatGPT相关配置")
if OPENAI_OFFICIAL in prov: if OPENAI_OFFICIAL in prov:
logger.log("初始化OpenAI官方", gu.LEVEL_INFO) logger.info("初始化OpenAI官方")
if cfg['openai']['key'] is not None and cfg['openai']['key'] != [None]: if cfg['openai']['key'] is not None and cfg['openai']['key'] != [None]:
from model.provider.openai_official import ProviderOpenAIOfficial from model.provider.openai_official import ProviderOpenAIOfficial
from model.command.openai_official import CommandOpenAIOfficial from model.command.openai_official import CommandOpenAIOfficial
llm_instance[OPENAI_OFFICIAL] = ProviderOpenAIOfficial(cfg['openai']) llm_instance[OPENAI_OFFICIAL] = ProviderOpenAIOfficial(
llm_command_instance[OPENAI_OFFICIAL] = CommandOpenAIOfficial(llm_instance[OPENAI_OFFICIAL], _global_object) cfg['openai'])
_global_object.llms.append(RegisteredLLM(llm_name=OPENAI_OFFICIAL, llm_instance=llm_instance[OPENAI_OFFICIAL], origin="internal")) llm_command_instance[OPENAI_OFFICIAL] = CommandOpenAIOfficial(
llm_instance[OPENAI_OFFICIAL], _global_object)
_global_object.llms.append(RegisteredLLM(
llm_name=OPENAI_OFFICIAL, llm_instance=llm_instance[OPENAI_OFFICIAL], origin="internal"))
chosen_provider = OPENAI_OFFICIAL chosen_provider = OPENAI_OFFICIAL
# 检查provider设置偏好 # 检查provider设置偏好
@@ -144,11 +131,12 @@ def init(cfg):
if 'baidu_aip' in cfg and 'enable' in cfg['baidu_aip'] and cfg['baidu_aip']['enable']: if 'baidu_aip' in cfg and 'enable' in cfg['baidu_aip'] and cfg['baidu_aip']['enable']:
try: try:
baidu_judge = BaiduJudge(cfg['baidu_aip']) baidu_judge = BaiduJudge(cfg['baidu_aip'])
logger.log("百度内容审核初始化成功", gu.LEVEL_INFO) logger.info("百度内容审核初始化成功")
except BaseException as e: except BaseException as e:
logger.log("百度内容审核初始化失败", gu.LEVEL_ERROR) logger.info("百度内容审核初始化失败")
threading.Thread(target=upload, args=(_global_object, ), daemon=True).start() threading.Thread(target=upload, args=(
_global_object, ), daemon=True).start()
# 得到发言频率配置 # 得到发言频率配置
if 'limit' in cfg: if 'limit' in cfg:
@@ -163,11 +151,11 @@ def init(cfg):
else: else:
_global_object.unique_session = False _global_object.unique_session = False
except BaseException as e: except BaseException as e:
logger.log("独立会话配置错误: "+str(e), gu.LEVEL_ERROR) logger.info("独立会话配置错误: "+str(e))
nick_qq = cc.get("nick_qq", None) nick_qq = cc.get("nick_qq", None)
if nick_qq == None: if nick_qq == None:
nick_qq = ("ai","!","") nick_qq = ("ai", "!", "")
if isinstance(nick_qq, str): if isinstance(nick_qq, str):
nick_qq = (nick_qq,) nick_qq = (nick_qq,)
if isinstance(nick_qq, list): if isinstance(nick_qq, list):
@@ -178,32 +166,35 @@ def init(cfg):
global llm_wake_prefix global llm_wake_prefix
llm_wake_prefix = cc.get("llm_wake_prefix", "") llm_wake_prefix = cc.get("llm_wake_prefix", "")
logger.log("正在载入插件...", gu.LEVEL_INFO) logger.info("正在载入插件...")
# 加载插件 # 加载插件
_command = Command(None, _global_object) _command = Command(None, _global_object)
ok, err = putil.plugin_reload(_global_object.cached_plugins) ok, err = putil.plugin_reload(_global_object.cached_plugins)
if ok: if ok:
logger.log(f"成功载入 {len(_global_object.cached_plugins)} 个插件", gu.LEVEL_INFO) logger.info(
f"成功载入 {len(_global_object.cached_plugins)} 个插件")
else: else:
logger.log(err, gu.LEVEL_ERROR) logger.info(err)
if chosen_provider is None: if chosen_provider is None:
llm_command_instance[NONE_LLM] = _command llm_command_instance[NONE_LLM] = _command
chosen_provider = NONE_LLM chosen_provider = NONE_LLM
logger.log("正在载入机器人消息平台", gu.LEVEL_INFO) logger.info("正在载入机器人消息平台")
# logger.log("提示:需要添加管理员 ID 才能使用 update/plugin 等指令),可在可视化面板添加。(如已添加可忽略)", gu.LEVEL_WARNING) # logger.info("提示:需要添加管理员 ID 才能使用 update/plugin 等指令),可在可视化面板添加。(如已添加可忽略)")
platform_str = "" platform_str = ""
# GOCQ # GOCQ
if 'gocqbot' in cfg and cfg['gocqbot']['enable']: if 'gocqbot' in cfg and cfg['gocqbot']['enable']:
logger.log("启用 QQ_GOCQ 机器人消息平台", gu.LEVEL_INFO) logger.info("启用 QQ_GOCQ 机器人消息平台")
threading.Thread(target=run_gocq_bot, args=(cfg, _global_object), daemon=True).start() threading.Thread(target=run_gocq_bot, args=(
cfg, _global_object), daemon=True).start()
platform_str += "QQ_GOCQ," platform_str += "QQ_GOCQ,"
# QQ频道 # QQ频道
if 'qqbot' in cfg and cfg['qqbot']['enable'] and cfg['qqbot']['appid'] != None: if 'qqbot' in cfg and cfg['qqbot']['enable'] and cfg['qqbot']['appid'] != None:
logger.log("启用 QQ_OFFICIAL 机器人消息平台", gu.LEVEL_INFO) logger.info("启用 QQ_OFFICIAL 机器人消息平台")
threading.Thread(target=run_qqchan_bot, args=(cfg, _global_object), daemon=True).start() threading.Thread(target=run_qqchan_bot, args=(
cfg, _global_object), daemon=True).start()
platform_str += "QQ_OFFICIAL," platform_str += "QQ_OFFICIAL,"
default_personality_str = cc.get("default_personality_str", "") default_personality_str = cc.get("default_personality_str", "")
@@ -222,36 +213,47 @@ def init(cfg):
plugins=_global_object.cached_plugins, plugins=_global_object.cached_plugins,
) )
dashboard_helper = DashBoardHelper(_global_object, config=cc.get_all()) dashboard_helper = DashBoardHelper(_global_object, config=cc.get_all())
dashboard_thread = threading.Thread(target=dashboard_helper.run, daemon=True) dashboard_thread = threading.Thread(
target=dashboard_helper.run, daemon=True)
dashboard_thread.start() dashboard_thread.start()
# 运行 monitor # 运行 monitor
threading.Thread(target=run_monitor, args=(_global_object,), daemon=False).start() threading.Thread(target=run_monitor, args=(
_global_object,), daemon=True).start()
logger.log("如果有任何问题, 请在 https://github.com/Soulter/AstrBot 上提交 issue 或加群 322154837。", gu.LEVEL_INFO) logger.info(
logger.log("请给 https://github.com/Soulter/AstrBot 点个 star。", gu.LEVEL_INFO) "如果有任何问题, 请在 https://github.com/Soulter/AstrBot 上提交 issue 或加群 322154837。")
logger.info("请给 https://github.com/Soulter/AstrBot 点个 star。")
if platform_str == '': if platform_str == '':
platform_str = "(未启动任何平台,请前往面板添加)" platform_str = "(未启动任何平台,请前往面板添加)"
logger.log(f"🎉 项目启动完成") logger.info(f"🎉 项目启动完成")
dashboard_thread.join() dashboard_thread.join()
''' '''
运行 QQ_OFFICIAL 机器人 运行 QQ_OFFICIAL 机器人
''' '''
def run_qqchan_bot(cfg: dict, global_object: GlobalObject): def run_qqchan_bot(cfg: dict, global_object: GlobalObject):
try: try:
from model.platform.qq_official import QQOfficial from model.platform.qq_official import QQOfficial
qqchannel_bot = QQOfficial(cfg=cfg, message_handler=oper_msg, global_object=global_object) qqchannel_bot = QQOfficial(
global_object.platforms.append(RegisteredPlatform(platform_name="qqchan", platform_instance=qqchannel_bot, origin="internal")) cfg=cfg, message_handler=oper_msg, global_object=global_object)
global_object.platforms.append(RegisteredPlatform(
platform_name="qqchan", platform_instance=qqchannel_bot, origin="internal"))
qqchannel_bot.run() qqchannel_bot.run()
except BaseException as e: except BaseException as e:
logger.log("启动QQ频道机器人时出现错误, 原因如下: " + str(e), gu.LEVEL_CRITICAL, tag="QQ频道") logger.error("启动 QQ 频道机器人时出现错误, 原因如下: " + str(e))
logger.log(r"如果您是初次启动请前往可视化面板填写配置。详情请看https://astrbot.soulter.top/center/。" + str(e), gu.LEVEL_CRITICAL) logger.error(r"如果您是初次启动请前往可视化面板填写配置。详情请看https://astrbot.soulter.top/center/。")
''' '''
运行 QQ_GOCQ 机器人 运行 QQ_GOCQ 机器人
''' '''
def run_gocq_bot(cfg: dict, _global_object: GlobalObject): def run_gocq_bot(cfg: dict, _global_object: GlobalObject):
from model.platform.qq_gocq import QQGOCQ from model.platform.qq_gocq import QQGOCQ
@@ -259,26 +261,33 @@ def run_gocq_bot(cfg: dict, _global_object: GlobalObject):
host = cc.get("gocq_host", "127.0.0.1") host = cc.get("gocq_host", "127.0.0.1")
port = cc.get("gocq_websocket_port", 6700) port = cc.get("gocq_websocket_port", 6700)
http_port = cc.get("gocq_http_port", 5700) http_port = cc.get("gocq_http_port", 5700)
logger.log(f"正在检查连接...host: {host}, ws port: {port}, http port: {http_port}", tag="QQ") logger.info(
f"正在检查连接...host: {host}, ws port: {port}, http port: {http_port}")
while True: while True:
if not gu.port_checker(port=port, host=host) or not gu.port_checker(port=http_port, host=host): if not gu.port_checker(port=port, host=host) or not gu.port_checker(port=http_port, host=host):
if not noticed: if not noticed:
noticed = True noticed = True
logger.log(f"连接到{host}:{port}(或{http_port})失败。程序会每隔 5s 自动重试。", gu.LEVEL_CRITICAL, tag="QQ") logger.warning(
f"连接到{host}:{port}(或{http_port})失败。程序会每隔 5s 自动重试。")
time.sleep(5) time.sleep(5)
else: else:
logger.log("检查完毕,未发现问题。", tag="QQ") logger.info("已连接到 gocq。")
break break
try: try:
qq_gocq = QQGOCQ(cfg=cfg, message_handler=oper_msg, global_object=_global_object) qq_gocq = QQGOCQ(cfg=cfg, message_handler=oper_msg,
_global_object.platforms.append(RegisteredPlatform(platform_name="gocq", platform_instance=qq_gocq, origin="internal")) global_object=_global_object)
_global_object.platforms.append(RegisteredPlatform(
platform_name="gocq", platform_instance=qq_gocq, origin="internal"))
qq_gocq.run() qq_gocq.run()
except BaseException as e: except BaseException as e:
input("启动QQ机器人出现错误"+str(e)) input("启动QQ机器人出现错误"+str(e))
''' '''
检查发言频率 检查发言频率
''' '''
def check_frequency(id) -> bool: def check_frequency(id) -> bool:
ts = int(time.time()) ts = int(time.time())
if id in user_frequency: if id in user_frequency:
@@ -290,13 +299,14 @@ def check_frequency(id) -> bool:
if user_frequency[id]['count'] >= frequency_count: if user_frequency[id]['count'] >= frequency_count:
return False return False
else: else:
user_frequency[id]['count']+=1 user_frequency[id]['count'] += 1
return True return True
else: else:
t = {'time':ts,'count':1} t = {'time': ts, 'count': 1}
user_frequency[id] = t user_frequency[id] = t
return True return True
async def record_message(platform: str, session_id: str): async def record_message(platform: str, session_id: str):
# TODO: 这里会非常吃资源。然而 sqlite3 不支持多线程,所以暂时这样写。 # TODO: 这里会非常吃资源。然而 sqlite3 不支持多线程,所以暂时这样写。
curr_ts = int(time.time()) curr_ts = int(time.time())
@@ -306,11 +316,12 @@ async def record_message(platform: str, session_id: str):
db_inst.increment_stat_platform(curr_ts, platform, 1) db_inst.increment_stat_platform(curr_ts, platform, 1)
_global_object.cnt_total += 1 _global_object.cnt_total += 1
async def oper_msg(message: AstrBotMessage, async def oper_msg(message: AstrBotMessage,
session_id: str, session_id: str,
role: str = 'member', role: str = 'member',
platform: str = None, platform: str = None,
) -> MessageResult: ) -> MessageResult:
""" """
处理消息 处理消息
message: 消息对象 message: 消息对象
@@ -322,8 +333,8 @@ async def oper_msg(message: AstrBotMessage,
message_str = '' message_str = ''
session_id = session_id session_id = session_id
role = role role = role
hit = False # 是否命中指令 hit = False # 是否命中指令
command_result = () # 调用指令返回的结果 command_result = () # 调用指令返回的结果
# 获取平台实例 # 获取平台实例
reg_platform: RegisteredPlatform = None reg_platform: RegisteredPlatform = None
@@ -332,7 +343,6 @@ async def oper_msg(message: AstrBotMessage,
reg_platform = p reg_platform = p
break break
if not reg_platform: if not reg_platform:
_global_object.logger.log(f"未找到平台 {platform} 的实例。", gu.LEVEL_ERROR)
raise Exception(f"未找到平台 {platform} 的实例。") raise Exception(f"未找到平台 {platform} 的实例。")
# 统计数据,如频道消息量 # 统计数据,如频道消息量
@@ -350,12 +360,10 @@ async def oper_msg(message: AstrBotMessage,
# 检查是否是更换语言模型的请求 # 检查是否是更换语言模型的请求
temp_switch = "" temp_switch = ""
if message_str.startswith('/gpt') or message_str.startswith('/revgpt'): if message_str.startswith('/gpt'):
target = chosen_provider target = chosen_provider
if message_str.startswith('/gpt'): if message_str.startswith('/gpt'):
target = OPENAI_OFFICIAL target = OPENAI_OFFICIAL
elif message_str.startswith('/revgpt'):
target = REV_CHATGPT
l = message_str.split(' ') l = message_str.split(' ')
if len(l) > 1 and l[1] != "": if len(l) > 1 and l[1] != "":
# 临时对话模式,先记录下之前的语言模型,回答完毕后再切回 # 临时对话模式,先记录下之前的语言模型,回答完毕后再切回
@@ -390,7 +398,7 @@ async def oper_msg(message: AstrBotMessage,
if not check: if not check:
return MessageResult(f"你的提问得到的回复未通过【百度AI内容审核】服务, 不予回复。\n\n{msg}") return MessageResult(f"你的提问得到的回复未通过【百度AI内容审核】服务, 不予回复。\n\n{msg}")
if chosen_provider == NONE_LLM: if chosen_provider == NONE_LLM:
logger.log("一条消息由于 Bot 未启动任何语言模型并且未触发指令而将被忽略。", gu.LEVEL_WARNING) logger.info("一条消息由于 Bot 未启动任何语言模型并且未触发指令而将被忽略。")
return return
try: try:
if llm_wake_prefix != "" and not message_str.startswith(llm_wake_prefix): if llm_wake_prefix != "" and not message_str.startswith(llm_wake_prefix):
@@ -412,16 +420,16 @@ async def oper_msg(message: AstrBotMessage,
web_sch_flag = True web_sch_flag = True
else: else:
message_str += " " + cc.get("llm_env_prompt", "") message_str += " " + cc.get("llm_env_prompt", "")
if chosen_provider == REV_CHATGPT or chosen_provider == OPENAI_OFFICIAL: if chosen_provider == OPENAI_OFFICIAL:
if _global_object.web_search or web_sch_flag: if _global_object.web_search or web_sch_flag:
official_fc = chosen_provider == OPENAI_OFFICIAL official_fc = chosen_provider == OPENAI_OFFICIAL
llm_result_str = await gplugin.web_search(message_str, llm_instance[chosen_provider], session_id, official_fc) llm_result_str = await gplugin.web_search(message_str, llm_instance[chosen_provider], session_id, official_fc)
else: else:
llm_result_str = await llm_instance[chosen_provider].text_chat(message_str, session_id, image_url, default_personality = _global_object.default_personality) llm_result_str = await llm_instance[chosen_provider].text_chat(message_str, session_id, image_url, default_personality=_global_object.default_personality)
llm_result_str = _global_object.reply_prefix + llm_result_str llm_result_str = _global_object.reply_prefix + llm_result_str
except BaseException as e: except BaseException as e:
logger.log(f"调用异常:{traceback.format_exc()}", gu.LEVEL_ERROR) logger.info(f"调用异常:{traceback.format_exc()}")
return MessageResult(f"调用语言模型例程时出现异常。原因: {str(e)}") return MessageResult(f"调用语言模型例程时出现异常。原因: {str(e)}")
# 切换回原来的语言模型 # 切换回原来的语言模型
@@ -474,4 +482,4 @@ async def oper_msg(message: AstrBotMessage,
try: try:
return MessageResult(llm_result_str) return MessageResult(llm_result_str)
except BaseException as e: except BaseException as e:
logger.log("回复消息错误: \n"+str(e), gu.LEVEL_ERROR) logger.info("回复消息错误: \n"+str(e))

View File

@@ -11,38 +11,43 @@ from types import ModuleType
from enum import Enum from enum import Enum
from dataclasses import dataclass from dataclasses import dataclass
class MessageType(Enum): class MessageType(Enum):
GROUP_MESSAGE = 'GroupMessage' # 群组形式的消息 GROUP_MESSAGE = 'GroupMessage' # 群组形式的消息
FRIEND_MESSAGE = 'FriendMessage' # 私聊、好友等单聊消息 FRIEND_MESSAGE = 'FriendMessage' # 私聊、好友等单聊消息
GUILD_MESSAGE = 'GuildMessage' # 频道消息 GUILD_MESSAGE = 'GuildMessage' # 频道消息
@dataclass @dataclass
class MessageMember(): class MessageMember():
user_id: str # 发送者id user_id: str # 发送者id
nickname: str = None nickname: str = None
class AstrBotMessage(): class AstrBotMessage():
''' '''
AstrBot 的消息对象 AstrBot 的消息对象
''' '''
tag: str # 消息来源标签 tag: str # 消息来源标签
type: MessageType # 消息类型 type: MessageType # 消息类型
self_id: str # 机器人的识别id self_id: str # 机器人的识别id
session_id: str # 会话id session_id: str # 会话id
message_id: str # 消息id message_id: str # 消息id
sender: MessageMember # 发送者 sender: MessageMember # 发送者
message: List[BaseMessageComponent] # 消息链使用 Nakuru 的消息链格式 message: List[BaseMessageComponent] # 消息链使用 Nakuru 的消息链格式
message_str: str # 最直观的纯文本消息字符串 message_str: str # 最直观的纯文本消息字符串
raw_message: object raw_message: object
timestamp: int # 消息时间戳 timestamp: int # 消息时间戳
def __str__(self) -> str: def __str__(self) -> str:
return str(self.__dict__) return str(self.__dict__)
class PluginType(Enum): class PluginType(Enum):
PLATFORM = 'platfrom' # 平台类插件。 PLATFORM = 'platfrom' # 平台类插件。
LLM = 'llm' # 大语言模型类插件 LLM = 'llm' # 大语言模型类插件
COMMON = 'common' # 其他插件 COMMON = 'common' # 其他插件
@dataclass @dataclass
class PluginMetadata: class PluginMetadata:
@@ -52,16 +57,17 @@ class PluginMetadata:
# required # required
plugin_name: str plugin_name: str
plugin_type: PluginType plugin_type: PluginType
author: str # 插件作者 author: str # 插件作者
desc: str # 插件简介 desc: str # 插件简介
version: str # 插件版本 version: str # 插件版本
# optional # optional
repo: str = None # 插件仓库地址 repo: str = None # 插件仓库地址
def __str__(self) -> str: def __str__(self) -> str:
return f"PluginMetadata({self.plugin_name}, {self.plugin_type}, {self.desc}, {self.version}, {self.repo})" return f"PluginMetadata({self.plugin_name}, {self.plugin_type}, {self.desc}, {self.version}, {self.repo})"
@dataclass @dataclass
class RegisteredPlugin: class RegisteredPlugin:
''' '''
@@ -76,8 +82,10 @@ class RegisteredPlugin:
def __str__(self) -> str: def __str__(self) -> str:
return f"RegisteredPlugin({self.metadata}, {self.module_path}, {self.root_dir_name})" return f"RegisteredPlugin({self.metadata}, {self.module_path}, {self.root_dir_name})"
RegisteredPlugins = List[RegisteredPlugin] RegisteredPlugins = List[RegisteredPlugin]
@dataclass @dataclass
class RegisteredPlatform: class RegisteredPlatform:
''' '''
@@ -85,7 +93,8 @@ class RegisteredPlatform:
''' '''
platform_name: str platform_name: str
platform_instance: Platform platform_instance: Platform
origin: str = None # 注册来源 origin: str = None # 注册来源
@dataclass @dataclass
class RegisteredLLM: class RegisteredLLM:
@@ -94,32 +103,32 @@ class RegisteredLLM:
''' '''
llm_name: str llm_name: str
llm_instance: LLMProvider llm_instance: LLMProvider
origin: str = None # 注册来源 origin: str = None # 注册来源
class GlobalObject: class GlobalObject:
''' '''
存放一些公用的数据用于在不同模块(如core与command)之间传递 存放一些公用的数据用于在不同模块(如core与command)之间传递
''' '''
version: str # 机器人版本 version: str # 机器人版本
nick: str # 用户定义的机器人的别名 nick: str # 用户定义的机器人的别名
base_config: dict # config.json 中导出的配置 base_config: dict # config.json 中导出的配置
cached_plugins: List[RegisteredPlugin] # 加载的插件 cached_plugins: List[RegisteredPlugin] # 加载的插件
platforms: List[RegisteredPlatform] platforms: List[RegisteredPlatform]
llms: List[RegisteredLLM] llms: List[RegisteredLLM]
web_search: bool # 是否开启了网页搜索 web_search: bool # 是否开启了网页搜索
reply_prefix: str # 回复前缀 reply_prefix: str # 回复前缀
unique_session: bool # 是否开启了独立会话 unique_session: bool # 是否开启了独立会话
cnt_total: int # 总消息数 cnt_total: int # 总消息数
default_personality: dict default_personality: dict
dashboard_data = None dashboard_data = None
logger: None
def __init__(self): def __init__(self):
self.nick = None # gocq 的昵称 self.nick = None # gocq 的昵称
self.base_config = None # config.yaml self.base_config = None # config.yaml
self.cached_plugins = [] # 缓存的插件 self.cached_plugins = [] # 缓存的插件
self.web_search = False # 是否开启了网页搜索 self.web_search = False # 是否开启了网页搜索
self.reply_prefix = None self.reply_prefix = None
self.unique_session = False self.unique_session = False
self.cnt_total = 0 self.cnt_total = 0
@@ -129,16 +138,17 @@ class GlobalObject:
self.dashboard_data = None self.dashboard_data = None
self.stat = {} self.stat = {}
class AstrMessageEvent(): class AstrMessageEvent():
''' '''
消息事件 消息事件
''' '''
context: GlobalObject # 一些公用数据 context: GlobalObject # 一些公用数据
message_str: str # 纯消息字符串 message_str: str # 纯消息字符串
message_obj: AstrBotMessage # 消息对象 message_obj: AstrBotMessage # 消息对象
platform: RegisteredPlatform # 来源平台 platform: RegisteredPlatform # 来源平台
role: str # 基本身份。`admin` 或 `member` role: str # 基本身份。`admin` 或 `member`
session_id: int # 会话 id session_id: int # 会话 id
def __init__(self, def __init__(self,
message_str: str, message_str: str,
@@ -154,10 +164,12 @@ class AstrMessageEvent():
self.role = role self.role = role
self.session_id = session_id self.session_id = session_id
class CommandResult(): class CommandResult():
''' '''
用于在Command中返回多个值 用于在Command中返回多个值
''' '''
def __init__(self, hit: bool, success: bool, message_chain: list, command_name: str = "unknown_command") -> None: def __init__(self, hit: bool, success: bool, message_chain: list, command_name: str = "unknown_command") -> None:
self.hit = hit self.hit = hit
self.success = success self.success = success

View File

@@ -3,11 +3,12 @@ import yaml
import time import time
from typing import Tuple from typing import Tuple
class dbConn(): class dbConn():
def __init__(self): def __init__(self):
# 读取参数,并支持中文 # 读取参数,并支持中文
conn = sqlite3.connect("data.db") conn = sqlite3.connect("data.db")
conn.text_factory=str conn.text_factory = str
self.conn = conn self.conn = conn
c = conn.cursor() c = conn.cursor()
c.execute( c.execute(
@@ -107,7 +108,6 @@ class dbConn():
) )
conn.commit() conn.commit()
def increment_stat_session(self, platform, session_id, cnt): def increment_stat_session(self, platform, session_id, cnt):
# if not exist, insert # if not exist, insert
conn = self.conn conn = self.conn
@@ -291,4 +291,3 @@ class dbConn():
def close(self): def close(self):
self.conn.close() self.conn.close()

View File

@@ -1,26 +0,0 @@
{
"llms_claude_cookie": {
"config_type": "item",
"name": "llms_claude_cookie",
"description": "Claude 的 Cookie",
"path": "llms_claude_cookie",
"value": "hihi",
"val_type": "str"
},
"llms_huggingchat_email": {
"config_type": "item",
"name": "llms_huggingchat_email",
"description": "HuggingChat 的邮箱",
"path": "llms_huggingchat_email",
"value": "",
"val_type": "str"
},
"llms_huggingchat_psw": {
"config_type": "item",
"name": "llms_huggingchat_psw",
"description": "HuggingChat 的密码",
"path": "llms_huggingchat_psw",
"value": "",
"val_type": "str"
}
}

121
main.py
View File

@@ -1,27 +1,63 @@
import os, sys
from pip._internal import main as pipmain import os
import sys
import warnings import warnings
import traceback import traceback
import threading import threading
from SparkleLogging.utils.core import LogManager
from logging import Formatter, Logger
warnings.filterwarnings("ignore") warnings.filterwarnings("ignore")
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/' abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
logger: Logger = None
logo_tmpl = """
___ _______.___________..______ .______ ______ .___________.
/ \ / | || _ \ | _ \ / __ \ | |
/ ^ \ | (----`---| |----`| |_) | | |_) | | | | | `---| |----`
/ /_\ \ \ \ | | | / | _ < | | | | | |
/ _____ \ .----) | | | | |\ \----.| |_) | | `--' | | |
/__/ \__\ |_______/ |__| | _| `._____||______/ \______/ |__|
"""
def make_necessary_dirs():
os.makedirs("data/config", exist_ok=True)
os.makedirs("temp", exist_ok=True)
def main(): def main():
logger = LogManager.GetLogger(
log_name='astrbot-core',
out_to_console=True,
# HTTPpost_url='http://localhost:6185/api/log',
# http_mode = True,
custom_formatter=Formatter('[%(asctime)s| %(name)s - %(levelname)s|%(filename)s:%(lineno)d]: %(message)s', datefmt="%H:%M:%S")
)
logger.info(logo_tmpl)
# config.yaml 配置文件加载和环境确认 # config.yaml 配置文件加载和环境确认
try: try:
import cores.qqbot.core as qqBot import botpy, logging, yaml
import yaml import cores.astrbot.core as qqBot
import util.general_utils as gu # delete qqbotpy's logger
ymlfile = open(abs_path+"configs/config.yaml", 'r', encoding='utf-8') for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
ymlfile = open(abs_path+"configs/config.yaml", 'r', encoding='utf-8')
cfg = yaml.safe_load(ymlfile) cfg = yaml.safe_load(ymlfile)
except ImportError as import_error: except ImportError as import_error:
traceback.print_exc() logger.error(import_error)
print(import_error) logger.error("检测到一些依赖库没有安装。由于兼容性问题AstrBot 此版本将不会自动为您安装依赖库。请您先自行安装,然后重试。")
input("第三方库未完全安装完毕,请退出程序重试。") logger.info("如何安装?如果:")
logger.info("- Windows 启动器部署且使用启动器下载了 Python的在 launcher.exe 所在目录下的地址框输入 powershell然后执行 .\python\python.exe -m pip install .\AstrBot\requirements.txt")
logger.info("- Windows 启动器部署且使用自己之前下载的 Python的在 launcher.exe 所在目录下的地址框输入 powershell然后执行 python -m pip install .\AstrBot\requirements.txt")
logger.info("- 自行 clone 源码部署的python -m pip install -r requirements.txt")
logger.info("- 如果还不会,加群 322154837 ")
input("按任意键退出。")
exit()
except FileNotFoundError as file_not_found: except FileNotFoundError as file_not_found:
print(file_not_found) logger.error(file_not_found)
input("配置文件不存在,请检查是否已经下载配置文件。") input("配置文件不存在,请检查是否已经下载配置文件。")
exit()
except BaseException as e: except BaseException as e:
raise e raise e
@@ -30,67 +66,26 @@ def main():
os.environ['HTTP_PROXY'] = cfg['http_proxy'] os.environ['HTTP_PROXY'] = cfg['http_proxy']
if 'https_proxy' in cfg and cfg['https_proxy'] != '': if 'https_proxy' in cfg and cfg['https_proxy'] != '':
os.environ['HTTPS_PROXY'] = cfg['https_proxy'] os.environ['HTTPS_PROXY'] = cfg['https_proxy']
os.environ['NO_PROXY'] = 'https://api.sgroup.qq.com' os.environ['NO_PROXY'] = 'https://api.sgroup.qq.com'
# 检查并创建 temp 文件夹 make_necessary_dirs()
if not os.path.exists(abs_path + "temp"):
os.mkdir(abs_path+"temp")
if not os.path.exists(abs_path + "data"):
os.mkdir(abs_path+"data")
if not os.path.exists(abs_path + "data/config"):
os.mkdir(abs_path+"data/config")
# 启动主程序cores/qqbot/core.py # 启动主程序cores/qqbot/core.py
qqBot.init(cfg) qqBot.init(cfg)
def check_env(ch_mirror=False):
def check_env():
if not (sys.version_info.major == 3 and sys.version_info.minor >= 9): if not (sys.version_info.major == 3 and sys.version_info.minor >= 9):
print("请使用Python3.9+运行本项目") logger.error("请使用 Python3.9+ 运行本项目。按任意键退出。")
input("按任意键退出...") input("")
exit() exit()
if os.path.exists('requirements.txt'):
pth = 'requirements.txt'
else:
pth = 'QQChannelChatGPT'+ os.sep +'requirements.txt'
print("正在检查或下载第三方库,请耐心等待...")
try:
if ch_mirror:
print("使用阿里云镜像")
pipmain(['install', '-r', pth, '-i', 'https://mirrors.aliyun.com/pypi/simple/'])
else:
pipmain(['install', '-r', pth])
except BaseException as e:
print(e)
while True:
res = input("安装失败。\n如报错ValueError: check_hostname requires server_hostname请尝试先关闭代理后重试。\n1.输入y回车重试\n2. 输入c回车使用国内镜像源下载\n3. 输入其他按键回车继续往下执行。")
if res == "y":
try:
pipmain(['install', '-r', pth])
break
except BaseException as e:
print(e)
continue
elif res == "c":
try:
pipmain(['install', '-r', pth, '-i', 'https://mirrors.aliyun.com/pypi/simple/'])
break
except BaseException as e:
print(e)
continue
else:
break
print("第三方库检查完毕。")
if __name__ == "__main__": if __name__ == "__main__":
args = sys.argv check_env()
t = threading.Thread(target=main, daemon=True)
if '-cn' in args:
check_env(True)
else:
check_env()
t = threading.Thread(target=main, daemon=False)
t.start() t.start()
t.join() try:
t.join()
except KeyboardInterrupt as e:
logger.info("退出 AstrBot。")
exit()

View File

@@ -13,8 +13,7 @@ from nakuru.entities.components import (
from util import general_utils as gu from util import general_utils as gu
from model.provider.provider import Provider from model.provider.provider import Provider
from util.cmd_config import CmdConfig as cc from util.cmd_config import CmdConfig as cc
from util.general_utils import Logger from cores.astrbot.types import (
from cores.qqbot.types import (
GlobalObject, GlobalObject,
AstrMessageEvent, AstrMessageEvent,
PluginType, PluginType,
@@ -24,23 +23,28 @@ from cores.qqbot.types import (
) )
from typing import List, Tuple from typing import List, Tuple
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
PLATFORM_QQCHAN = 'qqchan' PLATFORM_QQCHAN = 'qqchan'
PLATFORM_GOCQ = 'gocq' PLATFORM_GOCQ = 'gocq'
# 指令功能的基类,通用的(不区分语言模型)的指令就在这实现 # 指令功能的基类,通用的(不区分语言模型)的指令就在这实现
class Command: class Command:
def __init__(self, provider: Provider, global_object: GlobalObject = None): def __init__(self, provider: Provider, global_object: GlobalObject = None):
self.provider = provider self.provider = provider
self.global_object = global_object self.global_object = global_object
self.logger: Logger = global_object.logger
async def check_command(self, async def check_command(self,
message, message,
session_id: str, session_id: str,
role: str, role: str,
platform: RegisteredPlatform, platform: RegisteredPlatform,
message_obj): message_obj):
self.platform = platform self.platform = platform
# 插件 # 插件
cached_plugins = self.global_object.cached_plugins cached_plugins = self.global_object.cached_plugins
@@ -51,7 +55,7 @@ class Command:
platform=platform, platform=platform,
role=role, role=role,
context=self.global_object, context=self.global_object,
session_id = session_id session_id=session_id
) )
# 从已启动的插件中查找是否有匹配的指令 # 从已启动的插件中查找是否有匹配的指令
for plugin in cached_plugins: for plugin in cached_plugins:
@@ -83,9 +87,11 @@ class Command:
if hit: if hit:
return True, res return True, res
except BaseException as e: except BaseException as e:
self.logger.log(f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。", level=gu.LEVEL_WARNING) logger.error(
f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。")
except BaseException as e: except BaseException as e:
self.logger.log(f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。", level=gu.LEVEL_WARNING) logger.error(
f"{plugin.metadata.plugin_name} 插件异常,原因: {str(e)}\n如果你没有相关装插件的想法, 请直接忽略此报错, 不影响其他功能的运行。")
if self.command_start_with(message, "nick"): if self.command_start_with(message, "nick"):
return True, self.set_nick(message, platform, role) return True, self.set_nick(message, platform, role)
@@ -93,8 +99,10 @@ class Command:
return True, self.plugin_oper(message, role, cached_plugins, platform) return True, self.plugin_oper(message, role, cached_plugins, platform)
if self.command_start_with(message, "myid") or self.command_start_with(message, "!myid"): if self.command_start_with(message, "myid") or self.command_start_with(message, "!myid"):
return True, self.get_my_id(message_obj, platform) return True, self.get_my_id(message_obj, platform)
if self.command_start_with(message, "web"): # 网页搜索 if self.command_start_with(message, "web"): # 网页搜索
return True, self.web_search(message) return True, self.web_search(message)
if self.command_start_with(message, "update"):
return True, self.update(message, role)
if not self.provider and self.command_start_with(message, "help"): if not self.provider and self.command_start_with(message, "help"):
return True, await self.help() return True, await self.help()
@@ -124,16 +132,19 @@ class Command:
l = message.split(" ") l = message.split(" ")
if len(l) <= 1: if len(l) <= 1:
obj = cc.get_all() obj = cc.get_all()
p = gu.create_text_image("【cmd_config.json】", json.dumps(obj, indent=4, ensure_ascii=False)) p = gu.create_text_image("【cmd_config.json】", json.dumps(
obj, indent=4, ensure_ascii=False))
return True, [Image.fromFileSystem(p)], "newconf" return True, [Image.fromFileSystem(p)], "newconf"
''' '''
插件指令 插件指令
''' '''
def plugin_oper(self, message: str, role: str, cached_plugins: List[RegisteredPlugin], platform: str): def plugin_oper(self, message: str, role: str, cached_plugins: List[RegisteredPlugin], platform: str):
l = message.split(" ") l = message.split(" ")
if len(l) < 2: if len(l) < 2:
p = gu.create_text_image("【插件指令面板】", "安装插件: \nplugin i 插件Github地址\n卸载插件: \nplugin d 插件名 \n重载插件: \nplugin reload\n查看插件列表:\nplugin l\n更新插件: plugin u 插件名\n") p = gu.create_text_image(
"【插件指令面板】", "安装插件: \nplugin i 插件Github地址\n卸载插件: \nplugin d 插件名 \n重载插件: \nplugin reload\n查看插件列表:\nplugin l\n更新插件: plugin u 插件名\n")
return True, [Image.fromFileSystem(p)], "plugin" return True, [Image.fromFileSystem(p)], "plugin"
else: else:
if l[1] == "i": if l[1] == "i":
@@ -163,7 +174,8 @@ class Command:
plugin_list_info = "" plugin_list_info = ""
for plugin in cached_plugins: for plugin in cached_plugins:
plugin_list_info += f"{plugin.metadata.plugin_name}: \n名称: {plugin.metadata.plugin_name}\n简介: {plugin.metadata.plugin_desc}\n版本: {plugin.metadata.version}\n作者: {plugin.metadata.author}\n" plugin_list_info += f"{plugin.metadata.plugin_name}: \n名称: {plugin.metadata.plugin_name}\n简介: {plugin.metadata.plugin_desc}\n版本: {plugin.metadata.version}\n作者: {plugin.metadata.author}\n"
p = gu.create_text_image("【已激活插件列表】", plugin_list_info + "\n使用plugin v 插件名 查看插件帮助\n") p = gu.create_text_image(
"【已激活插件列表】", plugin_list_info + "\n使用plugin v 插件名 查看插件帮助\n")
return True, [Image.fromFileSystem(p)], "plugin" return True, [Image.fromFileSystem(p)], "plugin"
except BaseException as e: except BaseException as e:
return False, f"获取插件列表失败,原因: {str(e)}", "plugin" return False, f"获取插件列表失败,原因: {str(e)}", "plugin"
@@ -175,7 +187,8 @@ class Command:
info = i.metadata info = i.metadata
break break
if info: if info:
p = gu.create_text_image(f"【插件信息】", f"名称: {info['name']}\n{info['desc']}\n版本: {info['version']}\n作者: {info['author']}\n\n帮助:\n{info['help']}") p = gu.create_text_image(
f"【插件信息】", f"名称: {info['name']}\n{info['desc']}\n版本: {info['version']}\n作者: {info['author']}\n\n帮助:\n{info['help']}")
return True, [Image.fromFileSystem(p)], "plugin" return True, [Image.fromFileSystem(p)], "plugin"
else: else:
return False, "未找到该插件", "plugin" return False, "未找到该插件", "plugin"
@@ -185,6 +198,7 @@ class Command:
''' '''
nick: 存储机器人的昵称 nick: 存储机器人的昵称
''' '''
def set_nick(self, message: str, platform: str, role: str = "member"): def set_nick(self, message: str, platform: str, role: str = "member"):
if role != "admin": if role != "admin":
return True, "你无权使用该指令 :P", "nick" return True, "你无权使用该指令 :P", "nick"
@@ -209,8 +223,7 @@ class Command:
"plugin": "插件安装、卸载和重载", "plugin": "插件安装、卸载和重载",
"web on/off": "LLM 网页搜索能力", "web on/off": "LLM 网页搜索能力",
"reset": "重置 LLM 对话", "reset": "重置 LLM 对话",
"/gpt": "切换到 OpenAI 官方接口", "/gpt": "切换到 OpenAI 官方接口"
"/revgpt": "切换到网页版ChatGPT",
} }
async def help_messager(self, commands: dict, platform: str, cached_plugins: List[RegisteredPlugin] = None): async def help_messager(self, commands: dict, platform: str, cached_plugins: List[RegisteredPlugin] = None):
@@ -237,7 +250,7 @@ class Command:
p = gu.create_markdown_image(msg) p = gu.create_markdown_image(msg)
return [Image.fromFileSystem(p),] return [Image.fromFileSystem(p),]
except BaseException as e: except BaseException as e:
self.logger.log(str(e)) logger.error(str(e))
return msg return msg
def command_start_with(self, message: str, *args): def command_start_with(self, message: str, *args):
@@ -273,8 +286,10 @@ class Command:
else: else:
if l[1].lower().startswith('v'): if l[1].lower().startswith('v'):
try: try:
release_data = util.updator.request_release_info(latest=False) release_data = util.updator.request_release_info(
util.updator.update_project(release_data, latest=False, version=l[1]) latest=False)
util.updator.update_project(
release_data, latest=False, version=l[1])
return True, "更新成功重启生效。可输入「update r」重启", "update" return True, "更新成功重启生效。可输入「update r」重启", "update"
except BaseException as e: except BaseException as e:
return False, "更新失败: "+str(e), "update" return False, "更新失败: "+str(e), "update"

View File

@@ -1,7 +1,8 @@
from model.command.command import Command from model.command.command import Command
from model.provider.openai_official import ProviderOpenAIOfficial from model.provider.openai_official import ProviderOpenAIOfficial
from cores.qqbot.personality import personalities from util.personality import personalities
from cores.qqbot.types import GlobalObject from cores.astrbot.types import GlobalObject
class CommandOpenAIOfficial(Command): class CommandOpenAIOfficial(Command):
def __init__(self, provider: ProviderOpenAIOfficial, global_object: GlobalObject): def __init__(self, provider: ProviderOpenAIOfficial, global_object: GlobalObject):
@@ -11,11 +12,11 @@ class CommandOpenAIOfficial(Command):
super().__init__(provider, global_object) super().__init__(provider, global_object)
async def check_command(self, async def check_command(self,
message: str, message: str,
session_id: str, session_id: str,
role: str, role: str,
platform: str, platform: str,
message_obj): message_obj):
self.platform = platform self.platform = platform
# 检查基础指令 # 检查基础指令
@@ -67,7 +68,6 @@ class CommandOpenAIOfficial(Command):
commands['token'] = '查看本轮会话token' commands['token'] = '查看本轮会话token'
return True, await super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help" return True, await super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help"
async def reset(self, session_id: str, message: str = "reset"): async def reset(self, session_id: str, message: str = "reset"):
if self.provider is None: if self.provider is None:
return False, "未启用 OpenAI 官方 API", "reset" return False, "未启用 OpenAI 官方 API", "reset"
@@ -78,13 +78,13 @@ class CommandOpenAIOfficial(Command):
if len(l) == 2 and l[1] == "p": if len(l) == 2 and l[1] == "p":
self.provider.forget(session_id) self.provider.forget(session_id)
if self.personality_str != "": if self.personality_str != "":
self.set(self.personality_str, session_id) # 重新设置人格 self.set(self.personality_str, session_id) # 重新设置人格
return True, "重置成功", "reset" return True, "重置成功", "reset"
def his(self, message: str, session_id: str): def his(self, message: str, session_id: str):
if self.provider is None: if self.provider is None:
return False, "未启用 OpenAI 官方 API", "his" return False, "未启用 OpenAI 官方 API", "his"
#分页每页5条 # 分页每页5条
msg = '' msg = ''
size_per_page = 3 size_per_page = 3
page = 1 page = 1
@@ -95,8 +95,10 @@ class CommandOpenAIOfficial(Command):
msg = f"历史记录为空" msg = f"历史记录为空"
return True, msg, "his" return True, msg, "his"
l = self.provider.session_dict[session_id] l = self.provider.session_dict[session_id]
max_page = len(l)//size_per_page + 1 if len(l)%size_per_page != 0 else len(l)//size_per_page max_page = len(l)//size_per_page + \
p = self.provider.get_prompts_by_cache_list(self.provider.session_dict[session_id], divide=True, paging=True, size=size_per_page, page=page) 1 if len(l) % size_per_page != 0 else len(l)//size_per_page
p = self.provider.get_prompts_by_cache_list(
self.provider.session_dict[session_id], divide=True, paging=True, size=size_per_page, page=page)
return True, f"历史记录如下:\n{p}\n{page}页 | 共{max_page}\n*输入/his 2跳转到第2页", "his" return True, f"历史记录如下:\n{p}\n{page}页 | 共{max_page}\n*输入/his 2跳转到第2页", "his"
def token(self, session_id: str): def token(self, session_id: str):

View File

@@ -1,132 +0,0 @@
from model.command.command import Command
from model.provider.rev_chatgpt import ProviderRevChatGPT
from cores.qqbot.personality import personalities
from cores.qqbot.types import GlobalObject
class CommandRevChatGPT(Command):
def __init__(self, provider: ProviderRevChatGPT, global_object: GlobalObject):
self.provider = provider
self.global_object = global_object
self.personality_str = ""
super().__init__(provider, global_object)
async def check_command(self,
message: str,
session_id: str,
role: str,
platform: str,
message_obj):
self.platform = platform
hit, res = await super().check_command(
message,
session_id,
role,
platform,
message_obj
)
if hit:
return True, res
if self.command_start_with(message, "help", "帮助"):
return True, await self.help()
elif self.command_start_with(message, "reset"):
return True, self.reset(session_id, message)
elif self.command_start_with(message, "update"):
return True, self.update(message, role)
elif self.command_start_with(message, "set"):
return True, self.set(message, session_id)
elif self.command_start_with(message, "switch"):
return True, self.switch(message, session_id)
return False, None
def reset(self, session_id, message: str):
l = message.split(" ")
if len(l) == 1:
self.provider.forget(session_id)
return True, "重置完毕。", "reset"
if len(l) == 2 and l[1] == "p":
self.provider.forget(session_id)
ret = self.provider.text_chat(self.personality_str)
return True, f"重置完毕(保留人格)。\n\n{ret}", "reset"
def set(self, message: str, session_id: str):
l = message.split(" ")
if len(l) == 1:
return True, f"设置人格: \n/set 人格名或人格文本。例如/set 编剧\n人格列表: /set list\n人格详细信息: \
/set view 人格名\n重置会话(清除人格): /reset\n重置会话(保留人格): /reset p", "set"
elif l[1] == "list":
msg = "人格列表:\n"
for key in personalities.keys():
msg += f" |-{key}\n"
msg += '\n\n*输入/set view 人格名查看人格详细信息'
msg += '\n*不定时更新人格库,请及时更新本项目。'
return True, msg, "set"
elif l[1] == "view":
if len(l) == 2:
return True, "请输入/set view 人格名", "set"
ps = l[2].strip()
if ps in personalities:
msg = f"人格【{ps}】详细信息:\n"
msg += f"{personalities[ps]}\n"
else:
msg = f"人格【{ps}】不存在。"
return True, msg, "set"
else:
ps = l[1].strip()
if ps in personalities:
self.reset(session_id, "reset")
self.personality_str = personalities[ps]
ret = self.provider.text_chat(self.personality_str, session_id)
return True, f"人格【{ps}】已设置。\n\n{ret}", "set"
else:
self.reset(session_id, "reset")
self.personality_str = ps
ret = self.provider.text_chat(ps, session_id)
return True, f"人格信息已设置。\n\n{ret}", "set"
def switch(self, message: str, session_id: str):
'''
切换账号
'''
l = message.split(" ")
rev_chatgpt = self.provider.get_revchatgpt()
if len(l) == 1:
ret = "当前账号:\n"
index = 0
curr_ = None
for revstat in rev_chatgpt:
index += 1
ret += f"[{index}]. {revstat['id']}\n"
# if session_id in revstat['user']:
# curr_ = revstat['id']
for user in revstat['user']:
if session_id == user['id']:
curr_ = revstat['id']
break
if curr_ is None:
ret += "当前您未选择账号。输入/switch <账号序号>切换账号。"
else:
ret += f"当前您选择的账号为:{curr_}。输入/switch <账号序号>切换账号。"
return True, ret, "switch"
elif len(l) == 2:
try:
index = int(l[1])
if index > len(self.provider.rev_chatgpt) or index < 1:
return True, "账号序号不合法。", "switch"
else:
# pop
for revstat in self.provider.rev_chatgpt:
if session_id in revstat['user']:
revstat['user'].remove(session_id)
# append
self.provider.rev_chatgpt[index - 1]['user'].append(session_id)
return True, f"切换账号成功。当前账号为:{self.provider.rev_chatgpt[index - 1]['id']}", "switch"
except BaseException:
return True, "账号序号不合法。", "switch"
else:
return True, "参数过多。", "switch"
async def help(self):
commands = super().general_commands()
commands['set'] = '设置人格'
return True, await super().help_messager(commands, self.platform, self.global_object.cached_plugins), "help"

View File

@@ -5,14 +5,16 @@ from nakuru import (
FriendMessage FriendMessage
) )
import botpy.message import botpy.message
from cores.qqbot.types import MessageType, AstrBotMessage, MessageMember from cores.astrbot.types import MessageType, AstrBotMessage, MessageMember
from typing import List, Union from typing import List, Union
import time import time
# QQ官方消息类型转换 # QQ官方消息类型转换
def qq_official_message_parse(message: List[BaseMessageComponent]): def qq_official_message_parse(message: List[BaseMessageComponent]):
plain_text = "" plain_text = ""
image_path = None # only one img supported image_path = None # only one img supported
for i in message: for i in message:
if isinstance(i, Plain): if isinstance(i, Plain):
plain_text += i.text plain_text += i.text
@@ -24,6 +26,8 @@ def qq_official_message_parse(message: List[BaseMessageComponent]):
return plain_text, image_path return plain_text, image_path
# QQ官方消息类型 2 AstrBotMessage # QQ官方消息类型 2 AstrBotMessage
def qq_official_message_parse_rev(message: Union[botpy.message.Message, botpy.message.GroupMessage], def qq_official_message_parse_rev(message: Union[botpy.message.Message, botpy.message.GroupMessage],
message_type: MessageType) -> AstrBotMessage: message_type: MessageType) -> AstrBotMessage:
abm = AstrBotMessage() abm = AstrBotMessage()
@@ -60,7 +64,8 @@ def qq_official_message_parse_rev(message: Union[botpy.message.Message, botpy.me
except: except:
abm.self_id = "" abm.self_id = ""
plain_content = message.content.replace("<@!"+str(abm.self_id)+">", "").strip() plain_content = message.content.replace(
"<@!"+str(abm.self_id)+">", "").strip()
msg.append(Plain(plain_content)) msg.append(Plain(plain_content))
if message.attachments: if message.attachments:
for i in message.attachments: for i in message.attachments:
@@ -80,6 +85,7 @@ def qq_official_message_parse_rev(message: Union[botpy.message.Message, botpy.me
raise ValueError(f"Unknown message type: {message_type}") raise ValueError(f"Unknown message type: {message_type}")
return abm return abm
def nakuru_message_parse_rev(message: Union[GuildMessage, GroupMessage, FriendMessage]) -> AstrBotMessage: def nakuru_message_parse_rev(message: Union[GuildMessage, GroupMessage, FriendMessage]) -> AstrBotMessage:
abm = AstrBotMessage() abm = AstrBotMessage()
abm.type = MessageType(message.type) abm.type = MessageType(message.type)

View File

@@ -1,6 +1,7 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import Union, Optional from typing import Union, Optional
@dataclass @dataclass
class MessageResult(): class MessageResult():
result_message: Union[str, list] result_message: Union[str, list]

View File

@@ -15,7 +15,11 @@ import time
from ._platfrom import Platform from ._platfrom import Platform
from ._message_parse import nakuru_message_parse_rev from ._message_parse import nakuru_message_parse_rev
from cores.qqbot.types import MessageType, AstrBotMessage, MessageMember from cores.astrbot.types import MessageType, AstrBotMessage, MessageMember
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
class FakeSource: class FakeSource:
@@ -34,12 +38,11 @@ class QQGOCQ(Platform):
self.waiting = {} self.waiting = {}
self.cc = CmdConfig() self.cc = CmdConfig()
self.cfg = cfg self.cfg = cfg
self.logger: gu.Logger = global_object.logger
try: try:
self.nick_qq = cfg['nick_qq'] self.nick_qq = cfg['nick_qq']
except: except:
self.nick_qq = ["ai","!",""] self.nick_qq = ["ai", "!", ""]
nick_qq = self.nick_qq nick_qq = self.nick_qq
if isinstance(nick_qq, str): if isinstance(nick_qq, str):
nick_qq = [nick_qq] nick_qq = [nick_qq]
@@ -81,7 +84,7 @@ class QQGOCQ(Platform):
async def _(app: CQHTTP, source: GroupMemberIncrease): async def _(app: CQHTTP, source: GroupMemberIncrease):
if self.cc.get("gocq_react_group_increase", True): if self.cc.get("gocq_react_group_increase", True):
await app.sendGroupMessage(source.group_id, [ await app.sendGroupMessage(source.group_id, [
Plain(text = self.announcement) Plain(text=self.announcement)
]) ])
# @gocq_app.receiver("Notify") # @gocq_app.receiver("Notify")
@@ -106,9 +109,11 @@ class QQGOCQ(Platform):
self.client.run() self.client.run()
async def handle_msg(self, message: AstrBotMessage): async def handle_msg(self, message: AstrBotMessage):
self.logger.log(f"{message.sender.nickname}/{message.sender.user_id} -> {self.parse_message_outline(message)}", tag="QQ_GOCQ") logger.info(
f"{message.sender.nickname}/{message.sender.user_id} -> {self.parse_message_outline(message)}")
assert isinstance(message.raw_message, (GroupMessage, FriendMessage, GuildMessage)) assert isinstance(message.raw_message,
(GroupMessage, FriendMessage, GuildMessage))
is_group = message.type != MessageType.FRIEND_MESSAGE is_group = message.type != MessageType.FRIEND_MESSAGE
# 判断是否响应消息 # 判断是否响应消息
@@ -118,14 +123,14 @@ class QQGOCQ(Platform):
else: else:
for i in message.message: for i in message.message:
if isinstance(i, At): if isinstance(i, At):
if message.type == "GuildMessage": if message.type.value == "GuildMessage":
if i.qq == message.raw_message.user_id or i.qq == message.raw_message.self_tiny_id: if str(i.qq) == str(message.raw_message.user_id) or str(i.qq) == str(message.raw_message.self_tiny_id):
resp = True resp = True
if message.type == "FriendMessage": if message.type.value == "FriendMessage":
if i.qq == message.self_id: if str(i.qq) == str(message.self_id):
resp = True resp = True
if message.type == "GroupMessage": if message.type.value == "GroupMessage":
if i.qq == message.self_id: if str(i.qq) == str(message.self_id):
resp = True resp = True
elif isinstance(i, Plain): elif isinstance(i, Plain):
for nick in self.nick_qq: for nick in self.nick_qq:
@@ -133,7 +138,8 @@ class QQGOCQ(Platform):
resp = True resp = True
break break
if not resp: return if not resp:
return
# 解析 session_id # 解析 session_id
if self.unique_session or not is_group: if self.unique_session or not is_group:
@@ -150,7 +156,7 @@ class QQGOCQ(Platform):
# 解析 role # 解析 role
sender_id = str(message.raw_message.user_id) sender_id = str(message.raw_message.user_id)
if sender_id == self.cc.get('admin_qq', '') or \ if sender_id == self.cc.get('admin_qq', '') or \
sender_id in self.cc.get('other_admins', []): sender_id in self.cc.get('other_admins', []):
role = 'admin' role = 'admin'
else: else:
role = 'member' role = 'member'
@@ -185,7 +191,8 @@ class QQGOCQ(Platform):
res = result_message res = result_message
self.logger.log(f"{source.user_id} <- {self.parse_message_outline(res)}", tag="QQ_GOCQ") logger.info(
f"{source.user_id} <- {self.parse_message_outline(res)}")
if isinstance(source, int): if isinstance(source, int):
source = FakeSource("GroupMessage", source) source = FakeSource("GroupMessage", source)
@@ -241,7 +248,7 @@ class QQGOCQ(Platform):
node.name = f"bot" node.name = f"bot"
node.time = int(time.time()) node.time = int(time.time())
# print(node) # print(node)
nodes=[node] nodes = [node]
await self.client.sendGroupForwardMessage(source.group_id, nodes) await self.client.sendGroupForwardMessage(source.group_id, nodes)
return return
await self.client.sendGroupMessage(source.group_id, res) await self.client.sendGroupMessage(source.group_id, res)
@@ -258,8 +265,8 @@ class QQGOCQ(Platform):
raise e raise e
async def send(self, async def send(self,
to, to,
res): res):
''' '''
同 send_msg() 同 send_msg()
''' '''
@@ -311,4 +318,3 @@ class QQGOCQ(Platform):
return ret return ret
except BaseException as e: except BaseException as e:
raise e raise e

View File

@@ -11,13 +11,17 @@ from botpy.types.message import Reference
from botpy import Client from botpy import Client
import time import time
from ._platfrom import Platform from ._platfrom import Platform
from ._message_parse import( from ._message_parse import (
qq_official_message_parse_rev, qq_official_message_parse_rev,
qq_official_message_parse qq_official_message_parse
) )
from cores.qqbot.types import MessageType, AstrBotMessage, MessageMember from cores.astrbot.types import MessageType, AstrBotMessage, MessageMember
from typing import Union, List from typing import Union, List
from nakuru.entities.components import BaseMessageComponent from nakuru.entities.components import BaseMessageComponent
from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
# QQ 机器人官方框架 # QQ 机器人官方框架
class botClient(Client): class botClient(Client):
@@ -37,9 +41,11 @@ class botClient(Client):
# 收到私聊消息 # 收到私聊消息
async def on_direct_message_create(self, message: botpy.message.DirectMessage): async def on_direct_message_create(self, message: botpy.message.DirectMessage):
# 转换层 # 转换层
abm = qq_official_message_parse_rev(message, MessageType.FRIEND_MESSAGE) abm = qq_official_message_parse_rev(
message, MessageType.FRIEND_MESSAGE)
await self.platform.handle_msg(abm) await self.platform.handle_msg(abm)
class QQOfficial(Platform): class QQOfficial(Platform):
def __init__(self, cfg: dict, message_handler: callable, global_object) -> None: def __init__(self, cfg: dict, message_handler: callable, global_object) -> None:
@@ -55,27 +61,24 @@ class QQOfficial(Platform):
self.token = cfg['qqbot']['token'] self.token = cfg['qqbot']['token']
self.secret = cfg['qqbot_secret'] self.secret = cfg['qqbot_secret']
self.unique_session = cfg['uniqueSessionMode'] self.unique_session = cfg['uniqueSessionMode']
self.logger: gu.Logger = global_object.logger qq_group = cfg['qqofficial_enable_group_message']
try: if qq_group:
self.intents = botpy.Intents( self.intents = botpy.Intents(
public_messages=True, public_messages=True,
public_guild_messages=True, public_guild_messages=True,
direct_message=cfg['direct_message_mode'] direct_message=cfg['direct_message_mode']
) )
self.client = botClient( else:
intents=self.intents,
bot_log=False
)
except BaseException:
self.intents = botpy.Intents( self.intents = botpy.Intents(
public_guild_messages=True, public_guild_messages=True,
direct_message=cfg['direct_message_mode'] direct_message=cfg['direct_message_mode']
) )
self.client = botClient( self.client = botClient(
intents=self.intents, intents=self.intents,
bot_log=False bot_log=False
) )
self.client.set_platform(self) self.client.set_platform(self)
def run(self): def run(self):
@@ -97,11 +100,13 @@ class QQOfficial(Platform):
) )
async def handle_msg(self, message: AstrBotMessage): async def handle_msg(self, message: AstrBotMessage):
assert isinstance(message.raw_message, (botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage)) assert isinstance(message.raw_message, (botpy.message.Message,
botpy.message.GroupMessage, botpy.message.DirectMessage))
is_group = message.type != MessageType.FRIEND_MESSAGE is_group = message.type != MessageType.FRIEND_MESSAGE
_t = "/私聊" if not is_group else "" _t = "/私聊" if not is_group else ""
self.logger.log(f"{message.sender.nickname}({message.sender.user_id}{_t}) -> {self.parse_message_outline(message)}", tag="QQ_OFFICIAL") logger.info(
f"{message.sender.nickname}({message.sender.user_id}{_t}) -> {self.parse_message_outline(message)}")
# 解析出 session_id # 解析出 session_id
if self.unique_session or not is_group: if self.unique_session or not is_group:
@@ -118,7 +123,7 @@ class QQOfficial(Platform):
# 解析出 role # 解析出 role
sender_id = message.sender.user_id sender_id = message.sender.user_id
if sender_id == self.cfg['admin_qqchan'] or \ if sender_id == self.cfg['admin_qqchan'] or \
sender_id in self.cfg['other_admins']: sender_id in self.cfg['other_admins']:
role = 'admin' role = 'admin'
else: else:
role = 'member' role = 'member'
@@ -142,8 +147,8 @@ class QQOfficial(Platform):
self.waiting[session_id] = message self.waiting[session_id] = message
async def reply_msg(self, async def reply_msg(self,
message: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage], message: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
res: Union[str, list]): res: Union[str, list]):
''' '''
回复频道消息 回复频道消息
''' '''
@@ -151,8 +156,10 @@ class QQOfficial(Platform):
source = message.raw_message source = message.raw_message
else: else:
source = message source = message
assert isinstance(source, (botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage)) assert isinstance(source, (botpy.message.Message,
self.logger.log(f"{message.sender.nickname}({message.sender.user_id}) <- {self.parse_message_outline(res)}", tag="QQ_OFFICIAL") botpy.message.GroupMessage, botpy.message.DirectMessage))
logger.info(
f"{message.sender.nickname}({message.sender.user_id}) <- {self.parse_message_outline(res)}")
plain_text = '' plain_text = ''
image_path = '' image_path = ''
@@ -170,7 +177,8 @@ class QQOfficial(Platform):
if image_path.startswith("http"): if image_path.startswith("http"):
plain_text += "\n\n" + "![](" + image_path + ")" plain_text += "\n\n" + "![](" + image_path + ")"
else: else:
plain_text += "\n\n" + "![](file:///" + image_path + ")" plain_text += "\n\n" + \
"![](file:///" + image_path + ")"
image_path = gu.create_markdown_image("".join(plain_text)) image_path = gu.create_markdown_image("".join(plain_text))
plain_text = "" plain_text = ""
@@ -184,8 +192,9 @@ class QQOfficial(Platform):
image = PILImage.open(io.BytesIO(await response.read())) image = PILImage.open(io.BytesIO(await response.read()))
image_path = gu.save_temp_img(image) image_path = gu.save_temp_img(image)
if source is not None and image_path == '': # file_image与message_reference不能同时传入 if source is not None and image_path == '': # file_image与message_reference不能同时传入
msg_ref = Reference(message_id=source.id, ignore_get_message_error=False) msg_ref = Reference(message_id=source.id,
ignore_get_message_error=False)
# 到这里,我们得到了 plain_textimage_pathmsg_ref # 到这里,我们得到了 plain_textimage_pathmsg_ref
data = { data = {
@@ -229,7 +238,8 @@ class QQOfficial(Platform):
data['content'] = str.join(" ", plain_text) data['content'] = str.join(" ", plain_text)
await self._send_wrapper(**data) await self._send_wrapper(**data)
except BaseException as e: except BaseException as e:
plain_text = re.sub(r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '[被隐藏的链接]', str(e), flags=re.MULTILINE) plain_text = re.sub(
r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '[被隐藏的链接]', str(e), flags=re.MULTILINE)
plain_text = plain_text.replace(".", "·") plain_text = plain_text.replace(".", "·")
data['content'] = plain_text data['content'] = plain_text
await self._send_wrapper(**data) await self._send_wrapper(**data)
@@ -250,27 +260,29 @@ class QQOfficial(Platform):
elif 'channel_id' in kwargs: elif 'channel_id' in kwargs:
# 频道消息 # 频道消息
if 'file_image' in kwargs: if 'file_image' in kwargs:
kwargs['file_image'] = kwargs['file_image'].replace("file://", "") kwargs['file_image'] = kwargs['file_image'].replace(
"file://", "")
await self.client.api.post_message(**kwargs) await self.client.api.post_message(**kwargs)
else: else:
# 频道私聊消息 # 频道私聊消息
if 'file_image' in kwargs: if 'file_image' in kwargs:
kwargs['file_image'] = kwargs['file_image'].replace("file://", "") kwargs['file_image'] = kwargs['file_image'].replace(
"file://", "")
await self.client.api.post_dms(**kwargs) await self.client.api.post_dms(**kwargs)
async def send_msg(self, async def send_msg(self,
message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage], message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
message_chain: List[BaseMessageComponent], message_chain: List[BaseMessageComponent],
): ):
''' '''
发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入) 发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入)
''' '''
await self.reply_msg(message_obj, message_chain) await self.reply_msg(message_obj, message_chain)
async def send(self, async def send(self,
message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage], message_obj: Union[botpy.message.Message, botpy.message.GroupMessage, botpy.message.DirectMessage, AstrBotMessage],
message_chain: List[BaseMessageComponent], message_chain: List[BaseMessageComponent],
): ):
''' '''
发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入) 发送消息。目前只支持被动回复消息(即拥有一个 botpy Message 类型的 message_obj 传入)
''' '''

View File

@@ -14,22 +14,25 @@ from cores.database.conn import dbConn
from model.provider.provider import Provider from model.provider.provider import Provider
from util import general_utils as gu from util import general_utils as gu
from util.cmd_config import CmdConfig from util.cmd_config import CmdConfig
from util.general_utils import Logger from SparkleLogging.utils.core import LogManager
from logging import Logger
logger: Logger = LogManager.GetLogger(log_name='astrbot-core')
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/' abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
class ProviderOpenAIOfficial(Provider): class ProviderOpenAIOfficial(Provider):
def __init__(self, cfg): def __init__(self, cfg):
self.cc = CmdConfig() self.cc = CmdConfig()
self.logger = Logger()
self.key_list = [] self.key_list = []
# 如果 cfg['key'] 中有长度为 1 的字符串,那么是格式错误,直接报错 # 如果 cfg['key'] 中有长度为 1 的字符串,那么是格式错误,直接报错
for key in cfg['key']: for key in cfg['key']:
if len(key) == 1: if len(key) == 1:
raise BaseException("检查到了长度为 1 的Key。配置文件中的 openai.key 处的格式错误 (符号 - 的后面要加空格)。") raise BaseException(
"检查到了长度为 1 的Key。配置文件中的 openai.key 处的格式错误 (符号 - 的后面要加空格)。")
if cfg['key'] != '' and cfg['key'] != None: if cfg['key'] != '' and cfg['key'] != None:
self.key_list = cfg['key'] self.key_list = cfg['key']
if len(self.key_list) == 0: if len(self.key_list) == 0:
@@ -42,7 +45,7 @@ class ProviderOpenAIOfficial(Provider):
self.api_base = None self.api_base = None
if 'api_base' in cfg and cfg['api_base'] != 'none' and cfg['api_base'] != '': if 'api_base' in cfg and cfg['api_base'] != 'none' and cfg['api_base'] != '':
self.api_base = cfg['api_base'] self.api_base = cfg['api_base']
self.logger.log(f"设置 api_base 为: {self.api_base}", tag="OpenAI") logger.info(f"设置 api_base 为: {self.api_base}")
# 创建 OpenAI Client # 创建 OpenAI Client
self.client = AsyncOpenAI( self.client = AsyncOpenAI(
@@ -51,7 +54,6 @@ class ProviderOpenAIOfficial(Provider):
) )
self.openai_model_configs: dict = cfg['chatGPTConfigs'] self.openai_model_configs: dict = cfg['chatGPTConfigs']
self.logger.log(f'加载 OpenAI Chat Configs: {self.openai_model_configs}', tag="OpenAI")
self.openai_configs = cfg self.openai_configs = cfg
# 会话缓存 # 会话缓存
self.session_dict = {} self.session_dict = {}
@@ -67,9 +69,9 @@ class ProviderOpenAIOfficial(Provider):
db1 = dbConn() db1 = dbConn()
for session in db1.get_all_session(): for session in db1.get_all_session():
self.session_dict[session[0]] = json.loads(session[1])['data'] self.session_dict[session[0]] = json.loads(session[1])['data']
self.logger.log("读取历史记录成功。", tag="OpenAI") logger.info("读取历史记录成功。")
except BaseException as e: except BaseException as e:
self.logger.log("读取历史记录失败,但不影响使用。", level=gu.LEVEL_ERROR, tag="OpenAI") logger.info("读取历史记录失败,但不影响使用。")
# 创建转储定时器线程 # 创建转储定时器线程
threading.Thread(target=self.dump_history, daemon=True).start() threading.Thread(target=self.dump_history, daemon=True).start()
@@ -117,11 +119,11 @@ class ProviderOpenAIOfficial(Provider):
self.session_dict[session_id].append(new_record) self.session_dict[session_id].append(new_record)
async def text_chat(self, prompt, async def text_chat(self, prompt,
session_id = None, session_id=None,
image_url = None, image_url=None,
function_call=None, function_call=None,
extra_conf: dict = None, extra_conf: dict = None,
default_personality: dict = None): default_personality: dict = None):
if session_id is None: if session_id is None:
session_id = "unknown" session_id = "unknown"
if "unknown" in self.session_dict: if "unknown" in self.session_dict:
@@ -138,12 +140,14 @@ class ProviderOpenAIOfficial(Provider):
# 使用 tictoken 截断消息 # 使用 tictoken 截断消息
_encoded_prompt = self.enc.encode(prompt) _encoded_prompt = self.enc.encode(prompt)
if self.openai_model_configs['max_tokens'] < len(_encoded_prompt): if self.openai_model_configs['max_tokens'] < len(_encoded_prompt):
prompt = self.enc.decode(_encoded_prompt[:int(self.openai_model_configs['max_tokens']*0.80)]) prompt = self.enc.decode(_encoded_prompt[:int(
self.logger.log(f"注意,有一部分 prompt 文本由于超出 token 限制而被截断。", level=gu.LEVEL_WARNING, tag="OpenAI") self.openai_model_configs['max_tokens']*0.80)])
logger.info(f"注意,有一部分 prompt 文本由于超出 token 限制而被截断。")
cache_data_list, new_record, req = self.wrap(prompt, session_id, image_url) cache_data_list, new_record, req = self.wrap(
self.logger.log(f"cache: {str(cache_data_list)}", level=gu.LEVEL_DEBUG, tag="OpenAI") prompt, session_id, image_url)
self.logger.log(f"request: {str(req)}", level=gu.LEVEL_DEBUG, tag="OpenAI") logger.debug(f"cache: {str(cache_data_list)}")
logger.debug(f"request: {str(req)}")
retry = 0 retry = 0
response = None response = None
err = '' err = ''
@@ -177,7 +181,7 @@ class ProviderOpenAIOfficial(Provider):
else: else:
response = await self.client.chat.completions.create( response = await self.client.chat.completions.create(
messages=req, messages=req,
tools = function_call, tools=function_call,
**conf **conf
) )
break break
@@ -186,32 +190,36 @@ class ProviderOpenAIOfficial(Provider):
if 'Invalid content type. image_url is only supported by certain models.' in str(e): if 'Invalid content type. image_url is only supported by certain models.' in str(e):
raise e raise e
if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e): if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e):
self.logger.log("当前 Key 已超额或异常, 正在切换", level=gu.LEVEL_WARNING, tag="OpenAI") logger.info("当前 Key 已超额或异常, 正在切换",
)
self.key_stat[self.client.api_key]['exceed'] = True self.key_stat[self.client.api_key]['exceed'] = True
is_switched = self.handle_switch_key() is_switched = self.handle_switch_key()
if not is_switched: if not is_switched:
raise e raise e
retry -= 1 retry -= 1
elif 'maximum context length' in str(e): elif 'maximum context length' in str(e):
self.logger.log("token 超限, 清空对应缓存,并进行消息截断", tag="OpenAI") logger.info("token 超限, 清空对应缓存,并进行消息截断")
self.session_dict[session_id] = [] self.session_dict[session_id] = []
prompt = prompt[:int(len(prompt)*truncate_rate)] prompt = prompt[:int(len(prompt)*truncate_rate)]
truncate_rate -= 0.05 truncate_rate -= 0.05
cache_data_list, new_record, req = self.wrap(prompt, session_id) cache_data_list, new_record, req = self.wrap(
prompt, session_id)
elif 'Limit: 3 / min. Please try again in 20s.' in str(e) or "OpenAI response error" in str(e): elif 'Limit: 3 / min. Please try again in 20s.' in str(e) or "OpenAI response error" in str(e):
time.sleep(30) time.sleep(30)
continue continue
else: else:
self.logger.log(str(e), level=gu.LEVEL_ERROR, tag="OpenAI") logger.error(str(e))
time.sleep(2) time.sleep(2)
err = str(e) err = str(e)
retry += 1 retry += 1
if retry >= 10: if retry >= 10:
self.logger.log(r"如果报错, 且您的机器在中国大陆内, 请确保您的电脑已经设置好代理软件(梯子), 并在配置文件设置了系统代理地址。详见 https://github.com/Soulter/QQChannelChatGPT/wiki", tag="OpenAI") logger.warning(
r"如果报错, 且您的机器在中国大陆内, 请确保您的电脑已经设置好代理软件(梯子), 并在配置文件设置了系统代理地址。详见 https://github.com/Soulter/QQChannelChatGPT/wiki")
raise BaseException("连接出错: "+str(err)) raise BaseException("连接出错: "+str(err))
assert isinstance(response, ChatCompletion) assert isinstance(response, ChatCompletion)
self.logger.log(f"OPENAI RESPONSE: {response.usage}", level=gu.LEVEL_DEBUG, tag="OpenAI") logger.debug(
f"OPENAI RESPONSE: {response.usage}")
# 结果分类 # 结果分类
choice = response.choices[0] choice = response.choices[0]
@@ -248,7 +256,8 @@ class ProviderOpenAIOfficial(Provider):
} }
new_record['usage_tokens'] = current_usage_tokens new_record['usage_tokens'] = current_usage_tokens
if len(cache_data_list) > 0: if len(cache_data_list) > 0:
new_record['single_tokens'] = current_usage_tokens - int(cache_data_list[-1]['usage_tokens']) new_record['single_tokens'] = current_usage_tokens - \
int(cache_data_list[-1]['usage_tokens'])
else: else:
new_record['single_tokens'] = current_usage_tokens new_record['single_tokens'] = current_usage_tokens
@@ -258,7 +267,7 @@ class ProviderOpenAIOfficial(Provider):
return chatgpt_res return chatgpt_res
async def image_chat(self, prompt, img_num = 1, img_size = "1024x1024"): async def image_chat(self, prompt, img_num=1, img_size="1024x1024"):
retry = 0 retry = 0
image_url = '' image_url = ''
@@ -275,16 +284,16 @@ class ProviderOpenAIOfficial(Provider):
image_url.append(response.data[i].url) image_url.append(response.data[i].url)
break break
except Exception as e: except Exception as e:
self.logger.log(str(e), level=gu.LEVEL_ERROR) logger.warning(str(e))
if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str( if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(
e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e): e) or 'No API key provided' in str(e) or 'Incorrect API key provided' in str(e):
self.logger.log("当前 Key 已超额或者不正常, 正在切换", level=gu.LEVEL_WARNING, tag="OpenAI") logger.warning("当前 Key 已超额或者不正常, 正在切换")
self.key_stat[self.client.api_key]['exceed'] = True self.key_stat[self.client.api_key]['exceed'] = True
is_switched = self.handle_switch_key() is_switched = self.handle_switch_key()
if not is_switched: if not is_switched:
raise e raise e
elif 'Your request was rejected as a result of our safety system.' in str(e): elif 'Your request was rejected as a result of our safety system.' in str(e):
self.logger.log("您的请求被 OpenAI 安全系统拒绝, 请稍后再试", level=gu.LEVEL_WARNING, tag="OpenAI") logger.warning("您的请求被 OpenAI 安全系统拒绝, 请稍后再试")
raise e raise e
else: else:
retry += 1 retry += 1
@@ -293,7 +302,7 @@ class ProviderOpenAIOfficial(Provider):
return image_url return image_url
async def forget(self, session_id = None) -> bool: async def forget(self, session_id=None) -> bool:
if session_id is None: if session_id is None:
return False return False
self.session_dict[session_id] = [] self.session_dict[session_id] = []
@@ -313,14 +322,16 @@ class ProviderOpenAIOfficial(Provider):
page_end = len(cache_data_list) page_end = len(cache_data_list)
cache_data_list = cache_data_list[page_begin:page_end] cache_data_list = cache_data_list[page_begin:page_end]
for item in cache_data_list: for item in cache_data_list:
prompts += str(item['user']['role']) + ":\n" + str(item['user']['content']) + "\n" prompts += str(item['user']['role']) + ":\n" + \
prompts += str(item['AI']['role']) + ":\n" + str(item['AI']['content']) + "\n" str(item['user']['content']) + "\n"
prompts += str(item['AI']['role']) + ":\n" + \
str(item['AI']['content']) + "\n"
if divide: if divide:
prompts += "----------\n" prompts += "----------\n"
return prompts return prompts
def wrap(self, prompt, session_id, image_url = None): def wrap(self, prompt, session_id, image_url=None):
if image_url is not None: if image_url is not None:
prompt = [ prompt = [
{ {
@@ -361,10 +372,12 @@ class ProviderOpenAIOfficial(Provider):
continue continue
is_all_exceed = False is_all_exceed = False
self.client.api_key = key self.client.api_key = key
self.logger.log(f"切换到 Key: {key}(已使用 token: {self.key_stat[key]['used']})", level=gu.LEVEL_INFO, tag="OpenAI") logger.warning(
f"切换到 Key: {key}(已使用 token: {self.key_stat[key]['used']})")
break break
if is_all_exceed: if is_all_exceed:
self.logger.log("所有 Key 已超额", level=gu.LEVEL_CRITICAL, tag="OpenAI") logger.warning(
"所有 Key 已超额")
return False return False
return True return True

View File

@@ -28,7 +28,7 @@ class Provider:
''' '''
raise NotImplementedError raise NotImplementedError
async def forget(self, session_id = None) -> bool: async def forget(self, session_id=None) -> bool:
''' '''
重置会话 重置会话
''' '''

View File

@@ -1,224 +0,0 @@
from revChatGPT.V1 import Chatbot
from revChatGPT import typings
from model.provider.provider import Provider
from util import general_utils as gu
from util import cmd_config as cc
import time
class ProviderRevChatGPT(Provider):
def __init__(self, config, base_url = None):
if base_url == "":
base_url = None
self.rev_chatgpt: list[dict] = []
self.cc = cc.CmdConfig()
for i in range(0, len(config['account'])):
try:
gu.log(f"创建逆向ChatGPT负载{str(i+1)}中...", level=gu.LEVEL_INFO, tag="RevChatGPT")
if isinstance(config['account'][i], str):
# 默认是 access_token
rev_account_config = {
'access_token': config['account'][i],
}
else:
if 'password' in config['account'][i]:
gu.log(f"创建逆向ChatGPT负载{str(i+1)}失败: 已不支持账号密码登录请使用access_token方式登录。", level=gu.LEVEL_ERROR, tag="RevChatGPT")
continue
rev_account_config = {
'access_token': config['account'][i]['access_token'],
}
if self.cc.get("rev_chatgpt_model") != "":
rev_account_config['model'] = self.cc.get("rev_chatgpt_model")
if len(self.cc.get("rev_chatgpt_plugin_ids")) > 0:
rev_account_config['plugin_ids'] = self.cc.get("rev_chatgpt_plugin_ids")
if self.cc.get("rev_chatgpt_PUID") != "":
rev_account_config['PUID'] = self.cc.get("rev_chatgpt_PUID")
if len(self.cc.get("rev_chatgpt_unverified_plugin_domains")) > 0:
rev_account_config['unverified_plugin_domains'] = self.cc.get("rev_chatgpt_unverified_plugin_domains")
cb = Chatbot(config=rev_account_config, base_url=base_url)
# cb.captcha_solver = self.__captcha_solver
# 后八位c
g_id = rev_account_config['access_token'][-8:]
revstat = {
'id': g_id,
'obj': cb,
'busy': False,
'user': []
}
self.rev_chatgpt.append(revstat)
except BaseException as e:
gu.log(f"创建逆向ChatGPT负载{str(i+1)}失败: {str(e)}", level=gu.LEVEL_ERROR, tag="RevChatGPT")
def forget(self, session_id = None) -> bool:
for i in self.rev_chatgpt:
for user in i['user']:
if session_id == user['id']:
try:
i['obj'].reset_chat()
return True
except BaseException as e:
gu.log(f"重置RevChatGPT失败。原因: {str(e)}", level=gu.LEVEL_ERROR, tag="RevChatGPT")
return False
return False
def get_revchatgpt(self) -> list:
return self.rev_chatgpt
def request_text(self, prompt: str, bot) -> str:
resp = ''
err_count = 0
retry_count = 5
while err_count < retry_count:
try:
for data in bot.ask(prompt):
resp = data["message"]
break
except typings.Error as e:
if e.code == typings.ErrorType.INVALID_ACCESS_TOKEN_ERROR:
raise e
if e.code == typings.ErrorType.EXPIRED_ACCESS_TOKEN_ERROR:
raise e
if e.code == typings.ErrorType.PROHIBITED_CONCURRENT_QUERY_ERROR:
raise e
if "Your authentication token has expired. Please try signing in again." in str(e):
raise e
if "The message you submitted was too long" in str(e):
raise e
if "You've reached our limit of messages per hour." in str(e):
raise e
if "Rate limited by proxy" in str(e):
gu.log(f"触发请求频率限制, 60秒后自动重试。", level=gu.LEVEL_WARNING, tag="RevChatGPT")
time.sleep(60)
err_count += 1
gu.log(f"请求异常: {str(e)},正在重试。({str(err_count)})", level=gu.LEVEL_WARNING, tag="RevChatGPT")
if err_count >= retry_count:
raise e
except BaseException as e:
err_count += 1
gu.log(f"请求异常: {str(e)},正在重试。({str(err_count)})", level=gu.LEVEL_WARNING, tag="RevChatGPT")
if err_count >= retry_count:
raise e
if resp == '':
resp = "RevChatGPT请求异常。"
# print("[RevChatGPT] "+str(resp))
return resp
def text_chat(self, prompt,
session_id = None,
image_url = None,
function_call=None,
extra_conf: dict = None,
default_personality: dict = None) -> str:
# 选择一个人少的账号。
selected_revstat = None
min_revstat = None
min_ = None
new_user = False
conversation_id = ''
parent_id = ''
for revstat in self.rev_chatgpt:
for user in revstat['user']:
if session_id == user['id']:
selected_revstat = revstat
conversation_id = user['conversation_id']
parent_id = user['parent_id']
break
if min_ is None:
min_ = len(revstat['user'])
min_revstat = revstat
elif len(revstat['user']) < min_:
min_ = len(revstat['user'])
min_revstat = revstat
# if session_id in revstat['user']:
# selected_revstat = revstat
# break
if selected_revstat is None:
selected_revstat = min_revstat
selected_revstat['user'].append({
'id': session_id,
'conversation_id': '',
'parent_id': ''
})
new_user = True
gu.log(f"选择账号{str(selected_revstat)}", tag="RevChatGPT", level=gu.LEVEL_DEBUG)
while selected_revstat['busy']:
gu.log(f"账号忙碌,等待中...", tag="RevChatGPT", level=gu.LEVEL_DEBUG)
time.sleep(1)
selected_revstat['busy'] = True
if not new_user:
# 非新用户,则使用其专用的会话
selected_revstat['obj'].conversation_id = conversation_id
selected_revstat['obj'].parent_id = parent_id
else:
# 新用户,则使用新的会话
selected_revstat['obj'].reset_chat()
res = ''
err_msg = ''
err_cnt = 0
while err_cnt < 15:
try:
res = self.request_text(prompt, selected_revstat['obj'])
selected_revstat['busy'] = False
# 记录新用户的会话
if new_user:
i = 0
for user in selected_revstat['user']:
if user['id'] == session_id:
selected_revstat['user'][i]['conversation_id'] = selected_revstat['obj'].conversation_id
selected_revstat['user'][i]['parent_id'] = selected_revstat['obj'].parent_id
break
i += 1
return res.strip()
except BaseException as e:
if "Your authentication token has expired. Please try signing in again." in str(e):
raise Exception(f"此账号(access_token后8位为{selected_revstat['id']})的access_token已过期请重新获取或者切换账号。")
if "The message you submitted was too long" in str(e):
raise Exception("发送的消息太长,请分段发送。")
if "You've reached our limit of messages per hour." in str(e):
raise Exception("触发RevChatGPT请求频率限制。请1小时后再试或者切换账号。")
gu.log(f"请求异常: {str(e)}", level=gu.LEVEL_WARNING, tag="RevChatGPT")
err_cnt += 1
time.sleep(3)
raise Exception(f'回复失败。原因:{err_msg}。如果您设置了多个账号,可以使用/switch指令切换账号。输入/switch查看详情。')
# while self.is_all_busy():
# time.sleep(1)
# res = ''
# err_msg = ''
# cursor = 0
# for revstat in self.rev_chatgpt:
# cursor += 1
# if not revstat['busy']:
# try:
# revstat['busy'] = True
# res = self.request_text(prompt, revstat['obj'])
# revstat['busy'] = False
# return res.strip()
# # todo: 细化错误管理
# except BaseException as e:
# revstat['busy'] = False
# gu.log(f"请求出现问题: {str(e)}", level=gu.LEVEL_WARNING, tag="RevChatGPT")
# err_msg += f"账号{cursor} - 错误原因: {str(e)}"
# continue
# else:
# err_msg += f"账号{cursor} - 错误原因: 忙碌"
# continue
# raise Exception(f'回复失败。错误跟踪:{err_msg}')
def is_all_busy(self) -> bool:
for revstat in self.rev_chatgpt:
if not revstat['busy']:
return False
return True

View File

@@ -4,15 +4,16 @@ requests
openai~=1.2.3 openai~=1.2.3
qq-botpy qq-botpy
chardet~=5.1.0 chardet~=5.1.0
Pillow~=9.4.0 Pillow
GitPython~=3.1.31 GitPython
nakuru-project nakuru-project
beautifulsoup4 beautifulsoup4
googlesearch-python googlesearch-python
tiktoken tiktoken
readability-lxml readability-lxml
revChatGPT~=6.8.6 baidu-aip
baidu-aip~=4.16.9
websockets websockets
flask flask
psutil psutil
lxml_html_clean
SparkleLogging

View File

@@ -4,12 +4,14 @@ from typing import Union
cpath = "cmd_config.json" cpath = "cmd_config.json"
def check_exist(): def check_exist():
if not os.path.exists(cpath): if not os.path.exists(cpath):
with open(cpath, "w", encoding="utf-8-sig") as f: with open(cpath, "w", encoding="utf-8-sig") as f:
json.dump({}, f, indent=4, ensure_ascii=False) json.dump({}, f, indent=4, ensure_ascii=False)
f.flush() f.flush()
class CmdConfig(): class CmdConfig():
@staticmethod @staticmethod
@@ -58,11 +60,11 @@ class CmdConfig():
f.flush() f.flush()
@staticmethod @staticmethod
def init_attributes(key: Union[str, list], init_val = ""): def init_attributes(key: Union[str, list], init_val=""):
check_exist() check_exist()
conf_str = '' conf_str = ''
with open(cpath, "r", encoding="utf-8-sig") as f: with open(cpath, "r", encoding="utf-8-sig") as f:
conf_str = f.read() conf_str = f.read()
if conf_str.startswith(u'/ufeff'): if conf_str.startswith(u'/ufeff'):
conf_str = conf_str.encode('utf8')[3:].decode('utf8') conf_str = conf_str.encode('utf8')[3:].decode('utf8')
d = json.loads(conf_str) d = json.loads(conf_str)
@@ -82,16 +84,14 @@ class CmdConfig():
json.dump(d, f, indent=4, ensure_ascii=False) json.dump(d, f, indent=4, ensure_ascii=False)
f.flush() f.flush()
def init_astrbot_config_items(): def init_astrbot_config_items():
# 加载默认配置 # 加载默认配置
cc = CmdConfig() cc = CmdConfig()
cc.init_attributes("qq_forward_threshold", 200) cc.init_attributes("qq_forward_threshold", 200)
cc.init_attributes("qq_welcome", "欢迎加入本群!\n欢迎给https://github.com/Soulter/QQChannelChatGPT项目一个Star😊~\n输入help查看帮助~\n") cc.init_attributes(
"qq_welcome", "欢迎加入本群!\n欢迎给https://github.com/Soulter/QQChannelChatGPT项目一个Star😊~\n输入help查看帮助~\n")
cc.init_attributes("qq_pic_mode", False) cc.init_attributes("qq_pic_mode", False)
cc.init_attributes("rev_chatgpt_model", "")
cc.init_attributes("rev_chatgpt_plugin_ids", [])
cc.init_attributes("rev_chatgpt_PUID", "")
cc.init_attributes("rev_chatgpt_unverified_plugin_domains", [])
cc.init_attributes("gocq_host", "127.0.0.1") cc.init_attributes("gocq_host", "127.0.0.1")
cc.init_attributes("gocq_http_port", 5700) cc.init_attributes("gocq_http_port", 5700)
cc.init_attributes("gocq_websocket_port", 6700) cc.init_attributes("gocq_websocket_port", 6700)
@@ -102,6 +102,7 @@ def init_astrbot_config_items():
cc.init_attributes("other_admins", []) cc.init_attributes("other_admins", [])
cc.init_attributes("CHATGPT_BASE_URL", "") cc.init_attributes("CHATGPT_BASE_URL", "")
cc.init_attributes("qqbot_secret", "") cc.init_attributes("qqbot_secret", "")
cc.init_attributes("qqofficial_enable_group_message", False)
cc.init_attributes("admin_qq", "") cc.init_attributes("admin_qq", "")
cc.init_attributes("nick_qq", ["!", "", "ai"]) cc.init_attributes("nick_qq", ["!", "", "ai"])
cc.init_attributes("admin_qqchan", "") cc.init_attributes("admin_qqchan", "")

View File

@@ -3,6 +3,8 @@ import json
import util.general_utils as gu import util.general_utils as gu
import time import time
class FuncCallJsonFormatError(Exception): class FuncCallJsonFormatError(Exception):
def __init__(self, msg): def __init__(self, msg):
self.msg = msg self.msg = msg
@@ -10,6 +12,7 @@ class FuncCallJsonFormatError(Exception):
def __str__(self): def __str__(self):
return self.msg return self.msg
class FuncNotFoundError(Exception): class FuncNotFoundError(Exception):
def __init__(self, msg): def __init__(self, msg):
self.msg = msg self.msg = msg
@@ -17,16 +20,18 @@ class FuncNotFoundError(Exception):
def __str__(self): def __str__(self):
return self.msg return self.msg
class FuncCall(): class FuncCall():
def __init__(self, provider) -> None: def __init__(self, provider) -> None:
self.func_list = [] self.func_list = []
self.provider = provider self.provider = provider
def add_func(self, name: str = None, func_args: list = None, desc: str = None, func_obj = None) -> None: def add_func(self, name: str = None, func_args: list = None, desc: str = None, func_obj=None) -> None:
if name == None or func_args == None or desc == None or func_obj == None: if name == None or func_args == None or desc == None or func_obj == None:
raise FuncCallJsonFormatError("name, func_args, desc must be provided.") raise FuncCallJsonFormatError(
"name, func_args, desc must be provided.")
params = { params = {
"type": "object", # hardcore here "type": "object", # hardcore here
"properties": {} "properties": {}
} }
for param in func_args: for param in func_args:
@@ -65,7 +70,7 @@ class FuncCall():
}) })
return _l return _l
def func_call(self, question, func_definition, is_task = False, tasks = None, taskindex = -1, is_summary = True, session_id = None): def func_call(self, question, func_definition, is_task=False, tasks=None, taskindex=-1, is_summary=True, session_id=None):
funccall_prompt = """ funccall_prompt = """
我正实现function call功能该功能旨在让你变成给定的问题到给定的函数的解析器意味着你不是创造函数 我正实现function call功能该功能旨在让你变成给定的问题到给定的函数的解析器意味着你不是创造函数
@@ -120,7 +125,8 @@ class FuncCall():
res = self.provider.text_chat(prompt, session_id) res = self.provider.text_chat(prompt, session_id)
if res.find('```') != -1: if res.find('```') != -1:
res = res[res.find('```json') + 7: res.rfind('```')] res = res[res.find('```json') + 7: res.rfind('```')]
gu.log("REVGPT func_call json result", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"]) gu.log("REVGPT func_call json result",
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
print(res) print(res)
res = json.loads(res) res = json.loads(res)
break break
@@ -151,11 +157,13 @@ class FuncCall():
func_target = func["func_obj"] func_target = func["func_obj"]
break break
if func_target == None: if func_target == None:
raise FuncNotFoundError(f"Request function {func_name} not found.") raise FuncNotFoundError(
f"Request function {func_name} not found.")
t_res = str(func_target(**args)) t_res = str(func_target(**args))
invoke_func_res += f"{func_name} 调用结果:\n```\n{t_res}\n```\n" invoke_func_res += f"{func_name} 调用结果:\n```\n{t_res}\n```\n"
invoke_func_res_list.append(invoke_func_res) invoke_func_res_list.append(invoke_func_res)
gu.log(f"[FUNC| {func_name} invoked]", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"]) gu.log(f"[FUNC| {func_name} invoked]",
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
# print(str(t_res)) # print(str(t_res))
if is_summary: if is_summary:
@@ -181,12 +189,16 @@ class FuncCall():
try: try:
res = self.provider.text_chat(after_prompt, session_id) res = self.provider.text_chat(after_prompt, session_id)
# 截取```之间的内容 # 截取```之间的内容
gu.log("DEBUG BEGIN", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"]) gu.log(
"DEBUG BEGIN", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
print(res) print(res)
gu.log("DEBUG END", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"]) gu.log(
"DEBUG END", bg=gu.BG_COLORS["yellow"], fg=gu.FG_COLORS["white"])
if res.find('```') != -1: if res.find('```') != -1:
res = res[res.find('```json') + 7: res.rfind('```')] res = res[res.find('```json') +
gu.log("REVGPT after_func_call json result", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"]) 7: res.rfind('```')]
gu.log("REVGPT after_func_call json result",
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
after_prompt_res = res after_prompt_res = res
after_prompt_res = json.loads(after_prompt_res) after_prompt_res = json.loads(after_prompt_res)
break break
@@ -197,7 +209,8 @@ class FuncCall():
if "The message you submitted was too long" in str(e): if "The message you submitted was too long" in str(e):
# 如果返回的内容太长了,那么就截取一部分 # 如果返回的内容太长了,那么就截取一部分
time.sleep(3) time.sleep(3)
invoke_func_res = invoke_func_res[:int(len(invoke_func_res) / 2)] invoke_func_res = invoke_func_res[:int(
len(invoke_func_res) / 2)]
after_prompt = """ after_prompt = """
函数返回以下内容:"""+invoke_func_res+""" 函数返回以下内容:"""+invoke_func_res+"""
请以AI助手的身份结合返回的内容对用户提问做详细全面的回答。 请以AI助手的身份结合返回的内容对用户提问做详细全面的回答。
@@ -218,11 +231,13 @@ class FuncCall():
if "func_call_again" in after_prompt_res and after_prompt_res["func_call_again"]: if "func_call_again" in after_prompt_res and after_prompt_res["func_call_again"]:
# 如果需要重新调用函数 # 如果需要重新调用函数
# 重新调用函数 # 重新调用函数
gu.log("REVGPT func_call_again", bg=gu.BG_COLORS["purple"], fg=gu.FG_COLORS["white"]) gu.log("REVGPT func_call_again",
bg=gu.BG_COLORS["purple"], fg=gu.FG_COLORS["white"])
res = self.func_call(question, func_definition) res = self.func_call(question, func_definition)
return res, True return res, True
gu.log("REVGPT func callback:", bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"]) gu.log("REVGPT func callback:",
bg=gu.BG_COLORS["green"], fg=gu.FG_COLORS["white"])
# print(after_prompt_res["res"]) # print(after_prompt_res["res"])
return after_prompt_res["res"], True return after_prompt_res["res"], True
else: else:
@@ -230,8 +245,3 @@ class FuncCall():
else: else:
# print(res["res"]) # print(res["res"])
return res["res"], False return res["res"], False

View File

@@ -22,6 +22,7 @@ def tidy_text(text: str) -> str:
''' '''
return text.strip().replace("\n", " ").replace("\r", " ").replace(" ", " ") return text.strip().replace("\n", " ").replace("\r", " ").replace(" ", " ")
def special_fetch_zhihu(link: str) -> str: def special_fetch_zhihu(link: str) -> str:
''' '''
function-calling 函数, 用于获取知乎文章的内容 function-calling 函数, 用于获取知乎文章的内容
@@ -43,6 +44,7 @@ def special_fetch_zhihu(link: str) -> str:
raise Exception("zhihu none") raise Exception("zhihu none")
return tidy_text(r.text) return tidy_text(r.text)
def google_web_search(keyword) -> str: def google_web_search(keyword) -> str:
''' '''
获取 google 搜索结果, 得到 title、desc、link 获取 google 搜索结果, 得到 title、desc、link
@@ -66,6 +68,7 @@ def google_web_search(keyword) -> str:
return web_keyword_search_via_bing(keyword) return web_keyword_search_via_bing(keyword)
return ret return ret
def web_keyword_search_via_bing(keyword) -> str: def web_keyword_search_via_bing(keyword) -> str:
''' '''
获取bing搜索结果, 得到 title、desc、link 获取bing搜索结果, 得到 title、desc、link
@@ -104,7 +107,8 @@ def web_keyword_search_via_bing(keyword) -> str:
res += f"# No.{str(result_cnt + 1)}\ntitle: {title}\nurl: {link}\ncontent: {desc}\n\n" res += f"# No.{str(result_cnt + 1)}\ntitle: {title}\nurl: {link}\ncontent: {desc}\n\n"
result_cnt += 1 result_cnt += 1
if result_cnt > 5: break if result_cnt > 5:
break
# if len(_detail_store) >= 3: # if len(_detail_store) >= 3:
# continue # continue
@@ -122,7 +126,8 @@ def web_keyword_search_via_bing(keyword) -> str:
except Exception as e: except Exception as e:
print(f"bing parse err: {str(e)}") print(f"bing parse err: {str(e)}")
if result_cnt == 0: break if result_cnt == 0:
break
return res return res
except Exception as e: except Exception as e:
# gu.log(f"bing fetch err: {str(e)}") # gu.log(f"bing fetch err: {str(e)}")
@@ -132,6 +137,7 @@ def web_keyword_search_via_bing(keyword) -> str:
# gu.log("fail to fetch bing info, using sougou.") # gu.log("fail to fetch bing info, using sougou.")
return web_keyword_search_via_sougou(keyword) return web_keyword_search_via_sougou(keyword)
def web_keyword_search_via_sougou(keyword) -> str: def web_keyword_search_via_sougou(keyword) -> str:
headers = { headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
@@ -154,7 +160,7 @@ def web_keyword_search_via_sougou(keyword) -> str:
"title": title, "title": title,
"link": link, "link": link,
}) })
if len(res) >= 5: # 限制5条 if len(res) >= 5: # 限制5条
break break
except Exception as e: except Exception as e:
pass pass
@@ -173,6 +179,7 @@ def web_keyword_search_via_sougou(keyword) -> str:
ret += f"\n网页内容: {str(_detail_store)}" ret += f"\n网页内容: {str(_detail_store)}"
return ret return ret
def fetch_website_content(url): def fetch_website_content(url):
# gu.log(f"fetch_website_content: {url}", tag="fetch_website_content", level=gu.LEVEL_DEBUG) # gu.log(f"fetch_website_content: {url}", tag="fetch_website_content", level=gu.LEVEL_DEBUG)
headers = { headers = {
@@ -188,6 +195,7 @@ def fetch_website_content(url):
ret = tidy_text(soup.get_text()) ret = tidy_text(soup.get_text())
return ret return ret
async def web_search(question, provider: Provider, session_id, official_fc=False): async def web_search(question, provider: Provider, session_id, official_fc=False):
''' '''
official_fc: 使用官方 function-calling official_fc: 使用官方 function-calling
@@ -197,17 +205,17 @@ async def web_search(question, provider: Provider, session_id, official_fc=False
"type": "string", "type": "string",
"name": "keyword", "name": "keyword",
"description": "google search query (分词,尽量保留所有信息)" "description": "google search query (分词,尽量保留所有信息)"
}], }],
"通过搜索引擎搜索。如果问题需要获取近期、实时的消息,在网页上搜索(如天气、新闻或任何需要通过网页获取信息的问题),则调用此函数;如果没有,不要调用此函数。", "通过搜索引擎搜索。如果问题需要获取近期、实时的消息,在网页上搜索(如天气、新闻或任何需要通过网页获取信息的问题),则调用此函数;如果没有,不要调用此函数。",
web_keyword_search_via_bing web_keyword_search_via_bing
) )
new_func_call.add_func("fetch_website_content", [{ new_func_call.add_func("fetch_website_content", [{
"type": "string", "type": "string",
"name": "url", "name": "url",
"description": "网址" "description": "网址"
}], }],
"获取网页的内容。如果问题带有合法的网页链接(例如: `帮我总结一下 https://github.com 的内容`), 就调用此函数。如果没有,不要调用此函数。", "获取网页的内容。如果问题带有合法的网页链接(例如: `帮我总结一下 https://github.com 的内容`), 就调用此函数。如果没有,不要调用此函数。",
fetch_website_content fetch_website_content
) )
question1 = f"{question} \n> hint: 最多只能调用1个function, 并且存在不会调用任何function的可能性。" question1 = f"{question} \n> hint: 最多只能调用1个function, 并且存在不会调用任何function的可能性。"
has_func = False has_func = False
@@ -282,9 +290,11 @@ async def web_search(question, provider: Provider, session_id, official_fc=False
except Exception as e: except Exception as e:
print(e) print(e)
_c += 1 _c += 1
if _c == 3: raise e if _c == 3:
raise e
if "The message you submitted was too long" in str(e): if "The message you submitted was too long" in str(e):
await provider.forget(session_id) await provider.forget(session_id)
function_invoked_ret = function_invoked_ret[:int(len(function_invoked_ret) / 2)] function_invoked_ret = function_invoked_ret[:int(
len(function_invoked_ret) / 2)]
time.sleep(3) time.sleep(3)
return function_invoked_ret return function_invoked_ret

View File

@@ -7,137 +7,15 @@ import re
import requests import requests
from util.cmd_config import CmdConfig from util.cmd_config import CmdConfig
import socket import socket
from cores.qqbot.types import GlobalObject from cores.astrbot.types import GlobalObject
import platform import platform
import logging import logging
import json import json
import sys import sys
import psutil import psutil
PLATFORM_GOCQ = 'gocq'
PLATFORM_QQCHAN = 'qqchan'
FG_COLORS = {
"black": "30",
"red": "31",
"green": "32",
"yellow": "33",
"blue": "34",
"purple": "35",
"cyan": "36",
"white": "37",
"default": "39",
}
BG_COLORS = {
"black": "40",
"red": "41",
"green": "42",
"yellow": "43",
"blue": "44",
"purple": "45",
"cyan": "46",
"white": "47",
"default": "49",
}
LEVEL_DEBUG = "DEBUG"
LEVEL_INFO = "INFO"
LEVEL_WARNING = "WARN"
LEVEL_ERROR = "ERROR"
LEVEL_CRITICAL = "CRITICAL"
# 为了兼容旧版
level_codes = {
LEVEL_DEBUG: logging.DEBUG,
LEVEL_INFO: logging.INFO,
LEVEL_WARNING: logging.WARNING,
LEVEL_ERROR: logging.ERROR,
LEVEL_CRITICAL: logging.CRITICAL,
}
level_colors = {
"INFO": "green",
"WARN": "yellow",
"ERROR": "red",
"CRITICAL": "purple",
}
class Logger:
def __init__(self) -> None:
self.history = []
def log(
self,
msg: str,
level: str = "INFO",
tag: str = "System",
fg: str = None,
bg: str = None,
max_len: int = 50000,
err: Exception = None,):
"""
日志打印函数
"""
_set_level_code = level_codes[LEVEL_INFO]
if 'LOG_LEVEL' in os.environ and os.environ['LOG_LEVEL'] in level_codes:
_set_level_code = level_codes[os.environ['LOG_LEVEL']]
if level in level_codes and level_codes[level] < _set_level_code:
return
if err is not None:
msg += "\n异常原因: " + str(err)
level = LEVEL_ERROR
if len(msg) > max_len:
msg = msg[:max_len] + "..."
now = datetime.datetime.now().strftime("%H:%M:%S")
pres = []
for line in msg.split("\n"):
if line == "\n":
pres.append("")
else:
pres.append(f"[{now}] [{tag}/{level}] {line}")
if level == "INFO":
if fg is None:
fg = FG_COLORS["green"]
if bg is None:
bg = BG_COLORS["default"]
elif level == "WARN":
if fg is None:
fg = FG_COLORS["yellow"]
if bg is None:
bg = BG_COLORS["default"]
elif level == "ERROR":
if fg is None:
fg = FG_COLORS["red"]
if bg is None:
bg = BG_COLORS["default"]
elif level == "CRITICAL":
if fg is None:
fg = FG_COLORS["purple"]
if bg is None:
bg = BG_COLORS["default"]
ret = ""
for line in pres:
ret += f"\033[{fg};{bg}m{line}\033[0m\n"
try:
requests.post("http://localhost:6185/api/log", data=ret[:-1].encode(), timeout=1)
except BaseException as e:
pass
self.history.append(ret)
if len(self.history) > 100:
self.history = self.history[-100:]
print(ret[:-1])
log = Logger()
def port_checker(port: int, host: str = "localhost"): def port_checker(port: int, host: str = "localhost"):
sk = socket.socket(socket.AF_INET,socket.SOCK_STREAM) sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sk.settimeout(1) sk.settimeout(1)
try: try:
sk.connect((host, port)) sk.connect((host, port))
@@ -147,6 +25,7 @@ def port_checker(port: int, host: str = "localhost"):
sk.close() sk.close()
return False return False
def get_font_path() -> str: def get_font_path() -> str:
if os.path.exists("resources/fonts/syst.otf"): if os.path.exists("resources/fonts/syst.otf"):
font_path = "resources/fonts/syst.otf" font_path = "resources/fonts/syst.otf"
@@ -162,6 +41,7 @@ def get_font_path() -> str:
raise Exception("找不到字体文件") raise Exception("找不到字体文件")
return font_path return font_path
def word2img(title: str, text: str, max_width=30, font_size=20): def word2img(title: str, text: str, max_width=30, font_size=20):
font_path = get_font_path() font_path = get_font_path()
width_factor = 1.0 width_factor = 1.0
@@ -189,19 +69,21 @@ def word2img(title: str, text: str, max_width=30, font_size=20):
title_font = ImageFont.truetype(font_path, font_size + 5) title_font = ImageFont.truetype(font_path, font_size + 5)
# 标题居中 # 标题居中
title_width, title_height = title_font.getsize(title) title_width, title_height = title_font.getsize(title)
draw.text(((width - title_width) / 2, 10), title, fill=(0, 0, 0), font=title_font) draw.text(((width - title_width) / 2, 10),
title, fill=(0, 0, 0), font=title_font)
# 文本不居中 # 文本不居中
draw.text((10, title_height+20), text, fill=(0, 0, 0), font=text_font) draw.text((10, title_height+20), text, fill=(0, 0, 0), font=text_font)
return image return image
def render_markdown(markdown_text, image_width=800, image_height=600, font_size=26, font_color=(0, 0, 0), bg_color=(255, 255, 255)): def render_markdown(markdown_text, image_width=800, image_height=600, font_size=26, font_color=(0, 0, 0), bg_color=(255, 255, 255)):
HEADER_MARGIN = 20 HEADER_MARGIN = 20
HEADER_FONT_STANDARD_SIZE = 42 HEADER_FONT_STANDARD_SIZE = 42
QUOTE_LEFT_LINE_MARGIN = 10 QUOTE_LEFT_LINE_MARGIN = 10
QUOTE_FONT_LINE_MARGIN = 6 # 引用文字距离左边线的距离和上下的距离 QUOTE_FONT_LINE_MARGIN = 6 # 引用文字距离左边线的距离和上下的距离
QUOTE_LEFT_LINE_HEIGHT = font_size + QUOTE_FONT_LINE_MARGIN * 2 QUOTE_LEFT_LINE_HEIGHT = font_size + QUOTE_FONT_LINE_MARGIN * 2
QUOTE_LEFT_LINE_WIDTH = 5 QUOTE_LEFT_LINE_WIDTH = 5
QUOTE_LEFT_LINE_COLOR = (180, 180, 180) QUOTE_LEFT_LINE_COLOR = (180, 180, 180)
@@ -213,9 +95,9 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
CODE_BLOCK_FONT_SIZE = font_size CODE_BLOCK_FONT_SIZE = font_size
CODE_BLOCK_FONT_COLOR = (255, 255, 255) CODE_BLOCK_FONT_COLOR = (255, 255, 255)
CODE_BLOCK_BG_COLOR = (240, 240, 240) CODE_BLOCK_BG_COLOR = (240, 240, 240)
CODE_BLOCK_CODES_MARGIN_VERTICAL = 5 # 代码块和代码之间的距离 CODE_BLOCK_CODES_MARGIN_VERTICAL = 5 # 代码块和代码之间的距离
CODE_BLOCK_CODES_MARGIN_HORIZONTAL = 5 # 代码块和代码之间的距离 CODE_BLOCK_CODES_MARGIN_HORIZONTAL = 5 # 代码块和代码之间的距离
CODE_BLOCK_TEXT_MARGIN = 4 # 代码和代码之间的距离 CODE_BLOCK_TEXT_MARGIN = 4 # 代码和代码之间的距离
INLINE_CODE_MARGIN = 8 INLINE_CODE_MARGIN = 8
INLINE_CODE_FONT_SIZE = font_size INLINE_CODE_FONT_SIZE = font_size
@@ -255,13 +137,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
try: try:
image_url = re.findall(IMAGE_REGEX, line)[0] image_url = re.findall(IMAGE_REGEX, line)[0]
print(image_url) print(image_url)
image_res = Image.open(requests.get(image_url, stream=True, timeout=5).raw) image_res = Image.open(requests.get(
image_url, stream=True, timeout=5).raw)
images[i] = image_res images[i] = image_res
# 最大不得超过image_width的50% # 最大不得超过image_width的50%
img_height = image_res.size[1] img_height = image_res.size[1]
if image_res.size[0] > image_width*0.5: if image_res.size[0] > image_width*0.5:
image_res = image_res.resize((int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0]))) image_res = image_res.resize(
(int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
img_height = image_res.size[1] img_height = image_res.size[1]
height += img_height + IMAGE_MARGIN*2 height += img_height + IMAGE_MARGIN*2
@@ -280,18 +164,18 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
for ii in range(len(line)): for ii in range(len(line)):
# 检测是否是中文 # 检测是否是中文
_width += font.getsize(line[ii])[0] _width += font.getsize(line[ii])[0]
_word_cnt+=1 _word_cnt += 1
if _width > image_width: if _width > image_width:
_pre_lines.append(cp[:_word_cnt]) _pre_lines.append(cp[:_word_cnt])
cp = cp[_word_cnt:] cp = cp[_word_cnt:]
_word_cnt=0 _word_cnt = 0
_width=0 _width = 0
_pre_lines.append(cp) _pre_lines.append(cp)
else: else:
_pre_lines.append(line) _pre_lines.append(line)
pre_lines = _pre_lines pre_lines = _pre_lines
i=-1 i = -1
for line in pre_lines: for line in pre_lines:
if line == "": if line == "":
height += TEXT_LINE_MARGIN height += TEXT_LINE_MARGIN
@@ -358,19 +242,22 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
line = line.strip("#").strip() line = line.strip("#").strip()
font_size_header = HEADER_FONT_STANDARD_SIZE - header_level * 4 font_size_header = HEADER_FONT_STANDARD_SIZE - header_level * 4
font = ImageFont.truetype(font_path, font_size_header) font = ImageFont.truetype(font_path, font_size_header)
y += HEADER_MARGIN # 上边距 y += HEADER_MARGIN # 上边距
# 字间距 # 字间距
draw.text((x, y), line, font=font, fill=font_color) draw.text((x, y), line, font=font, fill=font_color)
draw.line((x, y + font_size_header + 8, image_width - 10, y + font_size_header + 8), fill=(230, 230, 230), width=3) draw.line((x, y + font_size_header + 8, image_width - 10,
y + font_size_header + 8), fill=(230, 230, 230), width=3)
y += font_size_header + HEADER_MARGIN y += font_size_header + HEADER_MARGIN
elif line.startswith(">"): elif line.startswith(">"):
# 处理引用 # 处理引用
quote_text = line.strip(">") quote_text = line.strip(">")
y+=QUOTE_LEFT_LINE_MARGIN y += QUOTE_LEFT_LINE_MARGIN
draw.line((x, y, x, y + QUOTE_LEFT_LINE_HEIGHT), fill=QUOTE_LEFT_LINE_COLOR, width=QUOTE_LEFT_LINE_WIDTH) draw.line((x, y, x, y + QUOTE_LEFT_LINE_HEIGHT),
fill=QUOTE_LEFT_LINE_COLOR, width=QUOTE_LEFT_LINE_WIDTH)
font = ImageFont.truetype(font_path, QUOTE_FONT_SIZE) font = ImageFont.truetype(font_path, QUOTE_FONT_SIZE)
draw.text((x + QUOTE_FONT_LINE_MARGIN, y + QUOTE_FONT_LINE_MARGIN), quote_text, font=font, fill=QUOTE_FONT_COLOR) draw.text((x + QUOTE_FONT_LINE_MARGIN, y + QUOTE_FONT_LINE_MARGIN),
quote_text, font=font, fill=QUOTE_FONT_COLOR)
y += font_size + QUOTE_LEFT_LINE_HEIGHT + QUOTE_LEFT_LINE_MARGIN y += font_size + QUOTE_LEFT_LINE_HEIGHT + QUOTE_LEFT_LINE_MARGIN
elif line.startswith("-"): elif line.startswith("-"):
@@ -378,7 +265,8 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
list_text = line.strip("-").strip() list_text = line.strip("-").strip()
font = ImageFont.truetype(font_path, LIST_FONT_SIZE) font = ImageFont.truetype(font_path, LIST_FONT_SIZE)
y += LIST_MARGIN y += LIST_MARGIN
draw.text((x, y), " · " + list_text, font=font, fill=LIST_FONT_COLOR) draw.text((x, y), " · " + list_text,
font=font, fill=LIST_FONT_COLOR)
y += font_size + LIST_MARGIN y += font_size + LIST_MARGIN
elif line.startswith("```"): elif line.startswith("```"):
@@ -390,13 +278,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
in_code_block = False in_code_block = False
codes = "\n".join(code_block_codes) codes = "\n".join(code_block_codes)
code_block_codes = [] code_block_codes = []
draw.rounded_rectangle((x, code_block_start_y, image_width - 10, y+CODE_BLOCK_CODES_MARGIN_VERTICAL + CODE_BLOCK_TEXT_MARGIN), radius=5, fill=CODE_BLOCK_BG_COLOR, width=2) draw.rounded_rectangle((x, code_block_start_y, image_width - 10, y+CODE_BLOCK_CODES_MARGIN_VERTICAL +
CODE_BLOCK_TEXT_MARGIN), radius=5, fill=CODE_BLOCK_BG_COLOR, width=2)
font = ImageFont.truetype(font_path1, CODE_BLOCK_FONT_SIZE) font = ImageFont.truetype(font_path1, CODE_BLOCK_FONT_SIZE)
draw.text((x + CODE_BLOCK_CODES_MARGIN_HORIZONTAL, code_block_start_y + CODE_BLOCK_CODES_MARGIN_VERTICAL), codes, font=font, fill=font_color) draw.text((x + CODE_BLOCK_CODES_MARGIN_HORIZONTAL, code_block_start_y +
CODE_BLOCK_CODES_MARGIN_VERTICAL), codes, font=font, fill=font_color)
y += CODE_BLOCK_CODES_MARGIN_VERTICAL + CODE_BLOCK_MARGIN y += CODE_BLOCK_CODES_MARGIN_VERTICAL + CODE_BLOCK_MARGIN
# y += font_size+10 # y += font_size+10
elif re.search(r"`(.*?)`", line): elif re.search(r"`(.*?)`", line):
y += INLINE_CODE_MARGIN # 上边距 y += INLINE_CODE_MARGIN # 上边距
# 处理行内代码 # 处理行内代码
code_regex = r"`(.*?)`" code_regex = r"`(.*?)`"
parts_inline = re.findall(code_regex, line) parts_inline = re.findall(code_regex, line)
@@ -409,11 +299,15 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
if part in parts_inline: if part in parts_inline:
font = ImageFont.truetype(font_path, INLINE_CODE_FONT_SIZE) font = ImageFont.truetype(font_path, INLINE_CODE_FONT_SIZE)
code_text = part.strip("`") code_text = part.strip("`")
code_width = font.getsize(code_text)[0] + INLINE_CODE_FONT_MARGIN*2 code_width = font.getsize(
code_text)[0] + INLINE_CODE_FONT_MARGIN*2
x += INLINE_CODE_MARGIN x += INLINE_CODE_MARGIN
code_box = (x, y, x + code_width, y + INLINE_CODE_BG_HEIGHT) code_box = (x, y, x + code_width,
draw.rounded_rectangle(code_box, radius=5, fill=INLINE_CODE_BG_COLOR, width=2) # 使用灰色填充矩形框作为引用背景 y + INLINE_CODE_BG_HEIGHT)
draw.text((x+INLINE_CODE_FONT_MARGIN, y), code_text, font=font, fill=font_color) draw.rounded_rectangle(
code_box, radius=5, fill=INLINE_CODE_BG_COLOR, width=2) # 使用灰色填充矩形框作为引用背景
draw.text((x+INLINE_CODE_FONT_MARGIN, y),
code_text, font=font, fill=font_color)
x += code_width+INLINE_CODE_MARGIN-INLINE_CODE_FONT_MARGIN x += code_width+INLINE_CODE_MARGIN-INLINE_CODE_FONT_MARGIN
else: else:
font = ImageFont.truetype(font_path, font_size) font = ImageFont.truetype(font_path, font_size)
@@ -437,11 +331,13 @@ def render_markdown(markdown_text, image_width=800, image_height=600, font_size=
image_res = images[index] image_res = images[index]
# 最大不得超过image_width的50% # 最大不得超过image_width的50%
if image_res.size[0] > image_width*0.5: if image_res.size[0] > image_width*0.5:
image_res = image_res.resize((int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0]))) image_res = image_res.resize(
(int(image_width*0.5), int(image_res.size[1]*image_width*0.5/image_res.size[0])))
image.paste(image_res, (IMAGE_MARGIN, y)) image.paste(image_res, (IMAGE_MARGIN, y))
y += image_res.size[1] + IMAGE_MARGIN*2 y += image_res.size[1] + IMAGE_MARGIN*2
return image return image
def save_temp_img(img: Image) -> str: def save_temp_img(img: Image) -> str:
if not os.path.exists("temp"): if not os.path.exists("temp"):
os.makedirs("temp") os.makedirs("temp")
@@ -455,7 +351,7 @@ def save_temp_img(img: Image) -> str:
if time.time() - ctime > 3600: if time.time() - ctime > 3600:
os.remove(path) os.remove(path)
except Exception as e: except Exception as e:
print(f"清除临时文件失败: {e}", level=LEVEL_WARNING, tag="GeneralUtils") print(f"清除临时文件失败: {e}")
# 获得时间戳 # 获得时间戳
timestamp = int(time.time()) timestamp = int(time.time())
@@ -463,6 +359,7 @@ def save_temp_img(img: Image) -> str:
img.save(p) img.save(p)
return p return p
def create_text_image(title: str, text: str, max_width=30, font_size=20): def create_text_image(title: str, text: str, max_width=30, font_size=20):
''' '''
文本转图片。 文本转图片。
@@ -480,6 +377,7 @@ def create_text_image(title: str, text: str, max_width=30, font_size=20):
except Exception as e: except Exception as e:
raise e raise e
def create_markdown_image(text: str): def create_markdown_image(text: str):
''' '''
markdown文本转图片。 markdown文本转图片。
@@ -492,6 +390,7 @@ def create_markdown_image(text: str):
except Exception as e: except Exception as e:
raise e raise e
def try_migrate_config(old_config: dict): def try_migrate_config(old_config: dict):
''' '''
迁移配置文件到 cmd_config.json 迁移配置文件到 cmd_config.json
@@ -502,6 +401,7 @@ def try_migrate_config(old_config: dict):
for k in old_config: for k in old_config:
cc.put(k, old_config[k]) cc.put(k, old_config[k])
def get_local_ip_addresses(): def get_local_ip_addresses():
ip = '' ip = ''
try: try:
@@ -514,6 +414,7 @@ def get_local_ip_addresses():
s.close() s.close()
return ip return ip
def get_sys_info(global_object: GlobalObject): def get_sys_info(global_object: GlobalObject):
mem = None mem = None
stats = global_object.dashboard_data.stats stats = global_object.dashboard_data.stats
@@ -528,6 +429,7 @@ def get_sys_info(global_object: GlobalObject):
'py': platform.python_version(), 'py': platform.python_version(),
} }
def upload(_global_object: GlobalObject): def upload(_global_object: GlobalObject):
while True: while True:
addr_ip = '' addr_ip = ''
@@ -539,7 +441,8 @@ def upload(_global_object: GlobalObject):
"sys": sys.platform, "sys": sys.platform,
"admin": "null", "admin": "null",
} }
resp = requests.post('https://api.soulter.top/upload', data=json.dumps(res), timeout=5) resp = requests.post(
'https://api.soulter.top/upload', data=json.dumps(res), timeout=5)
if resp.status_code == 200: if resp.status_code == 200:
ok = resp.json() ok = resp.json()
if ok['status'] == 'ok': if ok['status'] == 'ok':
@@ -548,6 +451,7 @@ def upload(_global_object: GlobalObject):
pass pass
time.sleep(10*60) time.sleep(10*60)
def run_monitor(global_object: GlobalObject): def run_monitor(global_object: GlobalObject):
''' '''
监测机器性能 监测机器性能
@@ -558,7 +462,7 @@ def run_monitor(global_object: GlobalObject):
while True: while True:
stat = global_object.dashboard_data.stats stat = global_object.dashboard_data.stats
# 程序占用的内存大小 # 程序占用的内存大小
mem = psutil.Process().memory_info().rss / 1024 / 1024 # MB mem = psutil.Process().memory_info().rss / 1024 / 1024 # MB
stat['sys_perf'] = { stat['sys_perf'] = {
'memory': mem, 'memory': mem,
'cpu': psutil.cpu_percent() 'cpu': psutil.cpu_percent()

View File

@@ -1,4 +1,4 @@
from cores.qqbot.types import ( from cores.astrbot.types import (
PluginMetadata, PluginMetadata,
RegisteredLLM, RegisteredLLM,
RegisteredPlugin, RegisteredPlugin,

View File

@@ -1,5 +1,5 @@
from cores.qqbot.core import oper_msg from cores.astrbot.core import oper_msg
from cores.qqbot.types import AstrMessageEvent, CommandResult from cores.astrbot.types import AstrMessageEvent, CommandResult
from model.platform._message_result import MessageResult from model.platform._message_result import MessageResult
''' '''

View File

@@ -5,7 +5,7 @@
''' '''
from model.provider.provider import Provider as LLMProvider from model.provider.provider import Provider as LLMProvider
from model.platform._platfrom import Platform from model.platform._platfrom import Platform
from cores.qqbot.types import GlobalObject, RegisteredPlatform, RegisteredLLM from cores.astrbot.types import GlobalObject, RegisteredPlatform, RegisteredLLM
def register_platform(platform_name: str, platform_instance: Platform, context: GlobalObject) -> None: def register_platform(platform_name: str, platform_instance: Platform, context: GlobalObject) -> None:
''' '''

View File

@@ -2,4 +2,4 @@
插件类型 插件类型
''' '''
from cores.qqbot.types import PluginType from cores.astrbot.types import PluginType

View File

@@ -15,7 +15,7 @@ import traceback
from types import ModuleType from types import ModuleType
from typing import List from typing import List
from pip._internal import main as pipmain from pip._internal import main as pipmain
from cores.qqbot.types import ( from cores.astrbot.types import (
PluginMetadata, PluginMetadata,
PluginType, PluginType,
RegisteredPlugin, RegisteredPlugin,
@@ -35,6 +35,8 @@ def get_classes(p_name, arg: ModuleType):
return classes return classes
# 获取一个文件夹下所有的模块, 文件名和文件夹名相同 # 获取一个文件夹下所有的模块, 文件名和文件夹名相同
def get_modules(path): def get_modules(path):
modules = [] modules = []
@@ -58,6 +60,7 @@ def get_modules(path):
}) })
return modules return modules
def get_plugin_store_path(): def get_plugin_store_path():
if os.path.exists("addons/plugins"): if os.path.exists("addons/plugins"):
return "addons/plugins" return "addons/plugins"
@@ -68,6 +71,7 @@ def get_plugin_store_path():
else: else:
raise FileNotFoundError("插件文件夹不存在。") raise FileNotFoundError("插件文件夹不存在。")
def get_plugin_modules(): def get_plugin_modules():
plugins = [] plugins = []
try: try:
@@ -82,6 +86,7 @@ def get_plugin_modules():
except BaseException as e: except BaseException as e:
raise e raise e
def plugin_reload(cached_plugins: RegisteredPlugins): def plugin_reload(cached_plugins: RegisteredPlugins):
plugins = get_plugin_modules() plugins = get_plugin_modules()
if plugins is None: if plugins is None:
@@ -98,11 +103,8 @@ def plugin_reload(cached_plugins: RegisteredPlugins):
module_path = plugin['module_path'] module_path = plugin['module_path']
root_dir_name = plugin['pname'] root_dir_name = plugin['pname']
if module_path in registered_map: module = __import__("addons.plugins." +
# 之前注册过 root_dir_name + "." + p, fromlist=[p])
module = importlib.reload(module)
else:
module = __import__("addons.plugins." + root_dir_name + "." + p, fromlist=[p])
cls = get_classes(p, module) cls = get_classes(p, module)
obj = getattr(module, cls[0])() obj = getattr(module, cls[0])()
@@ -117,7 +119,8 @@ def plugin_reload(cached_plugins: RegisteredPlugins):
else: else:
metadata = PluginMetadata( metadata = PluginMetadata(
plugin_name=info['name'], plugin_name=info['name'],
plugin_type=PluginType.COMMON if 'plugin_type' not in info else PluginType(info['plugin_type']), plugin_type=PluginType.COMMON if 'plugin_type' not in info else PluginType(
info['plugin_type']),
author=info['author'], author=info['author'],
desc=info['desc'], desc=info['desc'],
version=info['version'], version=info['version'],
@@ -131,13 +134,15 @@ def plugin_reload(cached_plugins: RegisteredPlugins):
except BaseException as e: except BaseException as e:
fail_rec += f"注册插件 {module_path} 失败, 原因: {str(e)}\n" fail_rec += f"注册插件 {module_path} 失败, 原因: {str(e)}\n"
continue continue
cached_plugins.append(RegisteredPlugin(
metadata=metadata, if module_path not in registered_map:
plugin_instance=obj, cached_plugins.append(RegisteredPlugin(
module=module, metadata=metadata,
module_path=module_path, plugin_instance=obj,
root_dir_name=root_dir_name module=module,
)) module_path=module_path,
root_dir_name=root_dir_name
))
except BaseException as e: except BaseException as e:
traceback.print_exc() traceback.print_exc()
fail_rec += f"加载{p}插件出现问题,原因 {str(e)}\n" fail_rec += f"加载{p}插件出现问题,原因 {str(e)}\n"
@@ -146,6 +151,7 @@ def plugin_reload(cached_plugins: RegisteredPlugins):
else: else:
return False, fail_rec return False, fail_rec
def install_plugin(repo_url: str, cached_plugins: RegisteredPlugins): def install_plugin(repo_url: str, cached_plugins: RegisteredPlugins):
ppath = get_plugin_store_path() ppath = get_plugin_store_path()
# 删除末尾的 / # 删除末尾的 /
@@ -165,7 +171,9 @@ def install_plugin(repo_url: str, cached_plugins: RegisteredPlugins):
if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), '--quiet']) != 0: if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), '--quiet']) != 0:
raise Exception("插件的依赖安装失败, 需要您手动 pip 安装对应插件的依赖。") raise Exception("插件的依赖安装失败, 需要您手动 pip 安装对应插件的依赖。")
ok, err = plugin_reload(cached_plugins) ok, err = plugin_reload(cached_plugins)
if not ok: raise Exception(err) if not ok:
raise Exception(err)
def get_registered_plugin(plugin_name: str, cached_plugins: RegisteredPlugins) -> RegisteredPlugin: def get_registered_plugin(plugin_name: str, cached_plugins: RegisteredPlugins) -> RegisteredPlugin:
ret = None ret = None
@@ -175,6 +183,7 @@ def get_registered_plugin(plugin_name: str, cached_plugins: RegisteredPlugins) -
break break
return ret return ret
def uninstall_plugin(plugin_name: str, cached_plugins: RegisteredPlugins): def uninstall_plugin(plugin_name: str, cached_plugins: RegisteredPlugins):
plugin = get_registered_plugin(plugin_name, cached_plugins) plugin = get_registered_plugin(plugin_name, cached_plugins)
if not plugin: if not plugin:
@@ -185,6 +194,7 @@ def uninstall_plugin(plugin_name: str, cached_plugins: RegisteredPlugins):
if not remove_dir(os.path.join(ppath, root_dir_name)): if not remove_dir(os.path.join(ppath, root_dir_name)):
raise Exception("移除插件成功,但是删除插件文件夹失败。您可以手动删除该文件夹,位于 addons/plugins/ 下。") raise Exception("移除插件成功,但是删除插件文件夹失败。您可以手动删除该文件夹,位于 addons/plugins/ 下。")
def update_plugin(plugin_name: str, cached_plugins: RegisteredPlugins): def update_plugin(plugin_name: str, cached_plugins: RegisteredPlugins):
plugin = get_registered_plugin(plugin_name, cached_plugins) plugin = get_registered_plugin(plugin_name, cached_plugins)
if not plugin: if not plugin:
@@ -192,14 +202,17 @@ def update_plugin(plugin_name: str, cached_plugins: RegisteredPlugins):
ppath = get_plugin_store_path() ppath = get_plugin_store_path()
root_dir_name = plugin.root_dir_name root_dir_name = plugin.root_dir_name
plugin_path = os.path.join(ppath, root_dir_name) plugin_path = os.path.join(ppath, root_dir_name)
repo = Repo(path = plugin_path) repo = Repo(path=plugin_path)
repo.remotes.origin.pull() repo.remotes.origin.pull()
# 读取插件的requirements.txt # 读取插件的requirements.txt
if os.path.exists(os.path.join(plugin_path, "requirements.txt")): if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt"), '--quiet']) != 0: print("正在安装插件依赖...")
if pipmain(['install', '-r', os.path.join(plugin_path, "requirements.txt")]) != 0:
raise Exception("插件依赖安装失败, 需要您手动pip安装对应插件的依赖。") raise Exception("插件依赖安装失败, 需要您手动pip安装对应插件的依赖。")
ok, err = plugin_reload(cached_plugins) ok, err = plugin_reload(cached_plugins)
if not ok: raise Exception(err) if not ok:
raise Exception(err)
def remove_dir(file_path) -> bool: def remove_dir(file_path) -> bool:
try_cnt = 50 try_cnt = 50

View File

@@ -111,7 +111,7 @@ def update_project(update_data: list,
else: else:
# 更新到最新版本对应的commit # 更新到最新版本对应的commit
try: try:
repo.remotes.origin.fetch() repo.git.fetch()
repo.git.checkout(update_data[0]['tag_name']) repo.git.checkout(update_data[0]['tag_name'])
if reboot: _reboot() if reboot: _reboot()
except BaseException as e: except BaseException as e:
@@ -119,25 +119,23 @@ def update_project(update_data: list,
else: else:
# 更新到指定版本 # 更新到指定版本
flag = False flag = False
print(f"请求更新到指定版本: {version}")
for data in update_data: for data in update_data:
if data['tag_name'] == version: if data['tag_name'] == version:
try: try:
repo.remotes.origin.fetch() repo.git.fetch()
repo.git.checkout(data['tag_name']) repo.git.checkout(data['tag_name'])
flag = True flag = True
if reboot: _reboot() if reboot: _reboot()
except BaseException as e: except BaseException as e:
raise e raise e
else:
continue
if not flag: if not flag:
raise Exception("未找到指定版本。") raise Exception("未找到指定版本。")
def checkout_branch(branch_name: str): def checkout_branch(branch_name: str):
repo = find_repo() repo = find_repo()
try: try:
origin = repo.remotes.origin repo.git.fetch()
origin.fetch()
repo.git.checkout(branch_name) repo.git.checkout(branch_name)
repo.git.pull("origin", branch_name, "-f") repo.git.pull("origin", branch_name, "-f")
return True return True