Compare commits
3140 Commits
v3.0.2
...
feat/file-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1de377e749 | ||
|
|
6aa6963ab5 | ||
|
|
d3001d8148 | ||
|
|
380c4faf17 | ||
|
|
bd2a88783c | ||
|
|
17d7f822e7 | ||
|
|
0e034f0fbd | ||
|
|
2a7d03f9e1 | ||
|
|
72fac4b9f1 | ||
|
|
38281ba2cf | ||
|
|
21aa3174f4 | ||
|
|
dcda871fc0 | ||
|
|
c13c51f499 | ||
|
|
a130db5cf4 | ||
|
|
7faeb5cea8 | ||
|
|
8d3ff61e0d | ||
|
|
4c03e82570 | ||
|
|
e7e8664ab4 | ||
|
|
1dd1623e7d | ||
|
|
80d8161d58 | ||
|
|
fc80d7d681 | ||
|
|
c2f036b27c | ||
|
|
4087bbb512 | ||
|
|
e1c728582d | ||
|
|
93c69a639a | ||
|
|
a7fdc98b29 | ||
|
|
85b7f104df | ||
|
|
d76d1bd7fe | ||
|
|
df4412aa80 | ||
|
|
ab2c94e19a | ||
|
|
37cc4e2121 | ||
|
|
60dfdd0a66 | ||
|
|
bb8b2cb194 | ||
|
|
4e29684aa3 | ||
|
|
0e17e3553d | ||
|
|
0a55060e89 | ||
|
|
77859c7daa | ||
|
|
ba39c393a0 | ||
|
|
6a50d316d9 | ||
|
|
88c1d77f0b | ||
|
|
758ce40cc1 | ||
|
|
3e7bb80492 | ||
|
|
75e95aa9ca | ||
|
|
a389842e25 | ||
|
|
0f6a3c3f5a | ||
|
|
133f27422d | ||
|
|
abc6deb244 | ||
|
|
06869b4597 | ||
|
|
d32cea9870 | ||
|
|
4b68100f16 | ||
|
|
5c5515d462 | ||
|
|
3932b8f982 | ||
|
|
82488ca900 | ||
|
|
29d9b9b2d6 | ||
|
|
02215e9b7b | ||
|
|
7160b7a18b | ||
|
|
ea8dac837a | ||
|
|
e2a7a028bd | ||
|
|
70db8d264b | ||
|
|
0518e6d487 | ||
|
|
39eb367866 | ||
|
|
f1d51a22ad | ||
|
|
77fb554e8f | ||
|
|
91f8a0ae09 | ||
|
|
370cda7cf0 | ||
|
|
66b3eed273 | ||
|
|
99b061a143 | ||
|
|
5f3c7ed673 | ||
|
|
a6dc458212 | ||
|
|
520f521887 | ||
|
|
01427d9969 | ||
|
|
34c03ce983 | ||
|
|
95e9da42d6 | ||
|
|
1338cab61b | ||
|
|
7ba98c1e91 | ||
|
|
9a5f507cbe | ||
|
|
d560671d1f | ||
|
|
82c9cf4db6 | ||
|
|
910ec6c695 | ||
|
|
766d6f2bec | ||
|
|
9f39140987 | ||
|
|
89716ef4da | ||
|
|
3c4ea5a339 | ||
|
|
601846a8c1 | ||
|
|
85d66c1056 | ||
|
|
b89d3f663c | ||
|
|
0260d430d1 | ||
|
|
2e608cdc09 | ||
|
|
234ce93dc1 | ||
|
|
4e2154feb7 | ||
|
|
604958898c | ||
|
|
a093f5ad0a | ||
|
|
a7e9a7f30c | ||
|
|
5d1e9de096 | ||
|
|
89da4eb747 | ||
|
|
8899a1dee1 | ||
|
|
384a687ec3 | ||
|
|
70cfdd2f8b | ||
|
|
bdbd2f009a | ||
|
|
164e0d26e0 | ||
|
|
cb087b5ff9 | ||
|
|
1d3928d145 | ||
|
|
6dc3d161e7 | ||
|
|
e9805ba205 | ||
|
|
d5280dcd88 | ||
|
|
67a9663eff | ||
|
|
77dd89b8eb | ||
|
|
8e511bf14b | ||
|
|
164a4226ea | ||
|
|
6d6fefc435 | ||
|
|
aa59532287 | ||
|
|
2ada1deb9a | ||
|
|
788ceb9721 | ||
|
|
8488c9aeab | ||
|
|
676f9fd4ff | ||
|
|
1935ce4700 | ||
|
|
e760956353 | ||
|
|
be3e5f3f8b | ||
|
|
cdf617feac | ||
|
|
afb56cf707 | ||
|
|
cd2556ab94 | ||
|
|
cf4a5d9ea4 | ||
|
|
0747099cac | ||
|
|
323ec29b02 | ||
|
|
ae81d70685 | ||
|
|
270c89c12f | ||
|
|
c7a58252fe | ||
|
|
47ad8c86e5 | ||
|
|
937e879e5e | ||
|
|
1ecf26eead | ||
|
|
adbb84530a | ||
|
|
6cf169f4f2 | ||
|
|
5ab9ea12c0 | ||
|
|
fd9cb703db | ||
|
|
388c1ab16d | ||
|
|
f867c2a271 | ||
|
|
605bb2cb90 | ||
|
|
5ea15dde5a | ||
|
|
3ca545c4c7 | ||
|
|
e200835074 | ||
|
|
3a90348353 | ||
|
|
5a11d8f0ee | ||
|
|
824af5eeea | ||
|
|
08ec787491 | ||
|
|
b062e83d54 | ||
|
|
17422ba9c3 | ||
|
|
6849af2bad | ||
|
|
09c3da64f9 | ||
|
|
2c8470e8ac | ||
|
|
c4ea3db73d | ||
|
|
89e79863f6 | ||
|
|
d19945009f | ||
|
|
c77256ee0e | ||
|
|
7d823af627 | ||
|
|
3957861878 | ||
|
|
6ac43c600e | ||
|
|
27af9ebb6b | ||
|
|
b360c8446e | ||
|
|
6d00717655 | ||
|
|
bb5f06498e | ||
|
|
aca5743ab6 | ||
|
|
6903032f7e | ||
|
|
1ce0ff87bd | ||
|
|
e39d6bae0b | ||
|
|
8028e9e9a6 | ||
|
|
817f20ea01 | ||
|
|
ad5579a2f4 | ||
|
|
81a689a79b | ||
|
|
1893dd8336 | ||
|
|
021ca8175b | ||
|
|
39d6207fe1 | ||
|
|
23ce687229 | ||
|
|
3715312fd2 | ||
|
|
8196922cac | ||
|
|
8089ad91da | ||
|
|
2930cc3fd8 | ||
|
|
0e841a8b25 | ||
|
|
67fa1611cc | ||
|
|
91136bb9f7 | ||
|
|
7c050d1adc | ||
|
|
a0690a6afc | ||
|
|
c51609b261 | ||
|
|
72148f66eb | ||
|
|
a04993a2bb | ||
|
|
74f845b06d | ||
|
|
50144ddcae | ||
|
|
94bf3b8195 | ||
|
|
e190bbeeed | ||
|
|
92abc43c9d | ||
|
|
c8e34ff26f | ||
|
|
630df3e76e | ||
|
|
bdbf382201 | ||
|
|
00eefc82db | ||
|
|
dc97080837 | ||
|
|
0b7fc29ac4 | ||
|
|
ff998fdd8d | ||
|
|
d7461ed54c | ||
|
|
3ce577acf9 | ||
|
|
50b1dccff3 | ||
|
|
c33e7e30d4 | ||
|
|
bc7f01ba36 | ||
|
|
2ce653caad | ||
|
|
0d850d7b22 | ||
|
|
a2be155b8e | ||
|
|
68aa107689 | ||
|
|
23096ed3a5 | ||
|
|
90a65c35c1 | ||
|
|
3d88827a95 | ||
|
|
40a0a8df5a | ||
|
|
20f7129c0b | ||
|
|
0e962e95dd | ||
|
|
07ba9c772c | ||
|
|
0622d88b22 | ||
|
|
594f0fed55 | ||
|
|
04b0d9b88d | ||
|
|
1f2af8ef94 | ||
|
|
598ea2d857 | ||
|
|
6dd9bbb516 | ||
|
|
3cd0b47dc6 | ||
|
|
65c71b5f20 | ||
|
|
1152b11202 | ||
|
|
51246ea31b | ||
|
|
7e5592dd32 | ||
|
|
c6b28caebf | ||
|
|
ca002f6fff | ||
|
|
14ec392091 | ||
|
|
5e2eb91ac0 | ||
|
|
c1626613ce | ||
|
|
42042d9e73 | ||
|
|
22c3b53ab8 | ||
|
|
090c32c90e | ||
|
|
4f4a9b9e55 | ||
|
|
6c7d7c9015 | ||
|
|
562e62a8c0 | ||
|
|
0823f7aa48 | ||
|
|
eb201c0420 | ||
|
|
6cfed9a39d | ||
|
|
33618c4a6b | ||
|
|
ace0a7c219 | ||
|
|
f7d018cf94 | ||
|
|
8ae2a556e4 | ||
|
|
4188deb386 | ||
|
|
82cf4ed909 | ||
|
|
88fc437abc | ||
|
|
57f868cab1 | ||
|
|
6cb5527894 | ||
|
|
016783a1e5 | ||
|
|
594ccff9c8 | ||
|
|
30792f0584 | ||
|
|
8f021eb35a | ||
|
|
1969abc340 | ||
|
|
b1b53ab983 | ||
|
|
9b5af23982 | ||
|
|
4cedc6d3c8 | ||
|
|
4e9cce76da | ||
|
|
9b004f3d2f | ||
|
|
9430e3090d | ||
|
|
ba44f9117b | ||
|
|
eb56710a72 | ||
|
|
38e3f27899 | ||
|
|
3c58d96db5 | ||
|
|
a6be0cc135 | ||
|
|
a53510bc41 | ||
|
|
1fd482e899 | ||
|
|
2f130ba009 | ||
|
|
e6d9db9395 | ||
|
|
e0ac743cdb | ||
|
|
b0d3fc11f0 | ||
|
|
7e0a50fbf2 | ||
|
|
59df244173 | ||
|
|
deb31a02cf | ||
|
|
e3aa1315ae | ||
|
|
65bc5efa19 | ||
|
|
abc4bc24b4 | ||
|
|
5df3f06f83 | ||
|
|
0e1de82bd7 | ||
|
|
f31e41b3f1 | ||
|
|
61a68477d0 | ||
|
|
fe8d2718c4 | ||
|
|
8afefada0a | ||
|
|
745e1c37c0 | ||
|
|
fdb5988cec | ||
|
|
36ffcf3cc3 | ||
|
|
e74f626383 | ||
|
|
ef99f64291 | ||
|
|
a0f8f3ae32 | ||
|
|
130f52f315 | ||
|
|
a05868cc45 | ||
|
|
2fc77aed15 | ||
|
|
c56edb4da6 | ||
|
|
6672190760 | ||
|
|
f122b17097 | ||
|
|
2c5f68e696 | ||
|
|
e1ca645a32 | ||
|
|
333bf56ddc | ||
|
|
b240594859 | ||
|
|
beccae933f | ||
|
|
e6aa1d2c54 | ||
|
|
5e808bab65 | ||
|
|
361d78247b | ||
|
|
3550103e45 | ||
|
|
8b0d4d4de4 | ||
|
|
dc71c04b67 | ||
|
|
a0254ed817 | ||
|
|
2563ecf3c5 | ||
|
|
c04738d9fe | ||
|
|
1266b4d086 | ||
|
|
99cf0a1522 | ||
|
|
98a75e923d | ||
|
|
ad96d676e6 | ||
|
|
79333bbc35 | ||
|
|
5c5b0f4fde | ||
|
|
ed6cdfedbb | ||
|
|
23f13ef05f | ||
|
|
f9c59d9706 | ||
|
|
e1cec42227 | ||
|
|
8d79c50d53 | ||
|
|
d77830b97f | ||
|
|
394540f689 | ||
|
|
7d776e0ce2 | ||
|
|
17df1692b9 | ||
|
|
9ab652641d | ||
|
|
9119f7166f | ||
|
|
da7d9d8eb9 | ||
|
|
80fccc90b7 | ||
|
|
dcebc70f1a | ||
|
|
259e7bc322 | ||
|
|
37bdb6c6f6 | ||
|
|
dc71afdd3f | ||
|
|
44638108d0 | ||
|
|
93fcac498c | ||
|
|
79e2743aac | ||
|
|
5e9c7cdd91 | ||
|
|
6f73e5087d | ||
|
|
8c120b020e | ||
|
|
12fc6f9d38 | ||
|
|
a6e8483b4c | ||
|
|
7191d28ada | ||
|
|
e6b5e3d282 | ||
|
|
1413d6b5fe | ||
|
|
dcd8a1094c | ||
|
|
e64b31b9ba | ||
|
|
080f347511 | ||
|
|
eaaff4298d | ||
|
|
dd5a02e8ef | ||
|
|
3211ec57ee | ||
|
|
6796afdaee | ||
|
|
cc6fe57773 | ||
|
|
1dfc831938 | ||
|
|
cafeda4abf | ||
|
|
d951b99718 | ||
|
|
0ad87209e5 | ||
|
|
1b50c5404d | ||
|
|
3007f67cab | ||
|
|
ee08659f01 | ||
|
|
baf5ad0fab | ||
|
|
8bdd748aec | ||
|
|
cef0c22f52 | ||
|
|
13d3fc5cfe | ||
|
|
b91141e2be | ||
|
|
f8a4b54165 | ||
|
|
afe007ca0b | ||
|
|
8a9a044f95 | ||
|
|
5eaf03e227 | ||
|
|
a8437d9331 | ||
|
|
e0392fa98b | ||
|
|
68ff8951de | ||
|
|
9c6b31e71c | ||
|
|
50f74f5ba2 | ||
|
|
b9de2aef60 | ||
|
|
7a47598538 | ||
|
|
3c8c28ebd5 | ||
|
|
524285f767 | ||
|
|
c2a34475f1 | ||
|
|
a69195a02b | ||
|
|
19d7438499 | ||
|
|
ccb380ce06 | ||
|
|
a35c439bbd | ||
|
|
09d1f96603 | ||
|
|
26aa18d980 | ||
|
|
d10b542797 | ||
|
|
ce4e4fb8dd | ||
|
|
8f4a31cf8c | ||
|
|
23549f13d6 | ||
|
|
869d11f9a6 | ||
|
|
02e73b82ee | ||
|
|
f85f87f545 | ||
|
|
1fff5713f3 | ||
|
|
8453ec36f0 | ||
|
|
d5b3ce8424 | ||
|
|
80cbbfa5ca | ||
|
|
9177bb660f | ||
|
|
a3df39a01a | ||
|
|
25dce05cbb | ||
|
|
1542ea3e03 | ||
|
|
6084abbcfe | ||
|
|
ed19b63914 | ||
|
|
4efeb85296 | ||
|
|
fc76665615 | ||
|
|
3a044bb71a | ||
|
|
cddd606562 | ||
|
|
7a5bc51c11 | ||
|
|
9f939b4b6f | ||
|
|
80a86f5b1b | ||
|
|
a0ce1855ab | ||
|
|
a4b43b884a | ||
|
|
824c0f6667 | ||
|
|
a030fe8491 | ||
|
|
3a9429e8ef | ||
|
|
c4eb1ab748 | ||
|
|
29ed19d600 | ||
|
|
0cc65513a5 | ||
|
|
debc048659 | ||
|
|
92f5c918dd | ||
|
|
9519f1e8e2 | ||
|
|
a8f874bf05 | ||
|
|
9d9917e45b | ||
|
|
91ee0a870d | ||
|
|
6cbbffc5a9 | ||
|
|
8f26fd34d1 | ||
|
|
fda655f6d7 | ||
|
|
a663d6509b | ||
|
|
9ec8839efa | ||
|
|
a7a0350eb2 | ||
|
|
39a7a0d960 | ||
|
|
7740e1e131 | ||
|
|
9dce1ed47e | ||
|
|
e84a00d3a5 | ||
|
|
88a944cb57 | ||
|
|
20c32e72cc | ||
|
|
4788c20816 | ||
|
|
e83fc570a4 | ||
|
|
e841b6af88 | ||
|
|
ea6f209557 | ||
|
|
9bfa726107 | ||
|
|
d24902c66d | ||
|
|
72aea2d3f3 | ||
|
|
dc9612d564 | ||
|
|
1770556d56 | ||
|
|
888fb84aee | ||
|
|
d597fd056d | ||
|
|
dea0ab3974 | ||
|
|
da6facd7d7 | ||
|
|
bb8ab5f173 | ||
|
|
ac8a541059 | ||
|
|
0e66771f0e | ||
|
|
d3a295a801 | ||
|
|
f2df771771 | ||
|
|
7b72cd87a5 | ||
|
|
9431efc6d1 | ||
|
|
7c3f5431ba | ||
|
|
d98cf16a4c | ||
|
|
2c3c3ae546 | ||
|
|
905eef48e3 | ||
|
|
b31b520c7c | ||
|
|
17aee086a3 | ||
|
|
c1756e5767 | ||
|
|
2920279c64 | ||
|
|
1f0f985b01 | ||
|
|
0762c81633 | ||
|
|
28ef301ccc | ||
|
|
26c6a2950f | ||
|
|
5082876de3 | ||
|
|
e50e7ad3d5 | ||
|
|
45a4a6b6da | ||
|
|
02918b7267 | ||
|
|
6c662a36c1 | ||
|
|
b78fe3822a | ||
|
|
35eda37e83 | ||
|
|
176a8e7067 | ||
|
|
61d4f1fd4b | ||
|
|
121b68995e | ||
|
|
d11f1d8dae | ||
|
|
c0ef2b5064 | ||
|
|
2a7308363e | ||
|
|
dc0c556f96 | ||
|
|
ba2ee1c0aa | ||
|
|
0f8b550d68 | ||
|
|
ed1fc98821 | ||
|
|
fa53b468fd | ||
|
|
4e2533d320 | ||
|
|
388ae49e55 | ||
|
|
f3f347dcba | ||
|
|
655be3519c | ||
|
|
06df2940af | ||
|
|
4149549e42 | ||
|
|
da351991f8 | ||
|
|
3305152e50 | ||
|
|
bea7bae674 | ||
|
|
45773d38ed | ||
|
|
8d4c176314 | ||
|
|
9ca5c87c4c | ||
|
|
36a6f00e5f | ||
|
|
e24a5b4cb5 | ||
|
|
f88031b0c9 | ||
|
|
830151e6da | ||
|
|
1e14fba81a | ||
|
|
7b8800c4eb | ||
|
|
8f4625f53b | ||
|
|
1e5f243edb | ||
|
|
e5eab2af34 | ||
|
|
c10973e160 | ||
|
|
b1e4bff3ec | ||
|
|
c1202cda63 | ||
|
|
32d6cd7776 | ||
|
|
2f78d30e93 | ||
|
|
33407c9f0d | ||
|
|
d2d5ef1c5c | ||
|
|
98d8eaee02 | ||
|
|
10b9228060 | ||
|
|
5872f1e017 | ||
|
|
5073f21002 | ||
|
|
69aaf09ac8 | ||
|
|
6e61ee81d8 | ||
|
|
cfd05a8d17 | ||
|
|
29845fcc4c | ||
|
|
e204b180a8 | ||
|
|
563972fd29 | ||
|
|
cbe94b84fc | ||
|
|
aa6f73574d | ||
|
|
94f0419ef7 | ||
|
|
cefd2d7f49 | ||
|
|
81e1e545fb | ||
|
|
d516920e72 | ||
|
|
2171372246 | ||
|
|
d2df4d0cce | ||
|
|
6ab90fc123 | ||
|
|
1a84ebbb1e | ||
|
|
c9c0352369 | ||
|
|
9903b028a3 | ||
|
|
49def5d883 | ||
|
|
6975525b70 | ||
|
|
fbc4f8527b | ||
|
|
90cb5a1951 | ||
|
|
ac71d9f034 | ||
|
|
64bcbc9fc0 | ||
|
|
9e7d46f956 | ||
|
|
e911896cfb | ||
|
|
9c6d66093f | ||
|
|
b2e39b9701 | ||
|
|
e95ad4049b | ||
|
|
1df49d1d6f | ||
|
|
b71000e2f3 | ||
|
|
47e6ed455e | ||
|
|
92592fb9d9 | ||
|
|
02a9769b35 | ||
|
|
7640f11bfc | ||
|
|
be8a0991ed | ||
|
|
9fa44dbcfa | ||
|
|
61aac9c80c | ||
|
|
60af83cfee | ||
|
|
cf64e6c231 | ||
|
|
2cae941bae | ||
|
|
bc0784f41d | ||
|
|
b711140f26 | ||
|
|
c57d75e01a | ||
|
|
1d766001bb | ||
|
|
0759a11a85 | ||
|
|
cb749a38ab | ||
|
|
369eab18ab | ||
|
|
73edeae013 | ||
|
|
7d46314dc8 | ||
|
|
d5a53a89eb | ||
|
|
a85bc510dd | ||
|
|
2beea7d218 | ||
|
|
a93cd3dd5f | ||
|
|
6c1f540170 | ||
|
|
d026a9f009 | ||
|
|
a8e7dadd39 | ||
|
|
2f8d921adf | ||
|
|
0c6e526f94 | ||
|
|
b1e3018b6b | ||
|
|
87f05fce66 | ||
|
|
1b37530c96 | ||
|
|
db4d02c2e2 | ||
|
|
fd7811402b | ||
|
|
eb0325e627 | ||
|
|
842c3c8ea9 | ||
|
|
8b4b04ec09 | ||
|
|
9f32c9280f | ||
|
|
4fcd09cfa8 | ||
|
|
7a8d65d37d | ||
|
|
23129a9ba2 | ||
|
|
7f791e730b | ||
|
|
f7e296b349 | ||
|
|
712d4acaaa | ||
|
|
74a5c01f21 | ||
|
|
3ba8724d77 | ||
|
|
6313a7d8a9 | ||
|
|
432a3f520c | ||
|
|
191b3e42d4 | ||
|
|
a27f05fcb4 | ||
|
|
2f33e0b873 | ||
|
|
f0359467f1 | ||
|
|
d1db8cf2c8 | ||
|
|
b1985ed2ce | ||
|
|
140ddc70e6 | ||
|
|
d7fd616470 | ||
|
|
3ccbef141e | ||
|
|
e92fbb0443 | ||
|
|
bd270aed68 | ||
|
|
28d7864393 | ||
|
|
b5d8173ee3 | ||
|
|
17d62a9af7 | ||
|
|
d89fb863ed | ||
|
|
a21ad77820 | ||
|
|
f86c8e8cab | ||
|
|
cb12cbdd3d | ||
|
|
6661fa996c | ||
|
|
c19bca798b | ||
|
|
8f98b411db | ||
|
|
a8aa03847e | ||
|
|
1bfd747cc6 | ||
|
|
ae06d945a7 | ||
|
|
9f41d5f34d | ||
|
|
ef61c52908 | ||
|
|
d8842ef274 | ||
|
|
c88fdaf353 | ||
|
|
af295da871 | ||
|
|
083235a2fe | ||
|
|
2a3a5f7eb2 | ||
|
|
77c48f280f | ||
|
|
0ee1eb2f9f | ||
|
|
c2b20365bb | ||
|
|
cfdc7e4452 | ||
|
|
2363f61aa9 | ||
|
|
557ac6f9fa | ||
|
|
a49b871cf9 | ||
|
|
a0d6b3efba | ||
|
|
6cabf07bc0 | ||
|
|
a15444ee8c | ||
|
|
ceb5f5669e | ||
|
|
25b75e05e4 | ||
|
|
4d214bb5c1 | ||
|
|
7cbaed8c6c | ||
|
|
2915fdf665 | ||
|
|
a66c385b08 | ||
|
|
4dace7c5d8 | ||
|
|
8ebf087dbf | ||
|
|
2fa8bda5bb | ||
|
|
a5ae833945 | ||
|
|
d21d42b312 | ||
|
|
78575f0f0a | ||
|
|
8ccd292d16 | ||
|
|
2534f59398 | ||
|
|
5c60dbe2b1 | ||
|
|
c99ecde15f | ||
|
|
219f3403d9 | ||
|
|
00f417bad6 | ||
|
|
81649f053b | ||
|
|
e5bde50f2d | ||
|
|
0321e00b0d | ||
|
|
09528e3292 | ||
|
|
e7412a9cbf | ||
|
|
01efe5f869 | ||
|
|
28a178a55c | ||
|
|
88f130014c | ||
|
|
af258c590c | ||
|
|
b0eb5733be | ||
|
|
fe35bfba37 | ||
|
|
7cfbc4ab8f | ||
|
|
7a9d4f0abd | ||
|
|
6f6a5b565c | ||
|
|
e57deb873c | ||
|
|
0f692b1608 | ||
|
|
8c03e79f99 | ||
|
|
71290f0929 | ||
|
|
22364ef7de | ||
|
|
2cc1eb1abc | ||
|
|
90dbcbb4e2 | ||
|
|
66503d58be | ||
|
|
8e10f0ce2b | ||
|
|
f51f510f2e | ||
|
|
c44f085b47 | ||
|
|
a35f36eeaf | ||
|
|
14564c392a | ||
|
|
76e05ea749 | ||
|
|
ab599dceed | ||
|
|
4c37604445 | ||
|
|
bb74018d19 | ||
|
|
575289e5bc | ||
|
|
e89da2a7b4 | ||
|
|
bd34959f68 | ||
|
|
622dcf8fd5 | ||
|
|
9e315739b7 | ||
|
|
7b01adc5df | ||
|
|
432fc47443 | ||
|
|
d8fba44c5e | ||
|
|
e29d3d8c01 | ||
|
|
e678413214 | ||
|
|
eaa9d9d087 | ||
|
|
9e3cc076b7 | ||
|
|
3bb01fa52c | ||
|
|
008e49d144 | ||
|
|
4e275384b0 | ||
|
|
63ec99f67a | ||
|
|
14a8bb57df | ||
|
|
7512bfc710 | ||
|
|
3c3b6dadc3 | ||
|
|
cd722a0e39 | ||
|
|
a1b5d0a100 | ||
|
|
69d3ae709c | ||
|
|
67ef993d61 | ||
|
|
20f49890ad | ||
|
|
3e4917f0a1 | ||
|
|
99ee75aec6 | ||
|
|
1674653a42 | ||
|
|
d2f7e55bf5 | ||
|
|
9f31df7f3a | ||
|
|
b8c1b53d67 | ||
|
|
2495837791 | ||
|
|
b6562e3c47 | ||
|
|
c57da046ee | ||
|
|
ff63134c14 | ||
|
|
3f5210c587 | ||
|
|
3df5e7b9b9 | ||
|
|
225db66738 | ||
|
|
383ebb8f57 | ||
|
|
e1bed60f1f | ||
|
|
edbb856023 | ||
|
|
98d3ab646f | ||
|
|
81be556f1b | ||
|
|
f45a085469 | ||
|
|
210cc58cc3 | ||
|
|
1063b11ef6 | ||
|
|
a4e999c47f | ||
|
|
543e01c301 | ||
|
|
14e0aa3ec5 | ||
|
|
1a8a171f8b | ||
|
|
f1954f9a43 | ||
|
|
441b148501 | ||
|
|
bd0f30b81c | ||
|
|
ad14e9bf40 | ||
|
|
6f71301aaf | ||
|
|
5f0d601baa | ||
|
|
f234a5bcc2 | ||
|
|
ab677ea100 | ||
|
|
f3ad53e949 | ||
|
|
d324cfa84d | ||
|
|
dd4319d72a | ||
|
|
1f2de3d3d8 | ||
|
|
72702beb0b | ||
|
|
adb0cbc5dd | ||
|
|
6a503b82c3 | ||
|
|
28a87351f1 | ||
|
|
bcc97378b0 | ||
|
|
eb8a138713 | ||
|
|
dcd7dcbbdf | ||
|
|
1538759ba7 | ||
|
|
30e8ea7fd8 | ||
|
|
879b7b582c | ||
|
|
8ba4236402 | ||
|
|
5eef8fa9b9 | ||
|
|
d03d035437 | ||
|
|
68e8e1f70b | ||
|
|
7acb45b157 | ||
|
|
c36142deaf | ||
|
|
5fd6e316fa | ||
|
|
39a9d7765a | ||
|
|
7cfcba29a6 | ||
|
|
9bf8aadca9 | ||
|
|
714d4af63d | ||
|
|
8203fdb4f0 | ||
|
|
5e1e2d1a4f | ||
|
|
2f941de65b | ||
|
|
777c503002 | ||
|
|
e9b23f68fd | ||
|
|
efa45e6203 | ||
|
|
638f55f83c | ||
|
|
8b2fc29d5b | ||
|
|
b516fb0550 | ||
|
|
efef34c01e | ||
|
|
5f1dfa7599 | ||
|
|
8e9c7544cf | ||
|
|
4e3d5641c8 | ||
|
|
20b760529e | ||
|
|
a55a07c5ff | ||
|
|
94ee8ea297 | ||
|
|
ec5d71d0e1 | ||
|
|
d121d08d05 | ||
|
|
be08f4a558 | ||
|
|
010f082fbb | ||
|
|
073cdf6d51 | ||
|
|
4df8606ab6 | ||
|
|
71442d26ec | ||
|
|
4f5528869c | ||
|
|
f16feff17b | ||
|
|
71b233fe5f | ||
|
|
770dec9ed6 | ||
|
|
2ca95a988e | ||
|
|
d8aae538cd | ||
|
|
cf1e7ee08a | ||
|
|
d14513ddfd | ||
|
|
9a9017bc6c | ||
|
|
3c9b654713 | ||
|
|
80d2ad40bc | ||
|
|
31670e75e5 | ||
|
|
ed6011a2be | ||
|
|
cdded38ade | ||
|
|
f536f24833 | ||
|
|
f5bff00b1f | ||
|
|
27c9717445 | ||
|
|
863a1ba8ef | ||
|
|
cb04dd2b83 | ||
|
|
8c7cf51958 | ||
|
|
244fb1fed6 | ||
|
|
25f7a68a13 | ||
|
|
62d8cf79ef | ||
|
|
646b18d910 | ||
|
|
2f81b2e381 | ||
|
|
1f5a7e7885 | ||
|
|
80fca470f2 | ||
|
|
6e9d9ac856 | ||
|
|
8d6fada1eb | ||
|
|
3e715399a1 | ||
|
|
81cc8831f9 | ||
|
|
f7370044a7 | ||
|
|
51b015a629 | ||
|
|
392af7a553 | ||
|
|
d2dd07bad7 | ||
|
|
cebcd6925a | ||
|
|
e7b4357fc7 | ||
|
|
dc279dde4a | ||
|
|
c0810a674f | ||
|
|
0760cabbbe | ||
|
|
3b149c520b | ||
|
|
3d19fc89ff | ||
|
|
cd1b1919f4 | ||
|
|
0ed646eb27 | ||
|
|
c0c5859c99 | ||
|
|
a47121b849 | ||
|
|
d9dd20e89a | ||
|
|
ed4609ebe5 | ||
|
|
e24225c828 | ||
|
|
01ef86d658 | ||
|
|
cd4802da04 | ||
|
|
2aca65780f | ||
|
|
2c435f7387 | ||
|
|
cc1afd1a9c | ||
|
|
6f098cdba6 | ||
|
|
d03e9fb90a | ||
|
|
9f2966abe9 | ||
|
|
4e28ea1883 | ||
|
|
289214e85c | ||
|
|
a20d98bf93 | ||
|
|
7c3d98acbe | ||
|
|
7311786f48 | ||
|
|
82de9c926e | ||
|
|
7fd86d4de3 | ||
|
|
724da29e2a | ||
|
|
54113d7b94 | ||
|
|
66396e8290 | ||
|
|
72be76215f | ||
|
|
ace86703a9 | ||
|
|
7b25495463 | ||
|
|
3d4b651c1f | ||
|
|
d305ae064d | ||
|
|
ac4f3d8907 | ||
|
|
af2687771b | ||
|
|
a67b7f909a | ||
|
|
f9c3e4cdb0 | ||
|
|
dc62c1f8d4 | ||
|
|
0441b51a68 | ||
|
|
5c0c9f687e | ||
|
|
e049c54043 | ||
|
|
99e47540d5 | ||
|
|
8e1885ffeb | ||
|
|
8501a0c205 | ||
|
|
797f2a3173 | ||
|
|
1057b4bc35 | ||
|
|
efc0116595 | ||
|
|
cdc560fad0 | ||
|
|
75a2803710 | ||
|
|
fb3169faa4 | ||
|
|
d587bd837e | ||
|
|
b9fab74edc | ||
|
|
50c22bbadb | ||
|
|
d0b10b9195 | ||
|
|
50a296de20 | ||
|
|
c8fe4f4a3c | ||
|
|
a8ba0720af | ||
|
|
745a01246c | ||
|
|
bee5d3550f | ||
|
|
1789393151 | ||
|
|
345afe1338 | ||
|
|
65428aa49f | ||
|
|
b251ee9322 | ||
|
|
04f00682a0 | ||
|
|
90dcda1475 | ||
|
|
f1ee4eb89f | ||
|
|
343fc22168 | ||
|
|
00ef0d7e3d | ||
|
|
f2deaf6199 | ||
|
|
617a2c010e | ||
|
|
c79e38e044 | ||
|
|
38eae1d1ee | ||
|
|
7e4c89b0cb | ||
|
|
14c29f07bd | ||
|
|
825e3dbcf5 | ||
|
|
8275130f04 | ||
|
|
2c47abea95 | ||
|
|
85aa28d724 | ||
|
|
53a3736b04 | ||
|
|
86ba3c230e | ||
|
|
8d21126bd6 | ||
|
|
74ded91976 | ||
|
|
7c27520d57 | ||
|
|
b54bbc4c5a | ||
|
|
3e09a4ddd4 | ||
|
|
f93f04a536 | ||
|
|
b93f30b809 | ||
|
|
95bd2f26a5 | ||
|
|
7cfcf056f9 | ||
|
|
96b565e1e8 | ||
|
|
9d7ad7a18f | ||
|
|
9838c2758b | ||
|
|
1b1f5f5a5e | ||
|
|
0f95f62aa1 | ||
|
|
9405ba7871 | ||
|
|
ccb95f803c | ||
|
|
dae745d925 | ||
|
|
791db65526 | ||
|
|
60b2ff0a7a | ||
|
|
e6c8507379 | ||
|
|
420db5416e | ||
|
|
6e03218d54 | ||
|
|
5e4bd36b26 | ||
|
|
bbc039366e | ||
|
|
e1ec7dbbba | ||
|
|
075b008740 | ||
|
|
b2c382fa01 | ||
|
|
02e2e617f5 | ||
|
|
c5f9b5861f | ||
|
|
2dace4c697 | ||
|
|
c7891385ca | ||
|
|
2059ddcadf | ||
|
|
ba1b68df20 | ||
|
|
bfc8024119 | ||
|
|
f26cf6ed6f | ||
|
|
403b61836d | ||
|
|
b5af7d1eb9 | ||
|
|
f453af6e4c | ||
|
|
f2be55bd8e | ||
|
|
d241dd17ca | ||
|
|
cecafdfe6c | ||
|
|
6fecfd1a0e | ||
|
|
64245d001c | ||
|
|
7d92965cae | ||
|
|
b4fa08c4e2 | ||
|
|
d4e9566851 | ||
|
|
a26b494f7f | ||
|
|
b84e22e41f | ||
|
|
cee6efab19 | ||
|
|
30f71cb550 | ||
|
|
771e755a78 | ||
|
|
16ec462abd | ||
|
|
ca55465d3c | ||
|
|
7098c98dde | ||
|
|
f56355da89 | ||
|
|
422160debd | ||
|
|
8062cf406a | ||
|
|
0e802232ec | ||
|
|
f650a9205d | ||
|
|
c85dbb2347 | ||
|
|
a6a79128c8 | ||
|
|
42839627e8 | ||
|
|
e7f35098e4 | ||
|
|
267e68a894 | ||
|
|
b32b444438 | ||
|
|
522d0f8313 | ||
|
|
5715e5de67 | ||
|
|
cc6b05e8b3 | ||
|
|
417747d5d0 | ||
|
|
a34f439226 | ||
|
|
b7ca014fd0 | ||
|
|
fa098d585a | ||
|
|
c35a14e3ec | ||
|
|
60651736a5 | ||
|
|
581f9b7bd3 | ||
|
|
124eb04807 | ||
|
|
1d561da7fb | ||
|
|
16e3cd0784 | ||
|
|
a6d91933dc | ||
|
|
445c40f758 | ||
|
|
725a841a3b | ||
|
|
f77c453843 | ||
|
|
ba6718d5bc | ||
|
|
cdb7a1b3fa | ||
|
|
a03c79b89d | ||
|
|
98800d3426 | ||
|
|
a616adaac4 | ||
|
|
ffb5605c99 | ||
|
|
621b556856 | ||
|
|
a3ffecbb2a | ||
|
|
ea64cebe2a | ||
|
|
e79487dd5f | ||
|
|
7fe1c1ec89 | ||
|
|
ab2bbff369 | ||
|
|
ec32825309 | ||
|
|
fd0c182087 | ||
|
|
49fcff1daf | ||
|
|
33b64ddf39 | ||
|
|
4c447aa648 | ||
|
|
ccbfc3d274 | ||
|
|
f83fe43bbb | ||
|
|
19022d67f8 | ||
|
|
58a815dd6b | ||
|
|
1ce95c473d | ||
|
|
eb365e398d | ||
|
|
bc9fe82860 | ||
|
|
b3cd9bf2b9 | ||
|
|
c5c2b829ec | ||
|
|
9713f96401 | ||
|
|
11f35ebf96 | ||
|
|
7d403aa181 | ||
|
|
64af810a4a | ||
|
|
30821905af | ||
|
|
a9dbff756b | ||
|
|
a6aba10d3d | ||
|
|
9c276c37fe | ||
|
|
6ab6c0fd4c | ||
|
|
b6b0fe3fff | ||
|
|
0d5825bda9 | ||
|
|
cdfb64631a | ||
|
|
d161c281c8 | ||
|
|
8fed5bf2a1 | ||
|
|
98d2e9bd27 | ||
|
|
a03af55edd | ||
|
|
86e2fd9aee | ||
|
|
97bd0e5e58 | ||
|
|
ceaba21986 | ||
|
|
172a77d942 | ||
|
|
4f9d2d2a7d | ||
|
|
8c929f6e05 | ||
|
|
3319b71f5b | ||
|
|
46ec028a5b | ||
|
|
0ce0ef3e5c | ||
|
|
375b071cb2 | ||
|
|
29e1417ff2 | ||
|
|
75db2bd366 | ||
|
|
60ca1efbda | ||
|
|
2692e4978b | ||
|
|
91982eb002 | ||
|
|
bb1dec76fa | ||
|
|
f618b8fcdc | ||
|
|
9147cab75b | ||
|
|
5f07bcc8e6 | ||
|
|
705cf2ea1b | ||
|
|
42c4394484 | ||
|
|
221221a3c1 | ||
|
|
9564166297 | ||
|
|
f5cf3c3c8e | ||
|
|
18f919fb6b | ||
|
|
0924835253 | ||
|
|
20d2e5c578 | ||
|
|
907801605c | ||
|
|
93bc684e8c | ||
|
|
a76c98d57e | ||
|
|
d937a800d0 | ||
|
|
d16f3a227f | ||
|
|
80c9a3eeda | ||
|
|
e68173b451 | ||
|
|
40c27d87f5 | ||
|
|
3c13b5049d | ||
|
|
8288d5e51f | ||
|
|
6e1449900a | ||
|
|
4ffbb18ab4 | ||
|
|
b27271b7a3 | ||
|
|
ebb6665f64 | ||
|
|
e4e5731ffd | ||
|
|
2ab5810f13 | ||
|
|
af934c5d09 | ||
|
|
1e0cf7c112 | ||
|
|
46859c93c9 | ||
|
|
ea1f9cb3b2 | ||
|
|
1641549016 | ||
|
|
716a5dbb8a | ||
|
|
af98cb11c5 | ||
|
|
9a4c2cf341 | ||
|
|
2bc3bcd102 | ||
|
|
d6c663f79d | ||
|
|
9ed86e5f53 | ||
|
|
303e0bc037 | ||
|
|
2cc24019f9 | ||
|
|
83ce774d19 | ||
|
|
2b4ee13b5e | ||
|
|
3a964561f0 | ||
|
|
6959f86632 | ||
|
|
537d373e10 | ||
|
|
cceadf222c | ||
|
|
cf5a4af623 | ||
|
|
39aea11c22 | ||
|
|
c2f1227700 | ||
|
|
900f14d37c | ||
|
|
598249b1d6 | ||
|
|
7ed15bdf04 | ||
|
|
2fc0ec0f72 | ||
|
|
5e9c2a669b | ||
|
|
b310521884 | ||
|
|
288945bf7e | ||
|
|
4fc07cff36 | ||
|
|
b884fe0e86 | ||
|
|
855858c236 | ||
|
|
c11a2a5419 | ||
|
|
773a6572af | ||
|
|
88ad373c9b | ||
|
|
51666464b9 | ||
|
|
5af9cf2f52 | ||
|
|
12c4ae4b10 | ||
|
|
4e1bef414a | ||
|
|
e896c18644 | ||
|
|
c852685e74 | ||
|
|
1e99797df8 | ||
|
|
52a4c986a8 | ||
|
|
c501728204 | ||
|
|
6b067fa6a7 | ||
|
|
a1cd5c53a9 | ||
|
|
a46d487e03 | ||
|
|
3deb6d3ab3 | ||
|
|
af34cdd5d2 | ||
|
|
6e1393235a | ||
|
|
343e0b54b9 | ||
|
|
ecb70cb6f7 | ||
|
|
ca50618af6 | ||
|
|
29c07ba83e | ||
|
|
45fbb83a9f | ||
|
|
ae7ba2df25 | ||
|
|
c3ef57cc32 | ||
|
|
7bb4ca5a14 | ||
|
|
063783d81d | ||
|
|
42116c9b65 | ||
|
|
a36e11973d | ||
|
|
5125568ea2 | ||
|
|
0fa164e50d | ||
|
|
cf814e81ee | ||
|
|
43a45f18ce | ||
|
|
ad51381063 | ||
|
|
0b0e4ce904 | ||
|
|
6a3e04d688 | ||
|
|
4107a17370 | ||
|
|
06b4d8f169 | ||
|
|
1c0c820746 | ||
|
|
d061403a28 | ||
|
|
5c092321a6 | ||
|
|
bdd3f61c1f | ||
|
|
8023557d6e | ||
|
|
074b0ced7a | ||
|
|
3864b1ac9b | ||
|
|
6e9b43457d | ||
|
|
ca1aec8920 | ||
|
|
acac580862 | ||
|
|
673e1b2980 | ||
|
|
f62157be72 | ||
|
|
f894ecf3b6 | ||
|
|
66dd4e28ad | ||
|
|
939dc1b0fb | ||
|
|
56bf5d38a1 | ||
|
|
d09b70b295 | ||
|
|
205180387a | ||
|
|
39c8cfeda5 | ||
|
|
f38a329be5 | ||
|
|
a0cd069539 | ||
|
|
bf306a2f01 | ||
|
|
c31f93a8d1 | ||
|
|
4730ab6309 | ||
|
|
1ae78ca98c | ||
|
|
d2379da478 | ||
|
|
0f64981b20 | ||
|
|
0002e49bb5 | ||
|
|
db13a60274 | ||
|
|
db0f11a359 | ||
|
|
ac7f43520b | ||
|
|
f67b9f5f6e | ||
|
|
c75156c4ce | ||
|
|
10270b5595 | ||
|
|
f7458572ed | ||
|
|
d57b7222b2 | ||
|
|
62e70a673a | ||
|
|
5e9eba6478 | ||
|
|
cb02dfe1a4 | ||
|
|
b50739e1af | ||
|
|
8da1b0212d | ||
|
|
ca1f2acb33 | ||
|
|
c15f966669 | ||
|
|
7705b8781a | ||
|
|
b2502746f0 | ||
|
|
ab68094386 | ||
|
|
bbec701223 | ||
|
|
b29d14e600 | ||
|
|
86e51c5cd1 | ||
|
|
cb8267be3f | ||
|
|
eaed43915c | ||
|
|
bd91fd2c38 | ||
|
|
1203b214cd | ||
|
|
c3fec15f11 | ||
|
|
0545653494 | ||
|
|
db2989bdb4 | ||
|
|
587bd00a19 | ||
|
|
960ff438e8 | ||
|
|
98e7ea85d3 | ||
|
|
2549e44710 | ||
|
|
4d32b563ca | ||
|
|
3a4b732977 | ||
|
|
500909a28e | ||
|
|
07753eb25b | ||
|
|
c6eaf3d010 | ||
|
|
6723fe8271 | ||
|
|
3348b70435 | ||
|
|
35a8527c16 | ||
|
|
7afc475290 | ||
|
|
789bceaa3a | ||
|
|
abbc043969 | ||
|
|
654e5762f1 | ||
|
|
507c3e3629 | ||
|
|
991dfeb2f2 | ||
|
|
26482fc2d3 | ||
|
|
e0ce6d9688 | ||
|
|
946595216a | ||
|
|
864b6bc56d | ||
|
|
6ea5b7581f | ||
|
|
f70b8f0c10 | ||
|
|
1593bcb537 | ||
|
|
bf7fc02c8d | ||
|
|
143702b92b | ||
|
|
c5ccc1a084 | ||
|
|
2ecb52a9b2 | ||
|
|
6439917cbe | ||
|
|
d21c18f657 | ||
|
|
25ef0039e4 | ||
|
|
e6981290bc | ||
|
|
75c3d8abbd | ||
|
|
d88683f498 | ||
|
|
40b9aa3a4c | ||
|
|
b6d1515d58 | ||
|
|
e01d4264e3 | ||
|
|
2117b65487 | ||
|
|
a7823b352f | ||
|
|
c543b62a08 | ||
|
|
3923b87f08 | ||
|
|
b7ecdadb83 | ||
|
|
5ff121e1ed | ||
|
|
f486e5448f | ||
|
|
c5aae98558 | ||
|
|
6d8a3b9897 | ||
|
|
6d98780e19 | ||
|
|
3ad2c46f3f | ||
|
|
a730cee7fd | ||
|
|
77c823c100 | ||
|
|
124f21c67a | ||
|
|
e46cf20dd3 | ||
|
|
4bef5e8313 | ||
|
|
22e93b0af4 | ||
|
|
5aeca9662b | ||
|
|
b996cf1f05 | ||
|
|
878a106877 | ||
|
|
45d36f86fd | ||
|
|
b108ae403a | ||
|
|
887ed66768 | ||
|
|
dac840a887 | ||
|
|
238de4ba8c | ||
|
|
9a7bdade43 | ||
|
|
aa84556204 | ||
|
|
6b68069fcd | ||
|
|
42c7034fb2 | ||
|
|
060c7e0145 | ||
|
|
b5b085dfb1 | ||
|
|
fc06ce9d7f | ||
|
|
d8d81b05a7 | ||
|
|
a60f42b1f2 | ||
|
|
6e18be88d0 | ||
|
|
b45e439c48 | ||
|
|
b87061c18c | ||
|
|
f78aca7752 | ||
|
|
3ccca2aa10 | ||
|
|
6d7c40eb76 | ||
|
|
da4cd7fb65 | ||
|
|
c97cda6b84 | ||
|
|
7a7fd4167a | ||
|
|
dffc1a43d5 | ||
|
|
36897fea1e | ||
|
|
c7b34735f0 | ||
|
|
5b07176c88 | ||
|
|
474b40d660 | ||
|
|
a62901b948 | ||
|
|
25d8746327 | ||
|
|
aff1698223 | ||
|
|
7f8941745f | ||
|
|
b858401098 | ||
|
|
d5a158b80f | ||
|
|
f315f284aa | ||
|
|
c367f5009d | ||
|
|
6db1e63bda | ||
|
|
e22ab2ede6 | ||
|
|
b7d7e0b682 | ||
|
|
96bba15f2f | ||
|
|
fcf965a595 | ||
|
|
e1a20d3c22 | ||
|
|
2abd7d8c5d | ||
|
|
5b8f73cdd7 | ||
|
|
7fd765421f | ||
|
|
d9d94af022 | ||
|
|
790b924e57 | ||
|
|
4a62f877df | ||
|
|
ac47c57bb7 | ||
|
|
3ace4199a1 | ||
|
|
e6bd7524c1 | ||
|
|
699c86e8c1 | ||
|
|
f40fa0ecea | ||
|
|
626f94686b | ||
|
|
752d13b1b1 | ||
|
|
54c0dc1b2b | ||
|
|
c5bc709898 | ||
|
|
ccdbb01513 | ||
|
|
5206d750ac | ||
|
|
a800e3df67 | ||
|
|
ccb1f87a20 | ||
|
|
c111da4681 | ||
|
|
9cc4e97a53 | ||
|
|
dca1c0b0f3 | ||
|
|
f06be6ed21 | ||
|
|
3c8ec2f42e | ||
|
|
7e193f7f52 | ||
|
|
7069b02929 | ||
|
|
66995db927 | ||
|
|
c36054ca1b | ||
|
|
3e07fbf3dc | ||
|
|
bf3fbe3e96 | ||
|
|
0a93d22bc8 | ||
|
|
f5b3d94d16 | ||
|
|
4d1a6994aa | ||
|
|
05c686782c | ||
|
|
85609ea742 | ||
|
|
20dabc0615 | ||
|
|
356dd9bc2b | ||
|
|
cd5d7534c4 | ||
|
|
b4f12fc933 | ||
|
|
cbea387ce0 | ||
|
|
345b155374 | ||
|
|
29d216950e | ||
|
|
321b04772c | ||
|
|
5b924aee98 | ||
|
|
46d44e3405 | ||
|
|
4d5332fe25 | ||
|
|
18bd4c54f4 | ||
|
|
31c7768ca0 | ||
|
|
6ec643e9d1 | ||
|
|
2b39f6f61c | ||
|
|
bf3ca13961 | ||
|
|
82026370ec | ||
|
|
6d49bf5346 | ||
|
|
67431d87fb | ||
|
|
fdf55221e6 | ||
|
|
07f277dd3b | ||
|
|
cf8f0603ca | ||
|
|
5592408ab8 | ||
|
|
a01617b45c | ||
|
|
7abb4087b3 | ||
|
|
dff15cf27a | ||
|
|
aa858137e5 | ||
|
|
45cb143202 | ||
|
|
7a9c6ab8c4 | ||
|
|
e2c26c292d | ||
|
|
be7c3fd00e | ||
|
|
7e5461a2cf | ||
|
|
6ee9010645 | ||
|
|
a23d5be056 | ||
|
|
97a6a1fdc2 | ||
|
|
c8f567347b | ||
|
|
74c1e7f69e | ||
|
|
15a5fc0cae | ||
|
|
f07c54d47c | ||
|
|
70446be108 | ||
|
|
d6d21fca56 | ||
|
|
8d7273924f | ||
|
|
ea64afbaa7 | ||
|
|
45da9837ec | ||
|
|
8c19b7d163 | ||
|
|
ab227a08d0 | ||
|
|
40d6e77964 | ||
|
|
9326e3f1b0 | ||
|
|
0e1eb3daf6 | ||
|
|
05daac12ed | ||
|
|
c5b24b4764 | ||
|
|
cc16548e5f | ||
|
|
291d65bb3e | ||
|
|
bd3ad03da6 | ||
|
|
5fa6788357 | ||
|
|
c5c5a98ac4 | ||
|
|
a1151143cf | ||
|
|
f5024984f7 | ||
|
|
f4880fd90d | ||
|
|
0ae61d5865 | ||
|
|
d3bd775a79 | ||
|
|
da546cfe7f | ||
|
|
a211933e83 | ||
|
|
1d40b5a821 | ||
|
|
33836daeb7 | ||
|
|
d921b0f6bd | ||
|
|
0607b95df6 | ||
|
|
0de6d0e046 | ||
|
|
98427345cf | ||
|
|
9fedaa9f77 | ||
|
|
bf4c2ecd33 | ||
|
|
f8c18cc1e0 | ||
|
|
458b900412 | ||
|
|
192c776e0b | ||
|
|
5cdec18863 | ||
|
|
15f856f951 | ||
|
|
01d52cef74 | ||
|
|
95563c8659 | ||
|
|
31d8c40eca | ||
|
|
56001ed272 | ||
|
|
d916fda04c | ||
|
|
cfae655068 | ||
|
|
5596565ec4 | ||
|
|
afa1aa5d93 | ||
|
|
e98c3d8393 | ||
|
|
6687b816f0 | ||
|
|
ea8035e854 | ||
|
|
54b0171d49 | ||
|
|
676d4277b9 | ||
|
|
a4b1da3ca2 | ||
|
|
9e9c16e770 | ||
|
|
dc87006fed | ||
|
|
b9b260f26a | ||
|
|
33fd6a5016 | ||
|
|
97cbccc2ba | ||
|
|
1ee4685d5d | ||
|
|
aba18232b1 | ||
|
|
0a02441b75 | ||
|
|
1be5b4c7ff | ||
|
|
a0ce0cf18a | ||
|
|
7c54e5d093 | ||
|
|
b825e51dab | ||
|
|
589855c393 | ||
|
|
4c546f2f53 | ||
|
|
3753fce912 | ||
|
|
4c02857ec5 | ||
|
|
33f87ff7d7 | ||
|
|
784dcf2a9a | ||
|
|
43ee943acb | ||
|
|
a769fd7d13 | ||
|
|
2c4fd00b16 | ||
|
|
264771fe98 | ||
|
|
ecd92dafef | ||
|
|
c8b6e4bea3 | ||
|
|
3756cb766e | ||
|
|
068d9ca60b | ||
|
|
93f632d8b8 | ||
|
|
bb44ce7e74 | ||
|
|
6986c8d8f7 | ||
|
|
fe95506db4 | ||
|
|
310ed76b18 | ||
|
|
98830d147f | ||
|
|
19c9177d7b | ||
|
|
f41c5f97f6 | ||
|
|
648c125697 | ||
|
|
0dc2b89897 | ||
|
|
83745f83a5 | ||
|
|
2f91fe4535 | ||
|
|
739f09059e | ||
|
|
c86f9f0f5f | ||
|
|
9470ca6bc5 | ||
|
|
2a92c4d5de | ||
|
|
bb6e892657 | ||
|
|
c9079b9299 | ||
|
|
b6963c1bf9 | ||
|
|
9c29df47bb | ||
|
|
fc146d3d00 | ||
|
|
1bf5a21678 | ||
|
|
011542dc2b | ||
|
|
489784104e | ||
|
|
3860634fd2 | ||
|
|
709c324e18 | ||
|
|
b75d24d92c | ||
|
|
ed80e9424c | ||
|
|
2fe1f2060a | ||
|
|
c6df820164 | ||
|
|
d6239822db | ||
|
|
bced9ffff9 | ||
|
|
d7d1c1544a | ||
|
|
7c1e8ce48c | ||
|
|
e3b0ca8ef6 | ||
|
|
9e266eb6d5 | ||
|
|
7231403e16 | ||
|
|
344a486fd7 | ||
|
|
4fd831875d | ||
|
|
0988d067ea | ||
|
|
44dbe475af | ||
|
|
bd24cf3ea4 | ||
|
|
b493a808fe | ||
|
|
54035d108d | ||
|
|
c5e8bc7e20 | ||
|
|
3bbb4779a3 | ||
|
|
1b3963ebea | ||
|
|
3b6dd7e15a | ||
|
|
757d2a3947 | ||
|
|
61b71143f2 | ||
|
|
1b343a36c9 | ||
|
|
8e94937060 | ||
|
|
e8ffebc006 | ||
|
|
2ca95eaa9f | ||
|
|
0dc5b4cdfc | ||
|
|
cc6cd96d8e | ||
|
|
4244d37625 | ||
|
|
0b766095d4 | ||
|
|
a4f212a18f | ||
|
|
caafb73190 | ||
|
|
09482799c9 | ||
|
|
37f93d1760 | ||
|
|
725f2e5204 | ||
|
|
967198fae0 | ||
|
|
43d57f6dcb | ||
|
|
6afa4db577 | ||
|
|
3b8c3fb29a | ||
|
|
921c3b0627 | ||
|
|
c0fadb45ab | ||
|
|
a1481fb179 | ||
|
|
987cd972d3 | ||
|
|
bdf25976a3 | ||
|
|
87c3aff4ce | ||
|
|
99350a957a | ||
|
|
319068dc7e | ||
|
|
cd18806c39 | ||
|
|
95b08b2023 | ||
|
|
0e70f76c86 | ||
|
|
4d414a2994 | ||
|
|
3d22772d4e | ||
|
|
0b381e2570 | ||
|
|
f2cc4311c5 | ||
|
|
e349671fdf | ||
|
|
01c02d5efa | ||
|
|
b62b1f3870 | ||
|
|
8844830859 | ||
|
|
0c51ee4b64 | ||
|
|
11920d5e31 | ||
|
|
848ea1eb63 | ||
|
|
a216519486 | ||
|
|
b04606c38e | ||
|
|
38072beea7 | ||
|
|
b843f1fa03 | ||
|
|
560d40e571 | ||
|
|
5f0b8161b7 | ||
|
|
062d482917 | ||
|
|
39693a27e3 | ||
|
|
7cd1eeac30 | ||
|
|
bafa473c8e | ||
|
|
750cf46b2e | ||
|
|
68885a4bbc | ||
|
|
bcc99a8904 | ||
|
|
59fbd98db3 | ||
|
|
b70ed425f1 | ||
|
|
45ef5811c8 | ||
|
|
3b137ac762 | ||
|
|
1ddb0caf73 | ||
|
|
ae4c6fe2dd | ||
|
|
b03fe438d0 | ||
|
|
db257af58e | ||
|
|
735368c71b | ||
|
|
9e04e3679b | ||
|
|
43b8414727 | ||
|
|
5a00187147 | ||
|
|
cb525c7c84 | ||
|
|
d88420dd03 | ||
|
|
b9a983f8e0 | ||
|
|
42431ea7db | ||
|
|
f9459e4abb | ||
|
|
72f917d611 | ||
|
|
9fd1d19e93 | ||
|
|
062af1ac08 | ||
|
|
41bd76e091 | ||
|
|
cfd3f4b199 | ||
|
|
79d38f9597 | ||
|
|
b3866559e1 | ||
|
|
4d186baa35 | ||
|
|
8ed3d5f3db | ||
|
|
f0c8f39b6d | ||
|
|
431db8fc9b | ||
|
|
ba252c5356 | ||
|
|
a2812c39c0 | ||
|
|
0490758820 | ||
|
|
7f56824b42 | ||
|
|
627da3a2bc | ||
|
|
9b36a5c8a6 | ||
|
|
c1cf2be533 | ||
|
|
e6b69042de | ||
|
|
109650faf3 | ||
|
|
e54eaab842 | ||
|
|
43b6297b5d | ||
|
|
c20f4f5adf | ||
|
|
dc1f222cd2 | ||
|
|
c2b687212c | ||
|
|
849913276d | ||
|
|
23579c1e4a | ||
|
|
e031161fd4 | ||
|
|
4800ee6c0a | ||
|
|
d3a7fef9b0 | ||
|
|
40822fe77a | ||
|
|
837b670213 | ||
|
|
57ce69f3fb | ||
|
|
be022c4894 | ||
|
|
8a366964bb | ||
|
|
ee86b68470 | ||
|
|
60352307aa | ||
|
|
3ebd2f746f | ||
|
|
1c1a65b637 | ||
|
|
010e60d029 | ||
|
|
7a25568861 | ||
|
|
5f4f913661 | ||
|
|
ccd0e34a53 | ||
|
|
72f1ffccd3 | ||
|
|
ea7a52945f | ||
|
|
89d4d1351a | ||
|
|
b757c91d93 | ||
|
|
27203d7a4d | ||
|
|
9ad4e18ac5 | ||
|
|
fcdc8f3ce7 | ||
|
|
78b994b84a | ||
|
|
58bfc677e2 | ||
|
|
7d17285a0c | ||
|
|
e9eb00a0d4 | ||
|
|
48d07af574 | ||
|
|
2fc62efd88 | ||
|
|
be516d75bd | ||
|
|
951d5fde85 | ||
|
|
1389abc052 | ||
|
|
19ad67a77f | ||
|
|
641f308344 | ||
|
|
9f097fa4d5 | ||
|
|
5ad362c52b | ||
|
|
614f238a61 | ||
|
|
dec91950bc | ||
|
|
6cef9c23f0 | ||
|
|
3f568bf136 | ||
|
|
5484b421ce | ||
|
|
02f21e07d3 | ||
|
|
fff1f23a83 | ||
|
|
a056ec0d38 | ||
|
|
2eb9e5dde3 | ||
|
|
627d2a4701 | ||
|
|
76895fe86d | ||
|
|
64c3c85780 | ||
|
|
7288348857 | ||
|
|
62e73299b1 | ||
|
|
fe76c41ed8 | ||
|
|
1a92edf8be | ||
|
|
b63b606a4e | ||
|
|
8e2ef3d22b | ||
|
|
c6c4a32283 | ||
|
|
b70b3b158e | ||
|
|
3d59ab8108 | ||
|
|
b6c3089510 | ||
|
|
bd92aac280 | ||
|
|
5299e802e9 | ||
|
|
8e5a57d7dd | ||
|
|
beaa324fb6 | ||
|
|
79e64fe206 | ||
|
|
93f525e3fe | ||
|
|
aacb803c64 | ||
|
|
8a0665b222 | ||
|
|
20e41a7f73 | ||
|
|
93a1699a35 | ||
|
|
c33c07e4af | ||
|
|
c7484d0cc9 | ||
|
|
fb85a7bb35 | ||
|
|
42ff9a4d34 | ||
|
|
005e9eae7c | ||
|
|
3e325debcc | ||
|
|
a221de9a2b | ||
|
|
32b0cc1865 | ||
|
|
bbf85f8a12 | ||
|
|
67a0172b28 | ||
|
|
fb19d4d45b | ||
|
|
a156b1af14 | ||
|
|
a604b4943c | ||
|
|
3f0b6435d9 | ||
|
|
e0f029e2cb | ||
|
|
89d3fd5fab | ||
|
|
a38b00be6b | ||
|
|
0e8d52b591 | ||
|
|
298c77740d | ||
|
|
c681aae8ee | ||
|
|
faef98b089 | ||
|
|
84a3e0a30b | ||
|
|
69bd553ce0 | ||
|
|
fd0c0f8975 | ||
|
|
860ceb06b4 | ||
|
|
ecf501bf72 | ||
|
|
81a2ed1e25 | ||
|
|
76ab28338a | ||
|
|
9a56c9630f | ||
|
|
53b9497c18 | ||
|
|
750b16b6ee | ||
|
|
0ee3e0779a | ||
|
|
333c2d9299 | ||
|
|
ad37ff5048 | ||
|
|
33f86f3bde | ||
|
|
8acb969a49 | ||
|
|
b74b5933b8 | ||
|
|
681c556b7e | ||
|
|
1746684e52 | ||
|
|
0b93d06555 | ||
|
|
8a8b8c7c27 | ||
|
|
6b6577006d | ||
|
|
23ee5e81c9 | ||
|
|
483f55e4b1 | ||
|
|
1bb1bc2553 | ||
|
|
a4e4e36f94 | ||
|
|
6849415812 | ||
|
|
86f6cb038e | ||
|
|
7480a1d6ce | ||
|
|
3cd10117dd | ||
|
|
0caf19d390 | ||
|
|
5c14ebb049 | ||
|
|
9717a736b1 | ||
|
|
9c9ab50d1a | ||
|
|
d4bcb8174e | ||
|
|
9e7fe773bd | ||
|
|
aca18fab0f | ||
|
|
691de01b79 | ||
|
|
3383f15142 | ||
|
|
84c1593889 | ||
|
|
3c80fa1e33 | ||
|
|
06b16a1deb | ||
|
|
4c4246fb09 | ||
|
|
364be1e9f6 | ||
|
|
f959ed71aa | ||
|
|
5c4326c302 | ||
|
|
125fc3a622 | ||
|
|
6b9e785db3 | ||
|
|
25d34e9a43 | ||
|
|
457d4aa1dc | ||
|
|
ff0c0992ff | ||
|
|
d379e012c4 | ||
|
|
151fff26fd | ||
|
|
3d0d561215 | ||
|
|
22d586ed7b | ||
|
|
6dc19b29e8 | ||
|
|
50975a87d4 | ||
|
|
ce721d9f0f | ||
|
|
20510a33f7 | ||
|
|
3abd9c8763 | ||
|
|
e9eff7420b | ||
|
|
64c250c9d8 | ||
|
|
8047f82bfd | ||
|
|
af6467fb3d | ||
|
|
3ff1664aec | ||
|
|
34ea2b44b8 | ||
|
|
6c8d851109 | ||
|
|
d678299a74 | ||
|
|
7aed0db2b6 | ||
|
|
0355524345 | ||
|
|
0a43e4672e | ||
|
|
71e0ccdfec | ||
|
|
1df33ac3c8 | ||
|
|
7334090ac1 | ||
|
|
6b0f044198 | ||
|
|
ddf54c9cf8 | ||
|
|
7c64e184e2 | ||
|
|
a904db033c | ||
|
|
b234856b02 | ||
|
|
89d51d2afc | ||
|
|
37cb9678e9 | ||
|
|
0500ff333a | ||
|
|
08528510ef | ||
|
|
ddbd03dc1e | ||
|
|
ade87f378a | ||
|
|
4db14b905f | ||
|
|
b669b31451 | ||
|
|
1cb2b62f81 | ||
|
|
e5828713cf | ||
|
|
d10cb84068 | ||
|
|
4222f8516f | ||
|
|
7f998c7611 | ||
|
|
db46000337 | ||
|
|
1aac8d8041 | ||
|
|
c59c8e05f7 | ||
|
|
4942d0a629 | ||
|
|
873b7715f4 | ||
|
|
98e7ed6920 | ||
|
|
046f5e645e | ||
|
|
f5e5a7094c | ||
|
|
154125fee6 | ||
|
|
9f8e960ebe | ||
|
|
4179b0be0a | ||
|
|
28bafa38db | ||
|
|
b07552565e | ||
|
|
c4427471d2 | ||
|
|
08f81c6784 | ||
|
|
a471e98aca | ||
|
|
75a8fcc8a0 | ||
|
|
46ef76c168 | ||
|
|
66637446c9 | ||
|
|
21efeb888a | ||
|
|
a4ee8b5322 | ||
|
|
36519ac47e | ||
|
|
3f514fceca | ||
|
|
c2249fdfac | ||
|
|
c610719a44 | ||
|
|
36a6c2461a | ||
|
|
c29f22c39e | ||
|
|
30d3062944 | ||
|
|
69ba75abf4 | ||
|
|
e4d486fec5 | ||
|
|
f242144dcf | ||
|
|
02dee2d664 | ||
|
|
a3dd2c3069 | ||
|
|
a23425e8aa | ||
|
|
be79ddc9a3 | ||
|
|
7d71015e8c | ||
|
|
ad54549b51 | ||
|
|
6cf032a164 | ||
|
|
6390d796ac | ||
|
|
98b8411905 | ||
|
|
ddf1029afa | ||
|
|
1effbc5cc9 | ||
|
|
414b645e9f | ||
|
|
398c76f496 | ||
|
|
1bc456dd95 | ||
|
|
2e8421884e | ||
|
|
70d9b193ac | ||
|
|
b49c11004a | ||
|
|
34843eea90 | ||
|
|
2d6d7f31e8 | ||
|
|
7a24cbff1c | ||
|
|
1e7eb2cf1c | ||
|
|
361256e016 | ||
|
|
8838dbd003 | ||
|
|
13a95e1f2b | ||
|
|
1aaa451a3e | ||
|
|
cbba81e54d | ||
|
|
370868dfac | ||
|
|
77f692aae2 | ||
|
|
9318e205ea | ||
|
|
ebcc717c19 | ||
|
|
4c16b564ee | ||
|
|
e2283d1453 | ||
|
|
d891801c5a | ||
|
|
de75386944 | ||
|
|
82dc37de50 | ||
|
|
b6fa7f62dc | ||
|
|
f9e0a95c5e | ||
|
|
b2c6e12647 | ||
|
|
caffb83780 | ||
|
|
8882cb5479 | ||
|
|
75dace2dee | ||
|
|
ad6487d042 | ||
|
|
a91604e8ab | ||
|
|
c364f7c643 | ||
|
|
53435ba184 | ||
|
|
25f8d5519b | ||
|
|
2e4fef6c66 | ||
|
|
80b2b7dc00 | ||
|
|
8585cd8e21 | ||
|
|
9fa2a7eeea | ||
|
|
2d1f74228d | ||
|
|
3d6f7aa0e1 | ||
|
|
3dea60366a | ||
|
|
d4d9a1df4c | ||
|
|
7d6975fd31 | ||
|
|
08be52ed17 | ||
|
|
682a7700c2 | ||
|
|
9d87009216 | ||
|
|
ef86838f62 | ||
|
|
35468233f8 | ||
|
|
26e229867d | ||
|
|
3a1578b3c6 | ||
|
|
d5e3d2cbbc | ||
|
|
c095248176 | ||
|
|
44601c8954 | ||
|
|
135dbb8f07 | ||
|
|
c95682a0c7 | ||
|
|
d177b9f7fa | ||
|
|
9b57615d94 | ||
|
|
c03f3eacd1 | ||
|
|
a26e395932 | ||
|
|
0870b87c96 | ||
|
|
b52a44a7dd | ||
|
|
0a290aafef | ||
|
|
9014d4c410 | ||
|
|
60e58b4f5f | ||
|
|
620e74a6aa | ||
|
|
efa287ed35 | ||
|
|
a24eb9d9b0 | ||
|
|
bd3dab8aae | ||
|
|
4fe1ebaa5b | ||
|
|
c5e944744b | ||
|
|
0c396181f7 | ||
|
|
0034474219 | ||
|
|
8136ad8287 | ||
|
|
681940d466 | ||
|
|
16488506e8 | ||
|
|
122fccc041 | ||
|
|
9d0ad35403 | ||
|
|
f9ec97e026 | ||
|
|
95495a2647 | ||
|
|
e3310a605c | ||
|
|
b55719bf28 | ||
|
|
b957b51279 | ||
|
|
90bcfab369 | ||
|
|
f8a8e30641 | ||
|
|
25cb98e7a7 | ||
|
|
03e1bb7cf9 | ||
|
|
85dbb24f3a | ||
|
|
d817635782 | ||
|
|
2f4f237810 | ||
|
|
5ac94d810f | ||
|
|
39dc46dc25 | ||
|
|
0d9cf725f7 | ||
|
|
e55dbead5b | ||
|
|
7d046e5b30 | ||
|
|
8b4693cf66 | ||
|
|
a1172c9a82 | ||
|
|
1ed2bd33f0 | ||
|
|
4c159bd0ba | ||
|
|
050654b2a9 | ||
|
|
61b261e1b2 | ||
|
|
017b010206 | ||
|
|
00f5189f58 | ||
|
|
4a8309ed1f | ||
|
|
76cfc31a1d | ||
|
|
d9ec434699 | ||
|
|
239f3c40be | ||
|
|
09c8c6e670 | ||
|
|
7e4ad01c94 | ||
|
|
ed98e269ef | ||
|
|
b47d63334f | ||
|
|
5e2a3a5aea | ||
|
|
1a7eb21fc7 | ||
|
|
834a51cdc9 | ||
|
|
1b69d99c06 | ||
|
|
ad189933c6 | ||
|
|
9d86ff32de | ||
|
|
278bb57a58 | ||
|
|
0ba494e0ba | ||
|
|
8b247054bb | ||
|
|
7c5c8e4e0d | ||
|
|
ad106a27f3 | ||
|
|
9d6f61b49e | ||
|
|
02368954a0 | ||
|
|
b477a35a01 | ||
|
|
16622887de | ||
|
|
9059d1fb17 | ||
|
|
df2b008d82 | ||
|
|
0da871efd0 | ||
|
|
1c55349f81 | ||
|
|
9309fa1e81 | ||
|
|
5996189f91 | ||
|
|
bd2b984bfb | ||
|
|
194409a117 | ||
|
|
27978b216d | ||
|
|
c38fa77ce6 | ||
|
|
3eb49f7422 | ||
|
|
1989d615d2 | ||
|
|
239412d265 | ||
|
|
375a419a9e | ||
|
|
875c8ab424 | ||
|
|
c9bfc810ce | ||
|
|
46ecb16949 | ||
|
|
f6dc16f17b | ||
|
|
4eef42f730 | ||
|
|
8612d9a771 | ||
|
|
0caff054f5 | ||
|
|
4aa91ad599 | ||
|
|
7a0864f5c2 | ||
|
|
73dc0dfcf6 | ||
|
|
1ff9a69339 | ||
|
|
179eb5d847 | ||
|
|
52c868828c | ||
|
|
7eea4615b6 | ||
|
|
d9b351df1a | ||
|
|
d6a785b645 | ||
|
|
79db828a01 | ||
|
|
a5ffb0f8dc | ||
|
|
9492fcde74 | ||
|
|
d2456ce4cd | ||
|
|
7de27abc8d | ||
|
|
d8155bc8eb | ||
|
|
cf08e52a92 | ||
|
|
768398b991 | ||
|
|
24c20a19f1 | ||
|
|
8fbcbcd4c0 | ||
|
|
e0da5bb943 | ||
|
|
36fbc4fb82 | ||
|
|
cb11051f42 | ||
|
|
a824781d14 | ||
|
|
600a2c6748 | ||
|
|
77df64bfb5 | ||
|
|
2d6e54903c | ||
|
|
baa2b83df9 | ||
|
|
1ff02446af | ||
|
|
b58c6ba762 | ||
|
|
611a902000 | ||
|
|
c1b3f9dd29 | ||
|
|
7c5a88a6a6 | ||
|
|
be9abfef58 | ||
|
|
b549c9377e | ||
|
|
a5b00dbf74 | ||
|
|
90e2e14cd7 | ||
|
|
14bb245424 | ||
|
|
b63a0f3a45 | ||
|
|
e1f8842d7f | ||
|
|
3dda5fb268 | ||
|
|
248e0c5240 | ||
|
|
0297a43de6 | ||
|
|
2b4f66e0cf | ||
|
|
e622af2cc3 | ||
|
|
f527b1b5a6 | ||
|
|
c15b13a107 | ||
|
|
bc06acdd25 | ||
|
|
5252870733 | ||
|
|
3cac6a47a5 | ||
|
|
49bba9bf98 | ||
|
|
f4d12e4e5e | ||
|
|
d305211a36 | ||
|
|
9ec44d6f97 | ||
|
|
175bb3ee01 | ||
|
|
036c78750f | ||
|
|
a18de9de7d | ||
|
|
59fbbd5987 | ||
|
|
7e89fbc907 | ||
|
|
0956f240b3 | ||
|
|
f9db97c6b0 | ||
|
|
a2443c4ac1 | ||
|
|
095bd95044 | ||
|
|
b569209647 | ||
|
|
9057cac2b9 | ||
|
|
f9a6c685df | ||
|
|
208eb4f454 | ||
|
|
b3cb9e6714 | ||
|
|
5f9233f9b7 | ||
|
|
16447ae597 | ||
|
|
103edd5260 | ||
|
|
928089bf0f | ||
|
|
e5bd74695a | ||
|
|
f796969465 | ||
|
|
10756175b7 | ||
|
|
5637a71486 | ||
|
|
bcebd0fb62 | ||
|
|
3817d3ca87 | ||
|
|
4dd714e814 | ||
|
|
61e8bb49ec | ||
|
|
103dcd3761 | ||
|
|
54ac135fc8 | ||
|
|
86582809fc | ||
|
|
974d648f19 | ||
|
|
a79afc9597 | ||
|
|
e4883241d9 | ||
|
|
babf223745 | ||
|
|
c7d91730b6 | ||
|
|
71246b65c9 | ||
|
|
50076b647e | ||
|
|
a1a788dce8 | ||
|
|
a611b4f346 | ||
|
|
7f6ed674b4 | ||
|
|
aa3cfd887a | ||
|
|
2649d46d8d | ||
|
|
e23ffe6f02 | ||
|
|
96f3c3729a | ||
|
|
11e9d47ce2 | ||
|
|
efbc8e4383 | ||
|
|
bc7404409f | ||
|
|
8677d70baf | ||
|
|
f39253f0e1 | ||
|
|
68c1957267 | ||
|
|
a275aa2e4d | ||
|
|
cadbac9948 | ||
|
|
82673e8ddd | ||
|
|
bee51024b3 | ||
|
|
3437cb73ec | ||
|
|
d01d1a8520 | ||
|
|
5aa842cf66 | ||
|
|
03282dee0f | ||
|
|
98e8ecb8e2 | ||
|
|
9451dc3fd4 | ||
|
|
e1d3759f55 | ||
|
|
0ec382c86b | ||
|
|
756087c9f1 | ||
|
|
3e7c47e873 | ||
|
|
e3ffdbc308 | ||
|
|
645cace4d6 | ||
|
|
0959d5986b | ||
|
|
89605c29a7 | ||
|
|
e527f31213 | ||
|
|
a0dbd99928 | ||
|
|
17d39c7a4a | ||
|
|
54edaebbd9 | ||
|
|
d587a6f64c | ||
|
|
2371c32be5 | ||
|
|
c9abb8352c | ||
|
|
8995e62e73 | ||
|
|
316147a8db | ||
|
|
1fdcfc7a30 | ||
|
|
8e2c633cd4 | ||
|
|
786b0e4a54 | ||
|
|
c38c1c3c35 | ||
|
|
7d856756f4 | ||
|
|
f0d1d365e0 | ||
|
|
8e2d666ff8 | ||
|
|
38d7be1d5f | ||
|
|
431e2fad72 | ||
|
|
b3b63be8fc | ||
|
|
071fc7d6ef | ||
|
|
2a37f7edac | ||
|
|
c656ad5e2c | ||
|
|
da14a89490 | ||
|
|
cf22eae467 | ||
|
|
b199bddb0b | ||
|
|
2188ea82de | ||
|
|
1fa13d0177 | ||
|
|
ed508af424 | ||
|
|
5df26864d5 | ||
|
|
837111b17e | ||
|
|
a6b363b433 | ||
|
|
2807e1e892 | ||
|
|
0a2abd8214 | ||
|
|
8beb7acdb1 | ||
|
|
466c80b94d | ||
|
|
36c0cfc9a9 | ||
|
|
35ba1b3345 | ||
|
|
d00821d1c7 | ||
|
|
6c1b3f242b | ||
|
|
9f9da1e0c9 | ||
|
|
14fb4b70bd | ||
|
|
b1049540a4 | ||
|
|
5e2909df33 | ||
|
|
c122dad21f | ||
|
|
48ae686602 | ||
|
|
bf2c3a1a81 | ||
|
|
96e7a93886 | ||
|
|
dba1ed1e19 | ||
|
|
a24514876b | ||
|
|
466a1c1c41 | ||
|
|
a2d5e9f40f | ||
|
|
1bbff1d161 | ||
|
|
0948bae99b | ||
|
|
850db41596 | ||
|
|
7bafc87e2b | ||
|
|
1a0de02a15 | ||
|
|
6d5d278624 | ||
|
|
3b4cc48fa0 | ||
|
|
c908461088 | ||
|
|
53d1398d30 | ||
|
|
782c0367d0 | ||
|
|
4678222e9b | ||
|
|
f71dc3e4be | ||
|
|
f6233893bd | ||
|
|
6427bcf130 | ||
|
|
8fa41b706c | ||
|
|
4706c4438d | ||
|
|
0c8ebc2b06 | ||
|
|
b3b5ebc2ca | ||
|
|
b8aa23ccc5 | ||
|
|
364843db29 | ||
|
|
aa56c8f7e6 | ||
|
|
8e9fd27058 | ||
|
|
b75908cb2a | ||
|
|
af6df49ce1 | ||
|
|
bd3bdb5769 | ||
|
|
98fe193b21 | ||
|
|
26cbc9e8b1 | ||
|
|
ebb8c43fd0 | ||
|
|
8c7344f1c4 | ||
|
|
5c32a17787 | ||
|
|
aff520e69a | ||
|
|
45e627c33c | ||
|
|
7a1b158f83 | ||
|
|
6374c5d49d | ||
|
|
fd460b19d4 | ||
|
|
dff7cc4ca5 | ||
|
|
d013320bec | ||
|
|
fc6dcfaf21 | ||
|
|
a001270bd2 | ||
|
|
9e67883fbd | ||
|
|
f1a448708c | ||
|
|
a4bfa96502 | ||
|
|
595b83a256 | ||
|
|
8d34f77321 | ||
|
|
67095f97b1 | ||
|
|
50740c94ab | ||
|
|
4db4cfeda2 | ||
|
|
ad13cef89c | ||
|
|
855fc6fcd1 | ||
|
|
8f12244e51 | ||
|
|
fe0213465c | ||
|
|
f984047004 | ||
|
|
19e9e2d090 | ||
|
|
7fe3b97d00 | ||
|
|
9cd243da47 | ||
|
|
e43208c2e9 | ||
|
|
dc016fc22f | ||
|
|
c6f037cae2 | ||
|
|
f049830e28 | ||
|
|
dd1995ae0b | ||
|
|
23dc233569 | ||
|
|
0977aa7d0d | ||
|
|
24862b0672 | ||
|
|
f05a57efc3 | ||
|
|
65331a9d7c | ||
|
|
f7ae287e40 | ||
|
|
45f380b1f6 | ||
|
|
9e6b329df4 | ||
|
|
43cd34d94c | ||
|
|
9fa00aff9a | ||
|
|
9a56dcb1be | ||
|
|
fdfe7bbe59 | ||
|
|
3a99a60792 | ||
|
|
fa2b4e14df | ||
|
|
35322a6900 | ||
|
|
2ccf29d61e | ||
|
|
b068013343 | ||
|
|
d839e72998 | ||
|
|
d7c9a8ed29 | ||
|
|
6837d4d692 | ||
|
|
8aba83735b | ||
|
|
aa51187747 | ||
|
|
5f07a9ae95 | ||
|
|
a2ca767bf4 | ||
|
|
5806c74e7c | ||
|
|
0481e1d45e | ||
|
|
3177b61421 | ||
|
|
6009cf5dfa | ||
|
|
0a970e8c31 | ||
|
|
aa276ca6af | ||
|
|
9f02dd13ff | ||
|
|
609e723322 | ||
|
|
c564a1d53e | ||
|
|
a7fe31f28b | ||
|
|
a84dc599d6 | ||
|
|
8da029add9 | ||
|
|
ba45a2d270 | ||
|
|
cb56b22aea | ||
|
|
23cc5b31ba | ||
|
|
e8d99f0460 | ||
|
|
6bcd10cd5c | ||
|
|
619fb20c5f | ||
|
|
386a312e96 | ||
|
|
2759d347e6 | ||
|
|
b6ec327b49 | ||
|
|
ee02d622ba | ||
|
|
5c4a6083f5 | ||
|
|
49e63a3d3d | ||
|
|
6bae9dc9ed | ||
|
|
5fa1979a46 | ||
|
|
b40d4fa315 | ||
|
|
4d2ff7cd5b | ||
|
|
d8ec0e64d0 | ||
|
|
82e979cc07 | ||
|
|
8c132a51f5 | ||
|
|
40bd372cc1 | ||
|
|
212e114270 | ||
|
|
b0e9de6951 | ||
|
|
3489522bbb | ||
|
|
96237abc03 | ||
|
|
7155b4f0ac | ||
|
|
a8b2b09e0f | ||
|
|
6858b8c555 | ||
|
|
0e493b1a0e | ||
|
|
37d478f970 | ||
|
|
7d0d42a49f | ||
|
|
0eb1684ef1 | ||
|
|
9b0b723143 | ||
|
|
532bc6e1e6 | ||
|
|
fe3ed4c454 | ||
|
|
b5ec89e586 | ||
|
|
895e7397c2 | ||
|
|
59b767957a | ||
|
|
17d4bf8f22 | ||
|
|
836be3b097 | ||
|
|
310415bea9 | ||
|
|
aafc1276a9 | ||
|
|
2993e794cc | ||
|
|
58cb9cfb2d | ||
|
|
fbdf0901d5 | ||
|
|
af8c81b621 | ||
|
|
06b5275e48 | ||
|
|
ad95572d5f | ||
|
|
0021cfc4bc | ||
|
|
aebc7850f4 | ||
|
|
1b7efbc607 | ||
|
|
3800e96d14 | ||
|
|
461f1bb07c | ||
|
|
7d4c07e4f6 | ||
|
|
31b788f463 | ||
|
|
96ab761f73 | ||
|
|
2b3f05c039 | ||
|
|
f2e8303b66 | ||
|
|
2a614b545b | ||
|
|
5c0ab21f68 | ||
|
|
689d109438 | ||
|
|
2a6934b283 | ||
|
|
760cb94e9a | ||
|
|
2a6cff0013 | ||
|
|
ce578f0417 | ||
|
|
1745bdb9e2 | ||
|
|
3f90b89c3c | ||
|
|
f343e40d15 | ||
|
|
5cc4be9e65 | ||
|
|
da5aada002 | ||
|
|
07f2ee9ad9 | ||
|
|
12f4e1146f | ||
|
|
92c57e5476 | ||
|
|
a923baacd8 | ||
|
|
999b094d55 | ||
|
|
d4213f2352 | ||
|
|
3f65c9a066 | ||
|
|
1d427e2645 | ||
|
|
36414c4b00 | ||
|
|
47e253d76c | ||
|
|
b73cf84df0 | ||
|
|
a5b885a774 | ||
|
|
0c785413da | ||
|
|
482d7ef5f7 | ||
|
|
9f9073c0ff | ||
|
|
ef05ff4abd | ||
|
|
5848aae435 | ||
|
|
fb06f33de0 | ||
|
|
0d7ddb149e | ||
|
|
4f2d7b9c4e | ||
|
|
c02ed96f6f | ||
|
|
3b2ac891b2 | ||
|
|
ef0108881b | ||
|
|
af48975a6b | ||
|
|
6441b149ab | ||
|
|
f8892881f8 | ||
|
|
228aec5401 | ||
|
|
68ad48ff55 | ||
|
|
541ba64032 | ||
|
|
2d870b798c | ||
|
|
0f1fe1ab63 | ||
|
|
73cc86ddb1 | ||
|
|
23128f4be2 | ||
|
|
92200d0e82 | ||
|
|
d6e8655792 | ||
|
|
37076d7920 | ||
|
|
78347ec91b | ||
|
|
9ded102a0a | ||
|
|
59b7d8b8cb | ||
|
|
f5b97f6762 | ||
|
|
d47da241af | ||
|
|
4611ce15eb | ||
|
|
aa8c56a688 | ||
|
|
ef44d4471a | ||
|
|
5581eae957 | ||
|
|
ec46dfaac9 | ||
|
|
6042a047bd | ||
|
|
6ca9e2a753 | ||
|
|
618eabfe5c | ||
|
|
bb5db2e9d0 | ||
|
|
97e4d169b3 | ||
|
|
50e44b1473 | ||
|
|
38588dd3fa | ||
|
|
d183388347 | ||
|
|
1e69d59384 | ||
|
|
00f008f94d | ||
|
|
3c28001a74 | ||
|
|
76a6218be6 | ||
|
|
6c1de1bbd6 | ||
|
|
d7678081da | ||
|
|
5e4ba563cb | ||
|
|
8afbe77b0a | ||
|
|
2ef139b59a | ||
|
|
1f0d2d9b89 | ||
|
|
37a1f144ab | ||
|
|
9a7a654596 | ||
|
|
9abccd63cf | ||
|
|
93fea77182 | ||
|
|
19797243f6 | ||
|
|
c9c733d925 | ||
|
|
a7d7678c78 | ||
|
|
c0911921c7 | ||
|
|
4a4241d57a | ||
|
|
c9426bb6eb | ||
|
|
db4abd169a | ||
|
|
80b6958599 | ||
|
|
80058c781a | ||
|
|
44bd2e36f3 | ||
|
|
3589a5e5be | ||
|
|
13ef033f0e | ||
|
|
3f8c68bbca | ||
|
|
4275cea82b | ||
|
|
a0bcb5339a | ||
|
|
43deec4a4b | ||
|
|
2bc433a30b | ||
|
|
eb2b395932 | ||
|
|
2bfd1c0bf2 | ||
|
|
7228c4b13f | ||
|
|
9351d7471f | ||
|
|
1cf49998bc | ||
|
|
6ae86597e8 | ||
|
|
c578ff25bd | ||
|
|
2934a3e3be | ||
|
|
ceaa69da75 | ||
|
|
fa8e731576 | ||
|
|
685c0a106a | ||
|
|
7f539090dd | ||
|
|
2089273f95 | ||
|
|
838bb4c7ad | ||
|
|
637acd1a12 | ||
|
|
03fa9a847f | ||
|
|
d488c88e78 | ||
|
|
baae842210 | ||
|
|
ec1fb838b6 | ||
|
|
13281179df | ||
|
|
276a42c9a1 | ||
|
|
7a70a730ba | ||
|
|
d0fe59631c | ||
|
|
106892e933 | ||
|
|
19543a41b3 | ||
|
|
b172b760ab | ||
|
|
4b5d49cb41 | ||
|
|
3fd35b6058 | ||
|
|
5f86c4ab99 | ||
|
|
c94a7f6629 | ||
|
|
7d6beb4141 | ||
|
|
e2117e690a | ||
|
|
fb791290e2 | ||
|
|
5dd1488b5d | ||
|
|
529cd64d82 | ||
|
|
d2bd3e8da8 | ||
|
|
e42ce7dd86 | ||
|
|
40709462ee | ||
|
|
2ad6c01a4d | ||
|
|
70c12e788e | ||
|
|
1713791c90 | ||
|
|
9aa23fd412 | ||
|
|
e4ba09cd93 | ||
|
|
171fdf1fbc | ||
|
|
01f4e0b961 | ||
|
|
be2d5a91c7 | ||
|
|
a1d89d9478 | ||
|
|
98d1dc3b65 | ||
|
|
b80eb3acc0 | ||
|
|
05ccc1995b | ||
|
|
0de244889e | ||
|
|
e6c5c3a493 | ||
|
|
164aa2ccd2 | ||
|
|
f1599e26b3 | ||
|
|
ed64a4d32d | ||
|
|
2ee4b431d4 | ||
|
|
cd8a73ed19 | ||
|
|
e6c985ce4e | ||
|
|
a20446aeb9 | ||
|
|
7b23d76559 | ||
|
|
8315cf5818 | ||
|
|
ed16265bde | ||
|
|
dff205faf6 | ||
|
|
9aae8aee0c | ||
|
|
7c818ced2b | ||
|
|
218e887558 | ||
|
|
a68860b35a | ||
|
|
82d4d43383 | ||
|
|
94618e8feb | ||
|
|
55de7d4494 | ||
|
|
7ed639f741 | ||
|
|
41f2870c29 | ||
|
|
ba198490fa | ||
|
|
0f9ab082ab | ||
|
|
97b58965f2 | ||
|
|
f2566c68e3 | ||
|
|
a456bf5449 | ||
|
|
a09998f910 | ||
|
|
be662b913c | ||
|
|
e7ddc8448d | ||
|
|
29374f8d8a | ||
|
|
359b971103 | ||
|
|
fbdb1ae208 | ||
|
|
22c13c1eff | ||
|
|
5fc63aeaf1 | ||
|
|
d4f32673ab | ||
|
|
480dffb51b | ||
|
|
966df00124 | ||
|
|
3e2b4bc727 | ||
|
|
5929a8d42b | ||
|
|
f8ab40eb39 | ||
|
|
55e9233b93 | ||
|
|
b7277b51fd | ||
|
|
1fa9111b2b | ||
|
|
90a9e496d9 | ||
|
|
2a7dce1eb0 | ||
|
|
0c0841cc03 | ||
|
|
4c9fe016bf | ||
|
|
acc90f140c | ||
|
|
68a7bc3930 | ||
|
|
12ea64be0e | ||
|
|
7f30a673f7 | ||
|
|
897e100c32 | ||
|
|
0d4ad5cb31 | ||
|
|
b124bd0d0e | ||
|
|
6bc2f84602 | ||
|
|
d787a28c40 | ||
|
|
6b078a5731 | ||
|
|
17dddbfe21 | ||
|
|
3ff3c9e144 | ||
|
|
f5a37d82cc | ||
|
|
d3d428dc9d | ||
|
|
8dc8c5b5dc | ||
|
|
e6b06f914b | ||
|
|
4dc502a8b6 | ||
|
|
b1d1a13d5f | ||
|
|
75cc4cac5a | ||
|
|
1b7e4fbbdc | ||
|
|
9789e2f6c1 | ||
|
|
b8fb0bee24 | ||
|
|
419f77e245 | ||
|
|
59b1c3473b | ||
|
|
6db58ca375 | ||
|
|
4832b342b0 | ||
|
|
6cec542402 | ||
|
|
9644791783 | ||
|
|
5031c307d1 | ||
|
|
aa49539e3e | ||
|
|
7b4118493b | ||
|
|
d1cc9ba4ce | ||
|
|
e0e92139d7 | ||
|
|
62039392bb | ||
|
|
b72c69892e | ||
|
|
e6205e9aad | ||
|
|
b8a6fb1720 | ||
|
|
7c06d82f27 | ||
|
|
d92cb0f500 | ||
|
|
7fa72f2fe9 | ||
|
|
21d480a3b5 | ||
|
|
771c045844 | ||
|
|
e6ce484c15 | ||
|
|
102a92f62d | ||
|
|
6c7ac70701 | ||
|
|
9d8372289f | ||
|
|
766f6a1ba2 | ||
|
|
193ff24f4c | ||
|
|
c675017374 | ||
|
|
86cb852507 | ||
|
|
73494e0d7d | ||
|
|
ec61aa1b6f | ||
|
|
6df0e78b22 | ||
|
|
63c604359b | ||
|
|
08212588a0 | ||
|
|
c8518ce827 | ||
|
|
94434e3fc0 | ||
|
|
9f3af95198 | ||
|
|
acb3af8ab8 | ||
|
|
9c50889371 | ||
|
|
8c03c90708 | ||
|
|
91cc21e729 | ||
|
|
dd29199c9b | ||
|
|
9156629d72 | ||
|
|
002aa61dd9 | ||
|
|
401747a7a3 | ||
|
|
990390218c | ||
|
|
69a4d6ac83 | ||
|
|
3a67492680 | ||
|
|
d58b9edf78 | ||
|
|
5144dd09f1 | ||
|
|
6a5f3720a2 | ||
|
|
d814d3537c | ||
|
|
85380ade6a | ||
|
|
86f53deade | ||
|
|
c3357dc0e2 | ||
|
|
97e14dd294 | ||
|
|
e45c48b998 | ||
|
|
0b53eae4ad | ||
|
|
92aa3123ec | ||
|
|
e9e789da20 | ||
|
|
c6bdac8835 | ||
|
|
90df679a77 | ||
|
|
b25a422fd6 | ||
|
|
47e70bd086 | ||
|
|
f963194124 | ||
|
|
bdfc77d349 | ||
|
|
7abe90f2ac | ||
|
|
4a52779d09 | ||
|
|
a01e865042 | ||
|
|
446c50da80 | ||
|
|
750a93a1aa | ||
|
|
ba12d65792 | ||
|
|
bd40404f58 | ||
|
|
4d8d9ecfc2 | ||
|
|
f2efa022b4 | ||
|
|
fc28f34ec6 | ||
|
|
b740cc467d | ||
|
|
6ab8114eee | ||
|
|
cd3f90917f | ||
|
|
2219547a8b | ||
|
|
017426501c | ||
|
|
ca19754a30 | ||
|
|
4623f2f12a | ||
|
|
c14813c0b2 | ||
|
|
9d8308ace0 | ||
|
|
4976e81ea4 | ||
|
|
f59de87a31 | ||
|
|
53dbebb503 | ||
|
|
52df91eb60 | ||
|
|
a9a758d715 | ||
|
|
0226fa7a25 | ||
|
|
a4f47da35c | ||
|
|
29364000e2 | ||
|
|
ceecca44a4 | ||
|
|
50f62e66b0 | ||
|
|
ab39dfd254 | ||
|
|
708fad18b6 | ||
|
|
526ba34d87 | ||
|
|
5d4882dee9 | ||
|
|
48c4361d37 | ||
|
|
c1d070186e | ||
|
|
1a39fd9172 | ||
|
|
0c1ab4158e | ||
|
|
5221566335 | ||
|
|
2291c2d9ba | ||
|
|
0de14c4c8b | ||
|
|
51de0159fb | ||
|
|
37a756aeb3 | ||
|
|
353b6ed761 | ||
|
|
90815b1ac5 | ||
|
|
8a50786e61 | ||
|
|
3b77df0556 | ||
|
|
1fa11062de | ||
|
|
6883de0f1c | ||
|
|
bdde0fe094 | ||
|
|
ab22b8103e | ||
|
|
641d5cd67b | ||
|
|
9fe941e457 | ||
|
|
78060c9985 | ||
|
|
5bd6af3400 | ||
|
|
4ecd78d6a8 | ||
|
|
7e9f54ed2c | ||
|
|
7dd29c707f | ||
|
|
a1489fb1f9 | ||
|
|
5f0f5398e8 | ||
|
|
e3b2396f32 | ||
|
|
6fd70ed26a | ||
|
|
a93e6ff01a | ||
|
|
6db8c38c58 | ||
|
|
d3d3ff7970 | ||
|
|
c5b2b30f79 | ||
|
|
ac2144d65b | ||
|
|
c620b4f919 | ||
|
|
292a3a43ba | ||
|
|
5fc4693b9c | ||
|
|
6dfbaf1b88 | ||
|
|
14c6e56287 | ||
|
|
7e48514f67 | ||
|
|
d8e70c4d7f | ||
|
|
fb52989d62 | ||
|
|
5b72ebaad5 | ||
|
|
98863ab901 | ||
|
|
b5cb5eb969 | ||
|
|
7f4f96f77b | ||
|
|
3b3f75f03e | ||
|
|
a5db4d4e47 | ||
|
|
d3b0f25cfe | ||
|
|
a9c6a68c5f | ||
|
|
c27f172452 | ||
|
|
2eeb5822c1 | ||
|
|
743046d48f | ||
|
|
d3a5205bde | ||
|
|
ae6dd8929a | ||
|
|
dcf96896ef | ||
|
|
67792100bb | ||
|
|
48c1263417 | ||
|
|
12d37381fe | ||
|
|
dcec3f5f84 | ||
|
|
32e2a7830a | ||
|
|
6992249e53 | ||
|
|
107214ac53 | ||
|
|
8a58772911 | ||
|
|
e21736b470 | ||
|
|
e8679f8984 | ||
|
|
970fe02027 | ||
|
|
12216853c5 | ||
|
|
33ec92258d | ||
|
|
a578edf137 | ||
|
|
f8949ebead | ||
|
|
141c91301f | ||
|
|
8d95e67b5a | ||
|
|
0633e7f25f | ||
|
|
266da0a9d8 | ||
|
|
121c40f273 | ||
|
|
a876efb95f | ||
|
|
95a8cc9498 | ||
|
|
f02731055e | ||
|
|
1df83addfc | ||
|
|
9db43ac5e6 | ||
|
|
0f470cf96f | ||
|
|
da3fcb7b86 | ||
|
|
73dd4703b9 | ||
|
|
0c679a0151 | ||
|
|
1d6ea2dbe6 | ||
|
|
933df57654 | ||
|
|
a7c87642b4 | ||
|
|
cbe761fc33 | ||
|
|
f8aef78d25 | ||
|
|
14dbdb2d83 | ||
|
|
abda226d63 | ||
|
|
a2dc6f0a49 | ||
|
|
7a94c26333 | ||
|
|
9b1ffb384b | ||
|
|
9566bfe122 | ||
|
|
89ff103bda | ||
|
|
6c788db53a | ||
|
|
344b5fa419 | ||
|
|
c6d161b837 | ||
|
|
2065ba0c60 | ||
|
|
a481fd1a3e | ||
|
|
c50bcdbdb9 | ||
|
|
36a2a7632c | ||
|
|
e77b7014e6 | ||
|
|
d57fd0f827 | ||
|
|
6a83d2a62a | ||
|
|
2d29726c18 | ||
|
|
b241b0f954 | ||
|
|
171dd1dc02 | ||
|
|
af62d969d7 | ||
|
|
c4fd9a66c6 | ||
|
|
d191997a39 | ||
|
|
853ac4c104 | ||
|
|
ed053acad6 | ||
|
|
f147634e51 | ||
|
|
e3b2a68341 | ||
|
|
84c450aef9 | ||
|
|
f52a0eb43a | ||
|
|
6ed7559518 | ||
|
|
d977dbe9a7 | ||
|
|
17fc761c61 | ||
|
|
af878f2ed3 | ||
|
|
bb2164c324 | ||
|
|
0496becc50 | ||
|
|
618f8aa7d2 | ||
|
|
c57f711c48 | ||
|
|
4edd11f2f7 | ||
|
|
a2cf058951 | ||
|
|
d52eb10ddd | ||
|
|
4b6dae71fc | ||
|
|
ddad30c22e | ||
|
|
77067c545c | ||
|
|
465d283cad | ||
|
|
05071144fb | ||
|
|
a4e7904953 | ||
|
|
986a8c7554 | ||
|
|
9272843b77 | ||
|
|
542d4bc703 | ||
|
|
e3640fdac9 | ||
|
|
f64ab4b190 | ||
|
|
bd571e1577 | ||
|
|
e4a5cbd893 | ||
|
|
7a9fd7fd1e | ||
|
|
d9b60108db | ||
|
|
8455c8b4ed | ||
|
|
5c2e7099fc | ||
|
|
1fd1d55895 | ||
|
|
5ce4137e75 | ||
|
|
d49179541e | ||
|
|
676f258981 | ||
|
|
fa44749240 | ||
|
|
6c856f9da2 | ||
|
|
e8773cea7f | ||
|
|
4d36ffcb08 | ||
|
|
c653e492c4 | ||
|
|
f08de1f404 | ||
|
|
1218691b61 | ||
|
|
61fc27ff79 | ||
|
|
123ee24f7e | ||
|
|
52c9045a28 | ||
|
|
f00f1e8933 | ||
|
|
8da4433e57 | ||
|
|
7babb87934 | ||
|
|
f67b171385 | ||
|
|
1780d1355d | ||
|
|
5a3390e4f3 | ||
|
|
337d96b41d | ||
|
|
38a1dfea98 | ||
|
|
fbef73aeec | ||
|
|
d6214c2b7c | ||
|
|
d58c86f6fc | ||
|
|
ea34c20198 | ||
|
|
934ca94e62 | ||
|
|
1775327c2e | ||
|
|
707fcad8b4 | ||
|
|
f143c5afc6 | ||
|
|
99f94b2611 | ||
|
|
e39c1f9116 | ||
|
|
235e0b9b8f | ||
|
|
d5a9bed8a4 | ||
|
|
d7dc8a7612 | ||
|
|
08cd3ca40c | ||
|
|
a13562dcea | ||
|
|
d7a0c0d1d0 | ||
|
|
c0729b2d29 | ||
|
|
a80f474290 | ||
|
|
699207dd54 | ||
|
|
e7708010c9 | ||
|
|
f66091e08f | ||
|
|
03bb932f8f | ||
|
|
fbf8b349e0 | ||
|
|
e9278fce6a | ||
|
|
9a7db956d5 | ||
|
|
13196dd667 | ||
|
|
52b80e24d2 | ||
|
|
7dff87e65d | ||
|
|
31ee64d1b2 | ||
|
|
8e865b6918 | ||
|
|
66f91e5832 | ||
|
|
cd2d368f9c | ||
|
|
7736c1c9bd | ||
|
|
6728c0b7b5 | ||
|
|
344f92e0e7 | ||
|
|
fdabfef6a7 | ||
|
|
6c5718f134 | ||
|
|
edfde51434 | ||
|
|
3fc1347bba | ||
|
|
e643eea365 | ||
|
|
1af481f5f9 | ||
|
|
317d1c4c41 | ||
|
|
a703860512 | ||
|
|
1cd1c8ea0d | ||
|
|
53ef3bbf4f | ||
|
|
ab7b8aad7c | ||
|
|
c49213282b | ||
|
|
3c87fc5b31 | ||
|
|
9684508e1d | ||
|
|
bb0edae200 | ||
|
|
acb68a4a1e | ||
|
|
46dd6f3243 | ||
|
|
ecab072890 | ||
|
|
148534d3c2 | ||
|
|
1278f16973 | ||
|
|
7d9b3c6c5c | ||
|
|
83dcb5165c | ||
|
|
30862bb82f | ||
|
|
6c0bda8feb | ||
|
|
e14dece206 | ||
|
|
680593d636 | ||
|
|
144440214f | ||
|
|
6667b58a3f | ||
|
|
b55d9533be | ||
|
|
3484fc60e6 | ||
|
|
eac0265522 | ||
|
|
ac74431633 | ||
|
|
4c098200be | ||
|
|
2cf18972f3 | ||
|
|
d522d2a6a9 | ||
|
|
7079ce096f | ||
|
|
5e8c5067b1 | ||
|
|
570ff4e8b6 | ||
|
|
e2f1362a1f | ||
|
|
3519e38211 | ||
|
|
08734250f7 | ||
|
|
e8407f6449 | ||
|
|
04f3400f83 | ||
|
|
89c8b3e7fc | ||
|
|
66294100ec | ||
|
|
8ed8a23c8b | ||
|
|
449b0b03b5 | ||
|
|
d93754bf1d | ||
|
|
a007a61ecc | ||
|
|
e481377317 | ||
|
|
4c5831c7b4 | ||
|
|
fc54b5237f | ||
|
|
f8f42678d1 | ||
|
|
38b1f4128c | ||
|
|
04fb4f88ad | ||
|
|
4675f5df08 | ||
|
|
34ee358d40 | ||
|
|
c4cfd1a3e2 | ||
|
|
5ac4748537 | ||
|
|
2e5ec1d2dc | ||
|
|
bac4c069d7 | ||
|
|
9d4a21a10b | ||
|
|
dbeb41195d | ||
|
|
71f4998458 | ||
|
|
40af5b7574 | ||
|
|
e7a1020f82 | ||
|
|
018e49ed95 | ||
|
|
582cfe9f7c | ||
|
|
db07f740b3 | ||
|
|
bacbd351d7 | ||
|
|
7e2c61c661 | ||
|
|
3df30fd4de | ||
|
|
92789ffdc9 | ||
|
|
09b746cdec | ||
|
|
8ace7b59e3 | ||
|
|
1fc0248d8f | ||
|
|
57bde33bfe | ||
|
|
1b1e558a3b | ||
|
|
c5c7e686d0 | ||
|
|
bd28f880f6 | ||
|
|
fe2ab69773 | ||
|
|
75f9d383cb | ||
|
|
5fefba4583 | ||
|
|
780d126437 | ||
|
|
4057dd9f5b | ||
|
|
b5f8df4bb6 | ||
|
|
5ace10d39f | ||
|
|
07ecdedf0d | ||
|
|
c2ca365312 | ||
|
|
8b9ca08903 | ||
|
|
16e6b588f6 | ||
|
|
3a1d5d8904 | ||
|
|
84d1293fd0 | ||
|
|
a12be7fa77 | ||
|
|
6eee4f678f | ||
|
|
0e53c95c06 | ||
|
|
3ba97ad0dc | ||
|
|
99ff8bc1f5 | ||
|
|
63aa6ee9a5 | ||
|
|
925a42e2c4 | ||
|
|
8dc91cfed4 | ||
|
|
9c6bdeea9d | ||
|
|
f5857aaa0c | ||
|
|
9bc8ac10fa | ||
|
|
3df3879954 | ||
|
|
be1f8e7075 | ||
|
|
d602041ad0 | ||
|
|
23882bcb8e | ||
|
|
311178189f | ||
|
|
5a57526aab | ||
|
|
450dd34f4d | ||
|
|
89ed31a888 | ||
|
|
9fe031efe3 | ||
|
|
baa57266b4 | ||
|
|
3e4818d0ee | ||
|
|
b36747c728 | ||
|
|
fdbe993913 | ||
|
|
f4222e0923 | ||
|
|
f0caea9026 | ||
|
|
9c3c8ff2c4 | ||
|
|
aaefdab0aa | ||
|
|
f18a311bc2 | ||
|
|
ad9705f9c4 | ||
|
|
fb0b626813 | ||
|
|
b48fbf10e1 | ||
|
|
4aa2eab8b6 | ||
|
|
3960a19bcb | ||
|
|
b3cec4781b | ||
|
|
8f0b0bf0d0 | ||
|
|
847672d7f1 | ||
|
|
c7f2962654 | ||
|
|
752201cb46 | ||
|
|
deebf61b5f | ||
|
|
d5e5b06e86 | ||
|
|
cb5975c102 | ||
|
|
5b1aee1b4d | ||
|
|
510c8b4236 | ||
|
|
89fc7b0553 | ||
|
|
123c21fcb3 | ||
|
|
75d62d66f9 | ||
|
|
23a8e989a5 | ||
|
|
9577e637f1 | ||
|
|
e51ef2201b | ||
|
|
f4ae503abf | ||
|
|
3424b658f3 | ||
|
|
3198f73f3d | ||
|
|
aa3262a8ab | ||
|
|
6acd7be547 | ||
|
|
fb7669ddad | ||
|
|
f2c4ef126e | ||
|
|
33dcc4c152 | ||
|
|
b9e331ebd6 | ||
|
|
7832ec386e | ||
|
|
b9828428cc | ||
|
|
da11034aec | ||
|
|
578c9e0695 | ||
|
|
cc675a9b4f | ||
|
|
08e7d4d0c6 | ||
|
|
553f1b8d83 | ||
|
|
73e7e2088d | ||
|
|
e40c9de610 | ||
|
|
2f4e0bb4f2 | ||
|
|
191976e22e | ||
|
|
52656b8586 | ||
|
|
998e29ded6 | ||
|
|
5bbe3f12d6 | ||
|
|
56aea81ed7 | ||
|
|
7b8a311dde | ||
|
|
b75d20a3e8 | ||
|
|
67faa587b6 | ||
|
|
15fde686d4 | ||
|
|
741284f6e8 | ||
|
|
8352fc269b | ||
|
|
5852f36557 | ||
|
|
cc1c723c12 | ||
|
|
adf5cbfeba | ||
|
|
d6d0516c9a | ||
|
|
8aab10aaf3 | ||
|
|
4fe5616ae1 | ||
|
|
7e1c76a3f5 | ||
|
|
f74665ff71 | ||
|
|
a96d64fe88 | ||
|
|
fd2aa0cba6 | ||
|
|
a92ea3db02 | ||
|
|
d7a513b640 | ||
|
|
8a017ff693 | ||
|
|
7d08f57b32 | ||
|
|
6f4ad7890b | ||
|
|
37488118a6 | ||
|
|
b2da0778ae | ||
|
|
cc887a5037 | ||
|
|
ca86a02d30 | ||
|
|
d652dc19a6 | ||
|
|
6a56b7bff5 | ||
|
|
81e8997852 | ||
|
|
372a204ba9 | ||
|
|
15ad5aae35 | ||
|
|
fd2e9ef93f | ||
|
|
5be3bf1f46 | ||
|
|
4915c2d480 | ||
|
|
bd56a19ac5 | ||
|
|
da8fa2d905 | ||
|
|
f56fd100d7 | ||
|
|
b725a1a20c | ||
|
|
ff1b5d02d2 | ||
|
|
d4882a8240 | ||
|
|
e37f84c1ae | ||
|
|
a23bd0a63c | ||
|
|
ae00e84974 | ||
|
|
53b3250978 | ||
|
|
7f15a59a4e | ||
|
|
6a164c9961 | ||
|
|
bd779a3df3 | ||
|
|
9ebb340c00 | ||
|
|
e8edbaae2d | ||
|
|
2aab1f4c96 | ||
|
|
90ea621c65 | ||
|
|
34bdceb41b | ||
|
|
6d2ded1c6c | ||
|
|
9b926048ca | ||
|
|
9cf4f0f57d | ||
|
|
9123b9d773 | ||
|
|
f9258ae1e1 | ||
|
|
d8808de4a9 | ||
|
|
afcb152d8d | ||
|
|
ff01174a1f | ||
|
|
71f1625284 | ||
|
|
19e3390083 | ||
|
|
3015b90e12 | ||
|
|
aa419f3ef9 | ||
|
|
954236c284 | ||
|
|
72d6b3886b | ||
|
|
a95046ecaf | ||
|
|
ccdb11575b | ||
|
|
7e68b2f2be | ||
|
|
39efab1081 | ||
|
|
cc6707c8ce | ||
|
|
09080adf84 | ||
|
|
4cc72030c0 | ||
|
|
a395902184 | ||
|
|
5156f0584a | ||
|
|
be171fe0d7 | ||
|
|
ad4bf5e654 | ||
|
|
da7429ad62 | ||
|
|
b5f20ee282 | ||
|
|
a9023d6f3a | ||
|
|
628b661a18 | ||
|
|
638fe466f8 | ||
|
|
a90adcf15c | ||
|
|
7896066db6 | ||
|
|
b1314bcc31 | ||
|
|
b1ecc929f2 | ||
|
|
3aad42a886 | ||
|
|
b6e87d3d31 | ||
|
|
461eb4b9c7 | ||
|
|
a89e92d5cc | ||
|
|
6e69e88e91 | ||
|
|
ae732c1dac | ||
|
|
8e4a72c97b | ||
|
|
bf0d82fe67 | ||
|
|
987383f957 | ||
|
|
c2cacf3281 | ||
|
|
72878477dc | ||
|
|
ad0d14420a | ||
|
|
5a7c60c81e | ||
|
|
6011840d1f | ||
|
|
9a2dffe299 | ||
|
|
e6770d2b12 | ||
|
|
255db6ee57 | ||
|
|
aa9ff99557 | ||
|
|
5f024e9f30 | ||
|
|
cbdc7b7ce4 | ||
|
|
5f636ca061 | ||
|
|
9fa3651170 | ||
|
|
bba66788c3 | ||
|
|
200f3cce00 | ||
|
|
938490b739 | ||
|
|
e77e7b050a | ||
|
|
bd2dbe5b63 | ||
|
|
c684d9cb4a | ||
|
|
7a39a9d45e | ||
|
|
2a3bb068db | ||
|
|
1aa4384ca3 | ||
|
|
3b26b7b26c | ||
|
|
3b097d662b | ||
|
|
c3acb3e77f | ||
|
|
55d58d30a8 | ||
|
|
020a8ace9f | ||
|
|
15f56ffc01 | ||
|
|
3724659b32 | ||
|
|
df77152581 | ||
|
|
339ea5f12a | ||
|
|
36f96ccc97 | ||
|
|
190e0a4971 | ||
|
|
72638fac68 | ||
|
|
807d19e381 | ||
|
|
10870172b4 | ||
|
|
1f7d3eccf9 | ||
|
|
5fc58123bb | ||
|
|
c84c9f4aaa | ||
|
|
cabe66fc0a | ||
|
|
9f1315b06d | ||
|
|
6f27f59730 | ||
|
|
17815e7fe3 | ||
|
|
596ae80fea | ||
|
|
be2dc6ba70 | ||
|
|
e5aa8c8270 | ||
|
|
7c5ac41c55 | ||
|
|
c6cf1153c1 | ||
|
|
a68338b651 | ||
|
|
bab46e912e | ||
|
|
4b158a1c89 | ||
|
|
6894900e46 | ||
|
|
2e11d6e007 | ||
|
|
348381be15 | ||
|
|
9024c28e70 | ||
|
|
ae1702901b | ||
|
|
c1c0df85e6 | ||
|
|
f3c6d9c02b | ||
|
|
811a885411 | ||
|
|
b4ec28b71c | ||
|
|
cdf4a5321b | ||
|
|
d83f155f80 | ||
|
|
4c402ed5bd | ||
|
|
ec5aff8d0b | ||
|
|
eae0d6c422 | ||
|
|
9c284b84b1 | ||
|
|
9f36e5ae05 | ||
|
|
7caa380e54 | ||
|
|
41d81bb60e | ||
|
|
454a74f4e1 | ||
|
|
c5bdad02e5 | ||
|
|
f46de3d518 | ||
|
|
a3e21bea1a | ||
|
|
d7e4707d5d |
24
.dockerignore
Normal file
24
.dockerignore
Normal file
@@ -0,0 +1,24 @@
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
# github actions
|
||||
.git
|
||||
.github/
|
||||
.*ignore
|
||||
# User-specific stuff
|
||||
.idea/
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv*/
|
||||
ENV/
|
||||
.conda/
|
||||
dashboard/
|
||||
data/
|
||||
changelogs/
|
||||
tests/
|
||||
.ruff_cache/
|
||||
.astrbot
|
||||
astrbot.lock
|
||||
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: astrbot
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: ['https://afdian.com/a/astrbot_team']
|
||||
57
.github/ISSUE_TEMPLATE/PLUGIN_PUBLISH.yml
vendored
Normal file
57
.github/ISSUE_TEMPLATE/PLUGIN_PUBLISH.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: 🥳 发布插件
|
||||
description: 提交插件到插件市场
|
||||
title: "[Plugin] 插件名"
|
||||
labels: ["plugin-publish"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
欢迎发布插件到插件市场!
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## 插件基本信息
|
||||
|
||||
请将插件信息填写到下方的 JSON 代码块中。其中 `tags`(插件标签)和 `social_link`(社交链接)选填。
|
||||
|
||||
不熟悉 JSON ?可以从 [此站](https://plugins.astrbot.app) 右下角提交。
|
||||
|
||||
- type: textarea
|
||||
id: plugin-info
|
||||
attributes:
|
||||
label: 插件信息
|
||||
description: 请在下方代码块中填写您的插件信息,确保反引号包裹了JSON
|
||||
value: |
|
||||
```json
|
||||
{
|
||||
"name": "插件名,请以 astrbot_plugin_ 开头",
|
||||
"display_name": "用于展示的插件名,方便人类阅读",
|
||||
"desc": "插件的简短介绍",
|
||||
"author": "作者名",
|
||||
"repo": "插件仓库链接",
|
||||
"tags": [],
|
||||
"social_link": "",
|
||||
}
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## 检查
|
||||
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: 插件检查清单
|
||||
description: 请确认以下所有项目
|
||||
options:
|
||||
- label: 我的插件经过完整的测试
|
||||
required: true
|
||||
- label: 我的插件不包含恶意代码
|
||||
required: true
|
||||
- label: 我已阅读并同意遵守该项目的 [行为准则](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||
required: true
|
||||
80
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
80
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: '🐛 Report Bug / 报告 Bug'
|
||||
title: '[Bug]'
|
||||
description: Submit bug report to help us improve. / 提交报告帮助我们改进。
|
||||
labels: [ 'bug' ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to report this issue! Please describe your problem accurately. If possible, please provide a reproducible snippet (this will help resolve the issue more quickly). Please note that issues that are not detailed or have no logs will be closed immediately. Thank you for your understanding. / 感谢您抽出时间报告问题!请准确解释您的问题。如果可能,请提供一个可复现的片段(这有助于更快地解决问题)。请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What happened / 发生了什么
|
||||
description: Description
|
||||
placeholder: >
|
||||
Please provide a clear and specific description of what this exception is. Please note that issues that are not detailed or have no logs will be closed immediately. Thank you for your understanding. / 一个清晰且具体的描述这个异常是什么。请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduce / 如何复现?
|
||||
description: >
|
||||
The steps to reproduce the issue. / 复现该问题的步骤
|
||||
placeholder: >
|
||||
Example: 1. Open '...'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: AstrBot version, deployment method (e.g., Windows Docker Desktop deployment), provider used, and messaging platform used. / AstrBot 版本、部署方式(如 Windows Docker Desktop 部署)、使用的提供商、使用的消息平台适配器
|
||||
placeholder: >
|
||||
Example: 4.5.7 Docker, 3.1.7 Windows Launcher
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: OS
|
||||
description: |
|
||||
On which operating system did you encounter this problem? / 你在哪个操作系统上遇到了这个问题?
|
||||
multiple: false
|
||||
options:
|
||||
- 'Windows'
|
||||
- 'macOS'
|
||||
- 'Linux'
|
||||
- 'Other'
|
||||
- 'Not sure'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs / 报错日志
|
||||
description: >
|
||||
Please provide complete Debug-level logs, such as error logs and screenshots. Don't worry if they're long! Please note that issues with insufficient details or no logs will be closed immediately. Thank you for your understanding. / 如报错日志、截图等。请提供完整的 Debug 级别的日志,不要介意它很长!请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||
placeholder: >
|
||||
Please provide a complete error log or screenshot. / 请提供完整的报错日志或截图。
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Are you willing to submit a PR? / 你愿意提交 PR 吗?
|
||||
description: >
|
||||
This is not required, but we would be happy to provide guidance during the contribution process, especially if you already have a good understanding of how to implement the fix. / 这不是必需的,但我们很乐意在贡献过程中为您提供指导特别是如果你已经很好地理解了如何实现修复。
|
||||
options:
|
||||
- label: Yes!
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
options:
|
||||
- label: >
|
||||
I have read and agree to abide by the project's [Code of Conduct](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "Thank you for filling out our form! / 感谢您填写我们的表单!"
|
||||
42
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
42
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
|
||||
name: '🎉 功能建议'
|
||||
title: "[Feature]"
|
||||
description: 提交建议帮助我们改进。
|
||||
labels: [ "enhancement" ]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
感谢您抽出时间提出新功能建议,请准确解释您的想法。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 描述
|
||||
description: 简短描述您的功能建议。
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 使用场景
|
||||
description: 你想要发生什么?
|
||||
placeholder: >
|
||||
一个清晰且具体的描述这个功能的使用场景。
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 你愿意提交PR吗?
|
||||
description: >
|
||||
这不是必须的,但我们欢迎您的贡献。
|
||||
options:
|
||||
- label: 是的, 我愿意提交PR!
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
options:
|
||||
- label: >
|
||||
我已阅读并同意遵守该项目的 [行为准则](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: "感谢您填写我们的表单!"
|
||||
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
<!--Please describe the motivation for this change: What problem does it solve? (e.g., Fixes XX issue, adds YY feature)-->
|
||||
<!--请描述此项更改的动机:它解决了什么问题?(例如:修复了 XX issue,添加了 YY 功能)-->
|
||||
|
||||
### Modifications / 改动点
|
||||
|
||||
<!--请总结你的改动:哪些核心文件被修改了?实现了什么功能?-->
|
||||
<!--Please summarize your changes: What core files were modified? What functionality was implemented?-->
|
||||
|
||||
- [x] This is NOT a breaking change. / 这不是一个破坏性变更。
|
||||
<!-- If your changes is a breaking change, please uncheck the checkbox above -->
|
||||
|
||||
### Screenshots or Test Results / 运行截图或测试结果
|
||||
|
||||
<!--Please paste screenshots, GIFs, or test logs here as evidence of executing the "Verification Steps" to prove this change is effective.-->
|
||||
<!--请粘贴截图、GIF 或测试日志,作为执行“验证步骤”的证据,证明此改动有效。-->
|
||||
|
||||
---
|
||||
|
||||
### Checklist / 检查清单
|
||||
|
||||
<!--If merged, your code will serve tens of thousands of users! Please double-check the following items before submitting.-->
|
||||
<!--如果分支被合并,您的代码将服务于数万名用户!在提交前,请核查一下几点内容。-->
|
||||
|
||||
- [ ] 😊 如果 PR 中有新加入的功能,已经通过 Issue / 邮件等方式和作者讨论过。/ If there are new features added in the PR, I have discussed it with the authors through issues/emails, etc.
|
||||
- [ ] 👀 我的更改经过了良好的测试,**并已在上方提供了“验证步骤”和“运行截图”**。/ My changes have been well-tested, **and "Verification Steps" and "Screenshots" have been provided above**.
|
||||
- [ ] 🤓 我确保没有引入新依赖库,或者引入了新依赖库的同时将其添加到了 `requirements.txt` 和 `pyproject.toml` 文件相应位置。/ I have ensured that no new dependencies are introduced, OR if new dependencies are introduced, they have been added to the appropriate locations in `requirements.txt` and `pyproject.toml`.
|
||||
- [ ] 😮 我的更改没有引入恶意代码。/ My changes do not introduce malicious code.
|
||||
38
.github/auto_assign.yml
vendored
Normal file
38
.github/auto_assign.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# Set to true to add reviewers to pull requests
|
||||
addReviewers: true
|
||||
|
||||
# Set to true to add assignees to pull requests
|
||||
addAssignees: false
|
||||
|
||||
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||
reviewers:
|
||||
- Soulter
|
||||
- Raven95676
|
||||
- Larch-C
|
||||
- anka-afk
|
||||
- advent259141
|
||||
- Fridemn
|
||||
- LIghtJUNction
|
||||
# - zouyonghe
|
||||
|
||||
# A number of reviewers added to the pull request
|
||||
# Set 0 to add all the reviewers (default: 0)
|
||||
numberOfReviewers: 2
|
||||
|
||||
# A list of assignees, overrides reviewers if set
|
||||
# assignees:
|
||||
# - assigneeA
|
||||
|
||||
# A number of assignees to add to the pull request
|
||||
# Set to 0 to add all of the assignees.
|
||||
# Uses numberOfReviewers if unset.
|
||||
# numberOfAssignees: 2
|
||||
|
||||
# A list of keywords to be skipped the process that add reviewers if pull requests include it
|
||||
skipKeywords:
|
||||
- wip
|
||||
- draft
|
||||
|
||||
# A list of users to be skipped by both the add reviewers and add assignees processes
|
||||
# skipUsers:
|
||||
# - dependabot[bot]
|
||||
63
.github/copilot-instructions.md
vendored
Normal file
63
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# AstrBot Development Instructions
|
||||
|
||||
AstrBot is a multi-platform LLM chatbot and development framework written in Python with a Vue.js dashboard. It supports multiple messaging platforms (QQ, Telegram, Discord, etc.) and various LLM providers (OpenAI, Anthropic, Google Gemini, etc.).
|
||||
|
||||
Always reference these instructions first and fallback to search or bash commands only when you encounter unexpected information that does not match the info here.
|
||||
|
||||
## Working Effectively
|
||||
|
||||
### Bootstrap and Install Dependencies
|
||||
- **Python 3.10+ required** - Check `.python-version` file
|
||||
- Install UV package manager: `pip install uv`
|
||||
- Install project dependencies: `uv sync` -- takes 6-7 minutes. NEVER CANCEL. Set timeout to 10+ minutes.
|
||||
- Create required directories: `mkdir -p data/plugins data/config data/temp`
|
||||
|
||||
### Running the Application
|
||||
- Run main application: `uv run main.py` -- starts in ~3 seconds
|
||||
- Application creates WebUI on http://localhost:6185 (default credentials: `astrbot`/`astrbot`)
|
||||
- Application loads plugins automatically from `packages/` and `data/plugins/` directories
|
||||
|
||||
### Dashboard Build (Vue.js/Node.js)
|
||||
- **Prerequisites**: Node.js 20+ and npm 10+ required
|
||||
- Navigate to dashboard: `cd dashboard`
|
||||
- Install dashboard dependencies: `npm install` -- takes 2-3 minutes. NEVER CANCEL. Set timeout to 5+ minutes.
|
||||
- Build dashboard: `npm run build` -- takes 25-30 seconds. NEVER CANCEL.
|
||||
- Dashboard creates optimized production build in `dashboard/dist/`
|
||||
|
||||
### Testing
|
||||
- Do not generate test files for now.
|
||||
|
||||
### Code Quality and Linting
|
||||
- Install ruff linter: `uv add --dev ruff`
|
||||
- Check code style: `uv run ruff check .` -- takes <1 second
|
||||
- Check formatting: `uv run ruff format --check .` -- takes <1 second
|
||||
- Fix formatting: `uv run ruff format .`
|
||||
- **ALWAYS** run `uv run ruff check .` and `uv run ruff format .` before committing changes
|
||||
|
||||
### Plugin Development
|
||||
- Plugins load from `packages/` (built-in) and `data/plugins/` (user-installed)
|
||||
- Plugin system supports function tools and message handlers
|
||||
- Key plugins: python_interpreter, web_searcher, astrbot, reminder, session_controller
|
||||
|
||||
### Common Issues and Workarounds
|
||||
- **Dashboard download fails**: Known issue with "division by zero" error - application still works
|
||||
- **Import errors in tests**: Ensure `uv run` is used to run tests in proper environment
|
||||
=- **Build timeouts**: Always set appropriate timeouts (10+ minutes for uv sync, 5+ minutes for npm install)
|
||||
|
||||
## CI/CD Integration
|
||||
- GitHub Actions workflows in `.github/workflows/`
|
||||
- Docker builds supported via `Dockerfile`
|
||||
- Pre-commit hooks enforce ruff formatting and linting
|
||||
|
||||
## Docker Support
|
||||
- Primary deployment method: `docker run soulter/astrbot:latest`
|
||||
- Compose file available: `compose.yml`
|
||||
- Exposes ports: 6185 (WebUI), 6195 (WeChat), 6199 (QQ), etc.
|
||||
- Volume mount required: `./data:/AstrBot/data`
|
||||
|
||||
## Multi-language Support
|
||||
- Documentation in Chinese (README.md), English (README_en.md), Japanese (README_ja.md)
|
||||
- UI supports internationalization
|
||||
- Default language is Chinese
|
||||
|
||||
Remember: This is a production chatbot framework with real users. Always test thoroughly and ensure changes don't break existing functionality.
|
||||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Keep GitHub Actions up to date with GitHub's Dependabot...
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot
|
||||
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
groups:
|
||||
github-actions:
|
||||
patterns:
|
||||
- "*" # Group all Actions updates into a single larger pull request
|
||||
schedule:
|
||||
interval: weekly
|
||||
92
.github/workflows/auto_release.yml
vendored
Normal file
92
.github/workflows/auto_release.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
|
||||
name: Auto Release
|
||||
|
||||
jobs:
|
||||
build-and-publish-to-github-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Dashboard Build
|
||||
run: |
|
||||
cd dashboard
|
||||
npm install
|
||||
npm run build
|
||||
echo "COMMIT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
echo ${{ github.ref_name }} > dist/assets/version
|
||||
zip -r dist.zip dist
|
||||
|
||||
- name: Upload to Cloudflare R2
|
||||
env:
|
||||
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
R2_BUCKET_NAME: "astrbot"
|
||||
R2_OBJECT_NAME: "astrbot-webui-latest.zip"
|
||||
VERSION_TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
echo "Installing rclone..."
|
||||
curl https://rclone.org/install.sh | sudo bash
|
||||
|
||||
echo "Configuring rclone remote..."
|
||||
mkdir -p ~/.config/rclone
|
||||
cat <<EOF > ~/.config/rclone/rclone.conf
|
||||
[r2]
|
||||
type = s3
|
||||
provider = Cloudflare
|
||||
access_key_id = $R2_ACCESS_KEY_ID
|
||||
secret_access_key = $R2_SECRET_ACCESS_KEY
|
||||
endpoint = https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com
|
||||
EOF
|
||||
|
||||
echo "Uploading dist.zip to R2 bucket: $R2_BUCKET_NAME/$R2_OBJECT_NAME"
|
||||
mv dashboard/dist.zip dashboard/$R2_OBJECT_NAME
|
||||
rclone copy dashboard/$R2_OBJECT_NAME r2:$R2_BUCKET_NAME --progress
|
||||
mv dashboard/$R2_OBJECT_NAME dashboard/astrbot-webui-${VERSION_TAG}.zip
|
||||
rclone copy dashboard/astrbot-webui-${VERSION_TAG}.zip r2:$R2_BUCKET_NAME --progress
|
||||
mv dashboard/astrbot-webui-${VERSION_TAG}.zip dashboard/dist.zip
|
||||
|
||||
- name: Fetch Changelog
|
||||
run: |
|
||||
echo "changelog=changelogs/${{github.ref_name}}.md" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
bodyFile: ${{ env.changelog }}
|
||||
artifacts: "dashboard/dist.zip"
|
||||
|
||||
build-and-publish-to-pypi:
|
||||
# 构建并发布到 PyPI
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-and-publish-to-github-release
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install uv
|
||||
run: |
|
||||
python -m pip install uv
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
uv build
|
||||
|
||||
- name: Publish to PyPI
|
||||
env:
|
||||
UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
run: |
|
||||
uv publish
|
||||
34
.github/workflows/code-format.yml
vendored
Normal file
34
.github/workflows/code-format.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: Code Format Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
format-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install UV
|
||||
run: pip install uv
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync
|
||||
|
||||
- name: Check code formatting with ruff
|
||||
run: |
|
||||
uv run ruff format --check .
|
||||
|
||||
- name: Check code style with ruff
|
||||
run: |
|
||||
uv run ruff check .
|
||||
93
.github/workflows/codeql.yml
vendored
Normal file
93
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '21 15 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: python
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
45
.github/workflows/coverage_test.yml
vendored
Normal file
45
.github/workflows/coverage_test.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Run tests and upload coverage
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'changelogs/**'
|
||||
- 'dashboard/**'
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run tests and collect coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pytest pytest-asyncio pytest-cov
|
||||
pip install --editable .
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
mkdir -p data/plugins
|
||||
mkdir -p data/config
|
||||
mkdir -p data/temp
|
||||
export TESTING=true
|
||||
export ZHIPU_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||
pytest --cov=. -v -o log_cli=true -o log_level=DEBUG
|
||||
|
||||
- name: Upload results to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
55
.github/workflows/dashboard_ci.yml
vendored
Normal file
55
.github/workflows/dashboard_ci.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: AstrBot Dashboard CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 'latest'
|
||||
|
||||
- name: npm install, build
|
||||
run: |
|
||||
cd dashboard
|
||||
npm install pnpm -g
|
||||
pnpm install
|
||||
pnpm i --save-dev @types/markdown-it
|
||||
pnpm run build
|
||||
|
||||
- name: Inject Commit SHA
|
||||
id: get_sha
|
||||
run: |
|
||||
echo "COMMIT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
mkdir -p dashboard/dist/assets
|
||||
echo $COMMIT_SHA > dashboard/dist/assets/version
|
||||
cd dashboard
|
||||
zip -r dist.zip dist
|
||||
|
||||
- name: Archive production artifacts
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: dist-without-markdown
|
||||
path: |
|
||||
dashboard/dist
|
||||
!dist/**/*.md
|
||||
|
||||
- name: Create GitHub Release
|
||||
if: github.event_name == 'push'
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: release-${{ github.sha }}
|
||||
owner: AstrBotDevs
|
||||
repo: astrbot-release-harbour
|
||||
body: "Automated release from commit ${{ github.sha }}"
|
||||
token: ${{ secrets.ASTRBOT_HARBOUR_TOKEN }}
|
||||
artifacts: "dashboard/dist.zip"
|
||||
198
.github/workflows/docker-image.yml
vendored
Normal file
198
.github/workflows/docker-image.yml
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
name: Docker Image CI/CD
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
schedule:
|
||||
# Run at 00:00 UTC every day
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-nightly-image:
|
||||
if: github.event_name == 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
GHCR_OWNER: soulter
|
||||
HAS_GHCR_TOKEN: ${{ secrets.GHCR_GITHUB_TOKEN != '' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-tag: true
|
||||
|
||||
- name: Check for new commits today
|
||||
if: github.event_name == 'schedule'
|
||||
id: check-commits
|
||||
run: |
|
||||
# Get commits from the last 24 hours
|
||||
commits=$(git log --since="24 hours ago" --oneline)
|
||||
if [ -z "$commits" ]; then
|
||||
echo "No commits in the last 24 hours, skipping build"
|
||||
echo "has_commits=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Found commits in the last 24 hours:"
|
||||
echo "$commits"
|
||||
echo "has_commits=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Exit if no commits
|
||||
if: github.event_name == 'schedule' && steps.check-commits.outputs.has_commits == 'false'
|
||||
run: exit 0
|
||||
|
||||
- name: Build Dashboard
|
||||
run: |
|
||||
cd dashboard
|
||||
npm install
|
||||
npm run build
|
||||
mkdir -p dist/assets
|
||||
echo $(git rev-parse HEAD) > dist/assets/version
|
||||
cd ..
|
||||
mkdir -p data
|
||||
cp -r dashboard/dist data/
|
||||
|
||||
- name: Determine test image tags
|
||||
id: test-meta
|
||||
run: |
|
||||
short_sha=$(echo "${GITHUB_SHA}" | cut -c1-12)
|
||||
build_date=$(date +%Y%m%d)
|
||||
echo "short_sha=$short_sha" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$build_date" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.HAS_GHCR_TOKEN == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ env.GHCR_OWNER }}
|
||||
password: ${{ secrets.GHCR_GITHUB_TOKEN }}
|
||||
|
||||
- name: Build nightly image tags list
|
||||
id: test-tags
|
||||
run: |
|
||||
TAGS="${{ env.DOCKER_HUB_USERNAME }}/astrbot:nightly-latest
|
||||
${{ env.DOCKER_HUB_USERNAME }}/astrbot:nightly-${{ steps.test-meta.outputs.build_date }}-${{ steps.test-meta.outputs.short_sha }}"
|
||||
if [ "${{ env.HAS_GHCR_TOKEN }}" = "true" ]; then
|
||||
TAGS="$TAGS
|
||||
ghcr.io/${{ env.GHCR_OWNER }}/astrbot:nightly-latest
|
||||
ghcr.io/${{ env.GHCR_OWNER }}/astrbot:nightly-${{ steps.test-meta.outputs.build_date }}-${{ steps.test-meta.outputs.short_sha }}"
|
||||
fi
|
||||
echo "tags<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$TAGS" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build and Push Nightly Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.test-tags.outputs.tags }}
|
||||
|
||||
- name: Post build notifications
|
||||
run: echo "Test Docker image has been built and pushed successfully"
|
||||
|
||||
build-release-image:
|
||||
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v'))
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
GHCR_OWNER: soulter
|
||||
HAS_GHCR_TOKEN: ${{ secrets.GHCR_GITHUB_TOKEN != '' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-tag: true
|
||||
|
||||
- name: Get latest tag (only on manual trigger)
|
||||
id: get-latest-tag
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
tag=$(git describe --tags --abbrev=0)
|
||||
echo "latest_tag=$tag" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout to latest tag (only on manual trigger)
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: git checkout ${{ steps.get-latest-tag.outputs.latest_tag }}
|
||||
|
||||
- name: Compute release metadata
|
||||
id: release-meta
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
version="${{ steps.get-latest-tag.outputs.latest_tag }}"
|
||||
else
|
||||
version="${GITHUB_REF#refs/tags/}"
|
||||
fi
|
||||
if [[ "$version" == *"beta"* ]] || [[ "$version" == *"alpha"* ]]; then
|
||||
echo "is_prerelease=true" >> $GITHUB_OUTPUT
|
||||
echo "Version $version marked as pre-release"
|
||||
else
|
||||
echo "is_prerelease=false" >> $GITHUB_OUTPUT
|
||||
echo "Version $version marked as stable"
|
||||
fi
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Dashboard
|
||||
run: |
|
||||
cd dashboard
|
||||
npm install
|
||||
npm run build
|
||||
mkdir -p dist/assets
|
||||
echo $(git rev-parse HEAD) > dist/assets/version
|
||||
cd ..
|
||||
mkdir -p data
|
||||
cp -r dashboard/dist data/
|
||||
|
||||
- name: Set QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.HAS_GHCR_TOKEN == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ env.GHCR_OWNER }}
|
||||
password: ${{ secrets.GHCR_GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Release Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.release-meta.outputs.is_prerelease == 'false' && format('{0}/astrbot:latest', env.DOCKER_HUB_USERNAME) || '' }}
|
||||
${{ steps.release-meta.outputs.is_prerelease == 'false' && env.HAS_GHCR_TOKEN == 'true' && format('ghcr.io/{0}/astrbot:latest', env.GHCR_OWNER) || '' }}
|
||||
${{ format('{0}/astrbot:{1}', env.DOCKER_HUB_USERNAME, steps.release-meta.outputs.version) }}
|
||||
${{ env.HAS_GHCR_TOKEN == 'true' && format('ghcr.io/{0}/astrbot:{1}', env.GHCR_OWNER, steps.release-meta.outputs.version) || '' }}
|
||||
|
||||
- name: Post build notifications
|
||||
run: echo "Release Docker image has been built and pushed successfully"
|
||||
27
.github/workflows/stale.yml
vendored
Normal file
27
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '21 23 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'Stale issue message'
|
||||
stale-pr-message: 'Stale pull request message'
|
||||
stale-issue-label: 'no-issue-activity'
|
||||
stale-pr-label: 'no-pr-activity'
|
||||
51
.gitignore
vendored
51
.gitignore
vendored
@@ -1,3 +1,52 @@
|
||||
# Python related
|
||||
__pycache__
|
||||
botpy.log
|
||||
.mypy_cache
|
||||
.venv*
|
||||
.conda/
|
||||
uv.lock
|
||||
.coverage
|
||||
|
||||
# IDE and editors
|
||||
.vscode
|
||||
.idea
|
||||
|
||||
# Logs and temporary files
|
||||
botpy.log
|
||||
logs/
|
||||
temp
|
||||
cookies.json
|
||||
|
||||
# Data files
|
||||
data_v2.db
|
||||
data_v3.db
|
||||
data
|
||||
configs/session
|
||||
configs/config.yaml
|
||||
cmd_config.json
|
||||
|
||||
# Plugins and packages
|
||||
addons/plugins
|
||||
packages/python_interpreter/workplace
|
||||
tests/astrbot_plugin_openai
|
||||
|
||||
# Dashboard
|
||||
dashboard/node_modules/
|
||||
dashboard/dist/
|
||||
package-lock.json
|
||||
package.json
|
||||
yarn.lock
|
||||
|
||||
# Operating System
|
||||
**/.DS_Store
|
||||
.DS_Store
|
||||
|
||||
# AstrBot specific
|
||||
.astrbot
|
||||
astrbot.lock
|
||||
|
||||
# Other
|
||||
chroma
|
||||
venv/*
|
||||
pytest.ini
|
||||
AGENTS.md
|
||||
IFLOW.md
|
||||
|
||||
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
default_install_hook_types: [pre-commit, prepare-commit-msg]
|
||||
ci:
|
||||
autofix_commit_msg: ":balloon: auto fixes by pre-commit hooks"
|
||||
autofix_prs: true
|
||||
autoupdate_branch: master
|
||||
autoupdate_schedule: weekly
|
||||
autoupdate_commit_msg: ":balloon: pre-commit autoupdate"
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
types_or: [ python, pyi ]
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
types_or: [ python, pyi ]
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.21.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py310-plus]
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.10
|
||||
32
Dockerfile
Normal file
32
Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM python:3.11-slim
|
||||
WORKDIR /AstrBot
|
||||
|
||||
COPY . /AstrBot/
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
build-essential \
|
||||
python3-dev \
|
||||
libffi-dev \
|
||||
libssl-dev \
|
||||
ca-certificates \
|
||||
bash \
|
||||
ffmpeg \
|
||||
curl \
|
||||
gnupg \
|
||||
git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
RUN apt-get update && apt-get install -y curl gnupg \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
RUN python -m pip install uv \
|
||||
&& echo "3.11" > .python-version
|
||||
RUN uv pip install -r requirements.txt --no-cache-dir --system
|
||||
RUN uv pip install socksio uv pilk --no-cache-dir --system
|
||||
|
||||
EXPOSE 6185
|
||||
|
||||
CMD ["python", "main.py"]
|
||||
4
LICENSE
4
LICENSE
@@ -629,8 +629,8 @@ to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
AstrBot is a llm-powered chatbot and develop framework.
|
||||
Copyright (C) 2022-2099 Soulter
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
|
||||
522
README.md
522
README.md
@@ -1,311 +1,233 @@
|
||||

|
||||
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<img src="https://socialify.git.ci/Soulter/QQChannelChatGPT/image?description=1&forks=1&issues=1&language=1&name=1&owner=1&pattern=Circuit%20Board&stargazers=1&theme=Light" alt="QQChannelChatGPT" width="600" height="300" />
|
||||
<br>
|
||||
|
||||
[](https://www.python.org/)
|
||||
[](https://github.com/Soulter/QQChannelChatGPT/blob/master/LICENSE)
|
||||

|
||||
|
||||
_✨在QQ和QQ频道上使用ChatGPT、NewBing等语言模型,稳定,一次部署,同时使用✨_
|
||||
<div>
|
||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://hellogithub.com/repository/AstrBotDevs/AstrBot" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=d127d50cd5e54c5382328acc3bb25483&claim_uid=ZO9by7qCXgSd6Lp&t=2" alt="Featured|HelloGitHub" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
|
||||
_✨教程:https://github.com/Soulter/QQChannelChatGPT/wiki ✨_
|
||||
|
||||
_✨插件开发教程:https://github.com/Soulter/QQChannelChatGPT/wiki/%E5%9B%9B%E3%80%81%E5%BC%80%E5%8F%91%E6%8F%92%E4%BB%B6 ✨_
|
||||
<br>
|
||||
|
||||
_✨欢迎体验😊(频道名: GPT机器人 | 频道号: x42d56aki2) | QQ群号:322154837)✨_
|
||||
<div>
|
||||
<img src="https://img.shields.io/github/v/release/AstrBotDevs/AstrBot?style=for-the-badge&color=76bad9" href="https://github.com/AstrBotDevs/AstrBot/releases/latest">
|
||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg?style=for-the-badge&color=76bad9" alt="python">
|
||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg?style=for-the-badge&color=76bad9"/></a>
|
||||
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=wtbaNx7EioxeaqS9z7RQWVXPIxg2zYr7&jump_from=webapi&authKey=vlqnv/AV2DbJEvGIcxdlNSpfxVy+8vVqijgreRdnVKOaydpc+YSw4MctmEbr0k5"><img alt="QQ_community" src="https://img.shields.io/badge/QQ群-775869627-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<img src="https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fapi.soulter.top%2Fastrbot%2Fplugin-num&query=%24.result&suffix=%E4%B8%AA&style=for-the-badge&label=%E6%8F%92%E4%BB%B6%E5%B8%82%E5%9C%BA&cacheSeconds=3600">
|
||||
</div>
|
||||
|
||||
<!-- <img src="https://user-images.githubusercontent.com/37870767/230417115-9dd3c9d5-6b6b-4928-8fe3-82f559208aab.JPG" width="300"></img> -->
|
||||
<br>
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_en.md">English</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_ja.md">日本語</a> |
|
||||
<a href="https://astrbot.app/">文档</a> |
|
||||
<a href="https://blog.astrbot.app/">Blog</a> |
|
||||
<a href="https://astrbot.featurebase.app/roadmap">路线图</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/issues">问题提交</a>
|
||||
</div>
|
||||
|
||||
AstrBot 是一个开源的一站式 Agent 聊天机器人平台,可无缝接入主流即时通讯软件,为个人、开发者和团队打造可靠、可扩展的对话式智能基础设施。无论是个人 AI 伙伴、智能客服、自动化助手,还是企业知识库,AstrBot 都能在你的即时通讯软件平台的工作流中快速构建生产可用的 AI 应用。
|
||||
|
||||
## 主要功能
|
||||
|
||||
1. **大模型对话**。支持接入多种大模型服务。支持多模态、工具调用、MCP、原生知识库、人设等功能。
|
||||
2. **多消息平台支持**。支持接入 QQ、企业微信、微信公众号、飞书、Telegram、钉钉、Discord、KOOK 等平台。支持速率限制、白名单、百度内容审核。
|
||||
3. **Agent**。完善适配的 Agentic 能力。支持多轮工具调用、内置沙盒代码执行器、网页搜索等功能。
|
||||
4. **插件扩展**。深度优化的插件机制,支持[开发插件](https://astrbot.app/dev/plugin.html)扩展功能,社区插件生态丰富。
|
||||
5. **WebUI**。可视化配置和管理机器人,功能齐全。
|
||||
|
||||
## 部署方式
|
||||
|
||||
#### Docker 部署(推荐 🥳)
|
||||
|
||||
推荐使用 Docker / Docker Compose 方式部署 AstrBot。
|
||||
|
||||
请参阅官方文档 [使用 Docker 部署 AstrBot](https://astrbot.app/deploy/astrbot/docker.html#%E4%BD%BF%E7%94%A8-docker-%E9%83%A8%E7%BD%B2-astrbot) 。
|
||||
|
||||
#### 宝塔面板部署
|
||||
|
||||
AstrBot 与宝塔面板合作,已上架至宝塔面板。
|
||||
|
||||
请参阅官方文档 [宝塔面板部署](https://astrbot.app/deploy/astrbot/btpanel.html) 。
|
||||
|
||||
#### 1Panel 部署
|
||||
|
||||
AstrBot 已由 1Panel 官方上架至 1Panel 面板。
|
||||
|
||||
请参阅官方文档 [1Panel 部署](https://astrbot.app/deploy/astrbot/1panel.html) 。
|
||||
|
||||
#### 在 雨云 上部署
|
||||
|
||||
AstrBot 已由雨云官方上架至云应用平台,可一键部署。
|
||||
|
||||
[](https://app.rainyun.com/apps/rca/store/5994?ref=NjU1ODg0)
|
||||
|
||||
#### 在 Replit 上部署
|
||||
|
||||
社区贡献的部署方式。
|
||||
|
||||
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||
|
||||
#### Windows 一键安装器部署
|
||||
|
||||
请参阅官方文档 [使用 Windows 一键安装器部署 AstrBot](https://astrbot.app/deploy/astrbot/windows.html) 。
|
||||
|
||||
#### CasaOS 部署
|
||||
|
||||
社区贡献的部署方式。
|
||||
|
||||
请参阅官方文档 [CasaOS 部署](https://astrbot.app/deploy/astrbot/casaos.html) 。
|
||||
|
||||
#### 手动部署
|
||||
|
||||
首先安装 uv:
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
```
|
||||
|
||||
通过 Git Clone 安装 AstrBot:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot && cd AstrBot
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
或者请参阅官方文档 [通过源码部署 AstrBot](https://astrbot.app/deploy/astrbot/cli.html) 。
|
||||
|
||||
## 🌍 社区
|
||||
|
||||
### QQ 群组
|
||||
|
||||
- 1 群:322154837
|
||||
- 3 群:630166526
|
||||
- 5 群:822130018
|
||||
- 6 群:753075035
|
||||
- 开发者群:975206796
|
||||
|
||||
### Telegram 群组
|
||||
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
### Discord 群组
|
||||
|
||||
<a href="https://discord.gg/hAVk6tgV36"><img alt="Discord_community" src="https://img.shields.io/badge/Discord-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
## 支持的消息平台
|
||||
|
||||
**官方维护**
|
||||
|
||||
- QQ (官方平台 & OneBot)
|
||||
- Telegram
|
||||
- 企微应用 & 企微智能机器人
|
||||
- 微信客服 & 微信公众号
|
||||
- 飞书
|
||||
- 钉钉
|
||||
- Slack
|
||||
- Discord
|
||||
- Satori
|
||||
- Misskey
|
||||
- Whatsapp (将支持)
|
||||
- LINE (将支持)
|
||||
|
||||
**社区维护**
|
||||
|
||||
- [KOOK](https://github.com/wuyan1003/astrbot_plugin_kook_adapter)
|
||||
- [VoceChat](https://github.com/HikariFroya/astrbot_plugin_vocechat)
|
||||
- [Bilibili 私信](https://github.com/Hina-Chat/astrbot_plugin_bilibili_adapter)
|
||||
- [wxauto](https://github.com/luosheng520qaq/wxauto-repost-onebotv11)
|
||||
|
||||
## 支持的模型服务
|
||||
|
||||
**大模型服务**
|
||||
|
||||
- OpenAI 及兼容服务
|
||||
- Anthropic
|
||||
- Google Gemini
|
||||
- Moonshot AI
|
||||
- 智谱 AI
|
||||
- DeepSeek
|
||||
- Ollama (本地部署)
|
||||
- LM Studio (本地部署)
|
||||
- [优云智算](https://www.compshare.cn/?ytag=GPU_YY-gh_astrbot&referral_code=FV7DcGowN4hB5UuXKgpE74)
|
||||
- [302.AI](https://share.302.ai/rr1M3l)
|
||||
- [小马算力](https://www.tokenpony.cn/3YPyf)
|
||||
- [硅基流动](https://docs.siliconflow.cn/cn/usercases/use-siliconcloud-in-astrbot)
|
||||
- [PPIO 派欧云](https://ppio.com/user/register?invited_by=AIOONE)
|
||||
- ModelScope
|
||||
- OneAPI
|
||||
|
||||
**LLMOps 平台**
|
||||
|
||||
- Dify
|
||||
- 阿里云百炼应用
|
||||
- Coze
|
||||
|
||||
**语音转文本服务**
|
||||
|
||||
- OpenAI Whisper
|
||||
- SenseVoice
|
||||
|
||||
**文本转语音服务**
|
||||
|
||||
- OpenAI TTS
|
||||
- Gemini TTS
|
||||
- GPT-Sovits-Inference
|
||||
- GPT-Sovits
|
||||
- FishAudio
|
||||
- Edge TTS
|
||||
- 阿里云百炼 TTS
|
||||
- Azure TTS
|
||||
- Minimax TTS
|
||||
- 火山引擎 TTS
|
||||
|
||||
## ❤️ 贡献
|
||||
|
||||
欢迎任何 Issues/Pull Requests!只需要将你的更改提交到此项目 :)
|
||||
|
||||
### 如何贡献
|
||||
|
||||
你可以通过查看问题或帮助审核 PR(拉取请求)来贡献。任何问题或 PR 都欢迎参与,以促进社区贡献。当然,这些只是建议,你可以以任何方式进行贡献。对于新功能的添加,请先通过 Issue 讨论。
|
||||
|
||||
### 开发环境
|
||||
|
||||
AstrBot 使用 `ruff` 进行代码格式化和检查。
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot
|
||||
pip install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## ❤️ Special Thanks
|
||||
|
||||
特别感谢所有 Contributors 和插件开发者对 AstrBot 的贡献 ❤️
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=AstrBotDevs/AstrBot" />
|
||||
</a>
|
||||
|
||||
此外,本项目的诞生离不开以下开源项目的帮助:
|
||||
|
||||
- [NapNeko/NapCatQQ](https://github.com/NapNeko/NapCatQQ) - 伟大的猫猫框架
|
||||
|
||||
## ⭐ Star History
|
||||
|
||||
> [!TIP]
|
||||
> 如果本项目对您的生活 / 工作产生了帮助,或者您关注本项目的未来发展,请给项目 Star,这是我们维护这个开源项目的动力 <3
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://star-history.com/#astrbotdevs/astrbot&Date)
|
||||
|
||||
</div>
|
||||
|
||||
## ⭐功能:
|
||||
|
||||
近期新功能:
|
||||
- 支持插件!https://github.com/Soulter/QQChannelChatGPT/wiki/%E5%9B%9B%E3%80%81%E5%BC%80%E5%8F%91%E6%8F%92%E4%BB%B6
|
||||
- 支持一键切换语言模型(使用/bing /revgpt /gpt分别可以切换newbing、逆向ChatGPT、官方ChatGPT模型)
|
||||
- 热更新
|
||||
- 接入QQ,支持在QQ上和QQ频道上同时聊天!https://github.com/Soulter/QQChannelChatGPT/issues/82
|
||||
- 更强大的Windows启动器,环境配置自动搞定。链接:https://github.com/Soulter/QQChatGPTLauncher/releases/latest
|
||||
|
||||
支持的AI语言模型(请在`configs/config.yaml`下配置):
|
||||
- 逆向ChatGPT库
|
||||
- 官方ChatGPT AI
|
||||
- 文心一言(即将支持)
|
||||
- NewBing
|
||||
- Bard (即将支持)
|
||||
|
||||
部署此项目的教程链接:https://github.com/Soulter/QQChannelChatGPT/wiki
|
||||
|
||||
### 基本功能
|
||||
<details>
|
||||
<summary>✅ 回复符合上下文</summary>
|
||||
|
||||
- 程序向API发送近多次对话内容,模型根据上下文生成回复
|
||||
|
||||
- 你可在`configs/config.yaml`中修改`total_token_limit`来近似控制缓存大小。
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 超额自动切换</summary>
|
||||
|
||||
- 超额时,程序自动切换openai的key,方便快捷
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
||||
<summary>✅ 支持统计频道、消息数量等信息</summary>
|
||||
|
||||
- 实现了简单的统计功能
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 多并发处理,回复速度快</summary>
|
||||
|
||||
- 使用了协程,理论最高可以支持每个子频道每秒回复5条信息
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 持久化转储历史记录,重启不丢失</summary>
|
||||
|
||||
- 使用内置的sqlite数据库存储历史记录到本地
|
||||
|
||||
- 方式为定时转储,可在`config.yaml`下修改`dump_history_interval`来修改间隔时间,单位为分钟。
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 支持多种指令控制</summary>
|
||||
|
||||
- 详见下方`指令功能`
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>✅ 官方API,稳定</summary>
|
||||
|
||||
- 不使用ChatGPT逆向接口,而使用官方API接口,稳定方便。
|
||||
|
||||
- QQ频道机器人框架为QQ官方开源的框架,稳定。
|
||||
|
||||
</details>
|
||||
|
||||
> 关于token:token就相当于是AI中的单词数(但是不等于单词数),`text-davinci-003`模型中最大可以支持`4097`个token。在发送信息时,这个机器人会将用户的历史聊天记录打包发送给ChatGPT,因此,`token`也会相应的累加,为了保证聊天的上下文的逻辑性,就有了缓存token。
|
||||
|
||||
### 🛠️ 插件支持
|
||||
|
||||
本项目支持接入插件。
|
||||
|
||||
插件开发教程:https://github.com/Soulter/QQChannelChatGPT/wiki/%E5%9B%9B%E3%80%81%E5%BC%80%E5%8F%91%E6%8F%92%E4%BB%B6
|
||||
|
||||
部分好用的插件:
|
||||
|
||||
`HuggingChat`: https://github.com/Soulter/HuggingChatForQQBot
|
||||
|
||||
|
||||
### 指令功能
|
||||
|
||||
#### OpenAI官方API
|
||||
在频道内需要先`@`机器人之后再输入指令;在QQ中暂时需要在消息前加上`ai `,不需要@
|
||||
- `/reset`重置prompt
|
||||
- `/his`查看历史记录(每个用户都有独立的会话)
|
||||
- `/his [页码数]`查看不同页码的历史记录。例如`/his 2`查看第2页
|
||||
- `/token`查看当前缓存的总token数
|
||||
- `/count` 查看统计
|
||||
- `/status` 查看chatGPT的配置
|
||||
- `/help` 查看帮助
|
||||
- `/key` 动态添加key
|
||||
- `/set` 人格设置面板
|
||||
- `/keyword nihao 你好` 设置关键词回复。nihao->你好
|
||||
- `/bing` 切换为bing
|
||||
- `/revgpt` 切换为ChatGPT逆向库
|
||||
- `/画` 画画
|
||||
|
||||
#### Bing语言模型
|
||||
- `/reset`重置prompt
|
||||
- `/gpt` 切换为OpenAI官方API
|
||||
- `/revgpt` 切换为ChatGPT逆向库
|
||||
|
||||
#### 逆向ChatGPT库语言模型
|
||||
- `/gpt` 切换为OpenAI官方API
|
||||
- `/bing` 切换为bing
|
||||
|
||||
* 切换模型指令支持临时回复。如`/bing 你好`将会临时使用一次bing模型
|
||||
|
||||
## 📰使用方法:
|
||||
|
||||
**详细部署教程链接**https://soulter.top/posts/qpdg.html
|
||||
|
||||
**Windows用户推荐Windows一键安装,请前往Release下载最新版本(Beta)**
|
||||
有报错请先看issue,解决不了再在频道内反馈。
|
||||
|
||||
### 安装第三方库
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
> ⚠Python版本应>=3.9
|
||||
|
||||
|
||||
### 配置
|
||||
|
||||
**详细部署教程链接**https://github.com/Soulter/QQChannelChatGPT/wiki
|
||||
|
||||
### 启动
|
||||
- 启动main.py
|
||||
|
||||
|
||||
## 感谢
|
||||
本项目使用了一下项目:
|
||||
|
||||
[ChatGPT by acheong08](https://github.com/acheong08/ChatGPT)
|
||||
|
||||
[EdgeGPT by acheong08](https://github.com/acheong08/EdgeGPT)
|
||||
|
||||
[go-cqhttp by Mrs4s](https://github.com/Mrs4s/go-cqhttp)
|
||||
|
||||
[nakuru-project by Lxns-Network](https://github.com/Lxns-Network/nakuru-project)
|
||||
|
||||
|
||||
## ⚙配置文件说明:
|
||||
```yaml
|
||||
# 如果你不知道怎么部署,请查看https://github.com/Soulter/QQChannelChatGPT/wiki
|
||||
# 不一定需要key了,如果你没有key但有openAI账号或者必应账号,可以考虑使用下面的逆向库
|
||||
|
||||
|
||||
###############平台设置#################
|
||||
|
||||
# QQ频道机器人
|
||||
# QQ开放平台的appid和令牌
|
||||
# q.qq.com
|
||||
# enable为true则启用,false则不启用
|
||||
qqbot:
|
||||
enable: true
|
||||
appid:
|
||||
token:
|
||||
|
||||
# QQ机器人
|
||||
# enable为true则启用,false则不启用
|
||||
# 需要安装GO-CQHTTP配合使用。
|
||||
# 文档:https://docs.go-cqhttp.org/
|
||||
# 请将go-cqhttp的配置文件的sever部分粘贴为以下内容,否则无法使用
|
||||
# 请先启动go-cqhttp再启动本程序
|
||||
#
|
||||
# servers:
|
||||
# - http:
|
||||
# host: 127.0.0.1
|
||||
# version: 0
|
||||
# port: 5700
|
||||
# timeout: 5
|
||||
# - ws:
|
||||
# address: 127.0.0.1:6700
|
||||
# middlewares:
|
||||
# <<: *default
|
||||
gocqbot:
|
||||
enable: false
|
||||
|
||||
# 设置是否一个人一个会话
|
||||
uniqueSessionMode: false
|
||||
# QChannelBot 的版本,请勿修改此字段,否则可能产生一些bug
|
||||
version: 3.0
|
||||
# [Beta] 转储历史记录时间间隔(分钟)
|
||||
dump_history_interval: 10
|
||||
# 一个用户只能在time秒内发送count条消息
|
||||
limit:
|
||||
time: 60
|
||||
count: 5
|
||||
# 公告
|
||||
notice: "此机器人由Github项目QQChannelChatGPT驱动。"
|
||||
# 是否打开私信功能
|
||||
# 设置为true则频道成员可以私聊机器人。
|
||||
# 设置为false则频道成员不能私聊机器人。
|
||||
direct_message_mode: true
|
||||
|
||||
# 系统代理
|
||||
# http_proxy: http://localhost:7890
|
||||
# https_proxy: http://localhost:7890
|
||||
|
||||
# 自定义回复前缀,如[Rev]或其他,务必加引号以防止不必要的bug。
|
||||
reply_prefix:
|
||||
openai_official: "[GPT]"
|
||||
rev_chatgpt: "[Rev]"
|
||||
rev_edgegpt: "[RevBing]"
|
||||
|
||||
# 百度内容审核服务
|
||||
# 新用户免费5万次调用。https://cloud.baidu.com/doc/ANTIPORN/index.html
|
||||
baidu_aip:
|
||||
enable: false
|
||||
app_id:
|
||||
api_key:
|
||||
secret_key:
|
||||
|
||||
|
||||
|
||||
|
||||
###############语言模型设置#################
|
||||
|
||||
|
||||
# OpenAI官方API
|
||||
# 注意:已支持多key自动切换,方法:
|
||||
# key:
|
||||
# - sk-xxxxxx
|
||||
# - sk-xxxxxx
|
||||
# 在下方非注释的地方使用以上格式
|
||||
# 关于api_base:可以使用一些云函数(如腾讯、阿里)来避免国内被墙的问题。
|
||||
# 详见:
|
||||
# https://github.com/Ice-Hazymoon/openai-scf-proxy
|
||||
# https://github.com/Soulter/QQChannelChatGPT/issues/42
|
||||
# 设置为none则表示使用官方默认api地址
|
||||
openai:
|
||||
key:
|
||||
-
|
||||
api_base: none
|
||||
# 这里是GPT配置,语言模型默认使用gpt-3.5-turbo
|
||||
chatGPTConfigs:
|
||||
model: gpt-3.5-turbo
|
||||
max_tokens: 3000
|
||||
temperature: 0.9
|
||||
top_p: 1
|
||||
frequency_penalty: 0
|
||||
presence_penalty: 0
|
||||
|
||||
total_tokens_limit: 5000
|
||||
|
||||
# 逆向文心一言【暂时不可用,请勿使用】
|
||||
rev_ernie:
|
||||
enable: false
|
||||
|
||||
# 逆向New Bing
|
||||
# 需要在项目根目录下创建cookies.json并粘贴cookies进去。
|
||||
# 详见:https://soulter.top/posts/qpdg.html
|
||||
rev_edgegpt:
|
||||
enable: false
|
||||
|
||||
# 逆向ChatGPT库
|
||||
# https://github.com/acheong08/ChatGPT
|
||||
# 优点:免费(无免费额度限制);
|
||||
# 缺点:速度相对慢。OpenAI 速率限制:免费帐户每小时 50 个请求。您可以通过多帐户循环来绕过它
|
||||
# enable设置为true后,将会停止使用上面正常的官方API调用而使用本逆向项目
|
||||
#
|
||||
# 多账户可以保证每个请求都能得到及时的回复。
|
||||
# 关于account的格式
|
||||
# account:
|
||||
# - email: 第1个账户
|
||||
# password: 第1个账户密码
|
||||
# - email: 第2个账户
|
||||
# password: 第2个账户密码
|
||||
# - ....
|
||||
# 支持使用access_token登录
|
||||
# 例:
|
||||
# - session_token: xxxxx
|
||||
# - access_token: xxxx
|
||||
# 请严格按照上面这个格式填写。
|
||||
# 逆向ChatGPT库的email-password登录方式不工作,建议使用access_token登录
|
||||
# 获取access_token的方法,详见:https://soulter.top/posts/qpdg.html
|
||||
rev_ChatGPT:
|
||||
enable: false
|
||||
account:
|
||||
- access_token:
|
||||
```
|
||||
_私は、高性能ですから!_
|
||||
|
||||
233
README_en.md
Normal file
233
README_en.md
Normal file
@@ -0,0 +1,233 @@
|
||||

|
||||
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<br>
|
||||
|
||||
<div>
|
||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://hellogithub.com/repository/AstrBotDevs/AstrBot" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=d127d50cd5e54c5382328acc3bb25483&claim_uid=ZO9by7qCXgSd6Lp&t=2" alt="Featured|HelloGitHub" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
||||
<div>
|
||||
<img src="https://img.shields.io/github/v/release/AstrBotDevs/AstrBot?style=for-the-badge&color=76bad9" href="https://github.com/AstrBotDevs/AstrBot/releases/latest">
|
||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg?style=for-the-badge&color=76bad9" alt="python">
|
||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg?style=for-the-badge&color=76bad9"/></a>
|
||||
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=wtbaNx7EioxeaqS9z7RQWVXPIxg2zYr7&jump_from=webapi&authKey=vlqnv/AV2DbJEvGIcxdlNSpfxVy+8vVqijgreRdnVKOaydpc+YSw4MctmEbr0k5"><img alt="QQ_community" src="https://img.shields.io/badge/QQ群-775869627-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<img src="https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fapi.soulter.top%2Fastrbot%2Fplugin-num&query=%24.result&suffix=%E4%B8%AA&style=for-the-badge&label=%E6%8F%92%E4%BB%B6%E5%B8%82%E5%9C%BA&cacheSeconds=3600">
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README.md">中文</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_ja.md">日本語</a> |
|
||||
<a href="https://astrbot.app/">Documentation</a> |
|
||||
<a href="https://blog.astrbot.app/">Blog</a> |
|
||||
<a href="https://astrbot.featurebase.app/roadmap">Roadmap</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/issues">Issue Tracker</a>
|
||||
</div>
|
||||
|
||||
AstrBot is an open-source all-in-one Agent chatbot platform and development framework.
|
||||
|
||||
## Key Features
|
||||
|
||||
1. **LLM Conversations**. Supports integration with various large language model services. Features include multimodal capabilities, tool calling, MCP, native knowledge base, character personas, and more.
|
||||
2. **Multi-Platform Support**. Integrates with QQ, WeChat Work, WeChat Official Accounts, Feishu, Telegram, DingTalk, Discord, KOOK, and other platforms. Supports rate limiting, whitelisting, and Baidu content moderation.
|
||||
3. **Agent Capabilities**. Fully optimized agentic features including multi-turn tool calling, built-in sandboxed code executor, web search, and more.
|
||||
4. **Plugin Extensions**. Deeply optimized plugin mechanism supporting [plugin development](https://astrbot.app/dev/plugin.html) to extend functionality, with a rich community plugin ecosystem.
|
||||
5. **Web UI**. Visual configuration and management of your bot with comprehensive features.
|
||||
|
||||
## Deployment Methods
|
||||
|
||||
#### Docker Deployment (Recommended 🥳)
|
||||
|
||||
We recommend deploying AstrBot using Docker or Docker Compose.
|
||||
|
||||
Please refer to the official documentation: [Deploy AstrBot with Docker](https://astrbot.app/deploy/astrbot/docker.html#%E4%BD%BF%E7%94%A8-docker-%E9%83%A8%E7%BD%B2-astrbot).
|
||||
|
||||
#### BT-Panel Deployment
|
||||
|
||||
AstrBot has partnered with BT-Panel and is now available in their marketplace.
|
||||
|
||||
Please refer to the official documentation: [BT-Panel Deployment](https://astrbot.app/deploy/astrbot/btpanel.html).
|
||||
|
||||
#### 1Panel Deployment
|
||||
|
||||
AstrBot has been officially listed on the 1Panel marketplace.
|
||||
|
||||
Please refer to the official documentation: [1Panel Deployment](https://astrbot.app/deploy/astrbot/1panel.html).
|
||||
|
||||
#### Deploy on RainYun
|
||||
|
||||
AstrBot has been officially listed on RainYun's cloud application platform with one-click deployment.
|
||||
|
||||
[](https://app.rainyun.com/apps/rca/store/5994?ref=NjU1ODg0)
|
||||
|
||||
#### Deploy on Replit
|
||||
|
||||
Community-contributed deployment method.
|
||||
|
||||
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||
|
||||
#### Windows One-Click Installer
|
||||
|
||||
Please refer to the official documentation: [Deploy AstrBot with Windows One-Click Installer](https://astrbot.app/deploy/astrbot/windows.html).
|
||||
|
||||
#### CasaOS Deployment
|
||||
|
||||
Community-contributed deployment method.
|
||||
|
||||
Please refer to the official documentation: [CasaOS Deployment](https://astrbot.app/deploy/astrbot/casaos.html).
|
||||
|
||||
#### Manual Deployment
|
||||
|
||||
First, install uv:
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
```
|
||||
|
||||
Install AstrBot via Git Clone:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot && cd AstrBot
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
Or refer to the official documentation: [Deploy AstrBot from Source](https://astrbot.app/deploy/astrbot/cli.html).
|
||||
|
||||
## 🌍 Community
|
||||
|
||||
### QQ Groups
|
||||
|
||||
- Group 1: 322154837
|
||||
- Group 3: 630166526
|
||||
- Group 5: 822130018
|
||||
- Group 6: 753075035
|
||||
- Developer Group: 975206796
|
||||
|
||||
### Telegram Group
|
||||
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
### Discord Server
|
||||
|
||||
<a href="https://discord.gg/hAVk6tgV36"><img alt="Discord_community" src="https://img.shields.io/badge/Discord-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
## Supported Messaging Platforms
|
||||
|
||||
**Officially Maintained**
|
||||
|
||||
- QQ (Official Platform & OneBot)
|
||||
- Telegram
|
||||
- WeChat Work Application & WeChat Work Intelligent Bot
|
||||
- WeChat Customer Service & WeChat Official Accounts
|
||||
- Feishu (Lark)
|
||||
- DingTalk
|
||||
- Slack
|
||||
- Discord
|
||||
- Satori
|
||||
- Misskey
|
||||
- WhatsApp (Coming Soon)
|
||||
- LINE (Coming Soon)
|
||||
|
||||
**Community Maintained**
|
||||
|
||||
- [KOOK](https://github.com/wuyan1003/astrbot_plugin_kook_adapter)
|
||||
- [VoceChat](https://github.com/HikariFroya/astrbot_plugin_vocechat)
|
||||
- [Bilibili Direct Messages](https://github.com/Hina-Chat/astrbot_plugin_bilibili_adapter)
|
||||
- [wxauto](https://github.com/luosheng520qaq/wxauto-repost-onebotv11)
|
||||
|
||||
## Supported Model Services
|
||||
|
||||
**LLM Services**
|
||||
|
||||
- OpenAI and Compatible Services
|
||||
- Anthropic
|
||||
- Google Gemini
|
||||
- Moonshot AI
|
||||
- Zhipu AI
|
||||
- DeepSeek
|
||||
- Ollama (Self-hosted)
|
||||
- LM Studio (Self-hosted)
|
||||
- [CompShare](https://www.compshare.cn/?ytag=GPU_YY-gh_astrbot&referral_code=FV7DcGowN4hB5UuXKgpE74)
|
||||
- [302.AI](https://share.302.ai/rr1M3l)
|
||||
- [TokenPony](https://www.tokenpony.cn/3YPyf)
|
||||
- [SiliconFlow](https://docs.siliconflow.cn/cn/usecases/use-siliconcloud-in-astrbot)
|
||||
- [PPIO Cloud](https://ppio.com/user/register?invited_by=AIOONE)
|
||||
- ModelScope
|
||||
- OneAPI
|
||||
|
||||
**LLMOps Platforms**
|
||||
|
||||
- Dify
|
||||
- Alibaba Cloud Bailian Applications
|
||||
- Coze
|
||||
|
||||
**Speech-to-Text Services**
|
||||
|
||||
- OpenAI Whisper
|
||||
- SenseVoice
|
||||
|
||||
**Text-to-Speech Services**
|
||||
|
||||
- OpenAI TTS
|
||||
- Gemini TTS
|
||||
- GPT-Sovits-Inference
|
||||
- GPT-Sovits
|
||||
- FishAudio
|
||||
- Edge TTS
|
||||
- Alibaba Cloud Bailian TTS
|
||||
- Azure TTS
|
||||
- Minimax TTS
|
||||
- Volcano Engine TTS
|
||||
|
||||
## ❤️ Contributing
|
||||
|
||||
Issues and Pull Requests are always welcome! Feel free to submit your changes to this project :)
|
||||
|
||||
### How to Contribute
|
||||
|
||||
You can contribute by reviewing issues or helping with pull request reviews. Any issues or PRs are welcome to encourage community participation. Of course, these are just suggestions—you can contribute in any way you like. For adding new features, please discuss through an Issue first.
|
||||
|
||||
### Development Environment
|
||||
|
||||
AstrBot uses `ruff` for code formatting and linting.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot
|
||||
pip install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## ❤️ Special Thanks
|
||||
|
||||
Special thanks to all Contributors and plugin developers for their contributions to AstrBot ❤️
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=AstrBotDevs/AstrBot" />
|
||||
</a>
|
||||
|
||||
Additionally, the birth of this project would not have been possible without the help of the following open-source projects:
|
||||
|
||||
- [NapNeko/NapCatQQ](https://github.com/NapNeko/NapCatQQ) - The amazing cat framework
|
||||
|
||||
## ⭐ Star History
|
||||
|
||||
> [!TIP]
|
||||
> If this project has helped you in your life or work, or if you're interested in its future development, please give the project a Star. It's the driving force behind maintaining this open-source project <3
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://star-history.com/#astrbotdevs/astrbot&Date)
|
||||
|
||||
</div>
|
||||
|
||||
</details>
|
||||
|
||||
_私は、高性能ですから!_
|
||||
233
README_ja.md
Normal file
233
README_ja.md
Normal file
@@ -0,0 +1,233 @@
|
||||

|
||||
|
||||
</p>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<br>
|
||||
|
||||
<div>
|
||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://hellogithub.com/repository/AstrBotDevs/AstrBot" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=d127d50cd5e54c5382328acc3bb25483&claim_uid=ZO9by7qCXgSd6Lp&t=2" alt="Featured|HelloGitHub" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
||||
<div>
|
||||
<img src="https://img.shields.io/github/v/release/AstrBotDevs/AstrBot?style=for-the-badge&color=76bad9" href="https://github.com/AstrBotDevs/AstrBot/releases/latest">
|
||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg?style=for-the-badge&color=76bad9" alt="python">
|
||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg?style=for-the-badge&color=76bad9"/></a>
|
||||
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=wtbaNx7EioxeaqS9z7RQWVXPIxg2zYr7&jump_from=webapi&authKey=vlqnv/AV2DbJEvGIcxdlNSpfxVy+8vVqijgreRdnVKOaydpc+YSw4MctmEbr0k5"><img alt="QQ_community" src="https://img.shields.io/badge/QQ群-775869627-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
<img src="https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fapi.soulter.top%2Fastrbot%2Fplugin-num&query=%24.result&suffix=%E4%B8%AA&style=for-the-badge&label=%E6%8F%92%E4%BB%B6%E5%B8%82%E5%9C%BA&cacheSeconds=3600">
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README.md">中文</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_en.md">English</a> |
|
||||
<a href="https://astrbot.app/">ドキュメント</a> |
|
||||
<a href="https://blog.astrbot.app/">Blog</a> |
|
||||
<a href="https://astrbot.featurebase.app/roadmap">ロードマップ</a> |
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/issues">Issue</a>
|
||||
</div>
|
||||
|
||||
AstrBot は、オープンソースのオールインワン Agent チャットボットプラットフォーム及び開発フレームワークです。
|
||||
|
||||
## 主な機能
|
||||
|
||||
1. **大規模言語モデル対話**。多様な大規模言語モデルサービスとの統合をサポート。マルチモーダル、ツール呼び出し、MCP、ネイティブナレッジベース、キャラクター設定などの機能を搭載。
|
||||
2. **マルチメッセージプラットフォームサポート**。QQ、WeChat Work、WeChat公式アカウント、Feishu、Telegram、DingTalk、Discord、KOOK などのプラットフォームと統合可能。レート制限、ホワイトリスト、Baidu コンテンツ審査をサポート。
|
||||
3. **Agent**。完全に最適化された Agentic 機能。マルチターンツール呼び出し、内蔵サンドボックスコード実行環境、Web 検索などの機能をサポート。
|
||||
4. **プラグイン拡張**。深く最適化されたプラグインメカニズムで、[プラグイン開発](https://astrbot.app/dev/plugin.html)による機能拡張をサポート。豊富なコミュニティプラグインエコシステム。
|
||||
5. **WebUI**。ビジュアル設定とボット管理、充実した機能。
|
||||
|
||||
## デプロイ方法
|
||||
|
||||
#### Docker デプロイ(推奨 🥳)
|
||||
|
||||
Docker / Docker Compose を使用した AstrBot のデプロイを推奨します。
|
||||
|
||||
公式ドキュメント [Docker を使用した AstrBot のデプロイ](https://astrbot.app/deploy/astrbot/docker.html#%E4%BD%BF%E7%94%A8-docker-%E9%83%A8%E7%BD%B2-astrbot) をご参照ください。
|
||||
|
||||
#### 宝塔パネルデプロイ
|
||||
|
||||
AstrBot は宝塔パネルと提携し、宝塔パネルに公開されています。
|
||||
|
||||
公式ドキュメント [宝塔パネルデプロイ](https://astrbot.app/deploy/astrbot/btpanel.html) をご参照ください。
|
||||
|
||||
#### 1Panel デプロイ
|
||||
|
||||
AstrBot は 1Panel 公式により 1Panel パネルに公開されています。
|
||||
|
||||
公式ドキュメント [1Panel デプロイ](https://astrbot.app/deploy/astrbot/1panel.html) をご参照ください。
|
||||
|
||||
#### 雨云でのデプロイ
|
||||
|
||||
AstrBot は雨云公式によりクラウドアプリケーションプラットフォームに公開され、ワンクリックでデプロイ可能です。
|
||||
|
||||
[](https://app.rainyun.com/apps/rca/store/5994?ref=NjU1ODg0)
|
||||
|
||||
#### Replit でのデプロイ
|
||||
|
||||
コミュニティ貢献によるデプロイ方法。
|
||||
|
||||
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||
|
||||
#### Windows ワンクリックインストーラーデプロイ
|
||||
|
||||
公式ドキュメント [Windows ワンクリックインストーラーを使用した AstrBot のデプロイ](https://astrbot.app/deploy/astrbot/windows.html) をご参照ください。
|
||||
|
||||
#### CasaOS デプロイ
|
||||
|
||||
コミュニティ貢献によるデプロイ方法。
|
||||
|
||||
公式ドキュメント [CasaOS デプロイ](https://astrbot.app/deploy/astrbot/casaos.html) をご参照ください。
|
||||
|
||||
#### 手動デプロイ
|
||||
|
||||
まず uv をインストールします:
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
```
|
||||
|
||||
Git Clone で AstrBot をインストール:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot && cd AstrBot
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
または、公式ドキュメント [ソースコードから AstrBot をデプロイ](https://astrbot.app/deploy/astrbot/cli.html) をご参照ください。
|
||||
|
||||
## 🌍 コミュニティ
|
||||
|
||||
### QQ グループ
|
||||
|
||||
- 1群:322154837
|
||||
- 3群:630166526
|
||||
- 5群:822130018
|
||||
- 6群:753075035
|
||||
- 開発者群:975206796
|
||||
|
||||
### Telegram グループ
|
||||
|
||||
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
### Discord サーバー
|
||||
|
||||
<a href="https://discord.gg/hAVk6tgV36"><img alt="Discord_community" src="https://img.shields.io/badge/Discord-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||
|
||||
## サポートされているメッセージプラットフォーム
|
||||
|
||||
**公式メンテナンス**
|
||||
|
||||
- QQ (公式プラットフォーム & OneBot)
|
||||
- Telegram
|
||||
- WeChat Work アプリケーション & WeChat Work インテリジェントボット
|
||||
- WeChat カスタマーサービス & WeChat 公式アカウント
|
||||
- Feishu (Lark)
|
||||
- DingTalk
|
||||
- Slack
|
||||
- Discord
|
||||
- Satori
|
||||
- Misskey
|
||||
- WhatsApp (近日対応予定)
|
||||
- LINE (近日対応予定)
|
||||
|
||||
**コミュニティメンテナンス**
|
||||
|
||||
- [KOOK](https://github.com/wuyan1003/astrbot_plugin_kook_adapter)
|
||||
- [VoceChat](https://github.com/HikariFroya/astrbot_plugin_vocechat)
|
||||
- [Bilibili ダイレクトメッセージ](https://github.com/Hina-Chat/astrbot_plugin_bilibili_adapter)
|
||||
- [wxauto](https://github.com/luosheng520qaq/wxauto-repost-onebotv11)
|
||||
|
||||
## サポートされているモデルサービス
|
||||
|
||||
**大規模言語モデルサービス**
|
||||
|
||||
- OpenAI および互換サービス
|
||||
- Anthropic
|
||||
- Google Gemini
|
||||
- Moonshot AI
|
||||
- 智谱 AI
|
||||
- DeepSeek
|
||||
- Ollama (セルフホスト)
|
||||
- LM Studio (セルフホスト)
|
||||
- [優云智算](https://www.compshare.cn/?ytag=GPU_YY-gh_astrbot&referral_code=FV7DcGowN4hB5UuXKgpE74)
|
||||
- [302.AI](https://share.302.ai/rr1M3l)
|
||||
- [小馬算力](https://www.tokenpony.cn/3YPyf)
|
||||
- [硅基流動](https://docs.siliconflow.cn/cn/usercases/use-siliconcloud-in-astrbot)
|
||||
- [PPIO 派欧云](https://ppio.com/user/register?invited_by=AIOONE)
|
||||
- ModelScope
|
||||
- OneAPI
|
||||
|
||||
**LLMOps プラットフォーム**
|
||||
|
||||
- Dify
|
||||
- Alibaba Cloud 百炼アプリケーション
|
||||
- Coze
|
||||
|
||||
**音声認識サービス**
|
||||
|
||||
- OpenAI Whisper
|
||||
- SenseVoice
|
||||
|
||||
**音声合成サービス**
|
||||
|
||||
- OpenAI TTS
|
||||
- Gemini TTS
|
||||
- GPT-Sovits-Inference
|
||||
- GPT-Sovits
|
||||
- FishAudio
|
||||
- Edge TTS
|
||||
- Alibaba Cloud 百炼 TTS
|
||||
- Azure TTS
|
||||
- Minimax TTS
|
||||
- Volcano Engine TTS
|
||||
|
||||
## ❤️ コントリビューション
|
||||
|
||||
Issue や Pull Request は大歓迎です!このプロジェクトに変更を送信してください :)
|
||||
|
||||
### コントリビュート方法
|
||||
|
||||
Issue を確認したり、PR(プルリクエスト)のレビューを手伝うことで貢献できます。どんな Issue や PR への参加も歓迎され、コミュニティ貢献を促進します。もちろん、これらは提案に過ぎず、どんな方法でも貢献できます。新機能の追加については、まず Issue で議論してください。
|
||||
|
||||
### 開発環境
|
||||
|
||||
AstrBot はコードのフォーマットとチェックに `ruff` を使用しています。
|
||||
|
||||
```bash
|
||||
git clone https://github.com/AstrBotDevs/AstrBot
|
||||
pip install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## ❤️ Special Thanks
|
||||
|
||||
AstrBot への貢献をしていただいたすべてのコントリビューターとプラグイン開発者に特別な感謝を ❤️
|
||||
|
||||
<a href="https://github.com/AstrBotDevs/AstrBot/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=AstrBotDevs/AstrBot" />
|
||||
</a>
|
||||
|
||||
また、このプロジェクトの誕生は以下のオープンソースプロジェクトの助けなしには実現できませんでした:
|
||||
|
||||
- [NapNeko/NapCatQQ](https://github.com/NapNeko/NapCatQQ) - 素晴らしい猫猫フレームワーク
|
||||
|
||||
## ⭐ Star History
|
||||
|
||||
> [!TIP]
|
||||
> このプロジェクトがあなたの生活や仕事に役立ったり、このプロジェクトの今後の発展に関心がある場合は、プロジェクトに Star をください。これがこのオープンソースプロジェクトを維持する原動力です <3
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://star-history.com/#astrbotdevs/astrbot&Date)
|
||||
|
||||
</div>
|
||||
|
||||
</details>
|
||||
|
||||
_私は、高性能ですから!_
|
||||
@@ -1,26 +0,0 @@
|
||||
from aip import AipContentCensor
|
||||
|
||||
class BaiduJudge:
|
||||
def __init__(self, baidu_configs) -> None:
|
||||
if 'app_id' in baidu_configs and 'api_key' in baidu_configs and 'secret_key' in baidu_configs:
|
||||
self.app_id = str(baidu_configs['app_id'])
|
||||
self.api_key = baidu_configs['api_key']
|
||||
self.secret_key = baidu_configs['secret_key']
|
||||
self.client = AipContentCensor(self.app_id, self.api_key, self.secret_key)
|
||||
else:
|
||||
raise ValueError("Baidu configs error! 请填写百度内容审核服务相关配置!")
|
||||
def judge(self, text):
|
||||
res = self.client.textCensorUserDefined(text)
|
||||
if 'conclusionType' not in res:
|
||||
return False, "百度审核服务未知错误"
|
||||
if res['conclusionType'] == 1:
|
||||
return True, "合规"
|
||||
else:
|
||||
if 'data' not in res:
|
||||
return False, "百度审核服务未知错误"
|
||||
count = len(res['data'])
|
||||
info = f"百度审核服务发现 {count} 处违规:\n"
|
||||
for i in res['data']:
|
||||
info += f"{i['msg']};\n"
|
||||
info += "\n判断结果:"+res['conclusion']
|
||||
return False, info
|
||||
@@ -1,5 +0,0 @@
|
||||
# helloworld
|
||||
|
||||
QQChannelChatGPT项目的测试插件
|
||||
|
||||
A test plugin for QQChannelChatGPT plugin feature
|
||||
@@ -1,65 +0,0 @@
|
||||
from nakuru.entities.components import *
|
||||
from nakuru import (
|
||||
GroupMessage,
|
||||
FriendMessage
|
||||
)
|
||||
from botpy.message import Message, DirectMessage
|
||||
|
||||
class HelloWorldPlugin:
|
||||
"""
|
||||
初始化函数, 可以选择直接pass
|
||||
"""
|
||||
def __init__(self) -> None:
|
||||
print("这是HelloWorld测试插件, 发送 helloworld 即可触发此插件。")
|
||||
|
||||
"""
|
||||
入口函数,机器人会调用此函数。
|
||||
参数规范: message: 消息文本; role: 身份; platform: 消息平台; message_obj: 消息对象
|
||||
参数详情: role为admin或者member; platform为qqchan或者gocq; message_obj为nakuru的GroupMessage对象或者FriendMessage对象或者频道的Message, DirectMessage对象。
|
||||
返回规范: bool: 是否hit到此插件(所有的消息均会调用每一个载入的插件, 如果没有hit到, 则应返回False)
|
||||
Tuple: None或者长度为3的元组。当没有hit到时, 返回None. hit到时, 第1个参数为指令是否调用成功, 第2个参数为返回的消息文本或者gocq的消息链列表, 第3个参数为指令名称
|
||||
例子:做一个名为"yuanshen"的插件;当接收到消息为“原神 可莉”, 如果不想要处理此消息,则返回False, None;如果想要处理,但是执行失败了,返回True, tuple([False, "请求失败啦~", "yuanshen"])
|
||||
;执行成功了,返回True, tuple([True, "结果文本", "yuanshen"])
|
||||
"""
|
||||
def run(self, message: str, role: str, platform: str, message_obj):
|
||||
|
||||
if platform == "gocq":
|
||||
"""
|
||||
QQ平台指令处理逻辑
|
||||
"""
|
||||
img_url = "https://gchat.qpic.cn/gchatpic_new/905617992/720871955-2246763964-C6EE1A52CC668EC982453065C4FA8747/0?term=2&is_origin=0"
|
||||
if message == "helloworld":
|
||||
return True, tuple([True, [Plain("Hello World!!"), Image.fromURL(url=img_url)], "helloworld"])
|
||||
else:
|
||||
return False, None
|
||||
elif platform == "qqchan":
|
||||
"""
|
||||
频道处理逻辑(频道暂时只支持回复字符串类型的信息,返回的信息都会被转成字符串,如果不想处理某一个平台的信息,直接返回False, None就行)
|
||||
"""
|
||||
if message == "helloworld":
|
||||
return True, tuple([True, "Hello World!!", "helloworld"])
|
||||
else:
|
||||
return False, None
|
||||
"""
|
||||
帮助函数,当用户输入 plugin v 插件名称 时,会调用此函数,返回帮助信息
|
||||
返回参数要求(必填):dict{
|
||||
"name": str, # 插件名称
|
||||
"desc": str, # 插件简短描述
|
||||
"help": str, # 插件帮助信息
|
||||
"version": str, # 插件版本
|
||||
"author": str, # 插件作者
|
||||
}
|
||||
"""
|
||||
def info(self):
|
||||
return {
|
||||
"name": "helloworld",
|
||||
"desc": "测试插件",
|
||||
"help": "测试插件, 回复helloworld即可触发",
|
||||
"version": "v1.0.1 beta",
|
||||
"author": "Soulter"
|
||||
}
|
||||
|
||||
|
||||
# 热知识:检测消息开头指令,使用以下方法
|
||||
# if message.startswith("原神"):
|
||||
# pass
|
||||
3
astrbot/__init__.py
Normal file
3
astrbot/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .core.log import LogManager
|
||||
|
||||
logger = LogManager.GetLogger(log_name="astrbot")
|
||||
19
astrbot/api/__init__.py
Normal file
19
astrbot/api/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from astrbot import logger
|
||||
from astrbot.core import html_renderer, sp
|
||||
from astrbot.core.agent.tool import FunctionTool, ToolSet
|
||||
from astrbot.core.agent.tool_executor import BaseFunctionToolExecutor
|
||||
from astrbot.core.config.astrbot_config import AstrBotConfig
|
||||
from astrbot.core.star.register import register_agent as agent
|
||||
from astrbot.core.star.register import register_llm_tool as llm_tool
|
||||
|
||||
__all__ = [
|
||||
"AstrBotConfig",
|
||||
"BaseFunctionToolExecutor",
|
||||
"FunctionTool",
|
||||
"ToolSet",
|
||||
"agent",
|
||||
"html_renderer",
|
||||
"llm_tool",
|
||||
"logger",
|
||||
"sp",
|
||||
]
|
||||
54
astrbot/api/all.py
Normal file
54
astrbot/api/all.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from astrbot.core.config.astrbot_config import AstrBotConfig
|
||||
from astrbot import logger
|
||||
from astrbot.core import html_renderer
|
||||
from astrbot.core.star.register import register_llm_tool as llm_tool
|
||||
|
||||
# event
|
||||
from astrbot.core.message.message_event_result import (
|
||||
MessageEventResult,
|
||||
MessageChain,
|
||||
CommandResult,
|
||||
EventResultType,
|
||||
)
|
||||
from astrbot.core.platform import AstrMessageEvent
|
||||
|
||||
# star register
|
||||
from astrbot.core.star.register import (
|
||||
register_command as command,
|
||||
register_command_group as command_group,
|
||||
register_event_message_type as event_message_type,
|
||||
register_regex as regex,
|
||||
register_platform_adapter_type as platform_adapter_type,
|
||||
)
|
||||
from astrbot.core.star.filter.event_message_type import (
|
||||
EventMessageTypeFilter,
|
||||
EventMessageType,
|
||||
)
|
||||
from astrbot.core.star.filter.platform_adapter_type import (
|
||||
PlatformAdapterTypeFilter,
|
||||
PlatformAdapterType,
|
||||
)
|
||||
from astrbot.core.star.register import (
|
||||
register_star as register, # 注册插件(Star)
|
||||
)
|
||||
from astrbot.core.star import Context, Star
|
||||
from astrbot.core.star.config import *
|
||||
|
||||
|
||||
# provider
|
||||
from astrbot.core.provider import Provider, ProviderMetaData
|
||||
from astrbot.core.db.po import Personality
|
||||
|
||||
# platform
|
||||
from astrbot.core.platform import (
|
||||
AstrMessageEvent,
|
||||
Platform,
|
||||
AstrBotMessage,
|
||||
MessageMember,
|
||||
MessageType,
|
||||
PlatformMetadata,
|
||||
)
|
||||
|
||||
from astrbot.core.platform.register import register_platform_adapter
|
||||
|
||||
from .message_components import *
|
||||
17
astrbot/api/event/__init__.py
Normal file
17
astrbot/api/event/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from astrbot.core.message.message_event_result import (
|
||||
CommandResult,
|
||||
EventResultType,
|
||||
MessageChain,
|
||||
MessageEventResult,
|
||||
ResultContentType,
|
||||
)
|
||||
from astrbot.core.platform import AstrMessageEvent
|
||||
|
||||
__all__ = [
|
||||
"AstrMessageEvent",
|
||||
"CommandResult",
|
||||
"EventResultType",
|
||||
"MessageChain",
|
||||
"MessageEventResult",
|
||||
"ResultContentType",
|
||||
]
|
||||
52
astrbot/api/event/filter/__init__.py
Normal file
52
astrbot/api/event/filter/__init__.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from astrbot.core.star.filter.custom_filter import CustomFilter
|
||||
from astrbot.core.star.filter.event_message_type import (
|
||||
EventMessageType,
|
||||
EventMessageTypeFilter,
|
||||
)
|
||||
from astrbot.core.star.filter.permission import PermissionType, PermissionTypeFilter
|
||||
from astrbot.core.star.filter.platform_adapter_type import (
|
||||
PlatformAdapterType,
|
||||
PlatformAdapterTypeFilter,
|
||||
)
|
||||
from astrbot.core.star.register import register_after_message_sent as after_message_sent
|
||||
from astrbot.core.star.register import register_command as command
|
||||
from astrbot.core.star.register import register_command_group as command_group
|
||||
from astrbot.core.star.register import register_custom_filter as custom_filter
|
||||
from astrbot.core.star.register import register_event_message_type as event_message_type
|
||||
from astrbot.core.star.register import register_llm_tool as llm_tool
|
||||
from astrbot.core.star.register import register_on_astrbot_loaded as on_astrbot_loaded
|
||||
from astrbot.core.star.register import (
|
||||
register_on_decorating_result as on_decorating_result,
|
||||
)
|
||||
from astrbot.core.star.register import register_on_llm_request as on_llm_request
|
||||
from astrbot.core.star.register import register_on_llm_response as on_llm_response
|
||||
from astrbot.core.star.register import register_on_platform_loaded as on_platform_loaded
|
||||
from astrbot.core.star.register import register_permission_type as permission_type
|
||||
from astrbot.core.star.register import (
|
||||
register_platform_adapter_type as platform_adapter_type,
|
||||
)
|
||||
from astrbot.core.star.register import register_regex as regex
|
||||
|
||||
__all__ = [
|
||||
"CustomFilter",
|
||||
"EventMessageType",
|
||||
"EventMessageTypeFilter",
|
||||
"PermissionType",
|
||||
"PermissionTypeFilter",
|
||||
"PlatformAdapterType",
|
||||
"PlatformAdapterTypeFilter",
|
||||
"after_message_sent",
|
||||
"command",
|
||||
"command_group",
|
||||
"custom_filter",
|
||||
"event_message_type",
|
||||
"llm_tool",
|
||||
"on_astrbot_loaded",
|
||||
"on_decorating_result",
|
||||
"on_llm_request",
|
||||
"on_llm_response",
|
||||
"on_platform_loaded",
|
||||
"permission_type",
|
||||
"platform_adapter_type",
|
||||
"regex",
|
||||
]
|
||||
1
astrbot/api/message_components.py
Normal file
1
astrbot/api/message_components.py
Normal file
@@ -0,0 +1 @@
|
||||
from astrbot.core.message.components import *
|
||||
22
astrbot/api/platform/__init__.py
Normal file
22
astrbot/api/platform/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from astrbot.core.message.components import *
|
||||
from astrbot.core.platform import (
|
||||
AstrBotMessage,
|
||||
AstrMessageEvent,
|
||||
Group,
|
||||
MessageMember,
|
||||
MessageType,
|
||||
Platform,
|
||||
PlatformMetadata,
|
||||
)
|
||||
from astrbot.core.platform.register import register_platform_adapter
|
||||
|
||||
__all__ = [
|
||||
"AstrBotMessage",
|
||||
"AstrMessageEvent",
|
||||
"Group",
|
||||
"MessageMember",
|
||||
"MessageType",
|
||||
"Platform",
|
||||
"PlatformMetadata",
|
||||
"register_platform_adapter",
|
||||
]
|
||||
18
astrbot/api/provider/__init__.py
Normal file
18
astrbot/api/provider/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from astrbot.core.db.po import Personality
|
||||
from astrbot.core.provider import Provider, STTProvider
|
||||
from astrbot.core.provider.entities import (
|
||||
LLMResponse,
|
||||
ProviderMetaData,
|
||||
ProviderRequest,
|
||||
ProviderType,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"LLMResponse",
|
||||
"Personality",
|
||||
"Provider",
|
||||
"ProviderMetaData",
|
||||
"ProviderRequest",
|
||||
"ProviderType",
|
||||
"STTProvider",
|
||||
]
|
||||
7
astrbot/api/star/__init__.py
Normal file
7
astrbot/api/star/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from astrbot.core.star import Context, Star, StarTools
|
||||
from astrbot.core.star.config import *
|
||||
from astrbot.core.star.register import (
|
||||
register_star as register, # 注册插件(Star)
|
||||
)
|
||||
|
||||
__all__ = ["Context", "Star", "StarTools", "register"]
|
||||
7
astrbot/api/util/__init__.py
Normal file
7
astrbot/api/util/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from astrbot.core.utils.session_waiter import (
|
||||
SessionController,
|
||||
SessionWaiter,
|
||||
session_waiter,
|
||||
)
|
||||
|
||||
__all__ = ["SessionController", "SessionWaiter", "session_waiter"]
|
||||
1
astrbot/cli/__init__.py
Normal file
1
astrbot/cli/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = "4.7.3"
|
||||
59
astrbot/cli/__main__.py
Normal file
59
astrbot/cli/__main__.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""AstrBot CLI入口"""
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from . import __version__
|
||||
from .commands import conf, init, plug, run
|
||||
|
||||
logo_tmpl = r"""
|
||||
___ _______.___________..______ .______ ______ .___________.
|
||||
/ \ / | || _ \ | _ \ / __ \ | |
|
||||
/ ^ \ | (----`---| |----`| |_) | | |_) | | | | | `---| |----`
|
||||
/ /_\ \ \ \ | | | / | _ < | | | | | |
|
||||
/ _____ \ .----) | | | | |\ \----.| |_) | | `--' | | |
|
||||
/__/ \__\ |_______/ |__| | _| `._____||______/ \______/ |__|
|
||||
"""
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(__version__, prog_name="AstrBot")
|
||||
def cli() -> None:
|
||||
"""The AstrBot CLI"""
|
||||
click.echo(logo_tmpl)
|
||||
click.echo("Welcome to AstrBot CLI!")
|
||||
click.echo(f"AstrBot CLI version: {__version__}")
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument("command_name", required=False, type=str)
|
||||
def help(command_name: str | None) -> None:
|
||||
"""显示命令的帮助信息
|
||||
|
||||
如果提供了 COMMAND_NAME,则显示该命令的详细帮助信息。
|
||||
否则,显示通用帮助信息。
|
||||
"""
|
||||
ctx = click.get_current_context()
|
||||
if command_name:
|
||||
# 查找指定命令
|
||||
command = cli.get_command(ctx, command_name)
|
||||
if command:
|
||||
# 显示特定命令的帮助信息
|
||||
click.echo(command.get_help(ctx))
|
||||
else:
|
||||
click.echo(f"Unknown command: {command_name}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
# 显示通用帮助信息
|
||||
click.echo(cli.get_help(ctx))
|
||||
|
||||
|
||||
cli.add_command(init)
|
||||
cli.add_command(run)
|
||||
cli.add_command(help)
|
||||
cli.add_command(plug)
|
||||
cli.add_command(conf)
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
6
astrbot/cli/commands/__init__.py
Normal file
6
astrbot/cli/commands/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from .cmd_conf import conf
|
||||
from .cmd_init import init
|
||||
from .cmd_plug import plug
|
||||
from .cmd_run import run
|
||||
|
||||
__all__ = ["conf", "init", "plug", "run"]
|
||||
209
astrbot/cli/commands/cmd_conf.py
Normal file
209
astrbot/cli/commands/cmd_conf.py
Normal file
@@ -0,0 +1,209 @@
|
||||
import hashlib
|
||||
import json
|
||||
import zoneinfo
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
|
||||
from ..utils import check_astrbot_root, get_astrbot_root
|
||||
|
||||
|
||||
def _validate_log_level(value: str) -> str:
|
||||
"""验证日志级别"""
|
||||
value = value.upper()
|
||||
if value not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]:
|
||||
raise click.ClickException(
|
||||
"日志级别必须是 DEBUG/INFO/WARNING/ERROR/CRITICAL 之一",
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _validate_dashboard_port(value: str) -> int:
|
||||
"""验证 Dashboard 端口"""
|
||||
try:
|
||||
port = int(value)
|
||||
if port < 1 or port > 65535:
|
||||
raise click.ClickException("端口必须在 1-65535 范围内")
|
||||
return port
|
||||
except ValueError:
|
||||
raise click.ClickException("端口必须是数字")
|
||||
|
||||
|
||||
def _validate_dashboard_username(value: str) -> str:
|
||||
"""验证 Dashboard 用户名"""
|
||||
if not value:
|
||||
raise click.ClickException("用户名不能为空")
|
||||
return value
|
||||
|
||||
|
||||
def _validate_dashboard_password(value: str) -> str:
|
||||
"""验证 Dashboard 密码"""
|
||||
if not value:
|
||||
raise click.ClickException("密码不能为空")
|
||||
return hashlib.md5(value.encode()).hexdigest()
|
||||
|
||||
|
||||
def _validate_timezone(value: str) -> str:
|
||||
"""验证时区"""
|
||||
try:
|
||||
zoneinfo.ZoneInfo(value)
|
||||
except Exception:
|
||||
raise click.ClickException(f"无效的时区: {value},请使用有效的IANA时区名称")
|
||||
return value
|
||||
|
||||
|
||||
def _validate_callback_api_base(value: str) -> str:
|
||||
"""验证回调接口基址"""
|
||||
if not value.startswith("http://") and not value.startswith("https://"):
|
||||
raise click.ClickException("回调接口基址必须以 http:// 或 https:// 开头")
|
||||
return value
|
||||
|
||||
|
||||
# 可通过CLI设置的配置项,配置键到验证器函数的映射
|
||||
CONFIG_VALIDATORS: dict[str, Callable[[str], Any]] = {
|
||||
"timezone": _validate_timezone,
|
||||
"log_level": _validate_log_level,
|
||||
"dashboard.port": _validate_dashboard_port,
|
||||
"dashboard.username": _validate_dashboard_username,
|
||||
"dashboard.password": _validate_dashboard_password,
|
||||
"callback_api_base": _validate_callback_api_base,
|
||||
}
|
||||
|
||||
|
||||
def _load_config() -> dict[str, Any]:
|
||||
"""加载或初始化配置文件"""
|
||||
root = get_astrbot_root()
|
||||
if not check_astrbot_root(root):
|
||||
raise click.ClickException(
|
||||
f"{root}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||
)
|
||||
|
||||
config_path = root / "data" / "cmd_config.json"
|
||||
if not config_path.exists():
|
||||
from astrbot.core.config.default import DEFAULT_CONFIG
|
||||
|
||||
config_path.write_text(
|
||||
json.dumps(DEFAULT_CONFIG, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8-sig",
|
||||
)
|
||||
|
||||
try:
|
||||
return json.loads(config_path.read_text(encoding="utf-8-sig"))
|
||||
except json.JSONDecodeError as e:
|
||||
raise click.ClickException(f"配置文件解析失败: {e!s}")
|
||||
|
||||
|
||||
def _save_config(config: dict[str, Any]) -> None:
|
||||
"""保存配置文件"""
|
||||
config_path = get_astrbot_root() / "data" / "cmd_config.json"
|
||||
|
||||
config_path.write_text(
|
||||
json.dumps(config, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8-sig",
|
||||
)
|
||||
|
||||
|
||||
def _set_nested_item(obj: dict[str, Any], path: str, value: Any) -> None:
|
||||
"""设置嵌套字典中的值"""
|
||||
parts = path.split(".")
|
||||
for part in parts[:-1]:
|
||||
if part not in obj:
|
||||
obj[part] = {}
|
||||
elif not isinstance(obj[part], dict):
|
||||
raise click.ClickException(
|
||||
f"配置路径冲突: {'.'.join(parts[: parts.index(part) + 1])} 不是字典",
|
||||
)
|
||||
obj = obj[part]
|
||||
obj[parts[-1]] = value
|
||||
|
||||
|
||||
def _get_nested_item(obj: dict[str, Any], path: str) -> Any:
|
||||
"""获取嵌套字典中的值"""
|
||||
parts = path.split(".")
|
||||
for part in parts:
|
||||
obj = obj[part]
|
||||
return obj
|
||||
|
||||
|
||||
@click.group(name="conf")
|
||||
def conf():
|
||||
"""配置管理命令
|
||||
|
||||
支持的配置项:
|
||||
|
||||
- timezone: 时区设置 (例如: Asia/Shanghai)
|
||||
|
||||
- log_level: 日志级别 (DEBUG/INFO/WARNING/ERROR/CRITICAL)
|
||||
|
||||
- dashboard.port: Dashboard 端口
|
||||
|
||||
- dashboard.username: Dashboard 用户名
|
||||
|
||||
- dashboard.password: Dashboard 密码
|
||||
|
||||
- callback_api_base: 回调接口基址
|
||||
"""
|
||||
|
||||
|
||||
@conf.command(name="set")
|
||||
@click.argument("key")
|
||||
@click.argument("value")
|
||||
def set_config(key: str, value: str):
|
||||
"""设置配置项的值"""
|
||||
if key not in CONFIG_VALIDATORS:
|
||||
raise click.ClickException(f"不支持的配置项: {key}")
|
||||
|
||||
config = _load_config()
|
||||
|
||||
try:
|
||||
old_value = _get_nested_item(config, key)
|
||||
validated_value = CONFIG_VALIDATORS[key](value)
|
||||
_set_nested_item(config, key, validated_value)
|
||||
_save_config(config)
|
||||
|
||||
click.echo(f"配置已更新: {key}")
|
||||
if key == "dashboard.password":
|
||||
click.echo(" 原值: ********")
|
||||
click.echo(" 新值: ********")
|
||||
else:
|
||||
click.echo(f" 原值: {old_value}")
|
||||
click.echo(f" 新值: {validated_value}")
|
||||
|
||||
except KeyError:
|
||||
raise click.ClickException(f"未知的配置项: {key}")
|
||||
except Exception as e:
|
||||
raise click.UsageError(f"设置配置失败: {e!s}")
|
||||
|
||||
|
||||
@conf.command(name="get")
|
||||
@click.argument("key", required=False)
|
||||
def get_config(key: str | None = None):
|
||||
"""获取配置项的值,不提供key则显示所有可配置项"""
|
||||
config = _load_config()
|
||||
|
||||
if key:
|
||||
if key not in CONFIG_VALIDATORS:
|
||||
raise click.ClickException(f"不支持的配置项: {key}")
|
||||
|
||||
try:
|
||||
value = _get_nested_item(config, key)
|
||||
if key == "dashboard.password":
|
||||
value = "********"
|
||||
click.echo(f"{key}: {value}")
|
||||
except KeyError:
|
||||
raise click.ClickException(f"未知的配置项: {key}")
|
||||
except Exception as e:
|
||||
raise click.UsageError(f"获取配置失败: {e!s}")
|
||||
else:
|
||||
click.echo("当前配置:")
|
||||
for key in CONFIG_VALIDATORS:
|
||||
try:
|
||||
value = (
|
||||
"********"
|
||||
if key == "dashboard.password"
|
||||
else _get_nested_item(config, key)
|
||||
)
|
||||
click.echo(f" {key}: {value}")
|
||||
except (KeyError, TypeError):
|
||||
pass
|
||||
56
astrbot/cli/commands/cmd_init.py
Normal file
56
astrbot/cli/commands/cmd_init.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
from filelock import FileLock, Timeout
|
||||
|
||||
from ..utils import check_dashboard, get_astrbot_root
|
||||
|
||||
|
||||
async def initialize_astrbot(astrbot_root: Path) -> None:
|
||||
"""执行 AstrBot 初始化逻辑"""
|
||||
dot_astrbot = astrbot_root / ".astrbot"
|
||||
|
||||
if not dot_astrbot.exists():
|
||||
click.echo(f"Current Directory: {astrbot_root}")
|
||||
click.echo(
|
||||
"如果你确认这是 Astrbot root directory, 你需要在当前目录下创建一个 .astrbot 文件标记该目录为 AstrBot 的数据目录。",
|
||||
)
|
||||
if click.confirm(
|
||||
f"请检查当前目录是否正确,确认正确请回车: {astrbot_root}",
|
||||
default=True,
|
||||
abort=True,
|
||||
):
|
||||
dot_astrbot.touch()
|
||||
click.echo(f"Created {dot_astrbot}")
|
||||
|
||||
paths = {
|
||||
"data": astrbot_root / "data",
|
||||
"config": astrbot_root / "data" / "config",
|
||||
"plugins": astrbot_root / "data" / "plugins",
|
||||
"temp": astrbot_root / "data" / "temp",
|
||||
}
|
||||
|
||||
for name, path in paths.items():
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
click.echo(f"{'Created' if not path.exists() else 'Directory exists'}: {path}")
|
||||
|
||||
await check_dashboard(astrbot_root / "data")
|
||||
|
||||
|
||||
@click.command()
|
||||
def init() -> None:
|
||||
"""初始化 AstrBot"""
|
||||
click.echo("Initializing AstrBot...")
|
||||
astrbot_root = get_astrbot_root()
|
||||
lock_file = astrbot_root / "astrbot.lock"
|
||||
lock = FileLock(lock_file, timeout=5)
|
||||
|
||||
try:
|
||||
with lock.acquire():
|
||||
asyncio.run(initialize_astrbot(astrbot_root))
|
||||
except Timeout:
|
||||
raise click.ClickException("无法获取锁文件,请检查是否有其他实例正在运行")
|
||||
|
||||
except Exception as e:
|
||||
raise click.ClickException(f"初始化失败: {e!s}")
|
||||
245
astrbot/cli/commands/cmd_plug.py
Normal file
245
astrbot/cli/commands/cmd_plug.py
Normal file
@@ -0,0 +1,245 @@
|
||||
import re
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
from ..utils import (
|
||||
PluginStatus,
|
||||
build_plug_list,
|
||||
check_astrbot_root,
|
||||
get_astrbot_root,
|
||||
get_git_repo,
|
||||
manage_plugin,
|
||||
)
|
||||
|
||||
|
||||
@click.group()
|
||||
def plug():
|
||||
"""插件管理"""
|
||||
|
||||
|
||||
def _get_data_path() -> Path:
|
||||
base = get_astrbot_root()
|
||||
if not check_astrbot_root(base):
|
||||
raise click.ClickException(
|
||||
f"{base}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||
)
|
||||
return (base / "data").resolve()
|
||||
|
||||
|
||||
def display_plugins(plugins, title=None, color=None):
|
||||
if title:
|
||||
click.echo(click.style(title, fg=color, bold=True))
|
||||
|
||||
click.echo(f"{'名称':<20} {'版本':<10} {'状态':<10} {'作者':<15} {'描述':<30}")
|
||||
click.echo("-" * 85)
|
||||
|
||||
for p in plugins:
|
||||
desc = p["desc"][:30] + ("..." if len(p["desc"]) > 30 else "")
|
||||
click.echo(
|
||||
f"{p['name']:<20} {p['version']:<10} {p['status']:<10} "
|
||||
f"{p['author']:<15} {desc:<30}",
|
||||
)
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.argument("name")
|
||||
def new(name: str):
|
||||
"""创建新插件"""
|
||||
base_path = _get_data_path()
|
||||
plug_path = base_path / "plugins" / name
|
||||
|
||||
if plug_path.exists():
|
||||
raise click.ClickException(f"插件 {name} 已存在")
|
||||
|
||||
author = click.prompt("请输入插件作者", type=str)
|
||||
desc = click.prompt("请输入插件描述", type=str)
|
||||
version = click.prompt("请输入插件版本", type=str)
|
||||
if not re.match(r"^\d+\.\d+(\.\d+)?$", version.lower().lstrip("v")):
|
||||
raise click.ClickException("版本号必须为 x.y 或 x.y.z 格式")
|
||||
repo = click.prompt("请输入插件仓库:", type=str)
|
||||
if not repo.startswith("http"):
|
||||
raise click.ClickException("仓库地址必须以 http 开头")
|
||||
|
||||
click.echo("下载插件模板...")
|
||||
get_git_repo(
|
||||
"https://github.com/Soulter/helloworld",
|
||||
plug_path,
|
||||
)
|
||||
|
||||
click.echo("重写插件信息...")
|
||||
# 重写 metadata.yaml
|
||||
with open(plug_path / "metadata.yaml", "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"name: {name}\n"
|
||||
f"desc: {desc}\n"
|
||||
f"version: {version}\n"
|
||||
f"author: {author}\n"
|
||||
f"repo: {repo}\n",
|
||||
)
|
||||
|
||||
# 重写 README.md
|
||||
with open(plug_path / "README.md", "w", encoding="utf-8") as f:
|
||||
f.write(f"# {name}\n\n{desc}\n\n# 支持\n\n[帮助文档](https://astrbot.app)\n")
|
||||
|
||||
# 重写 main.py
|
||||
with open(plug_path / "main.py", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
new_content = content.replace(
|
||||
'@register("helloworld", "YourName", "一个简单的 Hello World 插件", "1.0.0")',
|
||||
f'@register("{name}", "{author}", "{desc}", "{version}")',
|
||||
)
|
||||
|
||||
with open(plug_path / "main.py", "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
click.echo(f"插件 {name} 创建成功")
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.option("--all", "-a", is_flag=True, help="列出未安装的插件")
|
||||
def list(all: bool):
|
||||
"""列出插件"""
|
||||
base_path = _get_data_path()
|
||||
plugins = build_plug_list(base_path / "plugins")
|
||||
|
||||
# 未发布的插件
|
||||
not_published_plugins = [
|
||||
p for p in plugins if p["status"] == PluginStatus.NOT_PUBLISHED
|
||||
]
|
||||
if not_published_plugins:
|
||||
display_plugins(not_published_plugins, "未发布的插件", "red")
|
||||
|
||||
# 需要更新的插件
|
||||
need_update_plugins = [
|
||||
p for p in plugins if p["status"] == PluginStatus.NEED_UPDATE
|
||||
]
|
||||
if need_update_plugins:
|
||||
display_plugins(need_update_plugins, "需要更新的插件", "yellow")
|
||||
|
||||
# 已安装的插件
|
||||
installed_plugins = [p for p in plugins if p["status"] == PluginStatus.INSTALLED]
|
||||
if installed_plugins:
|
||||
display_plugins(installed_plugins, "已安装的插件", "green")
|
||||
|
||||
# 未安装的插件
|
||||
not_installed_plugins = [
|
||||
p for p in plugins if p["status"] == PluginStatus.NOT_INSTALLED
|
||||
]
|
||||
if not_installed_plugins and all:
|
||||
display_plugins(not_installed_plugins, "未安装的插件", "blue")
|
||||
|
||||
if (
|
||||
not any([not_published_plugins, need_update_plugins, installed_plugins])
|
||||
and not all
|
||||
):
|
||||
click.echo("未安装任何插件")
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.argument("name")
|
||||
@click.option("--proxy", help="代理服务器地址")
|
||||
def install(name: str, proxy: str | None):
|
||||
"""安装插件"""
|
||||
base_path = _get_data_path()
|
||||
plug_path = base_path / "plugins"
|
||||
plugins = build_plug_list(base_path / "plugins")
|
||||
|
||||
plugin = next(
|
||||
(
|
||||
p
|
||||
for p in plugins
|
||||
if p["name"] == name and p["status"] == PluginStatus.NOT_INSTALLED
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not plugin:
|
||||
raise click.ClickException(f"未找到可安装的插件 {name},可能是不存在或已安装")
|
||||
|
||||
manage_plugin(plugin, plug_path, is_update=False, proxy=proxy)
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.argument("name")
|
||||
def remove(name: str):
|
||||
"""卸载插件"""
|
||||
base_path = _get_data_path()
|
||||
plugins = build_plug_list(base_path / "plugins")
|
||||
plugin = next((p for p in plugins if p["name"] == name), None)
|
||||
|
||||
if not plugin or not plugin.get("local_path"):
|
||||
raise click.ClickException(f"插件 {name} 不存在或未安装")
|
||||
|
||||
plugin_path = plugin["local_path"]
|
||||
|
||||
click.confirm(f"确定要卸载插件 {name} 吗?", default=False, abort=True)
|
||||
|
||||
try:
|
||||
shutil.rmtree(plugin_path)
|
||||
click.echo(f"插件 {name} 已卸载")
|
||||
except Exception as e:
|
||||
raise click.ClickException(f"卸载插件 {name} 失败: {e}")
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.argument("name", required=False)
|
||||
@click.option("--proxy", help="Github代理地址")
|
||||
def update(name: str, proxy: str | None):
|
||||
"""更新插件"""
|
||||
base_path = _get_data_path()
|
||||
plug_path = base_path / "plugins"
|
||||
plugins = build_plug_list(base_path / "plugins")
|
||||
|
||||
if name:
|
||||
plugin = next(
|
||||
(
|
||||
p
|
||||
for p in plugins
|
||||
if p["name"] == name and p["status"] == PluginStatus.NEED_UPDATE
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not plugin:
|
||||
raise click.ClickException(f"插件 {name} 不需要更新或无法更新")
|
||||
|
||||
manage_plugin(plugin, plug_path, is_update=True, proxy=proxy)
|
||||
else:
|
||||
need_update_plugins = [
|
||||
p for p in plugins if p["status"] == PluginStatus.NEED_UPDATE
|
||||
]
|
||||
|
||||
if not need_update_plugins:
|
||||
click.echo("没有需要更新的插件")
|
||||
return
|
||||
|
||||
click.echo(f"发现 {len(need_update_plugins)} 个插件需要更新")
|
||||
for plugin in need_update_plugins:
|
||||
plugin_name = plugin["name"]
|
||||
click.echo(f"正在更新插件 {plugin_name}...")
|
||||
manage_plugin(plugin, plug_path, is_update=True, proxy=proxy)
|
||||
|
||||
|
||||
@plug.command()
|
||||
@click.argument("query")
|
||||
def search(query: str):
|
||||
"""搜索插件"""
|
||||
base_path = _get_data_path()
|
||||
plugins = build_plug_list(base_path / "plugins")
|
||||
|
||||
matched_plugins = [
|
||||
p
|
||||
for p in plugins
|
||||
if query.lower() in p["name"].lower()
|
||||
or query.lower() in p["desc"].lower()
|
||||
or query.lower() in p["author"].lower()
|
||||
]
|
||||
|
||||
if not matched_plugins:
|
||||
click.echo(f"未找到匹配 '{query}' 的插件")
|
||||
return
|
||||
|
||||
display_plugins(matched_plugins, f"搜索结果: '{query}'", "cyan")
|
||||
62
astrbot/cli/commands/cmd_run.py
Normal file
62
astrbot/cli/commands/cmd_run.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
from filelock import FileLock, Timeout
|
||||
|
||||
from ..utils import check_astrbot_root, check_dashboard, get_astrbot_root
|
||||
|
||||
|
||||
async def run_astrbot(astrbot_root: Path):
|
||||
"""运行 AstrBot"""
|
||||
from astrbot.core import LogBroker, LogManager, db_helper, logger
|
||||
from astrbot.core.initial_loader import InitialLoader
|
||||
|
||||
await check_dashboard(astrbot_root / "data")
|
||||
|
||||
log_broker = LogBroker()
|
||||
LogManager.set_queue_handler(logger, log_broker)
|
||||
db = db_helper
|
||||
|
||||
core_lifecycle = InitialLoader(db, log_broker)
|
||||
|
||||
await core_lifecycle.start()
|
||||
|
||||
|
||||
@click.option("--reload", "-r", is_flag=True, help="插件自动重载")
|
||||
@click.option("--port", "-p", help="Astrbot Dashboard端口", required=False, type=str)
|
||||
@click.command()
|
||||
def run(reload: bool, port: str) -> None:
|
||||
"""运行 AstrBot"""
|
||||
try:
|
||||
os.environ["ASTRBOT_CLI"] = "1"
|
||||
astrbot_root = get_astrbot_root()
|
||||
|
||||
if not check_astrbot_root(astrbot_root):
|
||||
raise click.ClickException(
|
||||
f"{astrbot_root}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||
)
|
||||
|
||||
os.environ["ASTRBOT_ROOT"] = str(astrbot_root)
|
||||
sys.path.insert(0, str(astrbot_root))
|
||||
|
||||
if port:
|
||||
os.environ["DASHBOARD_PORT"] = port
|
||||
|
||||
if reload:
|
||||
click.echo("启用插件自动重载")
|
||||
os.environ["ASTRBOT_RELOAD"] = "1"
|
||||
|
||||
lock_file = astrbot_root / "astrbot.lock"
|
||||
lock = FileLock(lock_file, timeout=5)
|
||||
with lock.acquire():
|
||||
asyncio.run(run_astrbot(astrbot_root))
|
||||
except KeyboardInterrupt:
|
||||
click.echo("AstrBot 已关闭...")
|
||||
except Timeout:
|
||||
raise click.ClickException("无法获取锁文件,请检查是否有其他实例正在运行")
|
||||
except Exception as e:
|
||||
raise click.ClickException(f"运行时出现错误: {e}\n{traceback.format_exc()}")
|
||||
18
astrbot/cli/utils/__init__.py
Normal file
18
astrbot/cli/utils/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from .basic import (
|
||||
check_astrbot_root,
|
||||
check_dashboard,
|
||||
get_astrbot_root,
|
||||
)
|
||||
from .plugin import PluginStatus, build_plug_list, get_git_repo, manage_plugin
|
||||
from .version_comparator import VersionComparator
|
||||
|
||||
__all__ = [
|
||||
"PluginStatus",
|
||||
"VersionComparator",
|
||||
"build_plug_list",
|
||||
"check_astrbot_root",
|
||||
"check_dashboard",
|
||||
"get_astrbot_root",
|
||||
"get_git_repo",
|
||||
"manage_plugin",
|
||||
]
|
||||
76
astrbot/cli/utils/basic.py
Normal file
76
astrbot/cli/utils/basic.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
|
||||
def check_astrbot_root(path: str | Path) -> bool:
|
||||
"""检查路径是否为 AstrBot 根目录"""
|
||||
if not isinstance(path, Path):
|
||||
path = Path(path)
|
||||
if not path.exists() or not path.is_dir():
|
||||
return False
|
||||
if not (path / ".astrbot").exists():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_astrbot_root() -> Path:
|
||||
"""获取Astrbot根目录路径"""
|
||||
return Path.cwd()
|
||||
|
||||
|
||||
async def check_dashboard(astrbot_root: Path) -> None:
|
||||
"""检查是否安装了dashboard"""
|
||||
from astrbot.core.config.default import VERSION
|
||||
from astrbot.core.utils.io import download_dashboard, get_dashboard_version
|
||||
|
||||
from .version_comparator import VersionComparator
|
||||
|
||||
try:
|
||||
dashboard_version = await get_dashboard_version()
|
||||
match dashboard_version:
|
||||
case None:
|
||||
click.echo("未安装管理面板")
|
||||
if click.confirm(
|
||||
"是否安装管理面板?",
|
||||
default=True,
|
||||
abort=True,
|
||||
):
|
||||
click.echo("正在安装管理面板...")
|
||||
await download_dashboard(
|
||||
path="data/dashboard.zip",
|
||||
extract_path=str(astrbot_root),
|
||||
version=f"v{VERSION}",
|
||||
latest=False,
|
||||
)
|
||||
click.echo("管理面板安装完成")
|
||||
|
||||
case str():
|
||||
if VersionComparator.compare_version(VERSION, dashboard_version) <= 0:
|
||||
click.echo("管理面板已是最新版本")
|
||||
return
|
||||
try:
|
||||
version = dashboard_version.split("v")[1]
|
||||
click.echo(f"管理面板版本: {version}")
|
||||
await download_dashboard(
|
||||
path="data/dashboard.zip",
|
||||
extract_path=str(astrbot_root),
|
||||
version=f"v{VERSION}",
|
||||
latest=False,
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(f"下载管理面板失败: {e}")
|
||||
return
|
||||
except FileNotFoundError:
|
||||
click.echo("初始化管理面板目录...")
|
||||
try:
|
||||
await download_dashboard(
|
||||
path=str(astrbot_root / "dashboard.zip"),
|
||||
extract_path=str(astrbot_root),
|
||||
version=f"v{VERSION}",
|
||||
latest=False,
|
||||
)
|
||||
click.echo("管理面板初始化完成")
|
||||
except Exception as e:
|
||||
click.echo(f"下载管理面板失败: {e}")
|
||||
return
|
||||
246
astrbot/cli/utils/plugin.py
Normal file
246
astrbot/cli/utils/plugin.py
Normal file
@@ -0,0 +1,246 @@
|
||||
import shutil
|
||||
import tempfile
|
||||
from enum import Enum
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
import httpx
|
||||
import yaml
|
||||
|
||||
from .version_comparator import VersionComparator
|
||||
|
||||
|
||||
class PluginStatus(str, Enum):
|
||||
INSTALLED = "已安装"
|
||||
NEED_UPDATE = "需更新"
|
||||
NOT_INSTALLED = "未安装"
|
||||
NOT_PUBLISHED = "未发布"
|
||||
|
||||
|
||||
def get_git_repo(url: str, target_path: Path, proxy: str | None = None):
|
||||
"""从 Git 仓库下载代码并解压到指定路径"""
|
||||
temp_dir = Path(tempfile.mkdtemp())
|
||||
try:
|
||||
# 解析仓库信息
|
||||
repo_namespace = url.split("/")[-2:]
|
||||
author = repo_namespace[0]
|
||||
repo = repo_namespace[1]
|
||||
|
||||
# 尝试获取最新的 release
|
||||
release_url = f"https://api.github.com/repos/{author}/{repo}/releases"
|
||||
try:
|
||||
with httpx.Client(
|
||||
proxy=proxy if proxy else None,
|
||||
follow_redirects=True,
|
||||
) as client:
|
||||
resp = client.get(release_url)
|
||||
resp.raise_for_status()
|
||||
releases = resp.json()
|
||||
|
||||
if releases:
|
||||
# 使用最新的 release
|
||||
download_url = releases[0]["zipball_url"]
|
||||
else:
|
||||
# 没有 release,使用默认分支
|
||||
click.echo(f"正在从默认分支下载 {author}/{repo}")
|
||||
download_url = f"https://github.com/{author}/{repo}/archive/refs/heads/master.zip"
|
||||
except Exception as e:
|
||||
click.echo(f"获取 release 信息失败: {e},将直接使用提供的 URL")
|
||||
download_url = url
|
||||
|
||||
# 应用代理
|
||||
if proxy:
|
||||
download_url = f"{proxy}/{download_url}"
|
||||
|
||||
# 下载并解压
|
||||
with httpx.Client(
|
||||
proxy=proxy if proxy else None,
|
||||
follow_redirects=True,
|
||||
) as client:
|
||||
resp = client.get(download_url)
|
||||
if (
|
||||
resp.status_code == 404
|
||||
and "archive/refs/heads/master.zip" in download_url
|
||||
):
|
||||
alt_url = download_url.replace("master.zip", "main.zip")
|
||||
click.echo("master 分支不存在,尝试下载 main 分支")
|
||||
resp = client.get(alt_url)
|
||||
resp.raise_for_status()
|
||||
else:
|
||||
resp.raise_for_status()
|
||||
zip_content = BytesIO(resp.content)
|
||||
with ZipFile(zip_content) as z:
|
||||
z.extractall(temp_dir)
|
||||
namelist = z.namelist()
|
||||
root_dir = Path(namelist[0]).parts[0] if namelist else ""
|
||||
if target_path.exists():
|
||||
shutil.rmtree(target_path)
|
||||
shutil.move(temp_dir / root_dir, target_path)
|
||||
finally:
|
||||
if temp_dir.exists():
|
||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||
|
||||
|
||||
def load_yaml_metadata(plugin_dir: Path) -> dict:
|
||||
"""从 metadata.yaml 文件加载插件元数据
|
||||
|
||||
Args:
|
||||
plugin_dir: 插件目录路径
|
||||
|
||||
Returns:
|
||||
dict: 包含元数据的字典,如果读取失败则返回空字典
|
||||
|
||||
"""
|
||||
yaml_path = plugin_dir / "metadata.yaml"
|
||||
if yaml_path.exists():
|
||||
try:
|
||||
return yaml.safe_load(yaml_path.read_text(encoding="utf-8")) or {}
|
||||
except Exception as e:
|
||||
click.echo(f"读取 {yaml_path} 失败: {e}", err=True)
|
||||
return {}
|
||||
|
||||
|
||||
def build_plug_list(plugins_dir: Path) -> list:
|
||||
"""构建插件列表,包含本地和在线插件信息
|
||||
|
||||
Args:
|
||||
plugins_dir (Path): 插件目录路径
|
||||
|
||||
Returns:
|
||||
list: 包含插件信息的字典列表
|
||||
|
||||
"""
|
||||
# 获取本地插件信息
|
||||
result = []
|
||||
if plugins_dir.exists():
|
||||
for plugin_name in [d.name for d in plugins_dir.glob("*") if d.is_dir()]:
|
||||
plugin_dir = plugins_dir / plugin_name
|
||||
|
||||
# 从 metadata.yaml 加载元数据
|
||||
metadata = load_yaml_metadata(plugin_dir)
|
||||
|
||||
if "desc" not in metadata and "description" in metadata:
|
||||
metadata["desc"] = metadata["description"]
|
||||
|
||||
# 如果成功加载元数据,添加到结果列表
|
||||
if metadata and all(
|
||||
k in metadata for k in ["name", "desc", "version", "author", "repo"]
|
||||
):
|
||||
result.append(
|
||||
{
|
||||
"name": str(metadata.get("name", "")),
|
||||
"desc": str(metadata.get("desc", "")),
|
||||
"version": str(metadata.get("version", "")),
|
||||
"author": str(metadata.get("author", "")),
|
||||
"repo": str(metadata.get("repo", "")),
|
||||
"status": PluginStatus.INSTALLED,
|
||||
"local_path": str(plugin_dir),
|
||||
},
|
||||
)
|
||||
|
||||
# 获取在线插件列表
|
||||
online_plugins = []
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
resp = client.get("https://api.soulter.top/astrbot/plugins")
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
for plugin_id, plugin_info in data.items():
|
||||
online_plugins.append(
|
||||
{
|
||||
"name": str(plugin_id),
|
||||
"desc": str(plugin_info.get("desc", "")),
|
||||
"version": str(plugin_info.get("version", "")),
|
||||
"author": str(plugin_info.get("author", "")),
|
||||
"repo": str(plugin_info.get("repo", "")),
|
||||
"status": PluginStatus.NOT_INSTALLED,
|
||||
"local_path": None,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(f"获取在线插件列表失败: {e}", err=True)
|
||||
|
||||
# 与在线插件比对,更新状态
|
||||
online_plugin_names = {plugin["name"] for plugin in online_plugins}
|
||||
for local_plugin in result:
|
||||
if local_plugin["name"] in online_plugin_names:
|
||||
# 查找对应的在线插件
|
||||
online_plugin = next(
|
||||
p for p in online_plugins if p["name"] == local_plugin["name"]
|
||||
)
|
||||
if (
|
||||
VersionComparator.compare_version(
|
||||
local_plugin["version"],
|
||||
online_plugin["version"],
|
||||
)
|
||||
< 0
|
||||
):
|
||||
local_plugin["status"] = PluginStatus.NEED_UPDATE
|
||||
else:
|
||||
# 本地插件未在线上发布
|
||||
local_plugin["status"] = PluginStatus.NOT_PUBLISHED
|
||||
|
||||
# 添加未安装的在线插件
|
||||
for online_plugin in online_plugins:
|
||||
if not any(plugin["name"] == online_plugin["name"] for plugin in result):
|
||||
result.append(online_plugin)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def manage_plugin(
|
||||
plugin: dict,
|
||||
plugins_dir: Path,
|
||||
is_update: bool = False,
|
||||
proxy: str | None = None,
|
||||
) -> None:
|
||||
"""安装或更新插件
|
||||
|
||||
Args:
|
||||
plugin (dict): 插件信息字典
|
||||
plugins_dir (Path): 插件目录
|
||||
is_update (bool, optional): 是否为更新操作. 默认为 False
|
||||
proxy (str, optional): 代理服务器地址
|
||||
|
||||
"""
|
||||
plugin_name = plugin["name"]
|
||||
repo_url = plugin["repo"]
|
||||
|
||||
# 如果是更新且有本地路径,直接使用本地路径
|
||||
if is_update and plugin.get("local_path"):
|
||||
target_path = Path(plugin["local_path"])
|
||||
else:
|
||||
target_path = plugins_dir / plugin_name
|
||||
|
||||
backup_path = Path(f"{target_path}_backup") if is_update else None
|
||||
|
||||
# 检查插件是否存在
|
||||
if is_update and not target_path.exists():
|
||||
raise click.ClickException(f"插件 {plugin_name} 未安装,无法更新")
|
||||
|
||||
# 备份现有插件
|
||||
if is_update and backup_path is not None and backup_path.exists():
|
||||
shutil.rmtree(backup_path)
|
||||
if is_update and backup_path is not None:
|
||||
shutil.copytree(target_path, backup_path)
|
||||
|
||||
try:
|
||||
click.echo(
|
||||
f"正在从 {repo_url} {'更新' if is_update else '下载'}插件 {plugin_name}...",
|
||||
)
|
||||
get_git_repo(repo_url, target_path, proxy)
|
||||
|
||||
# 更新成功,删除备份
|
||||
if is_update and backup_path is not None and backup_path.exists():
|
||||
shutil.rmtree(backup_path)
|
||||
click.echo(f"插件 {plugin_name} {'更新' if is_update else '安装'}成功")
|
||||
except Exception as e:
|
||||
if target_path.exists():
|
||||
shutil.rmtree(target_path, ignore_errors=True)
|
||||
if is_update and backup_path is not None and backup_path.exists():
|
||||
shutil.move(backup_path, target_path)
|
||||
raise click.ClickException(
|
||||
f"{'更新' if is_update else '安装'}插件 {plugin_name} 时出错: {e}",
|
||||
)
|
||||
90
astrbot/cli/utils/version_comparator.py
Normal file
90
astrbot/cli/utils/version_comparator.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""拷贝自 astrbot.core.utils.version_comparator"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class VersionComparator:
|
||||
@staticmethod
|
||||
def compare_version(v1: str, v2: str) -> int:
|
||||
"""根据 Semver 语义版本规范来比较版本号的大小。支持不仅局限于 3 个数字的版本号,并处理预发布标签。
|
||||
|
||||
参考: https://semver.org/lang/zh-CN/
|
||||
|
||||
返回 1 表示 v1 > v2,返回 -1 表示 v1 < v2,返回 0 表示 v1 = v2。
|
||||
"""
|
||||
v1 = v1.lower().replace("v", "")
|
||||
v2 = v2.lower().replace("v", "")
|
||||
|
||||
def split_version(version):
|
||||
match = re.match(
|
||||
r"^([0-9]+(?:\.[0-9]+)*)(?:-([0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))?(?:\+(.+))?$",
|
||||
version,
|
||||
)
|
||||
if not match:
|
||||
return [], None
|
||||
major_minor_patch = match.group(1).split(".")
|
||||
prerelease = match.group(2)
|
||||
# buildmetadata = match.group(3) # 构建元数据在比较时忽略
|
||||
parts = [int(x) for x in major_minor_patch]
|
||||
prerelease = VersionComparator._split_prerelease(prerelease)
|
||||
return parts, prerelease
|
||||
|
||||
v1_parts, v1_prerelease = split_version(v1)
|
||||
v2_parts, v2_prerelease = split_version(v2)
|
||||
|
||||
# 比较数字部分
|
||||
length = max(len(v1_parts), len(v2_parts))
|
||||
v1_parts.extend([0] * (length - len(v1_parts)))
|
||||
v2_parts.extend([0] * (length - len(v2_parts)))
|
||||
|
||||
for i in range(length):
|
||||
if v1_parts[i] > v2_parts[i]:
|
||||
return 1
|
||||
if v1_parts[i] < v2_parts[i]:
|
||||
return -1
|
||||
|
||||
# 比较预发布标签
|
||||
if v1_prerelease is None and v2_prerelease is not None:
|
||||
return 1 # 没有预发布标签的版本高于有预发布标签的版本
|
||||
if v1_prerelease is not None and v2_prerelease is None:
|
||||
return -1 # 有预发布标签的版本低于没有预发布标签的版本
|
||||
if v1_prerelease is not None and v2_prerelease is not None:
|
||||
len_pre = max(len(v1_prerelease), len(v2_prerelease))
|
||||
for i in range(len_pre):
|
||||
p1 = v1_prerelease[i] if i < len(v1_prerelease) else None
|
||||
p2 = v2_prerelease[i] if i < len(v2_prerelease) else None
|
||||
|
||||
if p1 is None and p2 is not None:
|
||||
return -1
|
||||
if p1 is not None and p2 is None:
|
||||
return 1
|
||||
if isinstance(p1, int) and isinstance(p2, str):
|
||||
return -1
|
||||
if isinstance(p1, str) and isinstance(p2, int):
|
||||
return 1
|
||||
if isinstance(p1, int) and isinstance(p2, int):
|
||||
if p1 > p2:
|
||||
return 1
|
||||
if p1 < p2:
|
||||
return -1
|
||||
elif isinstance(p1, str) and isinstance(p2, str):
|
||||
if p1 > p2:
|
||||
return 1
|
||||
if p1 < p2:
|
||||
return -1
|
||||
return 0 # 预发布标签完全相同
|
||||
|
||||
return 0 # 数字部分和预发布标签都相同
|
||||
|
||||
@staticmethod
|
||||
def _split_prerelease(prerelease):
|
||||
if not prerelease:
|
||||
return None
|
||||
parts = prerelease.split(".")
|
||||
result = []
|
||||
for part in parts:
|
||||
if part.isdigit():
|
||||
result.append(int(part))
|
||||
else:
|
||||
result.append(part)
|
||||
return result
|
||||
31
astrbot/core/__init__.py
Normal file
31
astrbot/core/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import os
|
||||
|
||||
from astrbot.core.config import AstrBotConfig
|
||||
from astrbot.core.config.default import DB_PATH
|
||||
from astrbot.core.db.sqlite import SQLiteDatabase
|
||||
from astrbot.core.file_token_service import FileTokenService
|
||||
from astrbot.core.utils.pip_installer import PipInstaller
|
||||
from astrbot.core.utils.shared_preferences import SharedPreferences
|
||||
from astrbot.core.utils.t2i.renderer import HtmlRenderer
|
||||
|
||||
from .log import LogBroker, LogManager # noqa
|
||||
from .utils.astrbot_path import get_astrbot_data_path
|
||||
|
||||
# 初始化数据存储文件夹
|
||||
os.makedirs(get_astrbot_data_path(), exist_ok=True)
|
||||
|
||||
DEMO_MODE = os.getenv("DEMO_MODE", False)
|
||||
|
||||
astrbot_config = AstrBotConfig()
|
||||
t2i_base_url = astrbot_config.get("t2i_endpoint", "https://t2i.soulter.top/text2img")
|
||||
html_renderer = HtmlRenderer(t2i_base_url)
|
||||
logger = LogManager.GetLogger(log_name="astrbot")
|
||||
db_helper = SQLiteDatabase(DB_PATH)
|
||||
# 简单的偏好设置存储, 这里后续应该存储到数据库中, 一些部分可以存储到配置中
|
||||
sp = SharedPreferences(db_helper=db_helper)
|
||||
# 文件令牌服务
|
||||
file_token_service = FileTokenService()
|
||||
pip_installer = PipInstaller(
|
||||
astrbot_config.get("pip_install_arg", ""),
|
||||
astrbot_config.get("pypi_index_url", None),
|
||||
)
|
||||
14
astrbot/core/agent/agent.py
Normal file
14
astrbot/core/agent/agent.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Generic
|
||||
|
||||
from .hooks import BaseAgentRunHooks
|
||||
from .run_context import TContext
|
||||
from .tool import FunctionTool
|
||||
|
||||
|
||||
@dataclass
|
||||
class Agent(Generic[TContext]):
|
||||
name: str
|
||||
instructions: str | None = None
|
||||
tools: list[str | FunctionTool] | None = None
|
||||
run_hooks: BaseAgentRunHooks[TContext] | None = None
|
||||
38
astrbot/core/agent/handoff.py
Normal file
38
astrbot/core/agent/handoff.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from typing import Generic
|
||||
|
||||
from .agent import Agent
|
||||
from .run_context import TContext
|
||||
from .tool import FunctionTool
|
||||
|
||||
|
||||
class HandoffTool(FunctionTool, Generic[TContext]):
|
||||
"""Handoff tool for delegating tasks to another agent."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
agent: Agent[TContext],
|
||||
parameters: dict | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
self.agent = agent
|
||||
super().__init__(
|
||||
name=f"transfer_to_{agent.name}",
|
||||
parameters=parameters or self.default_parameters(),
|
||||
description=agent.instructions or self.default_description(agent.name),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def default_parameters(self) -> dict:
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string",
|
||||
"description": "The input to be handed off to another agent. This should be a clear and concise request or task.",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def default_description(self, agent_name: str | None) -> str:
|
||||
agent_name = agent_name or "another"
|
||||
return f"Delegate tasks to {self.name} agent to handle the request."
|
||||
30
astrbot/core/agent/hooks.py
Normal file
30
astrbot/core/agent/hooks.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import Generic
|
||||
|
||||
import mcp
|
||||
|
||||
from astrbot.core.agent.tool import FunctionTool
|
||||
from astrbot.core.provider.entities import LLMResponse
|
||||
|
||||
from .run_context import ContextWrapper, TContext
|
||||
|
||||
|
||||
class BaseAgentRunHooks(Generic[TContext]):
|
||||
async def on_agent_begin(self, run_context: ContextWrapper[TContext]): ...
|
||||
async def on_tool_start(
|
||||
self,
|
||||
run_context: ContextWrapper[TContext],
|
||||
tool: FunctionTool,
|
||||
tool_args: dict | None,
|
||||
): ...
|
||||
async def on_tool_end(
|
||||
self,
|
||||
run_context: ContextWrapper[TContext],
|
||||
tool: FunctionTool,
|
||||
tool_args: dict | None,
|
||||
tool_result: mcp.types.CallToolResult | None,
|
||||
): ...
|
||||
async def on_agent_done(
|
||||
self,
|
||||
run_context: ContextWrapper[TContext],
|
||||
llm_response: LLMResponse,
|
||||
): ...
|
||||
385
astrbot/core/agent/mcp_client.py
Normal file
385
astrbot/core/agent/mcp_client.py
Normal file
@@ -0,0 +1,385 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from contextlib import AsyncExitStack
|
||||
from datetime import timedelta
|
||||
from typing import Generic
|
||||
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
retry_if_exception_type,
|
||||
stop_after_attempt,
|
||||
wait_exponential,
|
||||
)
|
||||
|
||||
from astrbot import logger
|
||||
from astrbot.core.agent.run_context import ContextWrapper
|
||||
from astrbot.core.utils.log_pipe import LogPipe
|
||||
|
||||
from .run_context import TContext
|
||||
from .tool import FunctionTool
|
||||
|
||||
try:
|
||||
import anyio
|
||||
import mcp
|
||||
from mcp.client.sse import sse_client
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
logger.warning(
|
||||
"Warning: Missing 'mcp' dependency, MCP services will be unavailable."
|
||||
)
|
||||
|
||||
try:
|
||||
from mcp.client.streamable_http import streamablehttp_client
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
logger.warning(
|
||||
"Warning: Missing 'mcp' dependency or MCP library version too old, Streamable HTTP connection unavailable.",
|
||||
)
|
||||
|
||||
|
||||
def _prepare_config(config: dict) -> dict:
|
||||
"""Prepare configuration, handle nested format"""
|
||||
if config.get("mcpServers"):
|
||||
first_key = next(iter(config["mcpServers"]))
|
||||
config = config["mcpServers"][first_key]
|
||||
config.pop("active", None)
|
||||
return config
|
||||
|
||||
|
||||
async def _quick_test_mcp_connection(config: dict) -> tuple[bool, str]:
|
||||
"""Quick test MCP server connectivity"""
|
||||
import aiohttp
|
||||
|
||||
cfg = _prepare_config(config.copy())
|
||||
|
||||
url = cfg["url"]
|
||||
headers = cfg.get("headers", {})
|
||||
timeout = cfg.get("timeout", 10)
|
||||
|
||||
try:
|
||||
if "transport" in cfg:
|
||||
transport_type = cfg["transport"]
|
||||
elif "type" in cfg:
|
||||
transport_type = cfg["type"]
|
||||
else:
|
||||
raise Exception("MCP connection config missing transport or type field")
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if transport_type == "streamable_http":
|
||||
test_payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"method": "initialize",
|
||||
"id": 0,
|
||||
"params": {
|
||||
"protocolVersion": "2024-11-05",
|
||||
"capabilities": {},
|
||||
"clientInfo": {"name": "test-client", "version": "1.2.3"},
|
||||
},
|
||||
}
|
||||
async with session.post(
|
||||
url,
|
||||
headers={
|
||||
**headers,
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json, text/event-stream",
|
||||
},
|
||||
json=test_payload,
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
return True, ""
|
||||
return False, f"HTTP {response.status}: {response.reason}"
|
||||
else:
|
||||
async with session.get(
|
||||
url,
|
||||
headers={
|
||||
**headers,
|
||||
"Accept": "application/json, text/event-stream",
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
return True, ""
|
||||
return False, f"HTTP {response.status}: {response.reason}"
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
return False, f"Connection timeout: {timeout} seconds"
|
||||
except Exception as e:
|
||||
return False, f"{e!s}"
|
||||
|
||||
|
||||
class MCPClient:
|
||||
def __init__(self):
|
||||
# Initialize session and client objects
|
||||
self.session: mcp.ClientSession | None = None
|
||||
self.exit_stack = AsyncExitStack()
|
||||
self._old_exit_stacks: list[AsyncExitStack] = [] # Track old stacks for cleanup
|
||||
|
||||
self.name: str | None = None
|
||||
self.active: bool = True
|
||||
self.tools: list[mcp.Tool] = []
|
||||
self.server_errlogs: list[str] = []
|
||||
self.running_event = asyncio.Event()
|
||||
|
||||
# Store connection config for reconnection
|
||||
self._mcp_server_config: dict | None = None
|
||||
self._server_name: str | None = None
|
||||
self._reconnect_lock = asyncio.Lock() # Lock for thread-safe reconnection
|
||||
self._reconnecting: bool = False # For logging and debugging
|
||||
|
||||
async def connect_to_server(self, mcp_server_config: dict, name: str):
|
||||
"""Connect to MCP server
|
||||
|
||||
If `url` parameter exists:
|
||||
1. When transport is specified as `streamable_http`, use Streamable HTTP connection.
|
||||
2. When transport is specified as `sse`, use SSE connection.
|
||||
3. If not specified, default to SSE connection to MCP service.
|
||||
|
||||
Args:
|
||||
mcp_server_config (dict): Configuration for the MCP server. See https://modelcontextprotocol.io/quickstart/server
|
||||
|
||||
"""
|
||||
# Store config for reconnection
|
||||
self._mcp_server_config = mcp_server_config
|
||||
self._server_name = name
|
||||
|
||||
cfg = _prepare_config(mcp_server_config.copy())
|
||||
|
||||
def logging_callback(msg: str):
|
||||
# Handle MCP service error logs
|
||||
print(f"MCP Server {name} Error: {msg}")
|
||||
self.server_errlogs.append(msg)
|
||||
|
||||
if "url" in cfg:
|
||||
success, error_msg = await _quick_test_mcp_connection(cfg)
|
||||
if not success:
|
||||
raise Exception(error_msg)
|
||||
|
||||
if "transport" in cfg:
|
||||
transport_type = cfg["transport"]
|
||||
elif "type" in cfg:
|
||||
transport_type = cfg["type"]
|
||||
else:
|
||||
raise Exception("MCP connection config missing transport or type field")
|
||||
|
||||
if transport_type != "streamable_http":
|
||||
# SSE transport method
|
||||
self._streams_context = sse_client(
|
||||
url=cfg["url"],
|
||||
headers=cfg.get("headers", {}),
|
||||
timeout=cfg.get("timeout", 5),
|
||||
sse_read_timeout=cfg.get("sse_read_timeout", 60 * 5),
|
||||
)
|
||||
streams = await self.exit_stack.enter_async_context(
|
||||
self._streams_context,
|
||||
)
|
||||
|
||||
# Create a new client session
|
||||
read_timeout = timedelta(seconds=cfg.get("session_read_timeout", 60))
|
||||
self.session = await self.exit_stack.enter_async_context(
|
||||
mcp.ClientSession(
|
||||
*streams,
|
||||
read_timeout_seconds=read_timeout,
|
||||
logging_callback=logging_callback, # type: ignore
|
||||
),
|
||||
)
|
||||
else:
|
||||
timeout = timedelta(seconds=cfg.get("timeout", 30))
|
||||
sse_read_timeout = timedelta(
|
||||
seconds=cfg.get("sse_read_timeout", 60 * 5),
|
||||
)
|
||||
self._streams_context = streamablehttp_client(
|
||||
url=cfg["url"],
|
||||
headers=cfg.get("headers", {}),
|
||||
timeout=timeout,
|
||||
sse_read_timeout=sse_read_timeout,
|
||||
terminate_on_close=cfg.get("terminate_on_close", True),
|
||||
)
|
||||
read_s, write_s, _ = await self.exit_stack.enter_async_context(
|
||||
self._streams_context,
|
||||
)
|
||||
|
||||
# Create a new client session
|
||||
read_timeout = timedelta(seconds=cfg.get("session_read_timeout", 60))
|
||||
self.session = await self.exit_stack.enter_async_context(
|
||||
mcp.ClientSession(
|
||||
read_stream=read_s,
|
||||
write_stream=write_s,
|
||||
read_timeout_seconds=read_timeout,
|
||||
logging_callback=logging_callback, # type: ignore
|
||||
),
|
||||
)
|
||||
|
||||
else:
|
||||
server_params = mcp.StdioServerParameters(
|
||||
**cfg,
|
||||
)
|
||||
|
||||
def callback(msg: str):
|
||||
# Handle MCP service error logs
|
||||
self.server_errlogs.append(msg)
|
||||
|
||||
stdio_transport = await self.exit_stack.enter_async_context(
|
||||
mcp.stdio_client(
|
||||
server_params,
|
||||
errlog=LogPipe(
|
||||
level=logging.ERROR,
|
||||
logger=logger,
|
||||
identifier=f"MCPServer-{name}",
|
||||
callback=callback,
|
||||
), # type: ignore
|
||||
),
|
||||
)
|
||||
|
||||
# Create a new client session
|
||||
self.session = await self.exit_stack.enter_async_context(
|
||||
mcp.ClientSession(*stdio_transport),
|
||||
)
|
||||
await self.session.initialize()
|
||||
|
||||
async def list_tools_and_save(self) -> mcp.ListToolsResult:
|
||||
"""List all tools from the server and save them to self.tools"""
|
||||
if not self.session:
|
||||
raise Exception("MCP Client is not initialized")
|
||||
response = await self.session.list_tools()
|
||||
self.tools = response.tools
|
||||
return response
|
||||
|
||||
async def _reconnect(self) -> None:
|
||||
"""Reconnect to the MCP server using the stored configuration.
|
||||
|
||||
Uses asyncio.Lock to ensure thread-safe reconnection in concurrent environments.
|
||||
|
||||
Raises:
|
||||
Exception: raised when reconnection fails
|
||||
"""
|
||||
async with self._reconnect_lock:
|
||||
# Check if already reconnecting (useful for logging)
|
||||
if self._reconnecting:
|
||||
logger.debug(
|
||||
f"MCP Client {self._server_name} is already reconnecting, skipping"
|
||||
)
|
||||
return
|
||||
|
||||
if not self._mcp_server_config or not self._server_name:
|
||||
raise Exception("Cannot reconnect: missing connection configuration")
|
||||
|
||||
self._reconnecting = True
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting to reconnect to MCP server {self._server_name}..."
|
||||
)
|
||||
|
||||
# Save old exit_stack for later cleanup (don't close it now to avoid cancel scope issues)
|
||||
if self.exit_stack:
|
||||
self._old_exit_stacks.append(self.exit_stack)
|
||||
|
||||
# Mark old session as invalid
|
||||
self.session = None
|
||||
|
||||
# Create new exit stack for new connection
|
||||
self.exit_stack = AsyncExitStack()
|
||||
|
||||
# Reconnect using stored config
|
||||
await self.connect_to_server(self._mcp_server_config, self._server_name)
|
||||
await self.list_tools_and_save()
|
||||
|
||||
logger.info(
|
||||
f"Successfully reconnected to MCP server {self._server_name}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to reconnect to MCP server {self._server_name}: {e}"
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
self._reconnecting = False
|
||||
|
||||
async def call_tool_with_reconnect(
|
||||
self,
|
||||
tool_name: str,
|
||||
arguments: dict,
|
||||
read_timeout_seconds: timedelta,
|
||||
) -> mcp.types.CallToolResult:
|
||||
"""Call MCP tool with automatic reconnection on failure, max 2 retries.
|
||||
|
||||
Args:
|
||||
tool_name: tool name
|
||||
arguments: tool arguments
|
||||
read_timeout_seconds: read timeout
|
||||
|
||||
Returns:
|
||||
MCP tool call result
|
||||
|
||||
Raises:
|
||||
ValueError: MCP session is not available
|
||||
anyio.ClosedResourceError: raised after reconnection failure
|
||||
"""
|
||||
|
||||
@retry(
|
||||
retry=retry_if_exception_type(anyio.ClosedResourceError),
|
||||
stop=stop_after_attempt(2),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=3),
|
||||
before_sleep=before_sleep_log(logger, logging.WARNING),
|
||||
reraise=True,
|
||||
)
|
||||
async def _call_with_retry():
|
||||
if not self.session:
|
||||
raise ValueError("MCP session is not available for MCP function tools.")
|
||||
|
||||
try:
|
||||
return await self.session.call_tool(
|
||||
name=tool_name,
|
||||
arguments=arguments,
|
||||
read_timeout_seconds=read_timeout_seconds,
|
||||
)
|
||||
except anyio.ClosedResourceError:
|
||||
logger.warning(
|
||||
f"MCP tool {tool_name} call failed (ClosedResourceError), attempting to reconnect..."
|
||||
)
|
||||
# Attempt to reconnect
|
||||
await self._reconnect()
|
||||
# Reraise the exception to trigger tenacity retry
|
||||
raise
|
||||
|
||||
return await _call_with_retry()
|
||||
|
||||
async def cleanup(self):
|
||||
"""Clean up resources including old exit stacks from reconnections"""
|
||||
# Close current exit stack
|
||||
try:
|
||||
await self.exit_stack.aclose()
|
||||
except Exception as e:
|
||||
logger.debug(f"Error closing current exit stack: {e}")
|
||||
|
||||
# Don't close old exit stacks as they may be in different task contexts
|
||||
# They will be garbage collected naturally
|
||||
# Just clear the list to release references
|
||||
self._old_exit_stacks.clear()
|
||||
|
||||
# Set running_event first to unblock any waiting tasks
|
||||
self.running_event.set()
|
||||
|
||||
|
||||
class MCPTool(FunctionTool, Generic[TContext]):
|
||||
"""A function tool that calls an MCP service."""
|
||||
|
||||
def __init__(
|
||||
self, mcp_tool: mcp.Tool, mcp_client: MCPClient, mcp_server_name: str, **kwargs
|
||||
):
|
||||
super().__init__(
|
||||
name=mcp_tool.name,
|
||||
description=mcp_tool.description or "",
|
||||
parameters=mcp_tool.inputSchema,
|
||||
)
|
||||
self.mcp_tool = mcp_tool
|
||||
self.mcp_client = mcp_client
|
||||
self.mcp_server_name = mcp_server_name
|
||||
|
||||
async def call(
|
||||
self, context: ContextWrapper[TContext], **kwargs
|
||||
) -> mcp.types.CallToolResult:
|
||||
return await self.mcp_client.call_tool_with_reconnect(
|
||||
tool_name=self.mcp_tool.name,
|
||||
arguments=kwargs,
|
||||
read_timeout_seconds=timedelta(seconds=context.tool_call_timeout),
|
||||
)
|
||||
192
astrbot/core/agent/message.py
Normal file
192
astrbot/core/agent/message.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# Inspired by MoonshotAI/kosong, credits to MoonshotAI/kosong authors for the original implementation.
|
||||
# License: Apache License 2.0
|
||||
|
||||
from typing import Any, ClassVar, Literal, cast
|
||||
|
||||
from pydantic import BaseModel, GetCoreSchemaHandler, model_validator
|
||||
from pydantic_core import core_schema
|
||||
|
||||
|
||||
class ContentPart(BaseModel):
|
||||
"""A part of the content in a message."""
|
||||
|
||||
__content_part_registry: ClassVar[dict[str, type["ContentPart"]]] = {}
|
||||
|
||||
type: str
|
||||
|
||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
invalid_subclass_error_msg = f"ContentPart subclass {cls.__name__} must have a `type` field of type `str`"
|
||||
|
||||
type_value = getattr(cls, "type", None)
|
||||
if type_value is None or not isinstance(type_value, str):
|
||||
raise ValueError(invalid_subclass_error_msg)
|
||||
|
||||
cls.__content_part_registry[type_value] = cls
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source_type: Any, handler: GetCoreSchemaHandler
|
||||
) -> core_schema.CoreSchema:
|
||||
# If we're dealing with the base ContentPart class, use custom validation
|
||||
if cls.__name__ == "ContentPart":
|
||||
|
||||
def validate_content_part(value: Any) -> Any:
|
||||
# if it's already an instance of a ContentPart subclass, return it
|
||||
if hasattr(value, "__class__") and issubclass(value.__class__, cls):
|
||||
return value
|
||||
|
||||
# if it's a dict with a type field, dispatch to the appropriate subclass
|
||||
if isinstance(value, dict) and "type" in value:
|
||||
type_value: Any | None = cast(dict[str, Any], value).get("type")
|
||||
if not isinstance(type_value, str):
|
||||
raise ValueError(f"Cannot validate {value} as ContentPart")
|
||||
target_class = cls.__content_part_registry[type_value]
|
||||
return target_class.model_validate(value)
|
||||
|
||||
raise ValueError(f"Cannot validate {value} as ContentPart")
|
||||
|
||||
return core_schema.no_info_plain_validator_function(validate_content_part)
|
||||
|
||||
# for subclasses, use the default schema
|
||||
return handler(source_type)
|
||||
|
||||
|
||||
class TextPart(ContentPart):
|
||||
"""
|
||||
>>> TextPart(text="Hello, world!").model_dump()
|
||||
{'type': 'text', 'text': 'Hello, world!'}
|
||||
"""
|
||||
|
||||
type: str = "text"
|
||||
text: str
|
||||
|
||||
|
||||
class ImageURLPart(ContentPart):
|
||||
"""
|
||||
>>> ImageURLPart(image_url="http://example.com/image.jpg").model_dump()
|
||||
{'type': 'image_url', 'image_url': 'http://example.com/image.jpg'}
|
||||
"""
|
||||
|
||||
class ImageURL(BaseModel):
|
||||
url: str
|
||||
"""The URL of the image, can be data URI scheme like `data:image/png;base64,...`."""
|
||||
id: str | None = None
|
||||
"""The ID of the image, to allow LLMs to distinguish different images."""
|
||||
|
||||
type: str = "image_url"
|
||||
image_url: ImageURL
|
||||
|
||||
|
||||
class AudioURLPart(ContentPart):
|
||||
"""
|
||||
>>> AudioURLPart(audio_url=AudioURLPart.AudioURL(url="https://example.com/audio.mp3")).model_dump()
|
||||
{'type': 'audio_url', 'audio_url': {'url': 'https://example.com/audio.mp3', 'id': None}}
|
||||
"""
|
||||
|
||||
class AudioURL(BaseModel):
|
||||
url: str
|
||||
"""The URL of the audio, can be data URI scheme like `data:audio/aac;base64,...`."""
|
||||
id: str | None = None
|
||||
"""The ID of the audio, to allow LLMs to distinguish different audios."""
|
||||
|
||||
type: str = "audio_url"
|
||||
audio_url: AudioURL
|
||||
|
||||
|
||||
class ToolCall(BaseModel):
|
||||
"""
|
||||
A tool call requested by the assistant.
|
||||
|
||||
>>> ToolCall(
|
||||
... id="123",
|
||||
... function=ToolCall.FunctionBody(
|
||||
... name="function",
|
||||
... arguments="{}"
|
||||
... ),
|
||||
... ).model_dump()
|
||||
{'type': 'function', 'id': '123', 'function': {'name': 'function', 'arguments': '{}'}}
|
||||
"""
|
||||
|
||||
class FunctionBody(BaseModel):
|
||||
name: str
|
||||
arguments: str | None
|
||||
|
||||
type: Literal["function"] = "function"
|
||||
|
||||
id: str
|
||||
"""The ID of the tool call."""
|
||||
function: FunctionBody
|
||||
"""The function body of the tool call."""
|
||||
extra_content: dict[str, Any] | None = None
|
||||
"""Extra metadata for the tool call."""
|
||||
|
||||
def model_dump(self, **kwargs: Any) -> dict[str, Any]:
|
||||
if self.extra_content is None:
|
||||
kwargs.setdefault("exclude", set()).add("extra_content")
|
||||
return super().model_dump(**kwargs)
|
||||
|
||||
|
||||
class ToolCallPart(BaseModel):
|
||||
"""A part of the tool call."""
|
||||
|
||||
arguments_part: str | None = None
|
||||
"""A part of the arguments of the tool call."""
|
||||
|
||||
|
||||
class Message(BaseModel):
|
||||
"""A message in a conversation."""
|
||||
|
||||
role: Literal[
|
||||
"system",
|
||||
"user",
|
||||
"assistant",
|
||||
"tool",
|
||||
]
|
||||
|
||||
content: str | list[ContentPart] | None = None
|
||||
"""The content of the message."""
|
||||
|
||||
tool_calls: list[ToolCall] | list[dict] | None = None
|
||||
"""The tool calls of the message."""
|
||||
|
||||
tool_call_id: str | None = None
|
||||
"""The ID of the tool call."""
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_content_required(self):
|
||||
# assistant + tool_calls is not None: allow content to be None
|
||||
if self.role == "assistant" and self.tool_calls is not None:
|
||||
return self
|
||||
|
||||
# other all cases: content is required
|
||||
if self.content is None:
|
||||
raise ValueError(
|
||||
"content is required unless role='assistant' and tool_calls is not None"
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
class AssistantMessageSegment(Message):
|
||||
"""A message segment from the assistant."""
|
||||
|
||||
role: Literal["assistant"] = "assistant"
|
||||
|
||||
|
||||
class ToolCallMessageSegment(Message):
|
||||
"""A message segment representing a tool call."""
|
||||
|
||||
role: Literal["tool"] = "tool"
|
||||
|
||||
|
||||
class UserMessageSegment(Message):
|
||||
"""A message segment from the user."""
|
||||
|
||||
role: Literal["user"] = "user"
|
||||
|
||||
|
||||
class SystemMessageSegment(Message):
|
||||
"""A message segment from the system."""
|
||||
|
||||
role: Literal["system"] = "system"
|
||||
14
astrbot/core/agent/response.py
Normal file
14
astrbot/core/agent/response.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import typing as T
|
||||
from dataclasses import dataclass
|
||||
|
||||
from astrbot.core.message.message_event_result import MessageChain
|
||||
|
||||
|
||||
class AgentResponseData(T.TypedDict):
|
||||
chain: MessageChain
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentResponse:
|
||||
type: str
|
||||
data: AgentResponseData
|
||||
22
astrbot/core/agent/run_context.py
Normal file
22
astrbot/core/agent/run_context.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from typing import Any, Generic
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
from .message import Message
|
||||
|
||||
TContext = TypeVar("TContext", default=Any)
|
||||
|
||||
|
||||
@dataclass(config={"arbitrary_types_allowed": True})
|
||||
class ContextWrapper(Generic[TContext]):
|
||||
"""A context for running an agent, which can be used to pass additional data or state."""
|
||||
|
||||
context: TContext
|
||||
messages: list[Message] = Field(default_factory=list)
|
||||
"""This field stores the llm message context for the agent run, agent runners will maintain this field automatically."""
|
||||
tool_call_timeout: int = 60 # Default tool call timeout in seconds
|
||||
|
||||
|
||||
NoContext = ContextWrapper[None]
|
||||
3
astrbot/core/agent/runners/__init__.py
Normal file
3
astrbot/core/agent/runners/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .base import BaseAgentRunner
|
||||
|
||||
__all__ = ["BaseAgentRunner"]
|
||||
65
astrbot/core/agent/runners/base.py
Normal file
65
astrbot/core/agent/runners/base.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import abc
|
||||
import typing as T
|
||||
from enum import Enum, auto
|
||||
|
||||
from astrbot import logger
|
||||
from astrbot.core.provider.entities import LLMResponse
|
||||
|
||||
from ..hooks import BaseAgentRunHooks
|
||||
from ..response import AgentResponse
|
||||
from ..run_context import ContextWrapper, TContext
|
||||
|
||||
|
||||
class AgentState(Enum):
|
||||
"""Defines the state of the agent."""
|
||||
|
||||
IDLE = auto() # Initial state
|
||||
RUNNING = auto() # Currently processing
|
||||
DONE = auto() # Completed
|
||||
ERROR = auto() # Error state
|
||||
|
||||
|
||||
class BaseAgentRunner(T.Generic[TContext]):
|
||||
@abc.abstractmethod
|
||||
async def reset(
|
||||
self,
|
||||
run_context: ContextWrapper[TContext],
|
||||
agent_hooks: BaseAgentRunHooks[TContext],
|
||||
**kwargs: T.Any,
|
||||
) -> None:
|
||||
"""Reset the agent to its initial state.
|
||||
This method should be called before starting a new run.
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def step(self) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
"""Process a single step of the agent."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def step_until_done(
|
||||
self, max_step: int
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
"""Process steps until the agent is done."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def done(self) -> bool:
|
||||
"""Check if the agent has completed its task.
|
||||
Returns True if the agent is done, False otherwise.
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||
"""Get the final observation from the agent.
|
||||
This method should be called after the agent is done.
|
||||
"""
|
||||
...
|
||||
|
||||
def _transition_state(self, new_state: AgentState) -> None:
|
||||
"""Transition the agent state."""
|
||||
if self._state != new_state:
|
||||
logger.debug(f"Agent state transition: {self._state} -> {new_state}")
|
||||
self._state = new_state
|
||||
367
astrbot/core/agent/runners/coze/coze_agent_runner.py
Normal file
367
astrbot/core/agent/runners/coze/coze_agent_runner.py
Normal file
@@ -0,0 +1,367 @@
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
import typing as T
|
||||
|
||||
import astrbot.core.message.components as Comp
|
||||
from astrbot import logger
|
||||
from astrbot.core import sp
|
||||
from astrbot.core.message.message_event_result import MessageChain
|
||||
from astrbot.core.provider.entities import (
|
||||
LLMResponse,
|
||||
ProviderRequest,
|
||||
)
|
||||
|
||||
from ...hooks import BaseAgentRunHooks
|
||||
from ...response import AgentResponseData
|
||||
from ...run_context import ContextWrapper, TContext
|
||||
from ..base import AgentResponse, AgentState, BaseAgentRunner
|
||||
from .coze_api_client import CozeAPIClient
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from typing import override
|
||||
else:
|
||||
from typing_extensions import override
|
||||
|
||||
|
||||
class CozeAgentRunner(BaseAgentRunner[TContext]):
|
||||
"""Coze Agent Runner"""
|
||||
|
||||
@override
|
||||
async def reset(
|
||||
self,
|
||||
request: ProviderRequest,
|
||||
run_context: ContextWrapper[TContext],
|
||||
agent_hooks: BaseAgentRunHooks[TContext],
|
||||
provider_config: dict,
|
||||
**kwargs: T.Any,
|
||||
) -> None:
|
||||
self.req = request
|
||||
self.streaming = kwargs.get("streaming", False)
|
||||
self.final_llm_resp = None
|
||||
self._state = AgentState.IDLE
|
||||
self.agent_hooks = agent_hooks
|
||||
self.run_context = run_context
|
||||
|
||||
self.api_key = provider_config.get("coze_api_key", "")
|
||||
if not self.api_key:
|
||||
raise Exception("Coze API Key 不能为空。")
|
||||
self.bot_id = provider_config.get("bot_id", "")
|
||||
if not self.bot_id:
|
||||
raise Exception("Coze Bot ID 不能为空。")
|
||||
self.api_base: str = provider_config.get("coze_api_base", "https://api.coze.cn")
|
||||
|
||||
if not isinstance(self.api_base, str) or not self.api_base.startswith(
|
||||
("http://", "https://"),
|
||||
):
|
||||
raise Exception(
|
||||
"Coze API Base URL 格式不正确,必须以 http:// 或 https:// 开头。",
|
||||
)
|
||||
|
||||
self.timeout = provider_config.get("timeout", 120)
|
||||
if isinstance(self.timeout, str):
|
||||
self.timeout = int(self.timeout)
|
||||
self.auto_save_history = provider_config.get("auto_save_history", True)
|
||||
|
||||
# 创建 API 客户端
|
||||
self.api_client = CozeAPIClient(api_key=self.api_key, api_base=self.api_base)
|
||||
|
||||
# 会话相关缓存
|
||||
self.file_id_cache: dict[str, dict[str, str]] = {}
|
||||
|
||||
@override
|
||||
async def step(self):
|
||||
"""
|
||||
执行 Coze Agent 的一个步骤
|
||||
"""
|
||||
if not self.req:
|
||||
raise ValueError("Request is not set. Please call reset() first.")
|
||||
|
||||
if self._state == AgentState.IDLE:
|
||||
try:
|
||||
await self.agent_hooks.on_agent_begin(self.run_context)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_begin hook: {e}", exc_info=True)
|
||||
|
||||
# 开始处理,转换到运行状态
|
||||
self._transition_state(AgentState.RUNNING)
|
||||
|
||||
try:
|
||||
# 执行 Coze 请求并处理结果
|
||||
async for response in self._execute_coze_request():
|
||||
yield response
|
||||
except Exception as e:
|
||||
logger.error(f"Coze 请求失败:{str(e)}")
|
||||
self._transition_state(AgentState.ERROR)
|
||||
self.final_llm_resp = LLMResponse(
|
||||
role="err", completion_text=f"Coze 请求失败:{str(e)}"
|
||||
)
|
||||
yield AgentResponse(
|
||||
type="err",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(f"Coze 请求失败:{str(e)}")
|
||||
),
|
||||
)
|
||||
finally:
|
||||
await self.api_client.close()
|
||||
|
||||
@override
|
||||
async def step_until_done(
|
||||
self, max_step: int = 30
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
while not self.done():
|
||||
async for resp in self.step():
|
||||
yield resp
|
||||
|
||||
async def _execute_coze_request(self):
|
||||
"""执行 Coze 请求的核心逻辑"""
|
||||
prompt = self.req.prompt or ""
|
||||
session_id = self.req.session_id or "unknown"
|
||||
image_urls = self.req.image_urls or []
|
||||
contexts = self.req.contexts or []
|
||||
system_prompt = self.req.system_prompt
|
||||
|
||||
# 用户ID参数
|
||||
user_id = session_id
|
||||
|
||||
# 获取或创建会话ID
|
||||
conversation_id = await sp.get_async(
|
||||
scope="umo",
|
||||
scope_id=user_id,
|
||||
key="coze_conversation_id",
|
||||
default="",
|
||||
)
|
||||
|
||||
# 构建消息
|
||||
additional_messages = []
|
||||
|
||||
if system_prompt:
|
||||
if not self.auto_save_history or not conversation_id:
|
||||
additional_messages.append(
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
"content_type": "text",
|
||||
},
|
||||
)
|
||||
|
||||
# 处理历史上下文
|
||||
if not self.auto_save_history and contexts:
|
||||
for ctx in contexts:
|
||||
if isinstance(ctx, dict) and "role" in ctx and "content" in ctx:
|
||||
# 处理上下文中的图片
|
||||
content = ctx["content"]
|
||||
if isinstance(content, list):
|
||||
# 多模态内容,需要处理图片
|
||||
processed_content = []
|
||||
for item in content:
|
||||
if isinstance(item, dict):
|
||||
if item.get("type") == "text":
|
||||
processed_content.append(item)
|
||||
elif item.get("type") == "image_url":
|
||||
# 处理图片上传
|
||||
try:
|
||||
image_data = item.get("image_url", {})
|
||||
url = image_data.get("url", "")
|
||||
if url:
|
||||
file_id = (
|
||||
await self._download_and_upload_image(
|
||||
url, session_id
|
||||
)
|
||||
)
|
||||
processed_content.append(
|
||||
{
|
||||
"type": "file",
|
||||
"file_id": file_id,
|
||||
"file_url": url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"处理上下文图片失败: {e}")
|
||||
continue
|
||||
|
||||
if processed_content:
|
||||
additional_messages.append(
|
||||
{
|
||||
"role": ctx["role"],
|
||||
"content": processed_content,
|
||||
"content_type": "object_string",
|
||||
}
|
||||
)
|
||||
else:
|
||||
# 纯文本内容
|
||||
additional_messages.append(
|
||||
{
|
||||
"role": ctx["role"],
|
||||
"content": content,
|
||||
"content_type": "text",
|
||||
}
|
||||
)
|
||||
|
||||
# 构建当前消息
|
||||
if prompt or image_urls:
|
||||
if image_urls:
|
||||
# 多模态
|
||||
object_string_content = []
|
||||
if prompt:
|
||||
object_string_content.append({"type": "text", "text": prompt})
|
||||
|
||||
for url in image_urls:
|
||||
# the url is a base64 string
|
||||
try:
|
||||
image_data = base64.b64decode(url)
|
||||
file_id = await self.api_client.upload_file(image_data)
|
||||
object_string_content.append(
|
||||
{
|
||||
"type": "image",
|
||||
"file_id": file_id,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"处理图片失败 {url}: {e}")
|
||||
continue
|
||||
|
||||
if object_string_content:
|
||||
content = json.dumps(object_string_content, ensure_ascii=False)
|
||||
additional_messages.append(
|
||||
{
|
||||
"role": "user",
|
||||
"content": content,
|
||||
"content_type": "object_string",
|
||||
}
|
||||
)
|
||||
elif prompt:
|
||||
# 纯文本
|
||||
additional_messages.append(
|
||||
{
|
||||
"role": "user",
|
||||
"content": prompt,
|
||||
"content_type": "text",
|
||||
},
|
||||
)
|
||||
|
||||
# 执行 Coze API 请求
|
||||
accumulated_content = ""
|
||||
message_started = False
|
||||
|
||||
async for chunk in self.api_client.chat_messages(
|
||||
bot_id=self.bot_id,
|
||||
user_id=user_id,
|
||||
additional_messages=additional_messages,
|
||||
conversation_id=conversation_id,
|
||||
auto_save_history=self.auto_save_history,
|
||||
stream=True,
|
||||
timeout=self.timeout,
|
||||
):
|
||||
event_type = chunk.get("event")
|
||||
data = chunk.get("data", {})
|
||||
|
||||
if event_type == "conversation.chat.created":
|
||||
if isinstance(data, dict) and "conversation_id" in data:
|
||||
await sp.put_async(
|
||||
scope="umo",
|
||||
scope_id=user_id,
|
||||
key="coze_conversation_id",
|
||||
value=data["conversation_id"],
|
||||
)
|
||||
|
||||
if event_type == "conversation.message.delta":
|
||||
# 增量消息
|
||||
content = data.get("content", "")
|
||||
if not content and "delta" in data:
|
||||
content = data["delta"].get("content", "")
|
||||
if not content and "text" in data:
|
||||
content = data.get("text", "")
|
||||
|
||||
if content:
|
||||
accumulated_content += content
|
||||
message_started = True
|
||||
|
||||
# 如果是流式响应,发送增量数据
|
||||
if self.streaming:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(content)
|
||||
),
|
||||
)
|
||||
|
||||
elif event_type == "conversation.message.completed":
|
||||
# 消息完成
|
||||
logger.debug("Coze message completed")
|
||||
message_started = True
|
||||
|
||||
elif event_type == "conversation.chat.completed":
|
||||
# 对话完成
|
||||
logger.debug("Coze chat completed")
|
||||
break
|
||||
|
||||
elif event_type == "error":
|
||||
# 错误处理
|
||||
error_msg = data.get("msg", "未知错误")
|
||||
error_code = data.get("code", "UNKNOWN")
|
||||
logger.error(f"Coze 出现错误: {error_code} - {error_msg}")
|
||||
raise Exception(f"Coze 出现错误: {error_code} - {error_msg}")
|
||||
|
||||
if not message_started and not accumulated_content:
|
||||
logger.warning("Coze 未返回任何内容")
|
||||
accumulated_content = ""
|
||||
|
||||
# 创建最终响应
|
||||
chain = MessageChain(chain=[Comp.Plain(accumulated_content)])
|
||||
self.final_llm_resp = LLMResponse(role="assistant", result_chain=chain)
|
||||
self._transition_state(AgentState.DONE)
|
||||
|
||||
try:
|
||||
await self.agent_hooks.on_agent_done(self.run_context, self.final_llm_resp)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_done hook: {e}", exc_info=True)
|
||||
|
||||
# 返回最终结果
|
||||
yield AgentResponse(
|
||||
type="llm_result",
|
||||
data=AgentResponseData(chain=chain),
|
||||
)
|
||||
|
||||
async def _download_and_upload_image(
|
||||
self,
|
||||
image_url: str,
|
||||
session_id: str | None = None,
|
||||
) -> str:
|
||||
"""下载图片并上传到 Coze,返回 file_id"""
|
||||
import hashlib
|
||||
|
||||
# 计算哈希实现缓存
|
||||
cache_key = hashlib.md5(image_url.encode("utf-8")).hexdigest()
|
||||
|
||||
if session_id:
|
||||
if session_id not in self.file_id_cache:
|
||||
self.file_id_cache[session_id] = {}
|
||||
|
||||
if cache_key in self.file_id_cache[session_id]:
|
||||
file_id = self.file_id_cache[session_id][cache_key]
|
||||
logger.debug(f"[Coze] 使用缓存的 file_id: {file_id}")
|
||||
return file_id
|
||||
|
||||
try:
|
||||
image_data = await self.api_client.download_image(image_url)
|
||||
file_id = await self.api_client.upload_file(image_data)
|
||||
|
||||
if session_id:
|
||||
self.file_id_cache[session_id][cache_key] = file_id
|
||||
logger.debug(f"[Coze] 图片上传成功并缓存,file_id: {file_id}")
|
||||
|
||||
return file_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"处理图片失败 {image_url}: {e!s}")
|
||||
raise Exception(f"处理图片失败: {e!s}")
|
||||
|
||||
@override
|
||||
def done(self) -> bool:
|
||||
"""检查 Agent 是否已完成工作"""
|
||||
return self._state in (AgentState.DONE, AgentState.ERROR)
|
||||
|
||||
@override
|
||||
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||
return self.final_llm_resp
|
||||
324
astrbot/core/agent/runners/coze/coze_api_client.py
Normal file
324
astrbot/core/agent/runners/coze/coze_api_client.py
Normal file
@@ -0,0 +1,324 @@
|
||||
import asyncio
|
||||
import io
|
||||
import json
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
|
||||
from astrbot.core import logger
|
||||
|
||||
|
||||
class CozeAPIClient:
|
||||
def __init__(self, api_key: str, api_base: str = "https://api.coze.cn"):
|
||||
self.api_key = api_key
|
||||
self.api_base = api_base
|
||||
self.session = None
|
||||
|
||||
async def _ensure_session(self):
|
||||
"""确保HTTP session存在"""
|
||||
if self.session is None:
|
||||
connector = aiohttp.TCPConnector(
|
||||
ssl=False if self.api_base.startswith("http://") else True,
|
||||
limit=100,
|
||||
limit_per_host=30,
|
||||
keepalive_timeout=30,
|
||||
enable_cleanup_closed=True,
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(
|
||||
total=120, # 默认超时时间
|
||||
connect=30,
|
||||
sock_read=120,
|
||||
)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Accept": "text/event-stream",
|
||||
}
|
||||
self.session = aiohttp.ClientSession(
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
connector=connector,
|
||||
)
|
||||
return self.session
|
||||
|
||||
async def upload_file(
|
||||
self,
|
||||
file_data: bytes,
|
||||
) -> str:
|
||||
"""上传文件到 Coze 并返回 file_id
|
||||
|
||||
Args:
|
||||
file_data (bytes): 文件的二进制数据
|
||||
Returns:
|
||||
str: 上传成功后返回的 file_id
|
||||
|
||||
"""
|
||||
session = await self._ensure_session()
|
||||
url = f"{self.api_base}/v1/files/upload"
|
||||
|
||||
try:
|
||||
file_io = io.BytesIO(file_data)
|
||||
async with session.post(
|
||||
url,
|
||||
data={
|
||||
"file": file_io,
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=60),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception("Coze API 认证失败,请检查 API Key 是否正确")
|
||||
|
||||
response_text = await response.text()
|
||||
logger.debug(
|
||||
f"文件上传响应状态: {response.status}, 内容: {response_text}",
|
||||
)
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(
|
||||
f"文件上传失败,状态码: {response.status}, 响应: {response_text}",
|
||||
)
|
||||
|
||||
try:
|
||||
result = await response.json()
|
||||
except json.JSONDecodeError:
|
||||
raise Exception(f"文件上传响应解析失败: {response_text}")
|
||||
|
||||
if result.get("code") != 0:
|
||||
raise Exception(f"文件上传失败: {result.get('msg', '未知错误')}")
|
||||
|
||||
file_id = result["data"]["id"]
|
||||
logger.debug(f"[Coze] 图片上传成功,file_id: {file_id}")
|
||||
return file_id
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
logger.error("文件上传超时")
|
||||
raise Exception("文件上传超时")
|
||||
except Exception as e:
|
||||
logger.error(f"文件上传失败: {e!s}")
|
||||
raise Exception(f"文件上传失败: {e!s}")
|
||||
|
||||
async def download_image(self, image_url: str) -> bytes:
|
||||
"""下载图片并返回字节数据
|
||||
|
||||
Args:
|
||||
image_url (str): 图片的URL
|
||||
Returns:
|
||||
bytes: 图片的二进制数据
|
||||
|
||||
"""
|
||||
session = await self._ensure_session()
|
||||
|
||||
try:
|
||||
async with session.get(image_url) as response:
|
||||
if response.status != 200:
|
||||
raise Exception(f"下载图片失败,状态码: {response.status}")
|
||||
|
||||
image_data = await response.read()
|
||||
return image_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"下载图片失败 {image_url}: {e!s}")
|
||||
raise Exception(f"下载图片失败: {e!s}")
|
||||
|
||||
async def chat_messages(
|
||||
self,
|
||||
bot_id: str,
|
||||
user_id: str,
|
||||
additional_messages: list[dict] | None = None,
|
||||
conversation_id: str | None = None,
|
||||
auto_save_history: bool = True,
|
||||
stream: bool = True,
|
||||
timeout: float = 120,
|
||||
) -> AsyncGenerator[dict[str, Any], None]:
|
||||
"""发送聊天消息并返回流式响应
|
||||
|
||||
Args:
|
||||
bot_id: Bot ID
|
||||
user_id: 用户ID
|
||||
additional_messages: 额外消息列表
|
||||
conversation_id: 会话ID
|
||||
auto_save_history: 是否自动保存历史
|
||||
stream: 是否流式响应
|
||||
timeout: 超时时间
|
||||
|
||||
"""
|
||||
session = await self._ensure_session()
|
||||
url = f"{self.api_base}/v3/chat"
|
||||
|
||||
payload = {
|
||||
"bot_id": bot_id,
|
||||
"user_id": user_id,
|
||||
"stream": stream,
|
||||
"auto_save_history": auto_save_history,
|
||||
}
|
||||
|
||||
if additional_messages:
|
||||
payload["additional_messages"] = additional_messages
|
||||
|
||||
params = {}
|
||||
if conversation_id:
|
||||
params["conversation_id"] = conversation_id
|
||||
|
||||
logger.debug(f"Coze chat_messages payload: {payload}, params: {params}")
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
url,
|
||||
json=payload,
|
||||
params=params,
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception("Coze API 认证失败,请检查 API Key 是否正确")
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f"Coze API 流式请求失败,状态码: {response.status}")
|
||||
|
||||
# SSE
|
||||
buffer = ""
|
||||
event_type = None
|
||||
event_data = None
|
||||
|
||||
async for chunk in response.content:
|
||||
if chunk:
|
||||
buffer += chunk.decode("utf-8", errors="ignore")
|
||||
lines = buffer.split("\n")
|
||||
buffer = lines[-1]
|
||||
|
||||
for line in lines[:-1]:
|
||||
line = line.strip()
|
||||
|
||||
if not line:
|
||||
if event_type and event_data:
|
||||
yield {"event": event_type, "data": event_data}
|
||||
event_type = None
|
||||
event_data = None
|
||||
elif line.startswith("event:"):
|
||||
event_type = line[6:].strip()
|
||||
elif line.startswith("data:"):
|
||||
data_str = line[5:].strip()
|
||||
if data_str and data_str != "[DONE]":
|
||||
try:
|
||||
event_data = json.loads(data_str)
|
||||
except json.JSONDecodeError:
|
||||
event_data = {"content": data_str}
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception(f"Coze API 流式请求超时 ({timeout}秒)")
|
||||
except Exception as e:
|
||||
raise Exception(f"Coze API 流式请求失败: {e!s}")
|
||||
|
||||
async def clear_context(self, conversation_id: str):
|
||||
"""清空会话上下文
|
||||
|
||||
Args:
|
||||
conversation_id: 会话ID
|
||||
Returns:
|
||||
dict: API响应结果
|
||||
|
||||
"""
|
||||
session = await self._ensure_session()
|
||||
url = f"{self.api_base}/v3/conversation/message/clear_context"
|
||||
payload = {"conversation_id": conversation_id}
|
||||
|
||||
try:
|
||||
async with session.post(url, json=payload) as response:
|
||||
response_text = await response.text()
|
||||
|
||||
if response.status == 401:
|
||||
raise Exception("Coze API 认证失败,请检查 API Key 是否正确")
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f"Coze API 请求失败,状态码: {response.status}")
|
||||
|
||||
try:
|
||||
return json.loads(response_text)
|
||||
except json.JSONDecodeError:
|
||||
raise Exception("Coze API 返回非JSON格式")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception("Coze API 请求超时")
|
||||
except aiohttp.ClientError as e:
|
||||
raise Exception(f"Coze API 请求失败: {e!s}")
|
||||
|
||||
async def get_message_list(
|
||||
self,
|
||||
conversation_id: str,
|
||||
order: str = "desc",
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
):
|
||||
"""获取消息列表
|
||||
|
||||
Args:
|
||||
conversation_id: 会话ID
|
||||
order: 排序方式 (asc/desc)
|
||||
limit: 限制数量
|
||||
offset: 偏移量
|
||||
Returns:
|
||||
dict: API响应结果
|
||||
|
||||
"""
|
||||
session = await self._ensure_session()
|
||||
url = f"{self.api_base}/v3/conversation/message/list"
|
||||
params = {
|
||||
"conversation_id": conversation_id,
|
||||
"order": order,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
try:
|
||||
async with session.get(url, params=params) as response:
|
||||
response.raise_for_status()
|
||||
return await response.json()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取Coze消息列表失败: {e!s}")
|
||||
raise Exception(f"获取Coze消息列表失败: {e!s}")
|
||||
|
||||
async def close(self):
|
||||
"""关闭会话"""
|
||||
if self.session:
|
||||
await self.session.close()
|
||||
self.session = None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
async def test_coze_api_client():
|
||||
api_key = os.getenv("COZE_API_KEY", "")
|
||||
bot_id = os.getenv("COZE_BOT_ID", "")
|
||||
client = CozeAPIClient(api_key=api_key)
|
||||
|
||||
try:
|
||||
with open("README.md", "rb") as f:
|
||||
file_data = f.read()
|
||||
file_id = await client.upload_file(file_data)
|
||||
print(f"Uploaded file_id: {file_id}")
|
||||
async for event in client.chat_messages(
|
||||
bot_id=bot_id,
|
||||
user_id="test_user",
|
||||
additional_messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": json.dumps(
|
||||
[
|
||||
{"type": "text", "text": "这是什么"},
|
||||
{"type": "file", "file_id": file_id},
|
||||
],
|
||||
ensure_ascii=False,
|
||||
),
|
||||
"content_type": "object_string",
|
||||
},
|
||||
],
|
||||
stream=True,
|
||||
):
|
||||
print(f"Event: {event}")
|
||||
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
asyncio.run(test_coze_api_client())
|
||||
403
astrbot/core/agent/runners/dashscope/dashscope_agent_runner.py
Normal file
403
astrbot/core/agent/runners/dashscope/dashscope_agent_runner.py
Normal file
@@ -0,0 +1,403 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import queue
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
import typing as T
|
||||
|
||||
from dashscope import Application
|
||||
from dashscope.app.application_response import ApplicationResponse
|
||||
|
||||
import astrbot.core.message.components as Comp
|
||||
from astrbot.core import logger, sp
|
||||
from astrbot.core.message.message_event_result import MessageChain
|
||||
from astrbot.core.provider.entities import (
|
||||
LLMResponse,
|
||||
ProviderRequest,
|
||||
)
|
||||
|
||||
from ...hooks import BaseAgentRunHooks
|
||||
from ...response import AgentResponseData
|
||||
from ...run_context import ContextWrapper, TContext
|
||||
from ..base import AgentResponse, AgentState, BaseAgentRunner
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from typing import override
|
||||
else:
|
||||
from typing_extensions import override
|
||||
|
||||
|
||||
class DashscopeAgentRunner(BaseAgentRunner[TContext]):
|
||||
"""Dashscope Agent Runner"""
|
||||
|
||||
@override
|
||||
async def reset(
|
||||
self,
|
||||
request: ProviderRequest,
|
||||
run_context: ContextWrapper[TContext],
|
||||
agent_hooks: BaseAgentRunHooks[TContext],
|
||||
provider_config: dict,
|
||||
**kwargs: T.Any,
|
||||
) -> None:
|
||||
self.req = request
|
||||
self.streaming = kwargs.get("streaming", False)
|
||||
self.final_llm_resp = None
|
||||
self._state = AgentState.IDLE
|
||||
self.agent_hooks = agent_hooks
|
||||
self.run_context = run_context
|
||||
|
||||
self.api_key = provider_config.get("dashscope_api_key", "")
|
||||
if not self.api_key:
|
||||
raise Exception("阿里云百炼 API Key 不能为空。")
|
||||
self.app_id = provider_config.get("dashscope_app_id", "")
|
||||
if not self.app_id:
|
||||
raise Exception("阿里云百炼 APP ID 不能为空。")
|
||||
self.dashscope_app_type = provider_config.get("dashscope_app_type", "")
|
||||
if not self.dashscope_app_type:
|
||||
raise Exception("阿里云百炼 APP 类型不能为空。")
|
||||
|
||||
self.variables: dict = provider_config.get("variables", {}) or {}
|
||||
self.rag_options: dict = provider_config.get("rag_options", {})
|
||||
self.output_reference = self.rag_options.get("output_reference", False)
|
||||
self.rag_options = self.rag_options.copy()
|
||||
self.rag_options.pop("output_reference", None)
|
||||
|
||||
self.timeout = provider_config.get("timeout", 120)
|
||||
if isinstance(self.timeout, str):
|
||||
self.timeout = int(self.timeout)
|
||||
|
||||
def has_rag_options(self):
|
||||
"""判断是否有 RAG 选项
|
||||
|
||||
Returns:
|
||||
bool: 是否有 RAG 选项
|
||||
|
||||
"""
|
||||
if self.rag_options and (
|
||||
len(self.rag_options.get("pipeline_ids", [])) > 0
|
||||
or len(self.rag_options.get("file_ids", [])) > 0
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
@override
|
||||
async def step(self):
|
||||
"""
|
||||
执行 Dashscope Agent 的一个步骤
|
||||
"""
|
||||
if not self.req:
|
||||
raise ValueError("Request is not set. Please call reset() first.")
|
||||
|
||||
if self._state == AgentState.IDLE:
|
||||
try:
|
||||
await self.agent_hooks.on_agent_begin(self.run_context)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_begin hook: {e}", exc_info=True)
|
||||
|
||||
# 开始处理,转换到运行状态
|
||||
self._transition_state(AgentState.RUNNING)
|
||||
|
||||
try:
|
||||
# 执行 Dashscope 请求并处理结果
|
||||
async for response in self._execute_dashscope_request():
|
||||
yield response
|
||||
except Exception as e:
|
||||
logger.error(f"阿里云百炼请求失败:{str(e)}")
|
||||
self._transition_state(AgentState.ERROR)
|
||||
self.final_llm_resp = LLMResponse(
|
||||
role="err", completion_text=f"阿里云百炼请求失败:{str(e)}"
|
||||
)
|
||||
yield AgentResponse(
|
||||
type="err",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(f"阿里云百炼请求失败:{str(e)}")
|
||||
),
|
||||
)
|
||||
|
||||
@override
|
||||
async def step_until_done(
|
||||
self, max_step: int = 30
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
while not self.done():
|
||||
async for resp in self.step():
|
||||
yield resp
|
||||
|
||||
def _consume_sync_generator(
|
||||
self, response: T.Any, response_queue: queue.Queue
|
||||
) -> None:
|
||||
"""在线程中消费同步generator,将结果放入队列
|
||||
|
||||
Args:
|
||||
response: 同步generator对象
|
||||
response_queue: 用于传递数据的队列
|
||||
|
||||
"""
|
||||
try:
|
||||
if self.streaming:
|
||||
for chunk in response:
|
||||
response_queue.put(("data", chunk))
|
||||
else:
|
||||
response_queue.put(("data", response))
|
||||
except Exception as e:
|
||||
response_queue.put(("error", e))
|
||||
finally:
|
||||
response_queue.put(("done", None))
|
||||
|
||||
async def _process_stream_chunk(
|
||||
self, chunk: ApplicationResponse, output_text: str
|
||||
) -> tuple[str, list | None, AgentResponse | None]:
|
||||
"""处理流式响应的单个chunk
|
||||
|
||||
Args:
|
||||
chunk: Dashscope响应chunk
|
||||
output_text: 当前累积的输出文本
|
||||
|
||||
Returns:
|
||||
(更新后的output_text, doc_references, AgentResponse或None)
|
||||
|
||||
"""
|
||||
logger.debug(f"dashscope stream chunk: {chunk}")
|
||||
|
||||
if chunk.status_code != 200:
|
||||
logger.error(
|
||||
f"阿里云百炼请求失败: request_id={chunk.request_id}, code={chunk.status_code}, message={chunk.message}, 请参考文档:https://help.aliyun.com/zh/model-studio/developer-reference/error-code",
|
||||
)
|
||||
self._transition_state(AgentState.ERROR)
|
||||
error_msg = (
|
||||
f"阿里云百炼请求失败: message={chunk.message} code={chunk.status_code}"
|
||||
)
|
||||
self.final_llm_resp = LLMResponse(
|
||||
role="err",
|
||||
result_chain=MessageChain().message(error_msg),
|
||||
)
|
||||
return (
|
||||
output_text,
|
||||
None,
|
||||
AgentResponse(
|
||||
type="err",
|
||||
data=AgentResponseData(chain=MessageChain().message(error_msg)),
|
||||
),
|
||||
)
|
||||
|
||||
chunk_text = chunk.output.get("text", "") or ""
|
||||
# RAG 引用脚标格式化
|
||||
chunk_text = re.sub(r"<ref>\[(\d+)\]</ref>", r"[\1]", chunk_text)
|
||||
|
||||
response = None
|
||||
if chunk_text:
|
||||
output_text += chunk_text
|
||||
response = AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(chain=MessageChain().message(chunk_text)),
|
||||
)
|
||||
|
||||
# 获取文档引用
|
||||
doc_references = chunk.output.get("doc_references", None)
|
||||
|
||||
return output_text, doc_references, response
|
||||
|
||||
def _format_doc_references(self, doc_references: list) -> str:
|
||||
"""格式化文档引用为文本
|
||||
|
||||
Args:
|
||||
doc_references: 文档引用列表
|
||||
|
||||
Returns:
|
||||
格式化后的引用文本
|
||||
|
||||
"""
|
||||
ref_parts = []
|
||||
for ref in doc_references:
|
||||
ref_title = (
|
||||
ref.get("title", "") if ref.get("title") else ref.get("doc_name", "")
|
||||
)
|
||||
ref_parts.append(f"{ref['index_id']}. {ref_title}\n")
|
||||
ref_str = "".join(ref_parts)
|
||||
return f"\n\n回答来源:\n{ref_str}"
|
||||
|
||||
async def _build_request_payload(
|
||||
self, prompt: str, session_id: str, contexts: list, system_prompt: str
|
||||
) -> dict:
|
||||
"""构建请求payload
|
||||
|
||||
Args:
|
||||
prompt: 用户输入
|
||||
session_id: 会话ID
|
||||
contexts: 上下文列表
|
||||
system_prompt: 系统提示词
|
||||
|
||||
Returns:
|
||||
请求payload字典
|
||||
|
||||
"""
|
||||
conversation_id = await sp.get_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="dashscope_conversation_id",
|
||||
default="",
|
||||
)
|
||||
# 获得会话变量
|
||||
payload_vars = self.variables.copy()
|
||||
session_var = await sp.get_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="session_variables",
|
||||
default={},
|
||||
)
|
||||
payload_vars.update(session_var)
|
||||
|
||||
if (
|
||||
self.dashscope_app_type in ["agent", "dialog-workflow"]
|
||||
and not self.has_rag_options()
|
||||
):
|
||||
# 支持多轮对话的
|
||||
p = {
|
||||
"app_id": self.app_id,
|
||||
"api_key": self.api_key,
|
||||
"prompt": prompt,
|
||||
"biz_params": payload_vars or None,
|
||||
"stream": self.streaming,
|
||||
"incremental_output": True,
|
||||
}
|
||||
if conversation_id:
|
||||
p["session_id"] = conversation_id
|
||||
return p
|
||||
else:
|
||||
# 不支持多轮对话的
|
||||
payload = {
|
||||
"app_id": self.app_id,
|
||||
"prompt": prompt,
|
||||
"api_key": self.api_key,
|
||||
"biz_params": payload_vars or None,
|
||||
"stream": self.streaming,
|
||||
"incremental_output": True,
|
||||
}
|
||||
if self.rag_options:
|
||||
payload["rag_options"] = self.rag_options
|
||||
return payload
|
||||
|
||||
async def _handle_streaming_response(
|
||||
self, response: T.Any, session_id: str
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
"""处理流式响应
|
||||
|
||||
Args:
|
||||
response: Dashscope 流式响应 generator
|
||||
|
||||
Yields:
|
||||
AgentResponse 对象
|
||||
|
||||
"""
|
||||
response_queue = queue.Queue()
|
||||
consumer_thread = threading.Thread(
|
||||
target=self._consume_sync_generator,
|
||||
args=(response, response_queue),
|
||||
daemon=True,
|
||||
)
|
||||
consumer_thread.start()
|
||||
|
||||
output_text = ""
|
||||
doc_references = None
|
||||
|
||||
while True:
|
||||
try:
|
||||
item_type, item_data = await asyncio.get_event_loop().run_in_executor(
|
||||
None, response_queue.get, True, 1
|
||||
)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
if item_type == "done":
|
||||
break
|
||||
elif item_type == "error":
|
||||
raise item_data
|
||||
elif item_type == "data":
|
||||
chunk = item_data
|
||||
assert isinstance(chunk, ApplicationResponse)
|
||||
|
||||
(
|
||||
output_text,
|
||||
chunk_doc_refs,
|
||||
response,
|
||||
) = await self._process_stream_chunk(chunk, output_text)
|
||||
|
||||
if response:
|
||||
if response.type == "err":
|
||||
yield response
|
||||
return
|
||||
yield response
|
||||
|
||||
if chunk_doc_refs:
|
||||
doc_references = chunk_doc_refs
|
||||
|
||||
if chunk.output.session_id:
|
||||
await sp.put_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="dashscope_conversation_id",
|
||||
value=chunk.output.session_id,
|
||||
)
|
||||
|
||||
# 添加 RAG 引用
|
||||
if self.output_reference and doc_references:
|
||||
ref_text = self._format_doc_references(doc_references)
|
||||
output_text += ref_text
|
||||
|
||||
if self.streaming:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(chain=MessageChain().message(ref_text)),
|
||||
)
|
||||
|
||||
# 创建最终响应
|
||||
chain = MessageChain(chain=[Comp.Plain(output_text)])
|
||||
self.final_llm_resp = LLMResponse(role="assistant", result_chain=chain)
|
||||
self._transition_state(AgentState.DONE)
|
||||
|
||||
try:
|
||||
await self.agent_hooks.on_agent_done(self.run_context, self.final_llm_resp)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_done hook: {e}", exc_info=True)
|
||||
|
||||
# 返回最终结果
|
||||
yield AgentResponse(
|
||||
type="llm_result",
|
||||
data=AgentResponseData(chain=chain),
|
||||
)
|
||||
|
||||
async def _execute_dashscope_request(self):
|
||||
"""执行 Dashscope 请求的核心逻辑"""
|
||||
prompt = self.req.prompt or ""
|
||||
session_id = self.req.session_id or "unknown"
|
||||
image_urls = self.req.image_urls or []
|
||||
contexts = self.req.contexts or []
|
||||
system_prompt = self.req.system_prompt
|
||||
|
||||
# 检查图片输入
|
||||
if image_urls:
|
||||
logger.warning("阿里云百炼暂不支持图片输入,将自动忽略图片内容。")
|
||||
|
||||
# 构建请求payload
|
||||
payload = await self._build_request_payload(
|
||||
prompt, session_id, contexts, system_prompt
|
||||
)
|
||||
|
||||
if not self.streaming:
|
||||
payload["incremental_output"] = False
|
||||
|
||||
# 发起请求
|
||||
partial = functools.partial(Application.call, **payload)
|
||||
response = await asyncio.get_event_loop().run_in_executor(None, partial)
|
||||
|
||||
async for resp in self._handle_streaming_response(response, session_id):
|
||||
yield resp
|
||||
|
||||
@override
|
||||
def done(self) -> bool:
|
||||
"""检查 Agent 是否已完成工作"""
|
||||
return self._state in (AgentState.DONE, AgentState.ERROR)
|
||||
|
||||
@override
|
||||
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||
return self.final_llm_resp
|
||||
336
astrbot/core/agent/runners/dify/dify_agent_runner.py
Normal file
336
astrbot/core/agent/runners/dify/dify_agent_runner.py
Normal file
@@ -0,0 +1,336 @@
|
||||
import base64
|
||||
import os
|
||||
import sys
|
||||
import typing as T
|
||||
|
||||
import astrbot.core.message.components as Comp
|
||||
from astrbot.core import logger, sp
|
||||
from astrbot.core.message.message_event_result import MessageChain
|
||||
from astrbot.core.provider.entities import (
|
||||
LLMResponse,
|
||||
ProviderRequest,
|
||||
)
|
||||
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||
from astrbot.core.utils.io import download_file
|
||||
|
||||
from ...hooks import BaseAgentRunHooks
|
||||
from ...response import AgentResponseData
|
||||
from ...run_context import ContextWrapper, TContext
|
||||
from ..base import AgentResponse, AgentState, BaseAgentRunner
|
||||
from .dify_api_client import DifyAPIClient
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from typing import override
|
||||
else:
|
||||
from typing_extensions import override
|
||||
|
||||
|
||||
class DifyAgentRunner(BaseAgentRunner[TContext]):
|
||||
"""Dify Agent Runner"""
|
||||
|
||||
@override
|
||||
async def reset(
|
||||
self,
|
||||
request: ProviderRequest,
|
||||
run_context: ContextWrapper[TContext],
|
||||
agent_hooks: BaseAgentRunHooks[TContext],
|
||||
provider_config: dict,
|
||||
**kwargs: T.Any,
|
||||
) -> None:
|
||||
self.req = request
|
||||
self.streaming = kwargs.get("streaming", False)
|
||||
self.final_llm_resp = None
|
||||
self._state = AgentState.IDLE
|
||||
self.agent_hooks = agent_hooks
|
||||
self.run_context = run_context
|
||||
|
||||
self.api_key = provider_config.get("dify_api_key", "")
|
||||
self.api_base = provider_config.get("dify_api_base", "https://api.dify.ai/v1")
|
||||
self.api_type = provider_config.get("dify_api_type", "chat")
|
||||
self.workflow_output_key = provider_config.get(
|
||||
"dify_workflow_output_key",
|
||||
"astrbot_wf_output",
|
||||
)
|
||||
self.dify_query_input_key = provider_config.get(
|
||||
"dify_query_input_key",
|
||||
"astrbot_text_query",
|
||||
)
|
||||
self.variables: dict = provider_config.get("variables", {}) or {}
|
||||
self.timeout = provider_config.get("timeout", 60)
|
||||
if isinstance(self.timeout, str):
|
||||
self.timeout = int(self.timeout)
|
||||
|
||||
self.api_client = DifyAPIClient(self.api_key, self.api_base)
|
||||
|
||||
@override
|
||||
async def step(self):
|
||||
"""
|
||||
执行 Dify Agent 的一个步骤
|
||||
"""
|
||||
if not self.req:
|
||||
raise ValueError("Request is not set. Please call reset() first.")
|
||||
|
||||
if self._state == AgentState.IDLE:
|
||||
try:
|
||||
await self.agent_hooks.on_agent_begin(self.run_context)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_begin hook: {e}", exc_info=True)
|
||||
|
||||
# 开始处理,转换到运行状态
|
||||
self._transition_state(AgentState.RUNNING)
|
||||
|
||||
try:
|
||||
# 执行 Dify 请求并处理结果
|
||||
async for response in self._execute_dify_request():
|
||||
yield response
|
||||
except Exception as e:
|
||||
logger.error(f"Dify 请求失败:{str(e)}")
|
||||
self._transition_state(AgentState.ERROR)
|
||||
self.final_llm_resp = LLMResponse(
|
||||
role="err", completion_text=f"Dify 请求失败:{str(e)}"
|
||||
)
|
||||
yield AgentResponse(
|
||||
type="err",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(f"Dify 请求失败:{str(e)}")
|
||||
),
|
||||
)
|
||||
finally:
|
||||
await self.api_client.close()
|
||||
|
||||
@override
|
||||
async def step_until_done(
|
||||
self, max_step: int = 30
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
while not self.done():
|
||||
async for resp in self.step():
|
||||
yield resp
|
||||
|
||||
async def _execute_dify_request(self):
|
||||
"""执行 Dify 请求的核心逻辑"""
|
||||
prompt = self.req.prompt or ""
|
||||
session_id = self.req.session_id or "unknown"
|
||||
image_urls = self.req.image_urls or []
|
||||
system_prompt = self.req.system_prompt
|
||||
|
||||
conversation_id = await sp.get_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="dify_conversation_id",
|
||||
default="",
|
||||
)
|
||||
result = ""
|
||||
|
||||
# 处理图片上传
|
||||
files_payload = []
|
||||
for image_url in image_urls:
|
||||
# image_url is a base64 string
|
||||
try:
|
||||
image_data = base64.b64decode(image_url)
|
||||
file_response = await self.api_client.file_upload(
|
||||
file_data=image_data,
|
||||
user=session_id,
|
||||
mime_type="image/png",
|
||||
file_name="image.png",
|
||||
)
|
||||
logger.debug(f"Dify 上传图片响应:{file_response}")
|
||||
if "id" not in file_response:
|
||||
logger.warning(
|
||||
f"上传图片后得到未知的 Dify 响应:{file_response},图片将忽略。"
|
||||
)
|
||||
continue
|
||||
files_payload.append(
|
||||
{
|
||||
"type": "image",
|
||||
"transfer_method": "local_file",
|
||||
"upload_file_id": file_response["id"],
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"上传图片失败:{e}")
|
||||
continue
|
||||
|
||||
# 获得会话变量
|
||||
payload_vars = self.variables.copy()
|
||||
# 动态变量
|
||||
session_var = await sp.get_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="session_variables",
|
||||
default={},
|
||||
)
|
||||
payload_vars.update(session_var)
|
||||
payload_vars["system_prompt"] = system_prompt
|
||||
|
||||
# 处理不同的 API 类型
|
||||
match self.api_type:
|
||||
case "chat" | "agent" | "chatflow":
|
||||
if not prompt:
|
||||
prompt = "请描述这张图片。"
|
||||
|
||||
async for chunk in self.api_client.chat_messages(
|
||||
inputs={
|
||||
**payload_vars,
|
||||
},
|
||||
query=prompt,
|
||||
user=session_id,
|
||||
conversation_id=conversation_id,
|
||||
files=files_payload,
|
||||
timeout=self.timeout,
|
||||
):
|
||||
logger.debug(f"dify resp chunk: {chunk}")
|
||||
if chunk["event"] == "message" or chunk["event"] == "agent_message":
|
||||
result += chunk["answer"]
|
||||
if not conversation_id:
|
||||
await sp.put_async(
|
||||
scope="umo",
|
||||
scope_id=session_id,
|
||||
key="dify_conversation_id",
|
||||
value=chunk["conversation_id"],
|
||||
)
|
||||
conversation_id = chunk["conversation_id"]
|
||||
|
||||
# 如果是流式响应,发送增量数据
|
||||
if self.streaming and chunk["answer"]:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(chunk["answer"])
|
||||
),
|
||||
)
|
||||
elif chunk["event"] == "message_end":
|
||||
logger.debug("Dify message end")
|
||||
break
|
||||
elif chunk["event"] == "error":
|
||||
logger.error(f"Dify 出现错误:{chunk}")
|
||||
raise Exception(
|
||||
f"Dify 出现错误 status: {chunk['status']} message: {chunk['message']}"
|
||||
)
|
||||
|
||||
case "workflow":
|
||||
async for chunk in self.api_client.workflow_run(
|
||||
inputs={
|
||||
self.dify_query_input_key: prompt,
|
||||
"astrbot_session_id": session_id,
|
||||
**payload_vars,
|
||||
},
|
||||
user=session_id,
|
||||
files=files_payload,
|
||||
timeout=self.timeout,
|
||||
):
|
||||
logger.debug(f"dify workflow resp chunk: {chunk}")
|
||||
match chunk["event"]:
|
||||
case "workflow_started":
|
||||
logger.info(
|
||||
f"Dify 工作流(ID: {chunk['workflow_run_id']})开始运行。"
|
||||
)
|
||||
case "node_finished":
|
||||
logger.debug(
|
||||
f"Dify 工作流节点(ID: {chunk['data']['node_id']} Title: {chunk['data'].get('title', '')})运行结束。"
|
||||
)
|
||||
case "text_chunk":
|
||||
if self.streaming and chunk["data"]["text"]:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(
|
||||
chunk["data"]["text"]
|
||||
)
|
||||
),
|
||||
)
|
||||
case "workflow_finished":
|
||||
logger.info(
|
||||
f"Dify 工作流(ID: {chunk['workflow_run_id']})运行结束"
|
||||
)
|
||||
logger.debug(f"Dify 工作流结果:{chunk}")
|
||||
if chunk["data"]["error"]:
|
||||
logger.error(
|
||||
f"Dify 工作流出现错误:{chunk['data']['error']}"
|
||||
)
|
||||
raise Exception(
|
||||
f"Dify 工作流出现错误:{chunk['data']['error']}"
|
||||
)
|
||||
if self.workflow_output_key not in chunk["data"]["outputs"]:
|
||||
raise Exception(
|
||||
f"Dify 工作流的输出不包含指定的键名:{self.workflow_output_key}"
|
||||
)
|
||||
result = chunk
|
||||
case _:
|
||||
raise Exception(f"未知的 Dify API 类型:{self.api_type}")
|
||||
|
||||
if not result:
|
||||
logger.warning("Dify 请求结果为空,请查看 Debug 日志。")
|
||||
|
||||
# 解析结果
|
||||
chain = await self.parse_dify_result(result)
|
||||
|
||||
# 创建最终响应
|
||||
self.final_llm_resp = LLMResponse(role="assistant", result_chain=chain)
|
||||
self._transition_state(AgentState.DONE)
|
||||
|
||||
try:
|
||||
await self.agent_hooks.on_agent_done(self.run_context, self.final_llm_resp)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_done hook: {e}", exc_info=True)
|
||||
|
||||
# 返回最终结果
|
||||
yield AgentResponse(
|
||||
type="llm_result",
|
||||
data=AgentResponseData(chain=chain),
|
||||
)
|
||||
|
||||
async def parse_dify_result(self, chunk: dict | str) -> MessageChain:
|
||||
"""解析 Dify 的响应结果"""
|
||||
if isinstance(chunk, str):
|
||||
# Chat
|
||||
return MessageChain(chain=[Comp.Plain(chunk)])
|
||||
|
||||
async def parse_file(item: dict):
|
||||
match item["type"]:
|
||||
case "image":
|
||||
return Comp.Image(file=item["url"], url=item["url"])
|
||||
case "audio":
|
||||
# 仅支持 wav
|
||||
temp_dir = os.path.join(get_astrbot_data_path(), "temp")
|
||||
path = os.path.join(temp_dir, f"{item['filename']}.wav")
|
||||
await download_file(item["url"], path)
|
||||
return Comp.Image(file=item["url"], url=item["url"])
|
||||
case "video":
|
||||
return Comp.Video(file=item["url"])
|
||||
case _:
|
||||
return Comp.File(name=item["filename"], file=item["url"])
|
||||
|
||||
output = chunk["data"]["outputs"][self.workflow_output_key]
|
||||
chains = []
|
||||
if isinstance(output, str):
|
||||
# 纯文本输出
|
||||
chains.append(Comp.Plain(output))
|
||||
elif isinstance(output, list):
|
||||
# 主要适配 Dify 的 HTTP 请求结点的多模态输出
|
||||
for item in output:
|
||||
# handle Array[File]
|
||||
if (
|
||||
not isinstance(item, dict)
|
||||
or item.get("dify_model_identity", "") != "__dify__file__"
|
||||
):
|
||||
chains.append(Comp.Plain(str(output)))
|
||||
break
|
||||
else:
|
||||
chains.append(Comp.Plain(str(output)))
|
||||
|
||||
# scan file
|
||||
files = chunk["data"].get("files", [])
|
||||
for item in files:
|
||||
comp = await parse_file(item)
|
||||
chains.append(comp)
|
||||
|
||||
return MessageChain(chain=chains)
|
||||
|
||||
@override
|
||||
def done(self) -> bool:
|
||||
"""检查 Agent 是否已完成工作"""
|
||||
return self._state in (AgentState.DONE, AgentState.ERROR)
|
||||
|
||||
@override
|
||||
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||
return self.final_llm_resp
|
||||
195
astrbot/core/agent/runners/dify/dify_api_client.py
Normal file
195
astrbot/core/agent/runners/dify/dify_api_client.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import codecs
|
||||
import json
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponse, ClientSession, FormData
|
||||
|
||||
from astrbot.core import logger
|
||||
|
||||
|
||||
async def _stream_sse(resp: ClientResponse) -> AsyncGenerator[dict, None]:
|
||||
decoder = codecs.getincrementaldecoder("utf-8")()
|
||||
buffer = ""
|
||||
async for chunk in resp.content.iter_chunked(8192):
|
||||
buffer += decoder.decode(chunk)
|
||||
while "\n\n" in buffer:
|
||||
block, buffer = buffer.split("\n\n", 1)
|
||||
if block.strip().startswith("data:"):
|
||||
try:
|
||||
yield json.loads(block[5:])
|
||||
except json.JSONDecodeError:
|
||||
logger.warning(f"Drop invalid dify json data: {block[5:]}")
|
||||
continue
|
||||
# flush any remaining text
|
||||
buffer += decoder.decode(b"", final=True)
|
||||
if buffer.strip().startswith("data:"):
|
||||
try:
|
||||
yield json.loads(buffer[5:])
|
||||
except json.JSONDecodeError:
|
||||
logger.warning(f"Drop invalid dify json data: {buffer[5:]}")
|
||||
|
||||
|
||||
class DifyAPIClient:
|
||||
def __init__(self, api_key: str, api_base: str = "https://api.dify.ai/v1"):
|
||||
self.api_key = api_key
|
||||
self.api_base = api_base
|
||||
self.session = ClientSession(trust_env=True)
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
}
|
||||
|
||||
async def chat_messages(
|
||||
self,
|
||||
inputs: dict,
|
||||
query: str,
|
||||
user: str,
|
||||
response_mode: str = "streaming",
|
||||
conversation_id: str = "",
|
||||
files: list[dict[str, Any]] | None = None,
|
||||
timeout: float = 60,
|
||||
) -> AsyncGenerator[dict[str, Any], None]:
|
||||
if files is None:
|
||||
files = []
|
||||
url = f"{self.api_base}/chat-messages"
|
||||
payload = locals()
|
||||
payload.pop("self")
|
||||
payload.pop("timeout")
|
||||
logger.info(f"chat_messages payload: {payload}")
|
||||
async with self.session.post(
|
||||
url,
|
||||
json=payload,
|
||||
headers=self.headers,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
if resp.status != 200:
|
||||
text = await resp.text()
|
||||
raise Exception(
|
||||
f"Dify /chat-messages 接口请求失败:{resp.status}. {text}",
|
||||
)
|
||||
async for event in _stream_sse(resp):
|
||||
yield event
|
||||
|
||||
async def workflow_run(
|
||||
self,
|
||||
inputs: dict,
|
||||
user: str,
|
||||
response_mode: str = "streaming",
|
||||
files: list[dict[str, Any]] | None = None,
|
||||
timeout: float = 60,
|
||||
):
|
||||
if files is None:
|
||||
files = []
|
||||
url = f"{self.api_base}/workflows/run"
|
||||
payload = locals()
|
||||
payload.pop("self")
|
||||
payload.pop("timeout")
|
||||
logger.info(f"workflow_run payload: {payload}")
|
||||
async with self.session.post(
|
||||
url,
|
||||
json=payload,
|
||||
headers=self.headers,
|
||||
timeout=timeout,
|
||||
) as resp:
|
||||
if resp.status != 200:
|
||||
text = await resp.text()
|
||||
raise Exception(
|
||||
f"Dify /workflows/run 接口请求失败:{resp.status}. {text}",
|
||||
)
|
||||
async for event in _stream_sse(resp):
|
||||
yield event
|
||||
|
||||
async def file_upload(
|
||||
self,
|
||||
user: str,
|
||||
file_path: str | None = None,
|
||||
file_data: bytes | None = None,
|
||||
file_name: str | None = None,
|
||||
mime_type: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Upload a file to Dify. Must provide either file_path or file_data.
|
||||
|
||||
Args:
|
||||
user: The user ID.
|
||||
file_path: The path to the file to upload.
|
||||
file_data: The file data in bytes.
|
||||
file_name: Optional file name when using file_data.
|
||||
Returns:
|
||||
A dictionary containing the uploaded file information.
|
||||
"""
|
||||
url = f"{self.api_base}/files/upload"
|
||||
|
||||
form = FormData()
|
||||
form.add_field("user", user)
|
||||
|
||||
if file_data is not None:
|
||||
# 使用 bytes 数据
|
||||
form.add_field(
|
||||
"file",
|
||||
file_data,
|
||||
filename=file_name or "uploaded_file",
|
||||
content_type=mime_type or "application/octet-stream",
|
||||
)
|
||||
elif file_path is not None:
|
||||
# 使用文件路径
|
||||
import os
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
file_content = f.read()
|
||||
form.add_field(
|
||||
"file",
|
||||
file_content,
|
||||
filename=os.path.basename(file_path),
|
||||
content_type=mime_type or "application/octet-stream",
|
||||
)
|
||||
else:
|
||||
raise ValueError("file_path 和 file_data 不能同时为 None")
|
||||
|
||||
async with self.session.post(
|
||||
url,
|
||||
data=form,
|
||||
headers=self.headers, # 不包含 Content-Type,让 aiohttp 自动设置
|
||||
) as resp:
|
||||
if resp.status != 200 and resp.status != 201:
|
||||
text = await resp.text()
|
||||
raise Exception(f"Dify 文件上传失败:{resp.status}. {text}")
|
||||
return await resp.json() # {"id": "xxx", ...}
|
||||
|
||||
async def close(self):
|
||||
await self.session.close()
|
||||
|
||||
async def get_chat_convs(self, user: str, limit: int = 20):
|
||||
# conversations. GET
|
||||
url = f"{self.api_base}/conversations"
|
||||
payload = {
|
||||
"user": user,
|
||||
"limit": limit,
|
||||
}
|
||||
async with self.session.get(url, params=payload, headers=self.headers) as resp:
|
||||
return await resp.json()
|
||||
|
||||
async def delete_chat_conv(self, user: str, conversation_id: str):
|
||||
# conversation. DELETE
|
||||
url = f"{self.api_base}/conversations/{conversation_id}"
|
||||
payload = {
|
||||
"user": user,
|
||||
}
|
||||
async with self.session.delete(url, json=payload, headers=self.headers) as resp:
|
||||
return await resp.json()
|
||||
|
||||
async def rename(
|
||||
self,
|
||||
conversation_id: str,
|
||||
name: str,
|
||||
user: str,
|
||||
auto_generate: bool = False,
|
||||
):
|
||||
# /conversations/:conversation_id/name
|
||||
url = f"{self.api_base}/conversations/{conversation_id}/name"
|
||||
payload = {
|
||||
"user": user,
|
||||
"name": name,
|
||||
"auto_generate": auto_generate,
|
||||
}
|
||||
async with self.session.post(url, json=payload, headers=self.headers) as resp:
|
||||
return await resp.json()
|
||||
400
astrbot/core/agent/runners/tool_loop_agent_runner.py
Normal file
400
astrbot/core/agent/runners/tool_loop_agent_runner.py
Normal file
@@ -0,0 +1,400 @@
|
||||
import sys
|
||||
import traceback
|
||||
import typing as T
|
||||
|
||||
from mcp.types import (
|
||||
BlobResourceContents,
|
||||
CallToolResult,
|
||||
EmbeddedResource,
|
||||
ImageContent,
|
||||
TextContent,
|
||||
TextResourceContents,
|
||||
)
|
||||
|
||||
from astrbot import logger
|
||||
from astrbot.core.message.message_event_result import (
|
||||
MessageChain,
|
||||
)
|
||||
from astrbot.core.provider.entities import (
|
||||
LLMResponse,
|
||||
ProviderRequest,
|
||||
ToolCallsResult,
|
||||
)
|
||||
from astrbot.core.provider.provider import Provider
|
||||
|
||||
from ..hooks import BaseAgentRunHooks
|
||||
from ..message import AssistantMessageSegment, Message, ToolCallMessageSegment
|
||||
from ..response import AgentResponseData
|
||||
from ..run_context import ContextWrapper, TContext
|
||||
from ..tool_executor import BaseFunctionToolExecutor
|
||||
from .base import AgentResponse, AgentState, BaseAgentRunner
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from typing import override
|
||||
else:
|
||||
from typing_extensions import override
|
||||
|
||||
|
||||
class ToolLoopAgentRunner(BaseAgentRunner[TContext]):
|
||||
@override
|
||||
async def reset(
|
||||
self,
|
||||
provider: Provider,
|
||||
request: ProviderRequest,
|
||||
run_context: ContextWrapper[TContext],
|
||||
tool_executor: BaseFunctionToolExecutor[TContext],
|
||||
agent_hooks: BaseAgentRunHooks[TContext],
|
||||
**kwargs: T.Any,
|
||||
) -> None:
|
||||
self.req = request
|
||||
self.streaming = kwargs.get("streaming", False)
|
||||
self.provider = provider
|
||||
self.final_llm_resp = None
|
||||
self._state = AgentState.IDLE
|
||||
self.tool_executor = tool_executor
|
||||
self.agent_hooks = agent_hooks
|
||||
self.run_context = run_context
|
||||
|
||||
messages = []
|
||||
# append existing messages in the run context
|
||||
for msg in request.contexts:
|
||||
messages.append(Message.model_validate(msg))
|
||||
if request.prompt is not None:
|
||||
m = await request.assemble_context()
|
||||
messages.append(Message.model_validate(m))
|
||||
if request.system_prompt:
|
||||
messages.insert(
|
||||
0,
|
||||
Message(role="system", content=request.system_prompt),
|
||||
)
|
||||
self.run_context.messages = messages
|
||||
|
||||
async def _iter_llm_responses(self) -> T.AsyncGenerator[LLMResponse, None]:
|
||||
"""Yields chunks *and* a final LLMResponse."""
|
||||
if self.streaming:
|
||||
stream = self.provider.text_chat_stream(**self.req.__dict__)
|
||||
async for resp in stream: # type: ignore
|
||||
yield resp
|
||||
else:
|
||||
yield await self.provider.text_chat(**self.req.__dict__)
|
||||
|
||||
@override
|
||||
async def step(self):
|
||||
"""Process a single step of the agent.
|
||||
This method should return the result of the step.
|
||||
"""
|
||||
if not self.req:
|
||||
raise ValueError("Request is not set. Please call reset() first.")
|
||||
|
||||
if self._state == AgentState.IDLE:
|
||||
try:
|
||||
await self.agent_hooks.on_agent_begin(self.run_context)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_begin hook: {e}", exc_info=True)
|
||||
|
||||
# 开始处理,转换到运行状态
|
||||
self._transition_state(AgentState.RUNNING)
|
||||
llm_resp_result = None
|
||||
|
||||
async for llm_response in self._iter_llm_responses():
|
||||
assert isinstance(llm_response, LLMResponse)
|
||||
if llm_response.is_chunk:
|
||||
if llm_response.result_chain:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(chain=llm_response.result_chain),
|
||||
)
|
||||
elif llm_response.completion_text:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(llm_response.completion_text),
|
||||
),
|
||||
)
|
||||
elif llm_response.reasoning_content:
|
||||
yield AgentResponse(
|
||||
type="streaming_delta",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain(type="reasoning").message(
|
||||
llm_response.reasoning_content,
|
||||
),
|
||||
),
|
||||
)
|
||||
continue
|
||||
llm_resp_result = llm_response
|
||||
break # got final response
|
||||
|
||||
if not llm_resp_result:
|
||||
return
|
||||
|
||||
# 处理 LLM 响应
|
||||
llm_resp = llm_resp_result
|
||||
|
||||
if llm_resp.role == "err":
|
||||
# 如果 LLM 响应错误,转换到错误状态
|
||||
self.final_llm_resp = llm_resp
|
||||
self._transition_state(AgentState.ERROR)
|
||||
yield AgentResponse(
|
||||
type="err",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(
|
||||
f"LLM 响应错误: {llm_resp.completion_text or '未知错误'}",
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
if not llm_resp.tools_call_name:
|
||||
# 如果没有工具调用,转换到完成状态
|
||||
self.final_llm_resp = llm_resp
|
||||
self._transition_state(AgentState.DONE)
|
||||
# record the final assistant message
|
||||
self.run_context.messages.append(
|
||||
Message(
|
||||
role="assistant",
|
||||
content=llm_resp.completion_text or "",
|
||||
),
|
||||
)
|
||||
try:
|
||||
await self.agent_hooks.on_agent_done(self.run_context, llm_resp)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_agent_done hook: {e}", exc_info=True)
|
||||
|
||||
# 返回 LLM 结果
|
||||
if llm_resp.result_chain:
|
||||
yield AgentResponse(
|
||||
type="llm_result",
|
||||
data=AgentResponseData(chain=llm_resp.result_chain),
|
||||
)
|
||||
elif llm_resp.completion_text:
|
||||
yield AgentResponse(
|
||||
type="llm_result",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain().message(llm_resp.completion_text),
|
||||
),
|
||||
)
|
||||
|
||||
# 如果有工具调用,还需处理工具调用
|
||||
if llm_resp.tools_call_name:
|
||||
tool_call_result_blocks = []
|
||||
for tool_call_name in llm_resp.tools_call_name:
|
||||
yield AgentResponse(
|
||||
type="tool_call",
|
||||
data=AgentResponseData(
|
||||
chain=MessageChain(type="tool_call").message(
|
||||
f"🔨 调用工具: {tool_call_name}"
|
||||
),
|
||||
),
|
||||
)
|
||||
async for result in self._handle_function_tools(self.req, llm_resp):
|
||||
if isinstance(result, list):
|
||||
tool_call_result_blocks = result
|
||||
elif isinstance(result, MessageChain):
|
||||
result.type = "tool_call_result"
|
||||
yield AgentResponse(
|
||||
type="tool_call_result",
|
||||
data=AgentResponseData(chain=result),
|
||||
)
|
||||
# 将结果添加到上下文中
|
||||
tool_calls_result = ToolCallsResult(
|
||||
tool_calls_info=AssistantMessageSegment(
|
||||
tool_calls=llm_resp.to_openai_to_calls_model(),
|
||||
content=llm_resp.completion_text,
|
||||
),
|
||||
tool_calls_result=tool_call_result_blocks,
|
||||
)
|
||||
# record the assistant message with tool calls
|
||||
self.run_context.messages.extend(
|
||||
tool_calls_result.to_openai_messages_model()
|
||||
)
|
||||
|
||||
self.req.append_tool_calls_result(tool_calls_result)
|
||||
|
||||
async def step_until_done(
|
||||
self, max_step: int
|
||||
) -> T.AsyncGenerator[AgentResponse, None]:
|
||||
"""Process steps until the agent is done."""
|
||||
step_count = 0
|
||||
while not self.done() and step_count < max_step:
|
||||
step_count += 1
|
||||
async for resp in self.step():
|
||||
yield resp
|
||||
|
||||
async def _handle_function_tools(
|
||||
self,
|
||||
req: ProviderRequest,
|
||||
llm_response: LLMResponse,
|
||||
) -> T.AsyncGenerator[MessageChain | list[ToolCallMessageSegment], None]:
|
||||
"""处理函数工具调用。"""
|
||||
tool_call_result_blocks: list[ToolCallMessageSegment] = []
|
||||
logger.info(f"Agent 使用工具: {llm_response.tools_call_name}")
|
||||
|
||||
# 执行函数调用
|
||||
for func_tool_name, func_tool_args, func_tool_id in zip(
|
||||
llm_response.tools_call_name,
|
||||
llm_response.tools_call_args,
|
||||
llm_response.tools_call_ids,
|
||||
):
|
||||
try:
|
||||
if not req.func_tool:
|
||||
return
|
||||
func_tool = req.func_tool.get_func(func_tool_name)
|
||||
logger.info(f"使用工具:{func_tool_name},参数:{func_tool_args}")
|
||||
|
||||
if not func_tool:
|
||||
logger.warning(f"未找到指定的工具: {func_tool_name},将跳过。")
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content=f"error: 未找到工具 {func_tool_name}",
|
||||
),
|
||||
)
|
||||
continue
|
||||
|
||||
valid_params = {} # 参数过滤:只传递函数实际需要的参数
|
||||
|
||||
# 获取实际的 handler 函数
|
||||
if func_tool.handler:
|
||||
logger.debug(
|
||||
f"工具 {func_tool_name} 期望的参数: {func_tool.parameters}",
|
||||
)
|
||||
if func_tool.parameters and func_tool.parameters.get("properties"):
|
||||
expected_params = set(func_tool.parameters["properties"].keys())
|
||||
|
||||
valid_params = {
|
||||
k: v
|
||||
for k, v in func_tool_args.items()
|
||||
if k in expected_params
|
||||
}
|
||||
|
||||
# 记录被忽略的参数
|
||||
ignored_params = set(func_tool_args.keys()) - set(
|
||||
valid_params.keys(),
|
||||
)
|
||||
if ignored_params:
|
||||
logger.warning(
|
||||
f"工具 {func_tool_name} 忽略非期望参数: {ignored_params}",
|
||||
)
|
||||
else:
|
||||
# 如果没有 handler(如 MCP 工具),使用所有参数
|
||||
valid_params = func_tool_args
|
||||
|
||||
try:
|
||||
await self.agent_hooks.on_tool_start(
|
||||
self.run_context,
|
||||
func_tool,
|
||||
valid_params,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_tool_start hook: {e}", exc_info=True)
|
||||
|
||||
executor = self.tool_executor.execute(
|
||||
tool=func_tool,
|
||||
run_context=self.run_context,
|
||||
**valid_params, # 只传递有效的参数
|
||||
)
|
||||
|
||||
_final_resp: CallToolResult | None = None
|
||||
async for resp in executor: # type: ignore
|
||||
if isinstance(resp, CallToolResult):
|
||||
res = resp
|
||||
_final_resp = resp
|
||||
if isinstance(res.content[0], TextContent):
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content=res.content[0].text,
|
||||
),
|
||||
)
|
||||
yield MessageChain().message(res.content[0].text)
|
||||
elif isinstance(res.content[0], ImageContent):
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content="返回了图片(已直接发送给用户)",
|
||||
),
|
||||
)
|
||||
yield MessageChain(type="tool_direct_result").base64_image(
|
||||
res.content[0].data,
|
||||
)
|
||||
elif isinstance(res.content[0], EmbeddedResource):
|
||||
resource = res.content[0].resource
|
||||
if isinstance(resource, TextResourceContents):
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content=resource.text,
|
||||
),
|
||||
)
|
||||
yield MessageChain().message(resource.text)
|
||||
elif (
|
||||
isinstance(resource, BlobResourceContents)
|
||||
and resource.mimeType
|
||||
and resource.mimeType.startswith("image/")
|
||||
):
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content="返回了图片(已直接发送给用户)",
|
||||
),
|
||||
)
|
||||
yield MessageChain(
|
||||
type="tool_direct_result",
|
||||
).base64_image(resource.blob)
|
||||
else:
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content="返回的数据类型不受支持",
|
||||
),
|
||||
)
|
||||
yield MessageChain().message("返回的数据类型不受支持。")
|
||||
|
||||
elif resp is None:
|
||||
# Tool 直接请求发送消息给用户
|
||||
# 这里我们将直接结束 Agent Loop。
|
||||
# 发送消息逻辑在 ToolExecutor 中处理了。
|
||||
logger.warning(
|
||||
f"{func_tool_name} 没有没有返回值或者将结果直接发送给用户,此工具调用不会被记录到历史中。"
|
||||
)
|
||||
self._transition_state(AgentState.DONE)
|
||||
else:
|
||||
# 不应该出现其他类型
|
||||
logger.warning(
|
||||
f"Tool 返回了不支持的类型: {type(resp)},将忽略。",
|
||||
)
|
||||
|
||||
try:
|
||||
await self.agent_hooks.on_tool_end(
|
||||
self.run_context,
|
||||
func_tool,
|
||||
func_tool_args,
|
||||
_final_resp,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in on_tool_end hook: {e}", exc_info=True)
|
||||
except Exception as e:
|
||||
logger.warning(traceback.format_exc())
|
||||
tool_call_result_blocks.append(
|
||||
ToolCallMessageSegment(
|
||||
role="tool",
|
||||
tool_call_id=func_tool_id,
|
||||
content=f"error: {e!s}",
|
||||
),
|
||||
)
|
||||
|
||||
# 处理函数调用响应
|
||||
if tool_call_result_blocks:
|
||||
yield tool_call_result_blocks
|
||||
|
||||
def done(self) -> bool:
|
||||
"""检查 Agent 是否已完成工作"""
|
||||
return self._state in (AgentState.DONE, AgentState.ERROR)
|
||||
|
||||
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||
return self.final_llm_resp
|
||||
285
astrbot/core/agent/tool.py
Normal file
285
astrbot/core/agent/tool.py
Normal file
@@ -0,0 +1,285 @@
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Any, Generic
|
||||
|
||||
import jsonschema
|
||||
import mcp
|
||||
from deprecated import deprecated
|
||||
from pydantic import Field, model_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from .run_context import ContextWrapper, TContext
|
||||
|
||||
ParametersType = dict[str, Any]
|
||||
ToolExecResult = str | mcp.types.CallToolResult
|
||||
|
||||
|
||||
@dataclass
|
||||
class ToolSchema:
|
||||
"""A class representing the schema of a tool for function calling."""
|
||||
|
||||
name: str
|
||||
"""The name of the tool."""
|
||||
|
||||
description: str
|
||||
"""The description of the tool."""
|
||||
|
||||
parameters: ParametersType
|
||||
"""The parameters of the tool, in JSON Schema format."""
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_parameters(self) -> "ToolSchema":
|
||||
jsonschema.validate(
|
||||
self.parameters, jsonschema.Draft202012Validator.META_SCHEMA
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
@dataclass
|
||||
class FunctionTool(ToolSchema, Generic[TContext]):
|
||||
"""A callable tool, for function calling."""
|
||||
|
||||
handler: Callable[..., Awaitable[Any]] | None = None
|
||||
"""a callable that implements the tool's functionality. It should be an async function."""
|
||||
|
||||
handler_module_path: str | None = None
|
||||
"""
|
||||
The module path of the handler function. This is empty when the origin is mcp.
|
||||
This field must be retained, as the handler will be wrapped in functools.partial during initialization,
|
||||
causing the handler's __module__ to be functools
|
||||
"""
|
||||
active: bool = True
|
||||
"""
|
||||
Whether the tool is active. This field is a special field for AstrBot.
|
||||
You can ignore it when integrating with other frameworks.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return f"FuncTool(name={self.name}, parameters={self.parameters}, description={self.description})"
|
||||
|
||||
async def call(self, context: ContextWrapper[TContext], **kwargs) -> ToolExecResult:
|
||||
"""Run the tool with the given arguments. The handler field has priority."""
|
||||
raise NotImplementedError(
|
||||
"FunctionTool.call() must be implemented by subclasses or set a handler."
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ToolSet:
|
||||
"""A set of function tools that can be used in function calling.
|
||||
|
||||
This class provides methods to add, remove, and retrieve tools, as well as
|
||||
convert the tools to different API formats (OpenAI, Anthropic, Google GenAI).
|
||||
"""
|
||||
|
||||
tools: list[FunctionTool] = Field(default_factory=list)
|
||||
|
||||
def empty(self) -> bool:
|
||||
"""Check if the tool set is empty."""
|
||||
return len(self.tools) == 0
|
||||
|
||||
def add_tool(self, tool: FunctionTool):
|
||||
"""Add a tool to the set."""
|
||||
# 检查是否已存在同名工具
|
||||
for i, existing_tool in enumerate(self.tools):
|
||||
if existing_tool.name == tool.name:
|
||||
self.tools[i] = tool
|
||||
return
|
||||
self.tools.append(tool)
|
||||
|
||||
def remove_tool(self, name: str):
|
||||
"""Remove a tool by its name."""
|
||||
self.tools = [tool for tool in self.tools if tool.name != name]
|
||||
|
||||
def get_tool(self, name: str) -> FunctionTool | None:
|
||||
"""Get a tool by its name."""
|
||||
for tool in self.tools:
|
||||
if tool.name == name:
|
||||
return tool
|
||||
return None
|
||||
|
||||
@deprecated(reason="Use add_tool() instead", version="4.0.0")
|
||||
def add_func(
|
||||
self,
|
||||
name: str,
|
||||
func_args: list,
|
||||
desc: str,
|
||||
handler: Callable[..., Awaitable[Any]],
|
||||
):
|
||||
"""Add a function tool to the set."""
|
||||
params = {
|
||||
"type": "object", # hard-coded here
|
||||
"properties": {},
|
||||
}
|
||||
for param in func_args:
|
||||
params["properties"][param["name"]] = {
|
||||
"type": param["type"],
|
||||
"description": param["description"],
|
||||
}
|
||||
_func = FunctionTool(
|
||||
name=name,
|
||||
parameters=params,
|
||||
description=desc,
|
||||
handler=handler,
|
||||
)
|
||||
self.add_tool(_func)
|
||||
|
||||
@deprecated(reason="Use remove_tool() instead", version="4.0.0")
|
||||
def remove_func(self, name: str):
|
||||
"""Remove a function tool by its name."""
|
||||
self.remove_tool(name)
|
||||
|
||||
@deprecated(reason="Use get_tool() instead", version="4.0.0")
|
||||
def get_func(self, name: str) -> FunctionTool | None:
|
||||
"""Get all function tools."""
|
||||
return self.get_tool(name)
|
||||
|
||||
@property
|
||||
def func_list(self) -> list[FunctionTool]:
|
||||
"""Get the list of function tools."""
|
||||
return self.tools
|
||||
|
||||
def openai_schema(self, omit_empty_parameter_field: bool = False) -> list[dict]:
|
||||
"""Convert tools to OpenAI API function calling schema format."""
|
||||
result = []
|
||||
for tool in self.tools:
|
||||
func_def = {
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": tool.name,
|
||||
"description": tool.description,
|
||||
},
|
||||
}
|
||||
|
||||
if (
|
||||
tool.parameters and tool.parameters.get("properties")
|
||||
) or not omit_empty_parameter_field:
|
||||
func_def["function"]["parameters"] = tool.parameters
|
||||
|
||||
result.append(func_def)
|
||||
return result
|
||||
|
||||
def anthropic_schema(self) -> list[dict]:
|
||||
"""Convert tools to Anthropic API format."""
|
||||
result = []
|
||||
for tool in self.tools:
|
||||
input_schema = {"type": "object"}
|
||||
if tool.parameters:
|
||||
input_schema["properties"] = tool.parameters.get("properties", {})
|
||||
input_schema["required"] = tool.parameters.get("required", [])
|
||||
tool_def = {
|
||||
"name": tool.name,
|
||||
"description": tool.description,
|
||||
"input_schema": input_schema,
|
||||
}
|
||||
result.append(tool_def)
|
||||
return result
|
||||
|
||||
def google_schema(self) -> dict:
|
||||
"""Convert tools to Google GenAI API format."""
|
||||
|
||||
def convert_schema(schema: dict) -> dict:
|
||||
"""Convert schema to Gemini API format."""
|
||||
supported_types = {
|
||||
"string",
|
||||
"number",
|
||||
"integer",
|
||||
"boolean",
|
||||
"array",
|
||||
"object",
|
||||
"null",
|
||||
}
|
||||
supported_formats = {
|
||||
"string": {"enum", "date-time"},
|
||||
"integer": {"int32", "int64"},
|
||||
"number": {"float", "double"},
|
||||
}
|
||||
|
||||
if "anyOf" in schema:
|
||||
return {"anyOf": [convert_schema(s) for s in schema["anyOf"]]}
|
||||
|
||||
result = {}
|
||||
|
||||
if "type" in schema and schema["type"] in supported_types:
|
||||
result["type"] = schema["type"]
|
||||
if "format" in schema and schema["format"] in supported_formats.get(
|
||||
result["type"],
|
||||
set(),
|
||||
):
|
||||
result["format"] = schema["format"]
|
||||
else:
|
||||
result["type"] = "null"
|
||||
|
||||
support_fields = {
|
||||
"title",
|
||||
"description",
|
||||
"enum",
|
||||
"minimum",
|
||||
"maximum",
|
||||
"maxItems",
|
||||
"minItems",
|
||||
"nullable",
|
||||
"required",
|
||||
}
|
||||
result.update({k: schema[k] for k in support_fields if k in schema})
|
||||
|
||||
if "properties" in schema:
|
||||
properties = {}
|
||||
for key, value in schema["properties"].items():
|
||||
prop_value = convert_schema(value)
|
||||
if "default" in prop_value:
|
||||
del prop_value["default"]
|
||||
properties[key] = prop_value
|
||||
|
||||
if properties:
|
||||
result["properties"] = properties
|
||||
|
||||
if "items" in schema:
|
||||
result["items"] = convert_schema(schema["items"])
|
||||
|
||||
return result
|
||||
|
||||
tools = []
|
||||
for tool in self.tools:
|
||||
d: dict[str, Any] = {
|
||||
"name": tool.name,
|
||||
"description": tool.description,
|
||||
}
|
||||
if tool.parameters:
|
||||
d["parameters"] = convert_schema(tool.parameters)
|
||||
tools.append(d)
|
||||
|
||||
declarations = {}
|
||||
if tools:
|
||||
declarations["function_declarations"] = tools
|
||||
return declarations
|
||||
|
||||
@deprecated(reason="Use openai_schema() instead", version="4.0.0")
|
||||
def get_func_desc_openai_style(self, omit_empty_parameter_field: bool = False):
|
||||
return self.openai_schema(omit_empty_parameter_field)
|
||||
|
||||
@deprecated(reason="Use anthropic_schema() instead", version="4.0.0")
|
||||
def get_func_desc_anthropic_style(self):
|
||||
return self.anthropic_schema()
|
||||
|
||||
@deprecated(reason="Use google_schema() instead", version="4.0.0")
|
||||
def get_func_desc_google_genai_style(self):
|
||||
return self.google_schema()
|
||||
|
||||
def names(self) -> list[str]:
|
||||
"""获取所有工具的名称列表"""
|
||||
return [tool.name for tool in self.tools]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.tools)
|
||||
|
||||
def __bool__(self):
|
||||
return len(self.tools) > 0
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.tools)
|
||||
|
||||
def __repr__(self):
|
||||
return f"ToolSet(tools={self.tools})"
|
||||
|
||||
def __str__(self):
|
||||
return f"ToolSet(tools={self.tools})"
|
||||
17
astrbot/core/agent/tool_executor.py
Normal file
17
astrbot/core/agent/tool_executor.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Any, Generic
|
||||
|
||||
import mcp
|
||||
|
||||
from .run_context import ContextWrapper, TContext
|
||||
from .tool import FunctionTool
|
||||
|
||||
|
||||
class BaseFunctionToolExecutor(Generic[TContext]):
|
||||
@classmethod
|
||||
async def execute(
|
||||
cls,
|
||||
tool: FunctionTool,
|
||||
run_context: ContextWrapper[TContext],
|
||||
**tool_args,
|
||||
) -> AsyncGenerator[Any | mcp.types.CallToolResult, None]: ...
|
||||
19
astrbot/core/astr_agent_context.py
Normal file
19
astrbot/core/astr_agent_context.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic import Field
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from astrbot.core.agent.run_context import ContextWrapper
|
||||
from astrbot.core.platform.astr_message_event import AstrMessageEvent
|
||||
from astrbot.core.star.context import Context
|
||||
|
||||
|
||||
@dataclass(config={"arbitrary_types_allowed": True})
|
||||
class AstrAgentContext:
|
||||
context: Context
|
||||
"""The star context instance"""
|
||||
event: AstrMessageEvent
|
||||
"""The message event associated with the agent context."""
|
||||
extra: dict[str, str] = Field(default_factory=dict)
|
||||
"""Customized extra data."""
|
||||
|
||||
|
||||
AgentContextWrapper = ContextWrapper[AstrAgentContext]
|
||||
36
astrbot/core/astr_agent_hooks.py
Normal file
36
astrbot/core/astr_agent_hooks.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from typing import Any
|
||||
|
||||
from mcp.types import CallToolResult
|
||||
|
||||
from astrbot.core.agent.hooks import BaseAgentRunHooks
|
||||
from astrbot.core.agent.run_context import ContextWrapper
|
||||
from astrbot.core.agent.tool import FunctionTool
|
||||
from astrbot.core.astr_agent_context import AstrAgentContext
|
||||
from astrbot.core.pipeline.context_utils import call_event_hook
|
||||
from astrbot.core.star.star_handler import EventType
|
||||
|
||||
|
||||
class MainAgentHooks(BaseAgentRunHooks[AstrAgentContext]):
|
||||
async def on_agent_done(self, run_context, llm_response):
|
||||
# 执行事件钩子
|
||||
await call_event_hook(
|
||||
run_context.context.event,
|
||||
EventType.OnLLMResponseEvent,
|
||||
llm_response,
|
||||
)
|
||||
|
||||
async def on_tool_end(
|
||||
self,
|
||||
run_context: ContextWrapper[AstrAgentContext],
|
||||
tool: FunctionTool[Any],
|
||||
tool_args: dict | None,
|
||||
tool_result: CallToolResult | None,
|
||||
):
|
||||
run_context.context.event.clear_result()
|
||||
|
||||
|
||||
class EmptyAgentHooks(BaseAgentRunHooks[AstrAgentContext]):
|
||||
pass
|
||||
|
||||
|
||||
MAIN_AGENT_HOOKS = MainAgentHooks()
|
||||
80
astrbot/core/astr_agent_run_util.py
Normal file
80
astrbot/core/astr_agent_run_util.py
Normal file
@@ -0,0 +1,80 @@
|
||||
import traceback
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from astrbot.core import logger
|
||||
from astrbot.core.agent.runners.tool_loop_agent_runner import ToolLoopAgentRunner
|
||||
from astrbot.core.astr_agent_context import AstrAgentContext
|
||||
from astrbot.core.message.message_event_result import (
|
||||
MessageChain,
|
||||
MessageEventResult,
|
||||
ResultContentType,
|
||||
)
|
||||
|
||||
AgentRunner = ToolLoopAgentRunner[AstrAgentContext]
|
||||
|
||||
|
||||
async def run_agent(
|
||||
agent_runner: AgentRunner,
|
||||
max_step: int = 30,
|
||||
show_tool_use: bool = True,
|
||||
stream_to_general: bool = False,
|
||||
show_reasoning: bool = False,
|
||||
) -> AsyncGenerator[MessageChain | None, None]:
|
||||
step_idx = 0
|
||||
astr_event = agent_runner.run_context.context.event
|
||||
while step_idx < max_step:
|
||||
step_idx += 1
|
||||
try:
|
||||
async for resp in agent_runner.step():
|
||||
if astr_event.is_stopped():
|
||||
return
|
||||
if resp.type == "tool_call_result":
|
||||
msg_chain = resp.data["chain"]
|
||||
if msg_chain.type == "tool_direct_result":
|
||||
# tool_direct_result 用于标记 llm tool 需要直接发送给用户的内容
|
||||
await astr_event.send(resp.data["chain"])
|
||||
continue
|
||||
# 对于其他情况,暂时先不处理
|
||||
continue
|
||||
elif resp.type == "tool_call":
|
||||
if agent_runner.streaming:
|
||||
# 用来标记流式响应需要分节
|
||||
yield MessageChain(chain=[], type="break")
|
||||
if show_tool_use:
|
||||
await astr_event.send(resp.data["chain"])
|
||||
continue
|
||||
|
||||
if stream_to_general and resp.type == "streaming_delta":
|
||||
continue
|
||||
|
||||
if stream_to_general or not agent_runner.streaming:
|
||||
content_typ = (
|
||||
ResultContentType.LLM_RESULT
|
||||
if resp.type == "llm_result"
|
||||
else ResultContentType.GENERAL_RESULT
|
||||
)
|
||||
astr_event.set_result(
|
||||
MessageEventResult(
|
||||
chain=resp.data["chain"].chain,
|
||||
result_content_type=content_typ,
|
||||
),
|
||||
)
|
||||
yield
|
||||
astr_event.clear_result()
|
||||
elif resp.type == "streaming_delta":
|
||||
chain = resp.data["chain"]
|
||||
if chain.type == "reasoning" and not show_reasoning:
|
||||
# display the reasoning content only when configured
|
||||
continue
|
||||
yield resp.data["chain"] # MessageChain
|
||||
if agent_runner.done():
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.error(traceback.format_exc())
|
||||
err_msg = f"\n\nAstrBot 请求失败。\n错误类型: {type(e).__name__}\n错误信息: {e!s}\n\n请在控制台查看和分享错误详情。\n"
|
||||
if agent_runner.streaming:
|
||||
yield MessageChain().message(err_msg)
|
||||
else:
|
||||
astr_event.set_result(MessageEventResult().message(err_msg))
|
||||
return
|
||||
246
astrbot/core/astr_agent_tool_exec.py
Normal file
246
astrbot/core/astr_agent_tool_exec.py
Normal file
@@ -0,0 +1,246 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import traceback
|
||||
import typing as T
|
||||
|
||||
import mcp
|
||||
|
||||
from astrbot import logger
|
||||
from astrbot.core.agent.handoff import HandoffTool
|
||||
from astrbot.core.agent.mcp_client import MCPTool
|
||||
from astrbot.core.agent.run_context import ContextWrapper
|
||||
from astrbot.core.agent.tool import FunctionTool, ToolSet
|
||||
from astrbot.core.agent.tool_executor import BaseFunctionToolExecutor
|
||||
from astrbot.core.astr_agent_context import AstrAgentContext
|
||||
from astrbot.core.message.message_event_result import (
|
||||
CommandResult,
|
||||
MessageChain,
|
||||
MessageEventResult,
|
||||
)
|
||||
from astrbot.core.provider.register import llm_tools
|
||||
|
||||
|
||||
class FunctionToolExecutor(BaseFunctionToolExecutor[AstrAgentContext]):
|
||||
@classmethod
|
||||
async def execute(cls, tool, run_context, **tool_args):
|
||||
"""执行函数调用。
|
||||
|
||||
Args:
|
||||
event (AstrMessageEvent): 事件对象, 当 origin 为 local 时必须提供。
|
||||
**kwargs: 函数调用的参数。
|
||||
|
||||
Returns:
|
||||
AsyncGenerator[None | mcp.types.CallToolResult, None]
|
||||
|
||||
"""
|
||||
if isinstance(tool, HandoffTool):
|
||||
async for r in cls._execute_handoff(tool, run_context, **tool_args):
|
||||
yield r
|
||||
return
|
||||
|
||||
elif isinstance(tool, MCPTool):
|
||||
async for r in cls._execute_mcp(tool, run_context, **tool_args):
|
||||
yield r
|
||||
return
|
||||
|
||||
else:
|
||||
async for r in cls._execute_local(tool, run_context, **tool_args):
|
||||
yield r
|
||||
return
|
||||
|
||||
@classmethod
|
||||
async def _execute_handoff(
|
||||
cls,
|
||||
tool: HandoffTool,
|
||||
run_context: ContextWrapper[AstrAgentContext],
|
||||
**tool_args,
|
||||
):
|
||||
input_ = tool_args.get("input")
|
||||
|
||||
# make toolset for the agent
|
||||
tools = tool.agent.tools
|
||||
if tools:
|
||||
toolset = ToolSet()
|
||||
for t in tools:
|
||||
if isinstance(t, str):
|
||||
_t = llm_tools.get_func(t)
|
||||
if _t:
|
||||
toolset.add_tool(_t)
|
||||
elif isinstance(t, FunctionTool):
|
||||
toolset.add_tool(t)
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
ctx = run_context.context.context
|
||||
event = run_context.context.event
|
||||
umo = event.unified_msg_origin
|
||||
prov_id = await ctx.get_current_chat_provider_id(umo)
|
||||
llm_resp = await ctx.tool_loop_agent(
|
||||
event=event,
|
||||
chat_provider_id=prov_id,
|
||||
prompt=input_,
|
||||
system_prompt=tool.agent.instructions,
|
||||
tools=toolset,
|
||||
max_steps=30,
|
||||
run_hooks=tool.agent.run_hooks,
|
||||
)
|
||||
yield mcp.types.CallToolResult(
|
||||
content=[mcp.types.TextContent(type="text", text=llm_resp.completion_text)]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def _execute_local(
|
||||
cls,
|
||||
tool: FunctionTool,
|
||||
run_context: ContextWrapper[AstrAgentContext],
|
||||
**tool_args,
|
||||
):
|
||||
event = run_context.context.event
|
||||
if not event:
|
||||
raise ValueError("Event must be provided for local function tools.")
|
||||
|
||||
is_override_call = False
|
||||
for ty in type(tool).mro():
|
||||
if "call" in ty.__dict__ and ty.__dict__["call"] is not FunctionTool.call:
|
||||
is_override_call = True
|
||||
break
|
||||
|
||||
# 检查 tool 下有没有 run 方法
|
||||
if not tool.handler and not hasattr(tool, "run") and not is_override_call:
|
||||
raise ValueError("Tool must have a valid handler or override 'run' method.")
|
||||
|
||||
awaitable = None
|
||||
method_name = ""
|
||||
if tool.handler:
|
||||
awaitable = tool.handler
|
||||
method_name = "decorator_handler"
|
||||
elif is_override_call:
|
||||
awaitable = tool.call
|
||||
method_name = "call"
|
||||
elif hasattr(tool, "run"):
|
||||
awaitable = getattr(tool, "run")
|
||||
method_name = "run"
|
||||
if awaitable is None:
|
||||
raise ValueError("Tool must have a valid handler or override 'run' method.")
|
||||
|
||||
wrapper = call_local_llm_tool(
|
||||
context=run_context,
|
||||
handler=awaitable,
|
||||
method_name=method_name,
|
||||
**tool_args,
|
||||
)
|
||||
while True:
|
||||
try:
|
||||
resp = await asyncio.wait_for(
|
||||
anext(wrapper),
|
||||
timeout=run_context.tool_call_timeout,
|
||||
)
|
||||
if resp is not None:
|
||||
if isinstance(resp, mcp.types.CallToolResult):
|
||||
yield resp
|
||||
else:
|
||||
text_content = mcp.types.TextContent(
|
||||
type="text",
|
||||
text=str(resp),
|
||||
)
|
||||
yield mcp.types.CallToolResult(content=[text_content])
|
||||
else:
|
||||
# NOTE: Tool 在这里直接请求发送消息给用户
|
||||
# TODO: 是否需要判断 event.get_result() 是否为空?
|
||||
# 如果为空,则说明没有发送消息给用户,并且返回值为空,将返回一个特殊的 TextContent,其内容如"工具没有返回内容"
|
||||
if res := run_context.context.event.get_result():
|
||||
if res.chain:
|
||||
try:
|
||||
await event.send(
|
||||
MessageChain(
|
||||
chain=res.chain,
|
||||
type="tool_direct_result",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Tool 直接发送消息失败: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
yield None
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception(
|
||||
f"tool {tool.name} execution timeout after {run_context.tool_call_timeout} seconds.",
|
||||
)
|
||||
except StopAsyncIteration:
|
||||
break
|
||||
|
||||
@classmethod
|
||||
async def _execute_mcp(
|
||||
cls,
|
||||
tool: FunctionTool,
|
||||
run_context: ContextWrapper[AstrAgentContext],
|
||||
**tool_args,
|
||||
):
|
||||
res = await tool.call(run_context, **tool_args)
|
||||
if not res:
|
||||
return
|
||||
yield res
|
||||
|
||||
|
||||
async def call_local_llm_tool(
|
||||
context: ContextWrapper[AstrAgentContext],
|
||||
handler: T.Callable[..., T.Awaitable[T.Any]],
|
||||
method_name: str,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> T.AsyncGenerator[T.Any, None]:
|
||||
"""执行本地 LLM 工具的处理函数并处理其返回结果"""
|
||||
ready_to_call = None # 一个协程或者异步生成器
|
||||
|
||||
trace_ = None
|
||||
|
||||
event = context.context.event
|
||||
|
||||
try:
|
||||
if method_name == "run" or method_name == "decorator_handler":
|
||||
ready_to_call = handler(event, *args, **kwargs)
|
||||
elif method_name == "call":
|
||||
ready_to_call = handler(context, *args, **kwargs)
|
||||
else:
|
||||
raise ValueError(f"未知的方法名: {method_name}")
|
||||
except ValueError as e:
|
||||
logger.error(f"调用本地 LLM 工具时出错: {e}", exc_info=True)
|
||||
except TypeError:
|
||||
logger.error("处理函数参数不匹配,请检查 handler 的定义。", exc_info=True)
|
||||
except Exception as e:
|
||||
trace_ = traceback.format_exc()
|
||||
logger.error(f"调用本地 LLM 工具时出错: {e}\n{trace_}")
|
||||
|
||||
if not ready_to_call:
|
||||
return
|
||||
|
||||
if inspect.isasyncgen(ready_to_call):
|
||||
_has_yielded = False
|
||||
try:
|
||||
async for ret in ready_to_call:
|
||||
# 这里逐步执行异步生成器, 对于每个 yield 返回的 ret, 执行下面的代码
|
||||
# 返回值只能是 MessageEventResult 或者 None(无返回值)
|
||||
_has_yielded = True
|
||||
if isinstance(ret, (MessageEventResult, CommandResult)):
|
||||
# 如果返回值是 MessageEventResult, 设置结果并继续
|
||||
event.set_result(ret)
|
||||
yield
|
||||
else:
|
||||
# 如果返回值是 None, 则不设置结果并继续
|
||||
# 继续执行后续阶段
|
||||
yield ret
|
||||
if not _has_yielded:
|
||||
# 如果这个异步生成器没有执行到 yield 分支
|
||||
yield
|
||||
except Exception as e:
|
||||
logger.error(f"Previous Error: {trace_}")
|
||||
raise e
|
||||
elif inspect.iscoroutine(ready_to_call):
|
||||
# 如果只是一个协程, 直接执行
|
||||
ret = await ready_to_call
|
||||
if isinstance(ret, (MessageEventResult, CommandResult)):
|
||||
event.set_result(ret)
|
||||
yield
|
||||
else:
|
||||
yield ret
|
||||
275
astrbot/core/astrbot_config_mgr.py
Normal file
275
astrbot/core/astrbot_config_mgr.py
Normal file
@@ -0,0 +1,275 @@
|
||||
import os
|
||||
import uuid
|
||||
from typing import TypedDict, TypeVar
|
||||
|
||||
from astrbot.core import AstrBotConfig, logger
|
||||
from astrbot.core.config.astrbot_config import ASTRBOT_CONFIG_PATH
|
||||
from astrbot.core.config.default import DEFAULT_CONFIG
|
||||
from astrbot.core.platform.message_session import MessageSession
|
||||
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||
from astrbot.core.utils.astrbot_path import get_astrbot_config_path
|
||||
from astrbot.core.utils.shared_preferences import SharedPreferences
|
||||
|
||||
_VT = TypeVar("_VT")
|
||||
|
||||
|
||||
class ConfInfo(TypedDict):
|
||||
"""Configuration information for a specific session or platform."""
|
||||
|
||||
id: str # UUID of the configuration or "default"
|
||||
name: str
|
||||
path: str # File name to the configuration file
|
||||
|
||||
|
||||
DEFAULT_CONFIG_CONF_INFO = ConfInfo(
|
||||
id="default",
|
||||
name="default",
|
||||
path=ASTRBOT_CONFIG_PATH,
|
||||
)
|
||||
|
||||
|
||||
class AstrBotConfigManager:
|
||||
"""A class to manage the system configuration of AstrBot, aka ACM"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
default_config: AstrBotConfig,
|
||||
ucr: UmopConfigRouter,
|
||||
sp: SharedPreferences,
|
||||
):
|
||||
self.sp = sp
|
||||
self.ucr = ucr
|
||||
self.confs: dict[str, AstrBotConfig] = {}
|
||||
"""uuid / "default" -> AstrBotConfig"""
|
||||
self.confs["default"] = default_config
|
||||
self.abconf_data = None
|
||||
self._load_all_configs()
|
||||
|
||||
def _get_abconf_data(self) -> dict:
|
||||
"""获取所有的 abconf 数据"""
|
||||
if self.abconf_data is None:
|
||||
self.abconf_data = self.sp.get(
|
||||
"abconf_mapping",
|
||||
{},
|
||||
scope="global",
|
||||
scope_id="global",
|
||||
)
|
||||
return self.abconf_data
|
||||
|
||||
def _load_all_configs(self):
|
||||
"""Load all configurations from the shared preferences."""
|
||||
abconf_data = self._get_abconf_data()
|
||||
self.abconf_data = abconf_data
|
||||
for uuid_, meta in abconf_data.items():
|
||||
filename = meta["path"]
|
||||
conf_path = os.path.join(get_astrbot_config_path(), filename)
|
||||
if os.path.exists(conf_path):
|
||||
conf = AstrBotConfig(config_path=conf_path)
|
||||
self.confs[uuid_] = conf
|
||||
else:
|
||||
logger.warning(
|
||||
f"Config file {conf_path} for UUID {uuid_} does not exist, skipping.",
|
||||
)
|
||||
continue
|
||||
|
||||
def _load_conf_mapping(self, umo: str | MessageSession) -> ConfInfo:
|
||||
"""获取指定 umo 的配置文件 uuid, 如果不存在则返回默认配置(返回 "default")
|
||||
|
||||
Returns:
|
||||
ConfInfo: 包含配置文件的 uuid, 路径和名称等信息, 是一个 dict 类型
|
||||
|
||||
"""
|
||||
# uuid -> { "path": str, "name": str }
|
||||
abconf_data = self._get_abconf_data()
|
||||
|
||||
if isinstance(umo, MessageSession):
|
||||
umo = str(umo)
|
||||
else:
|
||||
try:
|
||||
umo = str(MessageSession.from_str(umo)) # validate
|
||||
except Exception:
|
||||
return DEFAULT_CONFIG_CONF_INFO
|
||||
|
||||
conf_id = self.ucr.get_conf_id_for_umop(umo)
|
||||
if conf_id:
|
||||
meta = abconf_data.get(conf_id)
|
||||
if meta and isinstance(meta, dict):
|
||||
# the bind relation between umo and conf is defined in ucr now, so we remove "umop" here
|
||||
meta.pop("umop", None)
|
||||
return ConfInfo(**meta, id=conf_id)
|
||||
|
||||
return DEFAULT_CONFIG_CONF_INFO
|
||||
|
||||
def _save_conf_mapping(
|
||||
self,
|
||||
abconf_path: str,
|
||||
abconf_id: str,
|
||||
abconf_name: str | None = None,
|
||||
) -> None:
|
||||
"""保存配置文件的映射关系"""
|
||||
abconf_data = self.sp.get(
|
||||
"abconf_mapping",
|
||||
{},
|
||||
scope="global",
|
||||
scope_id="global",
|
||||
)
|
||||
random_word = abconf_name or uuid.uuid4().hex[:8]
|
||||
abconf_data[abconf_id] = {
|
||||
"path": abconf_path,
|
||||
"name": random_word,
|
||||
}
|
||||
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||
self.abconf_data = abconf_data
|
||||
|
||||
def get_conf(self, umo: str | MessageSession | None) -> AstrBotConfig:
|
||||
"""获取指定 umo 的配置文件。如果不存在,则 fallback 到默认配置文件。"""
|
||||
if not umo:
|
||||
return self.confs["default"]
|
||||
if isinstance(umo, MessageSession):
|
||||
umo = f"{umo.platform_id}:{umo.message_type}:{umo.session_id}"
|
||||
|
||||
uuid_ = self._load_conf_mapping(umo)["id"]
|
||||
|
||||
conf = self.confs.get(uuid_)
|
||||
if not conf:
|
||||
conf = self.confs["default"] # default MUST exists
|
||||
|
||||
return conf
|
||||
|
||||
@property
|
||||
def default_conf(self) -> AstrBotConfig:
|
||||
"""获取默认配置文件"""
|
||||
return self.confs["default"]
|
||||
|
||||
def get_conf_info(self, umo: str | MessageSession) -> ConfInfo:
|
||||
"""获取指定 umo 的配置文件元数据"""
|
||||
if isinstance(umo, MessageSession):
|
||||
umo = f"{umo.platform_id}:{umo.message_type}:{umo.session_id}"
|
||||
|
||||
return self._load_conf_mapping(umo)
|
||||
|
||||
def get_conf_list(self) -> list[ConfInfo]:
|
||||
"""获取所有配置文件的元数据列表"""
|
||||
conf_list = []
|
||||
abconf_mapping = self._get_abconf_data()
|
||||
for uuid_, meta in abconf_mapping.items():
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
meta.pop("umop", None)
|
||||
conf_list.append(ConfInfo(**meta, id=uuid_))
|
||||
conf_list.append(DEFAULT_CONFIG_CONF_INFO)
|
||||
return conf_list
|
||||
|
||||
def create_conf(
|
||||
self,
|
||||
config: dict = DEFAULT_CONFIG,
|
||||
name: str | None = None,
|
||||
) -> str:
|
||||
conf_uuid = str(uuid.uuid4())
|
||||
conf_file_name = f"abconf_{conf_uuid}.json"
|
||||
conf_path = os.path.join(get_astrbot_config_path(), conf_file_name)
|
||||
conf = AstrBotConfig(config_path=conf_path, default_config=config)
|
||||
conf.save_config()
|
||||
self._save_conf_mapping(conf_file_name, conf_uuid, abconf_name=name)
|
||||
self.confs[conf_uuid] = conf
|
||||
return conf_uuid
|
||||
|
||||
def delete_conf(self, conf_id: str) -> bool:
|
||||
"""删除指定配置文件
|
||||
|
||||
Args:
|
||||
conf_id: 配置文件的 UUID
|
||||
|
||||
Returns:
|
||||
bool: 删除是否成功
|
||||
|
||||
Raises:
|
||||
ValueError: 如果试图删除默认配置文件
|
||||
|
||||
"""
|
||||
if conf_id == "default":
|
||||
raise ValueError("不能删除默认配置文件")
|
||||
|
||||
# 从映射中移除
|
||||
abconf_data = self.sp.get(
|
||||
"abconf_mapping",
|
||||
{},
|
||||
scope="global",
|
||||
scope_id="global",
|
||||
)
|
||||
if conf_id not in abconf_data:
|
||||
logger.warning(f"配置文件 {conf_id} 不存在于映射中")
|
||||
return False
|
||||
|
||||
# 获取配置文件路径
|
||||
conf_path = os.path.join(
|
||||
get_astrbot_config_path(),
|
||||
abconf_data[conf_id]["path"],
|
||||
)
|
||||
|
||||
# 删除配置文件
|
||||
try:
|
||||
if os.path.exists(conf_path):
|
||||
os.remove(conf_path)
|
||||
logger.info(f"已删除配置文件: {conf_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"删除配置文件 {conf_path} 失败: {e}")
|
||||
return False
|
||||
|
||||
# 从内存中移除
|
||||
if conf_id in self.confs:
|
||||
del self.confs[conf_id]
|
||||
|
||||
# 从映射中移除
|
||||
del abconf_data[conf_id]
|
||||
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||
self.abconf_data = abconf_data
|
||||
|
||||
logger.info(f"成功删除配置文件 {conf_id}")
|
||||
return True
|
||||
|
||||
def update_conf_info(self, conf_id: str, name: str | None = None) -> bool:
|
||||
"""更新配置文件信息
|
||||
|
||||
Args:
|
||||
conf_id: 配置文件的 UUID
|
||||
name: 新的配置文件名称 (可选)
|
||||
|
||||
Returns:
|
||||
bool: 更新是否成功
|
||||
|
||||
"""
|
||||
if conf_id == "default":
|
||||
raise ValueError("不能更新默认配置文件的信息")
|
||||
|
||||
abconf_data = self.sp.get(
|
||||
"abconf_mapping",
|
||||
{},
|
||||
scope="global",
|
||||
scope_id="global",
|
||||
)
|
||||
if conf_id not in abconf_data:
|
||||
logger.warning(f"配置文件 {conf_id} 不存在于映射中")
|
||||
return False
|
||||
|
||||
# 更新名称
|
||||
if name is not None:
|
||||
abconf_data[conf_id]["name"] = name
|
||||
|
||||
# 保存更新
|
||||
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||
self.abconf_data = abconf_data
|
||||
logger.info(f"成功更新配置文件 {conf_id} 的信息")
|
||||
return True
|
||||
|
||||
def g(
|
||||
self,
|
||||
umo: str | None = None,
|
||||
key: str | None = None,
|
||||
default: _VT = None,
|
||||
) -> _VT:
|
||||
"""获取配置项。umo 为 None 时使用默认配置"""
|
||||
if umo is None:
|
||||
return self.confs["default"].get(key, default)
|
||||
conf = self.get_conf(umo)
|
||||
return conf.get(key, default)
|
||||
9
astrbot/core/config/__init__.py
Normal file
9
astrbot/core/config/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .astrbot_config import *
|
||||
from .default import DB_PATH, DEFAULT_CONFIG, VERSION
|
||||
|
||||
__all__ = [
|
||||
"DB_PATH",
|
||||
"DEFAULT_CONFIG",
|
||||
"VERSION",
|
||||
"AstrBotConfig",
|
||||
]
|
||||
172
astrbot/core/config/astrbot_config.py
Normal file
172
astrbot/core/config/astrbot_config.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import enum
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||
|
||||
from .default import DEFAULT_CONFIG, DEFAULT_VALUE_MAP
|
||||
|
||||
ASTRBOT_CONFIG_PATH = os.path.join(get_astrbot_data_path(), "cmd_config.json")
|
||||
logger = logging.getLogger("astrbot")
|
||||
|
||||
|
||||
class RateLimitStrategy(enum.Enum):
|
||||
STALL = "stall"
|
||||
DISCARD = "discard"
|
||||
|
||||
|
||||
class AstrBotConfig(dict):
|
||||
"""从配置文件中加载的配置,支持直接通过点号操作符访问根配置项。
|
||||
|
||||
- 初始化时会将传入的 default_config 与配置文件进行比对,如果配置文件中缺少配置项则会自动插入默认值并进行一次写入操作。会递归检查配置项。
|
||||
- 如果配置文件路径对应的文件不存在,则会自动创建并写入默认配置。
|
||||
- 如果传入了 schema,将会通过 schema 解析出 default_config,此时传入的 default_config 会被忽略。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_path: str = ASTRBOT_CONFIG_PATH,
|
||||
default_config: dict = DEFAULT_CONFIG,
|
||||
schema: dict | None = None,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
# 调用父类的 __setattr__ 方法,防止保存配置时将此属性写入配置文件
|
||||
object.__setattr__(self, "config_path", config_path)
|
||||
object.__setattr__(self, "default_config", default_config)
|
||||
object.__setattr__(self, "schema", schema)
|
||||
|
||||
if schema:
|
||||
default_config = self._config_schema_to_default_config(schema)
|
||||
|
||||
if not self.check_exist():
|
||||
"""不存在时载入默认配置"""
|
||||
with open(config_path, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(default_config, f, indent=4, ensure_ascii=False)
|
||||
object.__setattr__(self, "first_deploy", True) # 标记第一次部署
|
||||
|
||||
with open(config_path, encoding="utf-8-sig") as f:
|
||||
conf_str = f.read()
|
||||
conf = json.loads(conf_str)
|
||||
|
||||
# 检查配置完整性,并插入
|
||||
has_new = self.check_config_integrity(default_config, conf)
|
||||
self.update(conf)
|
||||
if has_new:
|
||||
self.save_config()
|
||||
|
||||
self.update(conf)
|
||||
|
||||
def _config_schema_to_default_config(self, schema: dict) -> dict:
|
||||
"""将 Schema 转换成 Config"""
|
||||
conf = {}
|
||||
|
||||
def _parse_schema(schema: dict, conf: dict):
|
||||
for k, v in schema.items():
|
||||
if v["type"] not in DEFAULT_VALUE_MAP:
|
||||
raise TypeError(
|
||||
f"不受支持的配置类型 {v['type']}。支持的类型有:{DEFAULT_VALUE_MAP.keys()}",
|
||||
)
|
||||
if "default" in v:
|
||||
default = v["default"]
|
||||
else:
|
||||
default = DEFAULT_VALUE_MAP[v["type"]]
|
||||
|
||||
if v["type"] == "object":
|
||||
conf[k] = {}
|
||||
_parse_schema(v["items"], conf[k])
|
||||
else:
|
||||
conf[k] = default
|
||||
|
||||
_parse_schema(schema, conf)
|
||||
|
||||
return conf
|
||||
|
||||
def check_config_integrity(self, refer_conf: dict, conf: dict, path=""):
|
||||
"""检查配置完整性,如果有新的配置项或顺序不一致则返回 True"""
|
||||
has_new = False
|
||||
|
||||
# 创建一个新的有序字典以保持参考配置的顺序
|
||||
new_conf = {}
|
||||
|
||||
# 先按照参考配置的顺序添加配置项
|
||||
for key, value in refer_conf.items():
|
||||
if key not in conf:
|
||||
# 配置项不存在,插入默认值
|
||||
path_ = path + "." + key if path else key
|
||||
logger.info(f"检查到配置项 {path_} 不存在,已插入默认值 {value}")
|
||||
new_conf[key] = value
|
||||
has_new = True
|
||||
elif conf[key] is None:
|
||||
# 配置项为 None,使用默认值
|
||||
new_conf[key] = value
|
||||
has_new = True
|
||||
elif isinstance(value, dict):
|
||||
# 递归检查子配置项
|
||||
if not isinstance(conf[key], dict):
|
||||
# 类型不匹配,使用默认值
|
||||
new_conf[key] = value
|
||||
has_new = True
|
||||
else:
|
||||
# 递归检查并同步顺序
|
||||
child_has_new = self.check_config_integrity(
|
||||
value,
|
||||
conf[key],
|
||||
path + "." + key if path else key,
|
||||
)
|
||||
new_conf[key] = conf[key]
|
||||
has_new |= child_has_new
|
||||
else:
|
||||
# 直接使用现有配置
|
||||
new_conf[key] = conf[key]
|
||||
|
||||
# 检查是否存在参考配置中没有的配置项
|
||||
for key in list(conf.keys()):
|
||||
if key not in refer_conf:
|
||||
path_ = path + "." + key if path else key
|
||||
logger.info(f"检查到配置项 {path_} 不存在,将从当前配置中删除")
|
||||
has_new = True
|
||||
|
||||
# 顺序不一致也算作变更
|
||||
if list(conf.keys()) != list(new_conf.keys()):
|
||||
if path:
|
||||
logger.info(f"检查到配置项 {path} 的子项顺序不一致,已重新排序")
|
||||
else:
|
||||
logger.info("检查到配置项顺序不一致,已重新排序")
|
||||
has_new = True
|
||||
|
||||
# 更新原始配置
|
||||
conf.clear()
|
||||
conf.update(new_conf)
|
||||
|
||||
return has_new
|
||||
|
||||
def save_config(self, replace_config: dict | None = None):
|
||||
"""将配置写入文件
|
||||
|
||||
如果传入 replace_config,则将配置替换为 replace_config
|
||||
"""
|
||||
if replace_config:
|
||||
self.update(replace_config)
|
||||
with open(self.config_path, "w", encoding="utf-8-sig") as f:
|
||||
json.dump(self, f, indent=2, ensure_ascii=False)
|
||||
|
||||
def __getattr__(self, item):
|
||||
try:
|
||||
return self[item]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def __delattr__(self, key):
|
||||
try:
|
||||
del self[key]
|
||||
self.save_config()
|
||||
except KeyError:
|
||||
raise AttributeError(f"没有找到 Key: '{key}'")
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
self[key] = value
|
||||
|
||||
def check_exist(self) -> bool:
|
||||
return os.path.exists(self.config_path)
|
||||
2981
astrbot/core/config/default.py
Normal file
2981
astrbot/core/config/default.py
Normal file
File diff suppressed because it is too large
Load Diff
110
astrbot/core/config/i18n_utils.py
Normal file
110
astrbot/core/config/i18n_utils.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
配置元数据国际化工具
|
||||
|
||||
提供配置元数据的国际化键转换功能
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
class ConfigMetadataI18n:
|
||||
"""配置元数据国际化转换器"""
|
||||
|
||||
@staticmethod
|
||||
def _get_i18n_key(group: str, section: str, field: str, attr: str) -> str:
|
||||
"""
|
||||
生成国际化键
|
||||
|
||||
Args:
|
||||
group: 配置组,如 'ai_group', 'platform_group'
|
||||
section: 配置节,如 'agent_runner', 'general'
|
||||
field: 字段名,如 'enable', 'default_provider'
|
||||
attr: 属性类型,如 'description', 'hint', 'labels'
|
||||
|
||||
Returns:
|
||||
国际化键,格式如: 'ai_group.agent_runner.enable.description'
|
||||
"""
|
||||
if field:
|
||||
return f"{group}.{section}.{field}.{attr}"
|
||||
else:
|
||||
return f"{group}.{section}.{attr}"
|
||||
|
||||
@staticmethod
|
||||
def convert_to_i18n_keys(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
将配置元数据转换为使用国际化键
|
||||
|
||||
Args:
|
||||
metadata: 原始配置元数据字典
|
||||
|
||||
Returns:
|
||||
使用国际化键的配置元数据字典
|
||||
"""
|
||||
result = {}
|
||||
|
||||
for group_key, group_data in metadata.items():
|
||||
group_result = {
|
||||
"name": f"{group_key}.name",
|
||||
"metadata": {},
|
||||
}
|
||||
|
||||
for section_key, section_data in group_data.get("metadata", {}).items():
|
||||
section_result = {
|
||||
"description": f"{group_key}.{section_key}.description",
|
||||
"type": section_data.get("type"),
|
||||
}
|
||||
|
||||
# 复制其他属性
|
||||
for key in ["items", "condition", "_special", "invisible"]:
|
||||
if key in section_data:
|
||||
section_result[key] = section_data[key]
|
||||
|
||||
# 处理 hint
|
||||
if "hint" in section_data:
|
||||
section_result["hint"] = f"{group_key}.{section_key}.hint"
|
||||
|
||||
# 处理 items 中的字段
|
||||
if "items" in section_data and isinstance(section_data["items"], dict):
|
||||
items_result = {}
|
||||
for field_key, field_data in section_data["items"].items():
|
||||
# 处理嵌套的点号字段名(如 provider_settings.enable)
|
||||
field_name = field_key
|
||||
|
||||
field_result = {}
|
||||
|
||||
# 复制基本属性
|
||||
for attr in [
|
||||
"type",
|
||||
"condition",
|
||||
"_special",
|
||||
"invisible",
|
||||
"options",
|
||||
]:
|
||||
if attr in field_data:
|
||||
field_result[attr] = field_data[attr]
|
||||
|
||||
# 转换文本属性为国际化键
|
||||
if "description" in field_data:
|
||||
field_result["description"] = (
|
||||
f"{group_key}.{section_key}.{field_name}.description"
|
||||
)
|
||||
|
||||
if "hint" in field_data:
|
||||
field_result["hint"] = (
|
||||
f"{group_key}.{section_key}.{field_name}.hint"
|
||||
)
|
||||
|
||||
if "labels" in field_data:
|
||||
field_result["labels"] = (
|
||||
f"{group_key}.{section_key}.{field_name}.labels"
|
||||
)
|
||||
|
||||
items_result[field_key] = field_result
|
||||
|
||||
section_result["items"] = items_result
|
||||
|
||||
group_result["metadata"][section_key] = section_result
|
||||
|
||||
result[group_key] = group_result
|
||||
|
||||
return result
|
||||
409
astrbot/core/conversation_mgr.py
Normal file
409
astrbot/core/conversation_mgr.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""AstrBot 会话-对话管理器, 维护两个本地存储, 其中一个是 json 格式的shared_preferences, 另外一个是数据库.
|
||||
|
||||
在 AstrBot 中, 会话和对话是独立的, 会话用于标记对话窗口, 例如群聊"123456789"可以建立一个会话,
|
||||
在一个会话中可以建立多个对话, 并且支持对话的切换和删除
|
||||
"""
|
||||
|
||||
import json
|
||||
from collections.abc import Awaitable, Callable
|
||||
|
||||
from astrbot.core import sp
|
||||
from astrbot.core.agent.message import AssistantMessageSegment, UserMessageSegment
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.core.db.po import Conversation, ConversationV2
|
||||
|
||||
|
||||
class ConversationManager:
|
||||
"""负责管理会话与 LLM 的对话,某个会话当前正在用哪个对话。"""
|
||||
|
||||
def __init__(self, db_helper: BaseDatabase):
|
||||
self.session_conversations: dict[str, str] = {}
|
||||
self.db = db_helper
|
||||
self.save_interval = 60 # 每 60 秒保存一次
|
||||
|
||||
# 会话删除回调函数列表(用于级联清理,如知识库配置)
|
||||
self._on_session_deleted_callbacks: list[Callable[[str], Awaitable[None]]] = []
|
||||
|
||||
def register_on_session_deleted(
|
||||
self,
|
||||
callback: Callable[[str], Awaitable[None]],
|
||||
) -> None:
|
||||
"""注册会话删除回调函数.
|
||||
|
||||
其他模块可以注册回调来响应会话删除事件,实现级联清理。
|
||||
例如:知识库模块可以注册回调来清理会话的知识库配置。
|
||||
|
||||
Args:
|
||||
callback: 回调函数,接收会话ID (unified_msg_origin) 作为参数
|
||||
|
||||
"""
|
||||
self._on_session_deleted_callbacks.append(callback)
|
||||
|
||||
async def _trigger_session_deleted(self, unified_msg_origin: str) -> None:
|
||||
"""触发会话删除回调.
|
||||
|
||||
Args:
|
||||
unified_msg_origin: 会话ID
|
||||
|
||||
"""
|
||||
for callback in self._on_session_deleted_callbacks:
|
||||
try:
|
||||
await callback(unified_msg_origin)
|
||||
except Exception as e:
|
||||
from astrbot.core import logger
|
||||
|
||||
logger.error(
|
||||
f"会话删除回调执行失败 (session: {unified_msg_origin}): {e}",
|
||||
)
|
||||
|
||||
def _convert_conv_from_v2_to_v1(self, conv_v2: ConversationV2) -> Conversation:
|
||||
"""将 ConversationV2 对象转换为 Conversation 对象"""
|
||||
created_at = int(conv_v2.created_at.timestamp())
|
||||
updated_at = int(conv_v2.updated_at.timestamp())
|
||||
return Conversation(
|
||||
platform_id=conv_v2.platform_id,
|
||||
user_id=conv_v2.user_id,
|
||||
cid=conv_v2.conversation_id,
|
||||
history=json.dumps(conv_v2.content or []),
|
||||
title=conv_v2.title,
|
||||
persona_id=conv_v2.persona_id,
|
||||
created_at=created_at,
|
||||
updated_at=updated_at,
|
||||
)
|
||||
|
||||
async def new_conversation(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
platform_id: str | None = None,
|
||||
content: list[dict] | None = None,
|
||||
title: str | None = None,
|
||||
persona_id: str | None = None,
|
||||
) -> str:
|
||||
"""新建对话,并将当前会话的对话转移到新对话.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
Returns:
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
|
||||
"""
|
||||
if not platform_id:
|
||||
# 如果没有提供 platform_id,则从 unified_msg_origin 中解析
|
||||
parts = unified_msg_origin.split(":")
|
||||
if len(parts) >= 3:
|
||||
platform_id = parts[0]
|
||||
if not platform_id:
|
||||
platform_id = "unknown"
|
||||
conv = await self.db.create_conversation(
|
||||
user_id=unified_msg_origin,
|
||||
platform_id=platform_id,
|
||||
content=content,
|
||||
title=title,
|
||||
persona_id=persona_id,
|
||||
)
|
||||
self.session_conversations[unified_msg_origin] = conv.conversation_id
|
||||
await sp.session_put(unified_msg_origin, "sel_conv_id", conv.conversation_id)
|
||||
return conv.conversation_id
|
||||
|
||||
async def switch_conversation(self, unified_msg_origin: str, conversation_id: str):
|
||||
"""切换会话的对话
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
|
||||
"""
|
||||
self.session_conversations[unified_msg_origin] = conversation_id
|
||||
await sp.session_put(unified_msg_origin, "sel_conv_id", conversation_id)
|
||||
|
||||
async def delete_conversation(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
conversation_id: str | None = None,
|
||||
):
|
||||
"""删除会话的对话,当 conversation_id 为 None 时删除会话当前的对话
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
|
||||
"""
|
||||
if not conversation_id:
|
||||
conversation_id = self.session_conversations.get(unified_msg_origin)
|
||||
if conversation_id:
|
||||
await self.db.delete_conversation(cid=conversation_id)
|
||||
curr_cid = await self.get_curr_conversation_id(unified_msg_origin)
|
||||
if curr_cid == conversation_id:
|
||||
self.session_conversations.pop(unified_msg_origin, None)
|
||||
await sp.session_remove(unified_msg_origin, "sel_conv_id")
|
||||
|
||||
async def delete_conversations_by_user_id(self, unified_msg_origin: str):
|
||||
"""删除会话的所有对话
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
|
||||
"""
|
||||
await self.db.delete_conversations_by_user_id(user_id=unified_msg_origin)
|
||||
self.session_conversations.pop(unified_msg_origin, None)
|
||||
await sp.session_remove(unified_msg_origin, "sel_conv_id")
|
||||
|
||||
# 触发会话删除回调(级联清理)
|
||||
await self._trigger_session_deleted(unified_msg_origin)
|
||||
|
||||
async def get_curr_conversation_id(self, unified_msg_origin: str) -> str | None:
|
||||
"""获取会话当前的对话 ID
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
Returns:
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
|
||||
"""
|
||||
ret = self.session_conversations.get(unified_msg_origin, None)
|
||||
if not ret:
|
||||
ret = await sp.session_get(unified_msg_origin, "sel_conv_id", None)
|
||||
if ret:
|
||||
self.session_conversations[unified_msg_origin] = ret
|
||||
return ret
|
||||
|
||||
async def get_conversation(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
conversation_id: str,
|
||||
create_if_not_exists: bool = False,
|
||||
) -> Conversation | None:
|
||||
"""获取会话的对话.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
create_if_not_exists (bool): 如果对话不存在,是否创建一个新的对话
|
||||
Returns:
|
||||
conversation (Conversation): 对话对象
|
||||
|
||||
"""
|
||||
conv = await self.db.get_conversation_by_id(cid=conversation_id)
|
||||
if not conv and create_if_not_exists:
|
||||
# 如果对话不存在且需要创建,则新建一个对话
|
||||
conversation_id = await self.new_conversation(unified_msg_origin)
|
||||
conv = await self.db.get_conversation_by_id(cid=conversation_id)
|
||||
conv_res = None
|
||||
if conv:
|
||||
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||
return conv_res
|
||||
|
||||
async def get_conversations(
|
||||
self,
|
||||
unified_msg_origin: str | None = None,
|
||||
platform_id: str | None = None,
|
||||
) -> list[Conversation]:
|
||||
"""获取对话列表.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id,可选
|
||||
platform_id (str): 平台 ID, 可选参数, 用于过滤对话
|
||||
Returns:
|
||||
conversations (List[Conversation]): 对话对象列表
|
||||
|
||||
"""
|
||||
convs = await self.db.get_conversations(
|
||||
user_id=unified_msg_origin,
|
||||
platform_id=platform_id,
|
||||
)
|
||||
convs_res = []
|
||||
for conv in convs:
|
||||
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||
convs_res.append(conv_res)
|
||||
return convs_res
|
||||
|
||||
async def get_filtered_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
platform_ids: list[str] | None = None,
|
||||
search_query: str = "",
|
||||
**kwargs,
|
||||
) -> tuple[list[Conversation], int]:
|
||||
"""获取过滤后的对话列表.
|
||||
|
||||
Args:
|
||||
page (int): 页码, 默认为 1
|
||||
page_size (int): 每页大小, 默认为 20
|
||||
platform_ids (list[str]): 平台 ID 列表, 可选
|
||||
search_query (str): 搜索查询字符串, 可选
|
||||
Returns:
|
||||
conversations (list[Conversation]): 对话对象列表
|
||||
|
||||
"""
|
||||
convs, cnt = await self.db.get_filtered_conversations(
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
platform_ids=platform_ids,
|
||||
search_query=search_query,
|
||||
**kwargs,
|
||||
)
|
||||
convs_res = []
|
||||
for conv in convs:
|
||||
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||
convs_res.append(conv_res)
|
||||
return convs_res, cnt
|
||||
|
||||
async def update_conversation(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
conversation_id: str | None = None,
|
||||
history: list[dict] | None = None,
|
||||
title: str | None = None,
|
||||
persona_id: str | None = None,
|
||||
) -> None:
|
||||
"""更新会话的对话.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
history (List[Dict]): 对话历史记录, 是一个字典列表, 每个字典包含 role 和 content 字段
|
||||
|
||||
"""
|
||||
if not conversation_id:
|
||||
# 如果没有提供 conversation_id,则获取当前的
|
||||
conversation_id = await self.get_curr_conversation_id(unified_msg_origin)
|
||||
if conversation_id:
|
||||
await self.db.update_conversation(
|
||||
cid=conversation_id,
|
||||
title=title,
|
||||
persona_id=persona_id,
|
||||
content=history,
|
||||
)
|
||||
|
||||
async def update_conversation_title(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
title: str,
|
||||
conversation_id: str | None = None,
|
||||
) -> None:
|
||||
"""更新会话的对话标题.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
title (str): 对话标题
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
Deprecated:
|
||||
Use `update_conversation` with `title` parameter instead.
|
||||
|
||||
"""
|
||||
await self.update_conversation(
|
||||
unified_msg_origin=unified_msg_origin,
|
||||
conversation_id=conversation_id,
|
||||
title=title,
|
||||
)
|
||||
|
||||
async def update_conversation_persona_id(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
persona_id: str,
|
||||
conversation_id: str | None = None,
|
||||
) -> None:
|
||||
"""更新会话的对话 Persona ID.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
persona_id (str): 对话 Persona ID
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
Deprecated:
|
||||
Use `update_conversation` with `persona_id` parameter instead.
|
||||
|
||||
"""
|
||||
await self.update_conversation(
|
||||
unified_msg_origin=unified_msg_origin,
|
||||
conversation_id=conversation_id,
|
||||
persona_id=persona_id,
|
||||
)
|
||||
|
||||
async def add_message_pair(
|
||||
self,
|
||||
cid: str,
|
||||
user_message: UserMessageSegment | dict,
|
||||
assistant_message: AssistantMessageSegment | dict,
|
||||
) -> None:
|
||||
"""Add a user-assistant message pair to the conversation history.
|
||||
|
||||
Args:
|
||||
cid (str): Conversation ID
|
||||
user_message (UserMessageSegment | dict): OpenAI-format user message object or dict
|
||||
assistant_message (AssistantMessageSegment | dict): OpenAI-format assistant message object or dict
|
||||
|
||||
Raises:
|
||||
Exception: If the conversation with the given ID is not found
|
||||
"""
|
||||
conv = await self.db.get_conversation_by_id(cid=cid)
|
||||
if not conv:
|
||||
raise Exception(f"Conversation with id {cid} not found")
|
||||
history = conv.content or []
|
||||
if isinstance(user_message, UserMessageSegment):
|
||||
user_msg_dict = user_message.model_dump()
|
||||
else:
|
||||
user_msg_dict = user_message
|
||||
if isinstance(assistant_message, AssistantMessageSegment):
|
||||
assistant_msg_dict = assistant_message.model_dump()
|
||||
else:
|
||||
assistant_msg_dict = assistant_message
|
||||
history.append(user_msg_dict)
|
||||
history.append(assistant_msg_dict)
|
||||
await self.db.update_conversation(
|
||||
cid=cid,
|
||||
content=history,
|
||||
)
|
||||
|
||||
async def get_human_readable_context(
|
||||
self,
|
||||
unified_msg_origin: str,
|
||||
conversation_id: str,
|
||||
page: int = 1,
|
||||
page_size: int = 10,
|
||||
) -> tuple[list[str], int]:
|
||||
"""获取人类可读的上下文.
|
||||
|
||||
Args:
|
||||
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||
page (int): 页码
|
||||
page_size (int): 每页大小
|
||||
|
||||
"""
|
||||
conversation = await self.get_conversation(unified_msg_origin, conversation_id)
|
||||
if not conversation:
|
||||
return [], 0
|
||||
history = json.loads(conversation.history)
|
||||
|
||||
# contexts_groups 存放按顺序的段落(每个段落是一个 str 列表),
|
||||
# 之后会被展平成一个扁平的 str 列表返回。
|
||||
contexts_groups: list[list[str]] = []
|
||||
temp_contexts: list[str] = []
|
||||
for record in history:
|
||||
if record["role"] == "user":
|
||||
temp_contexts.append(f"User: {record['content']}")
|
||||
elif record["role"] == "assistant":
|
||||
if record.get("content"):
|
||||
temp_contexts.append(f"Assistant: {record['content']}")
|
||||
elif "tool_calls" in record:
|
||||
tool_calls_str = json.dumps(
|
||||
record["tool_calls"],
|
||||
ensure_ascii=False,
|
||||
)
|
||||
temp_contexts.append(f"Assistant: [函数调用] {tool_calls_str}")
|
||||
else:
|
||||
temp_contexts.append("Assistant: [未知的内容]")
|
||||
contexts_groups.insert(0, temp_contexts)
|
||||
temp_contexts = []
|
||||
|
||||
# 展平分组后的 contexts 列表为单层字符串列表
|
||||
contexts = [item for sublist in contexts_groups for item in sublist]
|
||||
|
||||
# 计算分页
|
||||
paged_contexts = contexts[(page - 1) * page_size : page * page_size]
|
||||
total_pages = len(contexts) // page_size
|
||||
if len(contexts) % page_size != 0:
|
||||
total_pages += 1
|
||||
|
||||
return paged_contexts, total_pages
|
||||
336
astrbot/core/core_lifecycle.py
Normal file
336
astrbot/core/core_lifecycle.py
Normal file
@@ -0,0 +1,336 @@
|
||||
"""Astrbot 核心生命周期管理类, 负责管理 AstrBot 的启动、停止、重启等操作.
|
||||
|
||||
该类负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、EventBus等。
|
||||
该类还负责加载和执行插件, 以及处理事件总线的分发。
|
||||
|
||||
工作流程:
|
||||
1. 初始化所有组件
|
||||
2. 启动事件总线和任务, 所有任务都在这里运行
|
||||
3. 执行启动完成事件钩子
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from asyncio import Queue
|
||||
|
||||
from astrbot.api import logger, sp
|
||||
from astrbot.core import LogBroker
|
||||
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||
from astrbot.core.config.default import VERSION
|
||||
from astrbot.core.conversation_mgr import ConversationManager
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.core.knowledge_base.kb_mgr import KnowledgeBaseManager
|
||||
from astrbot.core.persona_mgr import PersonaManager
|
||||
from astrbot.core.pipeline.scheduler import PipelineContext, PipelineScheduler
|
||||
from astrbot.core.platform.manager import PlatformManager
|
||||
from astrbot.core.platform_message_history_mgr import PlatformMessageHistoryManager
|
||||
from astrbot.core.provider.manager import ProviderManager
|
||||
from astrbot.core.star import PluginManager
|
||||
from astrbot.core.star.context import Context
|
||||
from astrbot.core.star.star_handler import EventType, star_handlers_registry, star_map
|
||||
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||
from astrbot.core.updator import AstrBotUpdator
|
||||
from astrbot.core.utils.migra_helper import migra
|
||||
|
||||
from . import astrbot_config, html_renderer
|
||||
from .event_bus import EventBus
|
||||
|
||||
|
||||
class AstrBotCoreLifecycle:
|
||||
"""AstrBot 核心生命周期管理类, 负责管理 AstrBot 的启动、停止、重启等操作.
|
||||
|
||||
该类负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、
|
||||
EventBus 等。
|
||||
该类还负责加载和执行插件, 以及处理事件总线的分发。
|
||||
"""
|
||||
|
||||
def __init__(self, log_broker: LogBroker, db: BaseDatabase) -> None:
|
||||
self.log_broker = log_broker # 初始化日志代理
|
||||
self.astrbot_config = astrbot_config # 初始化配置
|
||||
self.db = db # 初始化数据库
|
||||
|
||||
# 设置代理
|
||||
proxy_config = self.astrbot_config.get("http_proxy", "")
|
||||
if proxy_config != "":
|
||||
os.environ["https_proxy"] = proxy_config
|
||||
os.environ["http_proxy"] = proxy_config
|
||||
logger.debug(f"Using proxy: {proxy_config}")
|
||||
# 设置 no_proxy
|
||||
no_proxy_list = self.astrbot_config.get("no_proxy", [])
|
||||
os.environ["no_proxy"] = ",".join(no_proxy_list)
|
||||
else:
|
||||
# 清空代理环境变量
|
||||
if "https_proxy" in os.environ:
|
||||
del os.environ["https_proxy"]
|
||||
if "http_proxy" in os.environ:
|
||||
del os.environ["http_proxy"]
|
||||
if "no_proxy" in os.environ:
|
||||
del os.environ["no_proxy"]
|
||||
logger.debug("HTTP proxy cleared")
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""初始化 AstrBot 核心生命周期管理类.
|
||||
|
||||
负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、EventBus、AstrBotUpdator等。
|
||||
"""
|
||||
# 初始化日志代理
|
||||
logger.info("AstrBot v" + VERSION)
|
||||
if os.environ.get("TESTING", ""):
|
||||
logger.setLevel("DEBUG") # 测试模式下设置日志级别为 DEBUG
|
||||
else:
|
||||
logger.setLevel(self.astrbot_config["log_level"]) # 设置日志级别
|
||||
|
||||
await self.db.initialize()
|
||||
|
||||
await html_renderer.initialize()
|
||||
|
||||
# 初始化 UMOP 配置路由器
|
||||
self.umop_config_router = UmopConfigRouter(sp=sp)
|
||||
|
||||
# 初始化 AstrBot 配置管理器
|
||||
self.astrbot_config_mgr = AstrBotConfigManager(
|
||||
default_config=self.astrbot_config,
|
||||
ucr=self.umop_config_router,
|
||||
sp=sp,
|
||||
)
|
||||
|
||||
# apply migration
|
||||
try:
|
||||
await migra(
|
||||
self.db,
|
||||
self.astrbot_config_mgr,
|
||||
self.umop_config_router,
|
||||
self.astrbot_config_mgr,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"AstrBot migration failed: {e!s}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
# 初始化事件队列
|
||||
self.event_queue = Queue()
|
||||
|
||||
# 初始化人格管理器
|
||||
self.persona_mgr = PersonaManager(self.db, self.astrbot_config_mgr)
|
||||
await self.persona_mgr.initialize()
|
||||
|
||||
# 初始化供应商管理器
|
||||
self.provider_manager = ProviderManager(
|
||||
self.astrbot_config_mgr,
|
||||
self.db,
|
||||
self.persona_mgr,
|
||||
)
|
||||
|
||||
# 初始化平台管理器
|
||||
self.platform_manager = PlatformManager(self.astrbot_config, self.event_queue)
|
||||
|
||||
# 初始化对话管理器
|
||||
self.conversation_manager = ConversationManager(self.db)
|
||||
|
||||
# 初始化平台消息历史管理器
|
||||
self.platform_message_history_manager = PlatformMessageHistoryManager(self.db)
|
||||
|
||||
# 初始化知识库管理器
|
||||
self.kb_manager = KnowledgeBaseManager(self.provider_manager)
|
||||
|
||||
# 初始化提供给插件的上下文
|
||||
self.star_context = Context(
|
||||
self.event_queue,
|
||||
self.astrbot_config,
|
||||
self.db,
|
||||
self.provider_manager,
|
||||
self.platform_manager,
|
||||
self.conversation_manager,
|
||||
self.platform_message_history_manager,
|
||||
self.persona_mgr,
|
||||
self.astrbot_config_mgr,
|
||||
self.kb_manager,
|
||||
)
|
||||
|
||||
# 初始化插件管理器
|
||||
self.plugin_manager = PluginManager(self.star_context, self.astrbot_config)
|
||||
|
||||
# 扫描、注册插件、实例化插件类
|
||||
await self.plugin_manager.reload()
|
||||
|
||||
# 根据配置实例化各个 Provider
|
||||
await self.provider_manager.initialize()
|
||||
|
||||
await self.kb_manager.initialize()
|
||||
|
||||
# 初始化消息事件流水线调度器
|
||||
self.pipeline_scheduler_mapping = await self.load_pipeline_scheduler()
|
||||
|
||||
# 初始化更新器
|
||||
self.astrbot_updator = AstrBotUpdator()
|
||||
|
||||
# 初始化事件总线
|
||||
self.event_bus = EventBus(
|
||||
self.event_queue,
|
||||
self.pipeline_scheduler_mapping,
|
||||
self.astrbot_config_mgr,
|
||||
)
|
||||
|
||||
# 记录启动时间
|
||||
self.start_time = int(time.time())
|
||||
|
||||
# 初始化当前任务列表
|
||||
self.curr_tasks: list[asyncio.Task] = []
|
||||
|
||||
# 根据配置实例化各个平台适配器
|
||||
await self.platform_manager.initialize()
|
||||
|
||||
# 初始化关闭控制面板的事件
|
||||
self.dashboard_shutdown_event = asyncio.Event()
|
||||
|
||||
def _load(self) -> None:
|
||||
"""加载事件总线和任务并初始化."""
|
||||
# 创建一个异步任务来执行事件总线的 dispatch() 方法
|
||||
# dispatch是一个无限循环的协程, 从事件队列中获取事件并处理
|
||||
event_bus_task = asyncio.create_task(
|
||||
self.event_bus.dispatch(),
|
||||
name="event_bus",
|
||||
)
|
||||
|
||||
# 把插件中注册的所有协程函数注册到事件总线中并执行
|
||||
extra_tasks = []
|
||||
for task in self.star_context._register_tasks:
|
||||
extra_tasks.append(asyncio.create_task(task, name=task.__name__))
|
||||
|
||||
tasks_ = [event_bus_task, *extra_tasks]
|
||||
for task in tasks_:
|
||||
self.curr_tasks.append(
|
||||
asyncio.create_task(self._task_wrapper(task), name=task.get_name()),
|
||||
)
|
||||
|
||||
self.start_time = int(time.time())
|
||||
|
||||
async def _task_wrapper(self, task: asyncio.Task) -> None:
|
||||
"""异步任务包装器, 用于处理异步任务执行中出现的各种异常.
|
||||
|
||||
Args:
|
||||
task (asyncio.Task): 要执行的异步任务
|
||||
|
||||
"""
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass # 任务被取消, 静默处理
|
||||
except Exception as e:
|
||||
# 获取完整的异常堆栈信息, 按行分割并记录到日志中
|
||||
logger.error(f"------- 任务 {task.get_name()} 发生错误: {e}")
|
||||
for line in traceback.format_exc().split("\n"):
|
||||
logger.error(f"| {line}")
|
||||
logger.error("-------")
|
||||
|
||||
async def start(self) -> None:
|
||||
"""启动 AstrBot 核心生命周期管理类.
|
||||
|
||||
用load加载事件总线和任务并初始化, 执行启动完成事件钩子
|
||||
"""
|
||||
self._load()
|
||||
logger.info("AstrBot 启动完成。")
|
||||
|
||||
# 执行启动完成事件钩子
|
||||
handlers = star_handlers_registry.get_handlers_by_event_type(
|
||||
EventType.OnAstrBotLoadedEvent,
|
||||
)
|
||||
for handler in handlers:
|
||||
try:
|
||||
logger.info(
|
||||
f"hook(on_astrbot_loaded) -> {star_map[handler.handler_module_path].name} - {handler.handler_name}",
|
||||
)
|
||||
await handler.handler()
|
||||
except BaseException:
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
# 同时运行curr_tasks中的所有任务
|
||||
await asyncio.gather(*self.curr_tasks, return_exceptions=True)
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""停止 AstrBot 核心生命周期管理类, 取消所有当前任务并终止各个管理器."""
|
||||
# 请求停止所有正在运行的异步任务
|
||||
for task in self.curr_tasks:
|
||||
task.cancel()
|
||||
|
||||
for plugin in self.plugin_manager.context.get_all_stars():
|
||||
try:
|
||||
await self.plugin_manager._terminate_plugin(plugin)
|
||||
except Exception as e:
|
||||
logger.warning(traceback.format_exc())
|
||||
logger.warning(
|
||||
f"插件 {plugin.name} 未被正常终止 {e!s}, 可能会导致资源泄露等问题。",
|
||||
)
|
||||
|
||||
await self.provider_manager.terminate()
|
||||
await self.platform_manager.terminate()
|
||||
await self.kb_manager.terminate()
|
||||
self.dashboard_shutdown_event.set()
|
||||
|
||||
# 再次遍历curr_tasks等待每个任务真正结束
|
||||
for task in self.curr_tasks:
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.error(f"任务 {task.get_name()} 发生错误: {e}")
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""重启 AstrBot 核心生命周期管理类, 终止各个管理器并重新加载平台实例"""
|
||||
await self.provider_manager.terminate()
|
||||
await self.platform_manager.terminate()
|
||||
await self.kb_manager.terminate()
|
||||
self.dashboard_shutdown_event.set()
|
||||
threading.Thread(
|
||||
target=self.astrbot_updator._reboot,
|
||||
name="restart",
|
||||
daemon=True,
|
||||
).start()
|
||||
|
||||
def load_platform(self) -> list[asyncio.Task]:
|
||||
"""加载平台实例并返回所有平台实例的异步任务列表"""
|
||||
tasks = []
|
||||
platform_insts = self.platform_manager.get_insts()
|
||||
for platform_inst in platform_insts:
|
||||
tasks.append(
|
||||
asyncio.create_task(
|
||||
platform_inst.run(),
|
||||
name=f"{platform_inst.meta().id}({platform_inst.meta().name})",
|
||||
),
|
||||
)
|
||||
return tasks
|
||||
|
||||
async def load_pipeline_scheduler(self) -> dict[str, PipelineScheduler]:
|
||||
"""加载消息事件流水线调度器.
|
||||
|
||||
Returns:
|
||||
dict[str, PipelineScheduler]: 平台 ID 到流水线调度器的映射
|
||||
|
||||
"""
|
||||
mapping = {}
|
||||
for conf_id, ab_config in self.astrbot_config_mgr.confs.items():
|
||||
scheduler = PipelineScheduler(
|
||||
PipelineContext(ab_config, self.plugin_manager, conf_id),
|
||||
)
|
||||
await scheduler.initialize()
|
||||
mapping[conf_id] = scheduler
|
||||
return mapping
|
||||
|
||||
async def reload_pipeline_scheduler(self, conf_id: str) -> None:
|
||||
"""重新加载消息事件流水线调度器.
|
||||
|
||||
Returns:
|
||||
dict[str, PipelineScheduler]: 平台 ID 到流水线调度器的映射
|
||||
|
||||
"""
|
||||
ab_config = self.astrbot_config_mgr.confs.get(conf_id)
|
||||
if not ab_config:
|
||||
raise ValueError(f"配置文件 {conf_id} 不存在")
|
||||
scheduler = PipelineScheduler(
|
||||
PipelineContext(ab_config, self.plugin_manager, conf_id),
|
||||
)
|
||||
await scheduler.initialize()
|
||||
self.pipeline_scheduler_mapping[conf_id] = scheduler
|
||||
364
astrbot/core/db/__init__.py
Normal file
364
astrbot/core/db/__init__.py
Normal file
@@ -0,0 +1,364 @@
|
||||
import abc
|
||||
import datetime
|
||||
import typing as T
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
|
||||
from deprecated import deprecated
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from astrbot.core.db.po import (
|
||||
Attachment,
|
||||
ConversationV2,
|
||||
Persona,
|
||||
PlatformMessageHistory,
|
||||
PlatformSession,
|
||||
PlatformStat,
|
||||
Preference,
|
||||
Stats,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseDatabase(abc.ABC):
|
||||
"""数据库基类"""
|
||||
|
||||
DATABASE_URL = ""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.engine = create_async_engine(
|
||||
self.DATABASE_URL,
|
||||
echo=False,
|
||||
future=True,
|
||||
)
|
||||
self.AsyncSessionLocal = sessionmaker(
|
||||
self.engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
async def initialize(self):
|
||||
"""初始化数据库连接"""
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db(self) -> T.AsyncGenerator[AsyncSession, None]:
|
||||
"""Get a database session."""
|
||||
if not self.inited:
|
||||
await self.initialize()
|
||||
self.inited = True
|
||||
async with self.AsyncSessionLocal() as session:
|
||||
yield session
|
||||
|
||||
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||
@abc.abstractmethod
|
||||
def get_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||
"""获取基础统计数据"""
|
||||
raise NotImplementedError
|
||||
|
||||
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||
@abc.abstractmethod
|
||||
def get_total_message_count(self) -> int:
|
||||
"""获取总消息数"""
|
||||
raise NotImplementedError
|
||||
|
||||
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||
@abc.abstractmethod
|
||||
def get_grouped_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||
"""获取基础统计数据(合并)"""
|
||||
raise NotImplementedError
|
||||
|
||||
# New methods in v4.0.0
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_platform_stats(
|
||||
self,
|
||||
platform_id: str,
|
||||
platform_type: str,
|
||||
count: int = 1,
|
||||
timestamp: datetime.datetime | None = None,
|
||||
) -> None:
|
||||
"""Insert a new platform statistic record."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def count_platform_stats(self) -> int:
|
||||
"""Count the number of platform statistics records."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_platform_stats(self, offset_sec: int = 86400) -> list[PlatformStat]:
|
||||
"""Get platform statistics within the specified offset in seconds and group by platform_id."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_conversations(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
platform_id: str | None = None,
|
||||
) -> list[ConversationV2]:
|
||||
"""Get all conversations for a specific user and platform_id(optional).
|
||||
|
||||
content is not included in the result.
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_conversation_by_id(self, cid: str) -> ConversationV2:
|
||||
"""Get a specific conversation by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_all_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> list[ConversationV2]:
|
||||
"""Get all conversations with pagination."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_filtered_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
platform_ids: list[str] | None = None,
|
||||
search_query: str = "",
|
||||
**kwargs,
|
||||
) -> tuple[list[ConversationV2], int]:
|
||||
"""Get conversations filtered by platform IDs and search query."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_conversation(
|
||||
self,
|
||||
user_id: str,
|
||||
platform_id: str,
|
||||
content: list[dict] | None = None,
|
||||
title: str | None = None,
|
||||
persona_id: str | None = None,
|
||||
cid: str | None = None,
|
||||
created_at: datetime.datetime | None = None,
|
||||
updated_at: datetime.datetime | None = None,
|
||||
) -> ConversationV2:
|
||||
"""Create a new conversation."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def update_conversation(
|
||||
self,
|
||||
cid: str,
|
||||
title: str | None = None,
|
||||
persona_id: str | None = None,
|
||||
content: list[dict] | None = None,
|
||||
) -> None:
|
||||
"""Update a conversation's history."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete_conversation(self, cid: str) -> None:
|
||||
"""Delete a conversation by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete_conversations_by_user_id(self, user_id: str) -> None:
|
||||
"""Delete all conversations for a specific user."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_platform_message_history(
|
||||
self,
|
||||
platform_id: str,
|
||||
user_id: str,
|
||||
content: dict,
|
||||
sender_id: str | None = None,
|
||||
sender_name: str | None = None,
|
||||
) -> None:
|
||||
"""Insert a new platform message history record."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete_platform_message_offset(
|
||||
self,
|
||||
platform_id: str,
|
||||
user_id: str,
|
||||
offset_sec: int = 86400,
|
||||
) -> None:
|
||||
"""Delete platform message history records newer than the specified offset."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_platform_message_history(
|
||||
self,
|
||||
platform_id: str,
|
||||
user_id: str,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> list[PlatformMessageHistory]:
|
||||
"""Get platform message history for a specific user."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_attachment(
|
||||
self,
|
||||
path: str,
|
||||
type: str,
|
||||
mime_type: str,
|
||||
):
|
||||
"""Insert a new attachment record."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_attachment_by_id(self, attachment_id: str) -> Attachment:
|
||||
"""Get an attachment by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_persona(
|
||||
self,
|
||||
persona_id: str,
|
||||
system_prompt: str,
|
||||
begin_dialogs: list[str] | None = None,
|
||||
tools: list[str] | None = None,
|
||||
) -> Persona:
|
||||
"""Insert a new persona record."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_persona_by_id(self, persona_id: str) -> Persona:
|
||||
"""Get a persona by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_personas(self) -> list[Persona]:
|
||||
"""Get all personas for a specific bot."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def update_persona(
|
||||
self,
|
||||
persona_id: str,
|
||||
system_prompt: str | None = None,
|
||||
begin_dialogs: list[str] | None = None,
|
||||
tools: list[str] | None = None,
|
||||
) -> Persona | None:
|
||||
"""Update a persona's system prompt or begin dialogs."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete_persona(self, persona_id: str) -> None:
|
||||
"""Delete a persona by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_preference_or_update(
|
||||
self,
|
||||
scope: str,
|
||||
scope_id: str,
|
||||
key: str,
|
||||
value: dict,
|
||||
) -> Preference:
|
||||
"""Insert a new preference record."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_preference(self, scope: str, scope_id: str, key: str) -> Preference:
|
||||
"""Get a preference by scope ID and key."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_preferences(
|
||||
self,
|
||||
scope: str,
|
||||
scope_id: str | None = None,
|
||||
key: str | None = None,
|
||||
) -> list[Preference]:
|
||||
"""Get all preferences for a specific scope ID or key."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def remove_preference(self, scope: str, scope_id: str, key: str) -> None:
|
||||
"""Remove a preference by scope ID and key."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def clear_preferences(self, scope: str, scope_id: str) -> None:
|
||||
"""Clear all preferences for a specific scope ID."""
|
||||
...
|
||||
|
||||
# @abc.abstractmethod
|
||||
# async def insert_llm_message(
|
||||
# self,
|
||||
# cid: str,
|
||||
# role: str,
|
||||
# content: list,
|
||||
# tool_calls: list = None,
|
||||
# tool_call_id: str = None,
|
||||
# parent_id: str = None,
|
||||
# ) -> LLMMessage:
|
||||
# """Insert a new LLM message into the conversation."""
|
||||
# ...
|
||||
|
||||
# @abc.abstractmethod
|
||||
# async def get_llm_messages(self, cid: str) -> list[LLMMessage]:
|
||||
# """Get all LLM messages for a specific conversation."""
|
||||
# ...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_session_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
search_query: str | None = None,
|
||||
platform: str | None = None,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Get paginated session conversations with joined conversation and persona details, support search and platform filter."""
|
||||
...
|
||||
|
||||
# ====
|
||||
# Platform Session Management
|
||||
# ====
|
||||
|
||||
@abc.abstractmethod
|
||||
async def create_platform_session(
|
||||
self,
|
||||
creator: str,
|
||||
platform_id: str = "webchat",
|
||||
session_id: str | None = None,
|
||||
display_name: str | None = None,
|
||||
is_group: int = 0,
|
||||
) -> PlatformSession:
|
||||
"""Create a new Platform session."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_platform_session_by_id(
|
||||
self, session_id: str
|
||||
) -> PlatformSession | None:
|
||||
"""Get a Platform session by its ID."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def get_platform_sessions_by_creator(
|
||||
self,
|
||||
creator: str,
|
||||
platform_id: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> list[PlatformSession]:
|
||||
"""Get all Platform sessions for a specific creator (username) and optionally platform."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def update_platform_session(
|
||||
self,
|
||||
session_id: str,
|
||||
display_name: str | None = None,
|
||||
) -> None:
|
||||
"""Update a Platform session's updated_at timestamp and optionally display_name."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete_platform_session(self, session_id: str) -> None:
|
||||
"""Delete a Platform session by its ID."""
|
||||
...
|
||||
69
astrbot/core/db/migration/helper.py
Normal file
69
astrbot/core/db/migration/helper.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import os
|
||||
|
||||
from astrbot.api import logger, sp
|
||||
from astrbot.core.config import AstrBotConfig
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||
|
||||
from .migra_3_to_4 import (
|
||||
migration_conversation_table,
|
||||
migration_persona_data,
|
||||
migration_platform_table,
|
||||
migration_preferences,
|
||||
migration_webchat_data,
|
||||
)
|
||||
|
||||
|
||||
async def check_migration_needed_v4(db_helper: BaseDatabase) -> bool:
|
||||
"""检查是否需要进行数据库迁移
|
||||
如果存在 data_v3.db 并且 preference 中没有 migration_done_v4,则需要进行迁移。
|
||||
"""
|
||||
# 仅当 data 目录下存在旧版本数据(data_v3.db 文件)时才考虑迁移
|
||||
data_dir = get_astrbot_data_path()
|
||||
data_v3_db = os.path.join(data_dir, "data_v3.db")
|
||||
|
||||
if not os.path.exists(data_v3_db):
|
||||
return False
|
||||
migration_done = await db_helper.get_preference(
|
||||
"global",
|
||||
"global",
|
||||
"migration_done_v4",
|
||||
)
|
||||
if migration_done:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
async def do_migration_v4(
|
||||
db_helper: BaseDatabase,
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
astrbot_config: AstrBotConfig,
|
||||
) -> None:
|
||||
"""执行数据库迁移
|
||||
迁移旧的 webchat_conversation 表到新的 conversation 表。
|
||||
迁移旧的 platform 到新的 platform_stats 表。
|
||||
"""
|
||||
if not await check_migration_needed_v4(db_helper):
|
||||
return
|
||||
|
||||
logger.info("开始执行数据库迁移...")
|
||||
|
||||
# 执行会话表迁移
|
||||
await migration_conversation_table(db_helper, platform_id_map)
|
||||
|
||||
# 执行人格数据迁移
|
||||
await migration_persona_data(db_helper, astrbot_config)
|
||||
|
||||
# 执行 WebChat 数据迁移
|
||||
await migration_webchat_data(db_helper, platform_id_map)
|
||||
|
||||
# 执行偏好设置迁移
|
||||
await migration_preferences(db_helper, platform_id_map)
|
||||
|
||||
# 执行平台统计表迁移
|
||||
await migration_platform_table(db_helper, platform_id_map)
|
||||
|
||||
# 标记迁移完成
|
||||
await sp.put_async("global", "global", "migration_done_v4", True)
|
||||
|
||||
logger.info("数据库迁移完成。")
|
||||
357
astrbot/core/db/migration/migra_3_to_4.py
Normal file
357
astrbot/core/db/migration/migra_3_to_4.py
Normal file
@@ -0,0 +1,357 @@
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from astrbot.api import logger, sp
|
||||
from astrbot.core.config import AstrBotConfig
|
||||
from astrbot.core.config.default import DB_PATH
|
||||
from astrbot.core.db.po import ConversationV2, PlatformMessageHistory
|
||||
from astrbot.core.platform.astr_message_event import MessageSesion
|
||||
|
||||
from .. import BaseDatabase
|
||||
from .shared_preferences_v3 import sp as sp_v3
|
||||
from .sqlite_v3 import SQLiteDatabase as SQLiteV3DatabaseV3
|
||||
|
||||
"""
|
||||
1. 迁移旧的 webchat_conversation 表到新的 conversation 表。
|
||||
2. 迁移旧的 platform 到新的 platform_stats 表。
|
||||
"""
|
||||
|
||||
|
||||
def get_platform_id(
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
old_platform_name: str,
|
||||
) -> str:
|
||||
return platform_id_map.get(
|
||||
old_platform_name,
|
||||
{"platform_id": old_platform_name, "platform_type": old_platform_name},
|
||||
).get("platform_id", old_platform_name)
|
||||
|
||||
|
||||
def get_platform_type(
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
old_platform_name: str,
|
||||
) -> str:
|
||||
return platform_id_map.get(
|
||||
old_platform_name,
|
||||
{"platform_id": old_platform_name, "platform_type": old_platform_name},
|
||||
).get("platform_type", old_platform_name)
|
||||
|
||||
|
||||
async def migration_conversation_table(
|
||||
db_helper: BaseDatabase,
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
):
|
||||
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||
)
|
||||
conversations, total_cnt = db_helper_v3.get_all_conversations(
|
||||
page=1,
|
||||
page_size=10000000,
|
||||
)
|
||||
logger.info(f"迁移 {total_cnt} 条旧的会话数据到新的表中...")
|
||||
|
||||
async with db_helper.get_db() as dbsession:
|
||||
dbsession: AsyncSession
|
||||
async with dbsession.begin():
|
||||
for idx, conversation in enumerate(conversations):
|
||||
if total_cnt > 0 and (idx + 1) % max(1, total_cnt // 10) == 0:
|
||||
progress = int((idx + 1) / total_cnt * 100)
|
||||
if progress % 10 == 0:
|
||||
logger.info(f"进度: {progress}% ({idx + 1}/{total_cnt})")
|
||||
try:
|
||||
conv = db_helper_v3.get_conversation_by_user_id(
|
||||
user_id=conversation.get("user_id", "unknown"),
|
||||
cid=conversation.get("cid", "unknown"),
|
||||
)
|
||||
if not conv:
|
||||
logger.info(
|
||||
f"未找到该条旧会话对应的具体数据: {conversation}, 跳过。",
|
||||
)
|
||||
if ":" not in conv.user_id:
|
||||
continue
|
||||
session = MessageSesion.from_str(session_str=conv.user_id)
|
||||
platform_id = get_platform_id(
|
||||
platform_id_map,
|
||||
session.platform_name,
|
||||
)
|
||||
session.platform_id = platform_id # 更新平台名称为新的 ID
|
||||
conv_v2 = ConversationV2(
|
||||
user_id=str(session),
|
||||
content=json.loads(conv.history) if conv.history else [],
|
||||
platform_id=platform_id,
|
||||
title=conv.title,
|
||||
persona_id=conv.persona_id,
|
||||
conversation_id=conv.cid,
|
||||
created_at=datetime.datetime.fromtimestamp(conv.created_at),
|
||||
updated_at=datetime.datetime.fromtimestamp(conv.updated_at),
|
||||
)
|
||||
dbsession.add(conv_v2)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"迁移旧会话 {conversation.get('cid', 'unknown')} 失败: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(f"成功迁移 {total_cnt} 条旧的会话数据到新表。")
|
||||
|
||||
|
||||
async def migration_platform_table(
|
||||
db_helper: BaseDatabase,
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
):
|
||||
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||
)
|
||||
secs_from_2023_4_10_to_now = (
|
||||
datetime.datetime.now(datetime.timezone.utc)
|
||||
- datetime.datetime(2023, 4, 10, tzinfo=datetime.timezone.utc)
|
||||
).total_seconds()
|
||||
offset_sec = int(secs_from_2023_4_10_to_now)
|
||||
logger.info(f"迁移旧平台数据,offset_sec: {offset_sec} 秒。")
|
||||
stats = db_helper_v3.get_base_stats(offset_sec=offset_sec)
|
||||
logger.info(f"迁移 {len(stats.platform)} 条旧的平台数据到新的表中...")
|
||||
platform_stats_v3 = stats.platform
|
||||
|
||||
if not platform_stats_v3:
|
||||
logger.info("没有找到旧平台数据,跳过迁移。")
|
||||
return
|
||||
|
||||
first_time_stamp = platform_stats_v3[0].timestamp
|
||||
end_time_stamp = platform_stats_v3[-1].timestamp
|
||||
start_time = first_time_stamp - (first_time_stamp % 3600) # 向下取整到小时
|
||||
end_time = end_time_stamp + (3600 - (end_time_stamp % 3600)) # 向上取整到小时
|
||||
|
||||
idx = 0
|
||||
|
||||
async with db_helper.get_db() as dbsession:
|
||||
dbsession: AsyncSession
|
||||
async with dbsession.begin():
|
||||
total_buckets = (end_time - start_time) // 3600
|
||||
for bucket_idx, bucket_end in enumerate(range(start_time, end_time, 3600)):
|
||||
if bucket_idx % 500 == 0:
|
||||
progress = int((bucket_idx + 1) / total_buckets * 100)
|
||||
logger.info(f"进度: {progress}% ({bucket_idx + 1}/{total_buckets})")
|
||||
cnt = 0
|
||||
while (
|
||||
idx < len(platform_stats_v3)
|
||||
and platform_stats_v3[idx].timestamp < bucket_end
|
||||
):
|
||||
cnt += platform_stats_v3[idx].count
|
||||
idx += 1
|
||||
if cnt == 0:
|
||||
continue
|
||||
platform_id = get_platform_id(
|
||||
platform_id_map,
|
||||
platform_stats_v3[idx].name,
|
||||
)
|
||||
platform_type = get_platform_type(
|
||||
platform_id_map,
|
||||
platform_stats_v3[idx].name,
|
||||
)
|
||||
try:
|
||||
await dbsession.execute(
|
||||
text("""
|
||||
INSERT INTO platform_stats (timestamp, platform_id, platform_type, count)
|
||||
VALUES (:timestamp, :platform_id, :platform_type, :count)
|
||||
ON CONFLICT(timestamp, platform_id, platform_type) DO UPDATE SET
|
||||
count = platform_stats.count + EXCLUDED.count
|
||||
"""),
|
||||
{
|
||||
"timestamp": datetime.datetime.fromtimestamp(
|
||||
bucket_end,
|
||||
tz=datetime.timezone.utc,
|
||||
),
|
||||
"platform_id": platform_id,
|
||||
"platform_type": platform_type,
|
||||
"count": cnt,
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
f"迁移平台统计数据失败: {platform_id}, {platform_type}, 时间戳: {bucket_end}",
|
||||
exc_info=True,
|
||||
)
|
||||
logger.info(f"成功迁移 {len(platform_stats_v3)} 条旧的平台数据到新表。")
|
||||
|
||||
|
||||
async def migration_webchat_data(
|
||||
db_helper: BaseDatabase,
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
):
|
||||
"""迁移 WebChat 的历史记录到新的 PlatformMessageHistory 表中"""
|
||||
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||
)
|
||||
conversations, total_cnt = db_helper_v3.get_all_conversations(
|
||||
page=1,
|
||||
page_size=10000000,
|
||||
)
|
||||
logger.info(f"迁移 {total_cnt} 条旧的 WebChat 会话数据到新的表中...")
|
||||
|
||||
async with db_helper.get_db() as dbsession:
|
||||
dbsession: AsyncSession
|
||||
async with dbsession.begin():
|
||||
for idx, conversation in enumerate(conversations):
|
||||
if total_cnt > 0 and (idx + 1) % max(1, total_cnt // 10) == 0:
|
||||
progress = int((idx + 1) / total_cnt * 100)
|
||||
if progress % 10 == 0:
|
||||
logger.info(f"进度: {progress}% ({idx + 1}/{total_cnt})")
|
||||
try:
|
||||
conv = db_helper_v3.get_conversation_by_user_id(
|
||||
user_id=conversation.get("user_id", "unknown"),
|
||||
cid=conversation.get("cid", "unknown"),
|
||||
)
|
||||
if not conv:
|
||||
logger.info(
|
||||
f"未找到该条旧会话对应的具体数据: {conversation}, 跳过。",
|
||||
)
|
||||
if ":" in conv.user_id:
|
||||
continue
|
||||
platform_id = "webchat"
|
||||
history = json.loads(conv.history) if conv.history else []
|
||||
for msg in history:
|
||||
type_ = msg.get("type") # user type, "bot" or "user"
|
||||
new_history = PlatformMessageHistory(
|
||||
platform_id=platform_id,
|
||||
user_id=conv.cid, # we use conv.cid as user_id for webchat
|
||||
content=msg,
|
||||
sender_id=type_,
|
||||
sender_name=type_,
|
||||
)
|
||||
dbsession.add(new_history)
|
||||
|
||||
except Exception:
|
||||
logger.error(
|
||||
f"迁移旧 WebChat 会话 {conversation.get('cid', 'unknown')} 失败",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
logger.info(f"成功迁移 {total_cnt} 条旧的 WebChat 会话数据到新表。")
|
||||
|
||||
|
||||
async def migration_persona_data(
|
||||
db_helper: BaseDatabase,
|
||||
astrbot_config: AstrBotConfig,
|
||||
):
|
||||
"""迁移 Persona 数据到新的表中。
|
||||
旧的 Persona 数据存储在 preference 中,新的 Persona 数据存储在 persona 表中。
|
||||
"""
|
||||
v3_persona_config: list[dict] = astrbot_config.get("persona", [])
|
||||
total_personas = len(v3_persona_config)
|
||||
logger.info(f"迁移 {total_personas} 个 Persona 配置到新表中...")
|
||||
|
||||
for idx, persona in enumerate(v3_persona_config):
|
||||
if total_personas > 0 and (idx + 1) % max(1, total_personas // 10) == 0:
|
||||
progress = int((idx + 1) / total_personas * 100)
|
||||
if progress % 10 == 0:
|
||||
logger.info(f"进度: {progress}% ({idx + 1}/{total_personas})")
|
||||
try:
|
||||
begin_dialogs = persona.get("begin_dialogs", [])
|
||||
mood_imitation_dialogs = persona.get("mood_imitation_dialogs", [])
|
||||
parts = []
|
||||
user_turn = True
|
||||
for mood_dialog in mood_imitation_dialogs:
|
||||
if user_turn:
|
||||
parts.append(f"A: {mood_dialog}\n")
|
||||
else:
|
||||
parts.append(f"B: {mood_dialog}\n")
|
||||
user_turn = not user_turn
|
||||
mood_prompt = "".join(parts)
|
||||
system_prompt = persona.get("prompt", "")
|
||||
if mood_prompt:
|
||||
system_prompt += f"Here are few shots of dialogs, you need to imitate the tone of 'B' in the following dialogs to respond:\n {mood_prompt}"
|
||||
persona_new = await db_helper.insert_persona(
|
||||
persona_id=persona["name"],
|
||||
system_prompt=system_prompt,
|
||||
begin_dialogs=begin_dialogs,
|
||||
)
|
||||
logger.info(
|
||||
f"迁移 Persona {persona['name']}({persona_new.system_prompt[:30]}...) 到新表成功。",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"解析 Persona 配置失败:{e}")
|
||||
|
||||
|
||||
async def migration_preferences(
|
||||
db_helper: BaseDatabase,
|
||||
platform_id_map: dict[str, dict[str, str]],
|
||||
):
|
||||
# 1. global scope migration
|
||||
keys = [
|
||||
"inactivated_llm_tools",
|
||||
"inactivated_plugins",
|
||||
"curr_provider",
|
||||
"curr_provider_tts",
|
||||
"curr_provider_stt",
|
||||
"alter_cmd",
|
||||
]
|
||||
for key in keys:
|
||||
value = sp_v3.get(key)
|
||||
if value is not None:
|
||||
await sp.put_async("global", "global", key, value)
|
||||
logger.info(f"迁移全局偏好设置 {key} 成功,值: {value}")
|
||||
|
||||
# 2. umo scope migration
|
||||
session_conversation = sp_v3.get("session_conversation", default={})
|
||||
for umo, conversation_id in session_conversation.items():
|
||||
if not umo or not conversation_id:
|
||||
continue
|
||||
try:
|
||||
session = MessageSesion.from_str(session_str=umo)
|
||||
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||
session.platform_id = platform_id
|
||||
await sp.put_async("umo", str(session), "sel_conv_id", conversation_id)
|
||||
logger.info(f"迁移会话 {umo} 的对话数据到新表成功,平台 ID: {platform_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"迁移会话 {umo} 的对话数据失败: {e}", exc_info=True)
|
||||
|
||||
session_service_config = sp_v3.get("session_service_config", default={})
|
||||
for umo, config in session_service_config.items():
|
||||
if not umo or not config:
|
||||
continue
|
||||
try:
|
||||
session = MessageSesion.from_str(session_str=umo)
|
||||
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||
session.platform_id = platform_id
|
||||
|
||||
await sp.put_async("umo", str(session), "session_service_config", config)
|
||||
|
||||
logger.info(f"迁移会话 {umo} 的服务配置到新表成功,平台 ID: {platform_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"迁移会话 {umo} 的服务配置失败: {e}", exc_info=True)
|
||||
|
||||
session_variables = sp_v3.get("session_variables", default={})
|
||||
for umo, variables in session_variables.items():
|
||||
if not umo or not variables:
|
||||
continue
|
||||
try:
|
||||
session = MessageSesion.from_str(session_str=umo)
|
||||
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||
session.platform_id = platform_id
|
||||
await sp.put_async("umo", str(session), "session_variables", variables)
|
||||
except Exception as e:
|
||||
logger.error(f"迁移会话 {umo} 的变量失败: {e}", exc_info=True)
|
||||
|
||||
session_provider_perf = sp_v3.get("session_provider_perf", default={})
|
||||
for umo, perf in session_provider_perf.items():
|
||||
if not umo or not perf:
|
||||
continue
|
||||
try:
|
||||
session = MessageSesion.from_str(session_str=umo)
|
||||
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||
session.platform_id = platform_id
|
||||
|
||||
for provider_type, provider_id in perf.items():
|
||||
await sp.put_async(
|
||||
"umo",
|
||||
str(session),
|
||||
f"provider_perf_{provider_type}",
|
||||
provider_id,
|
||||
)
|
||||
logger.info(
|
||||
f"迁移会话 {umo} 的提供商偏好到新表成功,平台 ID: {platform_id}",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"迁移会话 {umo} 的提供商偏好失败: {e}", exc_info=True)
|
||||
44
astrbot/core/db/migration/migra_45_to_46.py
Normal file
44
astrbot/core/db/migration/migra_45_to_46.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from astrbot.api import logger, sp
|
||||
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||
|
||||
|
||||
async def migrate_45_to_46(acm: AstrBotConfigManager, ucr: UmopConfigRouter):
|
||||
abconf_data = acm.abconf_data
|
||||
|
||||
if not isinstance(abconf_data, dict):
|
||||
# should be unreachable
|
||||
logger.warning(
|
||||
f"migrate_45_to_46: abconf_data is not a dict (type={type(abconf_data)}). Value: {abconf_data!r}",
|
||||
)
|
||||
return
|
||||
|
||||
# 如果任何一项带有 umop,则说明需要迁移
|
||||
need_migration = False
|
||||
for conf_id, conf_info in abconf_data.items():
|
||||
if isinstance(conf_info, dict) and "umop" in conf_info:
|
||||
need_migration = True
|
||||
break
|
||||
|
||||
if not need_migration:
|
||||
return
|
||||
|
||||
logger.info("Starting migration from version 4.5 to 4.6")
|
||||
|
||||
# extract umo->conf_id mapping
|
||||
umo_to_conf_id = {}
|
||||
for conf_id, conf_info in abconf_data.items():
|
||||
if isinstance(conf_info, dict) and "umop" in conf_info:
|
||||
umop_ls = conf_info.pop("umop")
|
||||
if not isinstance(umop_ls, list):
|
||||
continue
|
||||
for umo in umop_ls:
|
||||
if isinstance(umo, str) and umo not in umo_to_conf_id:
|
||||
umo_to_conf_id[umo] = conf_id
|
||||
|
||||
# update the abconf data
|
||||
await sp.global_put("abconf_mapping", abconf_data)
|
||||
# update the umop config router
|
||||
await ucr.update_routing_data(umo_to_conf_id)
|
||||
|
||||
logger.info("Migration from version 45 to 46 completed successfully")
|
||||
131
astrbot/core/db/migration/migra_webchat_session.py
Normal file
131
astrbot/core/db/migration/migra_webchat_session.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Migration script for WebChat sessions.
|
||||
|
||||
This migration creates PlatformSession from existing platform_message_history records.
|
||||
|
||||
Changes:
|
||||
- Creates platform_sessions table
|
||||
- Adds platform_id field (default: 'webchat')
|
||||
- Adds display_name field
|
||||
- Session_id format: {platform_id}_{uuid}
|
||||
"""
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlmodel import col
|
||||
|
||||
from astrbot.api import logger, sp
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.core.db.po import ConversationV2, PlatformMessageHistory, PlatformSession
|
||||
|
||||
|
||||
async def migrate_webchat_session(db_helper: BaseDatabase):
|
||||
"""Create PlatformSession records from platform_message_history.
|
||||
|
||||
This migration extracts all unique user_ids from platform_message_history
|
||||
where platform_id='webchat' and creates corresponding PlatformSession records.
|
||||
"""
|
||||
# 检查是否已经完成迁移
|
||||
migration_done = await db_helper.get_preference(
|
||||
"global", "global", "migration_done_webchat_session_1"
|
||||
)
|
||||
if migration_done:
|
||||
return
|
||||
|
||||
logger.info("开始执行数据库迁移(WebChat 会话迁移)...")
|
||||
|
||||
try:
|
||||
async with db_helper.get_db() as session:
|
||||
# 从 platform_message_history 创建 PlatformSession
|
||||
query = (
|
||||
select(
|
||||
col(PlatformMessageHistory.user_id),
|
||||
col(PlatformMessageHistory.sender_name),
|
||||
func.min(PlatformMessageHistory.created_at).label("earliest"),
|
||||
func.max(PlatformMessageHistory.updated_at).label("latest"),
|
||||
)
|
||||
.where(col(PlatformMessageHistory.platform_id) == "webchat")
|
||||
.where(col(PlatformMessageHistory.sender_id) != "bot")
|
||||
.group_by(col(PlatformMessageHistory.user_id))
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
webchat_users = result.all()
|
||||
|
||||
if not webchat_users:
|
||||
logger.info("没有找到需要迁移的 WebChat 数据")
|
||||
await sp.put_async(
|
||||
"global", "global", "migration_done_webchat_session_1", True
|
||||
)
|
||||
return
|
||||
|
||||
logger.info(f"找到 {len(webchat_users)} 个 WebChat 会话需要迁移")
|
||||
|
||||
# 检查已存在的会话
|
||||
existing_query = select(col(PlatformSession.session_id))
|
||||
existing_result = await session.execute(existing_query)
|
||||
existing_session_ids = {row[0] for row in existing_result.fetchall()}
|
||||
|
||||
# 查询 Conversations 表中的 title,用于设置 display_name
|
||||
# 对于每个 user_id,对应的 conversation user_id 格式为: webchat:FriendMessage:webchat!astrbot!{user_id}
|
||||
user_ids_to_query = [
|
||||
f"webchat:FriendMessage:webchat!astrbot!{user_id}"
|
||||
for user_id, _, _, _ in webchat_users
|
||||
]
|
||||
conv_query = select(
|
||||
col(ConversationV2.user_id), col(ConversationV2.title)
|
||||
).where(col(ConversationV2.user_id).in_(user_ids_to_query))
|
||||
conv_result = await session.execute(conv_query)
|
||||
# 创建 user_id -> title 的映射字典
|
||||
title_map = {
|
||||
user_id.replace("webchat:FriendMessage:webchat!astrbot!", ""): title
|
||||
for user_id, title in conv_result.fetchall()
|
||||
}
|
||||
|
||||
# 批量创建 PlatformSession 记录
|
||||
sessions_to_add = []
|
||||
skipped_count = 0
|
||||
|
||||
for user_id, sender_name, created_at, updated_at in webchat_users:
|
||||
# user_id 就是 webchat_conv_id (session_id)
|
||||
session_id = user_id
|
||||
|
||||
# sender_name 通常是 username,但可能为 None
|
||||
creator = sender_name if sender_name else "guest"
|
||||
|
||||
# 检查是否已经存在该会话
|
||||
if session_id in existing_session_ids:
|
||||
logger.debug(f"会话 {session_id} 已存在,跳过")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# 从 Conversations 表中获取 display_name
|
||||
display_name = title_map.get(user_id)
|
||||
|
||||
# 创建新的 PlatformSession(保留原有的时间戳)
|
||||
new_session = PlatformSession(
|
||||
session_id=session_id,
|
||||
platform_id="webchat",
|
||||
creator=creator,
|
||||
is_group=0,
|
||||
created_at=created_at,
|
||||
updated_at=updated_at,
|
||||
display_name=display_name,
|
||||
)
|
||||
sessions_to_add.append(new_session)
|
||||
|
||||
# 批量插入
|
||||
if sessions_to_add:
|
||||
session.add_all(sessions_to_add)
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"WebChat 会话迁移完成!成功迁移: {len(sessions_to_add)}, 跳过: {skipped_count}",
|
||||
)
|
||||
else:
|
||||
logger.info("没有新会话需要迁移")
|
||||
|
||||
# 标记迁移完成
|
||||
await sp.put_async("global", "global", "migration_done_webchat_session_1", True)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"迁移过程中发生错误: {e}", exc_info=True)
|
||||
raise
|
||||
48
astrbot/core/db/migration/shared_preferences_v3.py
Normal file
48
astrbot/core/db/migration/shared_preferences_v3.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import json
|
||||
import os
|
||||
from typing import TypeVar
|
||||
|
||||
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||
|
||||
_VT = TypeVar("_VT")
|
||||
|
||||
|
||||
class SharedPreferences:
|
||||
def __init__(self, path=None):
|
||||
if path is None:
|
||||
path = os.path.join(get_astrbot_data_path(), "shared_preferences.json")
|
||||
self.path = path
|
||||
self._data = self._load_preferences()
|
||||
|
||||
def _load_preferences(self):
|
||||
if os.path.exists(self.path):
|
||||
try:
|
||||
with open(self.path) as f:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
os.remove(self.path)
|
||||
return {}
|
||||
|
||||
def _save_preferences(self):
|
||||
with open(self.path, "w") as f:
|
||||
json.dump(self._data, f, indent=4, ensure_ascii=False)
|
||||
f.flush()
|
||||
|
||||
def get(self, key, default: _VT = None) -> _VT:
|
||||
return self._data.get(key, default)
|
||||
|
||||
def put(self, key, value):
|
||||
self._data[key] = value
|
||||
self._save_preferences()
|
||||
|
||||
def remove(self, key):
|
||||
if key in self._data:
|
||||
del self._data[key]
|
||||
self._save_preferences()
|
||||
|
||||
def clear(self):
|
||||
self._data.clear()
|
||||
self._save_preferences()
|
||||
|
||||
|
||||
sp = SharedPreferences()
|
||||
497
astrbot/core/db/migration/sqlite_v3.py
Normal file
497
astrbot/core/db/migration/sqlite_v3.py
Normal file
@@ -0,0 +1,497 @@
|
||||
import sqlite3
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from astrbot.core.db.po import Platform, Stats
|
||||
|
||||
|
||||
@dataclass
|
||||
class Conversation:
|
||||
"""LLM 对话存储
|
||||
|
||||
对于网页聊天,history 存储了包括指令、回复、图片等在内的所有消息。
|
||||
对于其他平台的聊天,不存储非 LLM 的回复(因为考虑到已经存储在各自的平台上)。
|
||||
"""
|
||||
|
||||
user_id: str
|
||||
cid: str
|
||||
history: str = ""
|
||||
"""字符串格式的列表。"""
|
||||
created_at: int = 0
|
||||
updated_at: int = 0
|
||||
title: str = ""
|
||||
persona_id: str = ""
|
||||
|
||||
|
||||
INIT_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS platform(
|
||||
name VARCHAR(32),
|
||||
count INTEGER,
|
||||
timestamp INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS llm(
|
||||
name VARCHAR(32),
|
||||
count INTEGER,
|
||||
timestamp INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS plugin(
|
||||
name VARCHAR(32),
|
||||
count INTEGER,
|
||||
timestamp INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS command(
|
||||
name VARCHAR(32),
|
||||
count INTEGER,
|
||||
timestamp INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS llm_history(
|
||||
provider_type VARCHAR(32),
|
||||
session_id VARCHAR(32),
|
||||
content TEXT
|
||||
);
|
||||
|
||||
-- ATRI
|
||||
CREATE TABLE IF NOT EXISTS atri_vision(
|
||||
id TEXT,
|
||||
url_or_path TEXT,
|
||||
caption TEXT,
|
||||
is_meme BOOLEAN,
|
||||
keywords TEXT,
|
||||
platform_name VARCHAR(32),
|
||||
session_id VARCHAR(32),
|
||||
sender_nickname VARCHAR(32),
|
||||
timestamp INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS webchat_conversation(
|
||||
user_id TEXT, -- 会话 id
|
||||
cid TEXT, -- 对话 id
|
||||
history TEXT,
|
||||
created_at INTEGER,
|
||||
updated_at INTEGER,
|
||||
title TEXT,
|
||||
persona_id TEXT
|
||||
);
|
||||
|
||||
PRAGMA encoding = 'UTF-8';
|
||||
"""
|
||||
|
||||
|
||||
class SQLiteDatabase:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
super().__init__()
|
||||
self.db_path = db_path
|
||||
|
||||
sql = INIT_SQL
|
||||
|
||||
# 初始化数据库
|
||||
self.conn = self._get_conn(self.db_path)
|
||||
c = self.conn.cursor()
|
||||
c.executescript(sql)
|
||||
self.conn.commit()
|
||||
|
||||
# 检查 webchat_conversation 的 title 字段是否存在
|
||||
c.execute(
|
||||
"""
|
||||
PRAGMA table_info(webchat_conversation)
|
||||
""",
|
||||
)
|
||||
res = c.fetchall()
|
||||
has_title = False
|
||||
has_persona_id = False
|
||||
for row in res:
|
||||
if row[1] == "title":
|
||||
has_title = True
|
||||
if row[1] == "persona_id":
|
||||
has_persona_id = True
|
||||
if not has_title:
|
||||
c.execute(
|
||||
"""
|
||||
ALTER TABLE webchat_conversation ADD COLUMN title TEXT;
|
||||
""",
|
||||
)
|
||||
self.conn.commit()
|
||||
if not has_persona_id:
|
||||
c.execute(
|
||||
"""
|
||||
ALTER TABLE webchat_conversation ADD COLUMN persona_id TEXT;
|
||||
""",
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
c.close()
|
||||
|
||||
def _get_conn(self, db_path: str) -> sqlite3.Connection:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.text_factory = str
|
||||
return conn
|
||||
|
||||
def _exec_sql(self, sql: str, params: tuple = None):
|
||||
conn = self.conn
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
conn = self._get_conn(self.db_path)
|
||||
c = conn.cursor()
|
||||
|
||||
if params:
|
||||
c.execute(sql, params)
|
||||
c.close()
|
||||
else:
|
||||
c.execute(sql)
|
||||
c.close()
|
||||
|
||||
conn.commit()
|
||||
|
||||
def insert_platform_metrics(self, metrics: dict):
|
||||
for k, v in metrics.items():
|
||||
self._exec_sql(
|
||||
"""
|
||||
INSERT INTO platform(name, count, timestamp) VALUES (?, ?, ?)
|
||||
""",
|
||||
(k, v, int(time.time())),
|
||||
)
|
||||
|
||||
def insert_llm_metrics(self, metrics: dict):
|
||||
for k, v in metrics.items():
|
||||
self._exec_sql(
|
||||
"""
|
||||
INSERT INTO llm(name, count, timestamp) VALUES (?, ?, ?)
|
||||
""",
|
||||
(k, v, int(time.time())),
|
||||
)
|
||||
|
||||
def get_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||
"""获取 offset_sec 秒前到现在的基础统计数据"""
|
||||
where_clause = f" WHERE timestamp >= {int(time.time()) - offset_sec}"
|
||||
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
c.execute(
|
||||
"""
|
||||
SELECT * FROM platform
|
||||
"""
|
||||
+ where_clause,
|
||||
)
|
||||
|
||||
platform = []
|
||||
for row in c.fetchall():
|
||||
platform.append(Platform(*row))
|
||||
|
||||
c.close()
|
||||
|
||||
return Stats(platform=platform)
|
||||
|
||||
def get_total_message_count(self) -> int:
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
c.execute(
|
||||
"""
|
||||
SELECT SUM(count) FROM platform
|
||||
""",
|
||||
)
|
||||
res = c.fetchone()
|
||||
c.close()
|
||||
return res[0]
|
||||
|
||||
def get_grouped_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||
"""获取 offset_sec 秒前到现在的基础统计数据(合并)"""
|
||||
where_clause = f" WHERE timestamp >= {int(time.time()) - offset_sec}"
|
||||
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
c.execute(
|
||||
"""
|
||||
SELECT name, SUM(count), timestamp FROM platform
|
||||
"""
|
||||
+ where_clause
|
||||
+ " GROUP BY name",
|
||||
)
|
||||
|
||||
platform = []
|
||||
for row in c.fetchall():
|
||||
platform.append(Platform(*row))
|
||||
|
||||
c.close()
|
||||
|
||||
return Stats(platform, [], [])
|
||||
|
||||
def get_conversation_by_user_id(self, user_id: str, cid: str) -> Conversation:
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
c.execute(
|
||||
"""
|
||||
SELECT * FROM webchat_conversation WHERE user_id = ? AND cid = ?
|
||||
""",
|
||||
(user_id, cid),
|
||||
)
|
||||
|
||||
res = c.fetchone()
|
||||
c.close()
|
||||
|
||||
if not res:
|
||||
return None
|
||||
|
||||
return Conversation(*res)
|
||||
|
||||
def new_conversation(self, user_id: str, cid: str):
|
||||
history = "[]"
|
||||
updated_at = int(time.time())
|
||||
created_at = updated_at
|
||||
self._exec_sql(
|
||||
"""
|
||||
INSERT INTO webchat_conversation(user_id, cid, history, updated_at, created_at) VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(user_id, cid, history, updated_at, created_at),
|
||||
)
|
||||
|
||||
def get_conversations(self, user_id: str) -> tuple:
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
c.execute(
|
||||
"""
|
||||
SELECT cid, created_at, updated_at, title, persona_id FROM webchat_conversation WHERE user_id = ? ORDER BY updated_at DESC
|
||||
""",
|
||||
(user_id,),
|
||||
)
|
||||
|
||||
res = c.fetchall()
|
||||
c.close()
|
||||
conversations = []
|
||||
for row in res:
|
||||
cid = row[0]
|
||||
created_at = row[1]
|
||||
updated_at = row[2]
|
||||
title = row[3]
|
||||
persona_id = row[4]
|
||||
conversations.append(
|
||||
Conversation("", cid, "[]", created_at, updated_at, title, persona_id),
|
||||
)
|
||||
return conversations
|
||||
|
||||
def update_conversation(self, user_id: str, cid: str, history: str):
|
||||
"""更新对话,并且同时更新时间"""
|
||||
updated_at = int(time.time())
|
||||
self._exec_sql(
|
||||
"""
|
||||
UPDATE webchat_conversation SET history = ?, updated_at = ? WHERE user_id = ? AND cid = ?
|
||||
""",
|
||||
(history, updated_at, user_id, cid),
|
||||
)
|
||||
|
||||
def update_conversation_title(self, user_id: str, cid: str, title: str):
|
||||
self._exec_sql(
|
||||
"""
|
||||
UPDATE webchat_conversation SET title = ? WHERE user_id = ? AND cid = ?
|
||||
""",
|
||||
(title, user_id, cid),
|
||||
)
|
||||
|
||||
def update_conversation_persona_id(self, user_id: str, cid: str, persona_id: str):
|
||||
self._exec_sql(
|
||||
"""
|
||||
UPDATE webchat_conversation SET persona_id = ? WHERE user_id = ? AND cid = ?
|
||||
""",
|
||||
(persona_id, user_id, cid),
|
||||
)
|
||||
|
||||
def delete_conversation(self, user_id: str, cid: str):
|
||||
self._exec_sql(
|
||||
"""
|
||||
DELETE FROM webchat_conversation WHERE user_id = ? AND cid = ?
|
||||
""",
|
||||
(user_id, cid),
|
||||
)
|
||||
|
||||
def get_all_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
"""获取所有对话,支持分页,按更新时间降序排序"""
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
try:
|
||||
# 获取总记录数
|
||||
c.execute("""
|
||||
SELECT COUNT(*) FROM webchat_conversation
|
||||
""")
|
||||
total_count = c.fetchone()[0]
|
||||
|
||||
# 计算偏移量
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# 获取分页数据,按更新时间降序排序
|
||||
c.execute(
|
||||
"""
|
||||
SELECT user_id, cid, created_at, updated_at, title, persona_id
|
||||
FROM webchat_conversation
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(page_size, offset),
|
||||
)
|
||||
|
||||
rows = c.fetchall()
|
||||
|
||||
conversations = []
|
||||
|
||||
for row in rows:
|
||||
user_id, cid, created_at, updated_at, title, persona_id = row
|
||||
# 确保 cid 是字符串类型且至少有8个字符,否则使用一个默认值
|
||||
safe_cid = str(cid) if cid else "unknown"
|
||||
display_cid = safe_cid[:8] if len(safe_cid) >= 8 else safe_cid
|
||||
|
||||
conversations.append(
|
||||
{
|
||||
"user_id": user_id or "",
|
||||
"cid": safe_cid,
|
||||
"title": title or f"对话 {display_cid}",
|
||||
"persona_id": persona_id or "",
|
||||
"created_at": created_at or 0,
|
||||
"updated_at": updated_at or 0,
|
||||
},
|
||||
)
|
||||
|
||||
return conversations, total_count
|
||||
|
||||
except Exception as _:
|
||||
# 返回空列表和0,确保即使出错也有有效的返回值
|
||||
return [], 0
|
||||
finally:
|
||||
c.close()
|
||||
|
||||
def get_filtered_conversations(
|
||||
self,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
platforms: list[str] | None = None,
|
||||
message_types: list[str] | None = None,
|
||||
search_query: str | None = None,
|
||||
exclude_ids: list[str] | None = None,
|
||||
exclude_platforms: list[str] | None = None,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
"""获取筛选后的对话列表"""
|
||||
try:
|
||||
c = self.conn.cursor()
|
||||
except sqlite3.ProgrammingError:
|
||||
c = self._get_conn(self.db_path).cursor()
|
||||
|
||||
try:
|
||||
# 构建查询条件
|
||||
where_clauses = []
|
||||
params = []
|
||||
|
||||
# 平台筛选
|
||||
if platforms and len(platforms) > 0:
|
||||
platform_conditions = []
|
||||
for platform in platforms:
|
||||
platform_conditions.append("user_id LIKE ?")
|
||||
params.append(f"{platform}:%")
|
||||
|
||||
if platform_conditions:
|
||||
where_clauses.append(f"({' OR '.join(platform_conditions)})")
|
||||
|
||||
# 消息类型筛选
|
||||
if message_types and len(message_types) > 0:
|
||||
message_type_conditions = []
|
||||
for msg_type in message_types:
|
||||
message_type_conditions.append("user_id LIKE ?")
|
||||
params.append(f"%:{msg_type}:%")
|
||||
|
||||
if message_type_conditions:
|
||||
where_clauses.append(f"({' OR '.join(message_type_conditions)})")
|
||||
|
||||
# 搜索关键词
|
||||
if search_query:
|
||||
search_query = search_query.encode("unicode_escape").decode("utf-8")
|
||||
where_clauses.append(
|
||||
"(title LIKE ? OR user_id LIKE ? OR cid LIKE ? OR history LIKE ?)",
|
||||
)
|
||||
search_param = f"%{search_query}%"
|
||||
params.extend([search_param, search_param, search_param, search_param])
|
||||
|
||||
# 排除特定用户ID
|
||||
if exclude_ids and len(exclude_ids) > 0:
|
||||
for exclude_id in exclude_ids:
|
||||
where_clauses.append("user_id NOT LIKE ?")
|
||||
params.append(f"{exclude_id}%")
|
||||
|
||||
# 排除特定平台
|
||||
if exclude_platforms and len(exclude_platforms) > 0:
|
||||
for exclude_platform in exclude_platforms:
|
||||
where_clauses.append("user_id NOT LIKE ?")
|
||||
params.append(f"{exclude_platform}:%")
|
||||
|
||||
# 构建完整的 WHERE 子句
|
||||
where_sql = " WHERE " + " AND ".join(where_clauses) if where_clauses else ""
|
||||
|
||||
# 构建计数查询
|
||||
count_sql = f"SELECT COUNT(*) FROM webchat_conversation{where_sql}"
|
||||
|
||||
# 获取总记录数
|
||||
c.execute(count_sql, params)
|
||||
total_count = c.fetchone()[0]
|
||||
|
||||
# 计算偏移量
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# 构建分页数据查询
|
||||
data_sql = f"""
|
||||
SELECT user_id, cid, created_at, updated_at, title, persona_id
|
||||
FROM webchat_conversation
|
||||
{where_sql}
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
query_params = params + [page_size, offset]
|
||||
|
||||
# 获取分页数据
|
||||
c.execute(data_sql, query_params)
|
||||
rows = c.fetchall()
|
||||
|
||||
conversations = []
|
||||
|
||||
for row in rows:
|
||||
user_id, cid, created_at, updated_at, title, persona_id = row
|
||||
# 确保 cid 是字符串类型,否则使用一个默认值
|
||||
safe_cid = str(cid) if cid else "unknown"
|
||||
display_cid = safe_cid[:8] if len(safe_cid) >= 8 else safe_cid
|
||||
|
||||
conversations.append(
|
||||
{
|
||||
"user_id": user_id or "",
|
||||
"cid": safe_cid,
|
||||
"title": title or f"对话 {display_cid}",
|
||||
"persona_id": persona_id or "",
|
||||
"created_at": created_at or 0,
|
||||
"updated_at": updated_at or 0,
|
||||
},
|
||||
)
|
||||
|
||||
return conversations, total_count
|
||||
|
||||
except Exception as _:
|
||||
# 返回空列表和0,确保即使出错也有有效的返回值
|
||||
return [], 0
|
||||
finally:
|
||||
c.close()
|
||||
293
astrbot/core/db/po.py
Normal file
293
astrbot/core/db/po.py
Normal file
@@ -0,0 +1,293 @@
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from typing import TypedDict
|
||||
|
||||
from sqlmodel import JSON, Field, SQLModel, Text, UniqueConstraint
|
||||
|
||||
|
||||
class PlatformStat(SQLModel, table=True):
|
||||
"""This class represents the statistics of bot usage across different platforms.
|
||||
|
||||
Note: In astrbot v4, we moved `platform` table to here.
|
||||
"""
|
||||
|
||||
__tablename__ = "platform_stats" # type: ignore
|
||||
|
||||
id: int = Field(primary_key=True, sa_column_kwargs={"autoincrement": True})
|
||||
timestamp: datetime = Field(nullable=False)
|
||||
platform_id: str = Field(nullable=False)
|
||||
platform_type: str = Field(nullable=False) # such as "aiocqhttp", "slack", etc.
|
||||
count: int = Field(default=0, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"timestamp",
|
||||
"platform_id",
|
||||
"platform_type",
|
||||
name="uix_platform_stats",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ConversationV2(SQLModel, table=True):
|
||||
__tablename__ = "conversations" # type: ignore
|
||||
|
||||
inner_conversation_id: int = Field(
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
)
|
||||
conversation_id: str = Field(
|
||||
max_length=36,
|
||||
nullable=False,
|
||||
unique=True,
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
platform_id: str = Field(nullable=False)
|
||||
user_id: str = Field(nullable=False)
|
||||
content: list | None = Field(default=None, sa_type=JSON)
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
title: str | None = Field(default=None, max_length=255)
|
||||
persona_id: str | None = Field(default=None)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"conversation_id",
|
||||
name="uix_conversation_id",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Persona(SQLModel, table=True):
|
||||
"""Persona is a set of instructions for LLMs to follow.
|
||||
|
||||
It can be used to customize the behavior of LLMs.
|
||||
"""
|
||||
|
||||
__tablename__ = "personas" # type: ignore
|
||||
|
||||
id: int | None = Field(
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
default=None,
|
||||
)
|
||||
persona_id: str = Field(max_length=255, nullable=False)
|
||||
system_prompt: str = Field(sa_type=Text, nullable=False)
|
||||
begin_dialogs: list | None = Field(default=None, sa_type=JSON)
|
||||
"""a list of strings, each representing a dialog to start with"""
|
||||
tools: list | None = Field(default=None, sa_type=JSON)
|
||||
"""None means use ALL tools for default, empty list means no tools, otherwise a list of tool names."""
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"persona_id",
|
||||
name="uix_persona_id",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Preference(SQLModel, table=True):
|
||||
"""This class represents preferences for bots."""
|
||||
|
||||
__tablename__ = "preferences" # type: ignore
|
||||
|
||||
id: int | None = Field(
|
||||
default=None,
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
)
|
||||
scope: str = Field(nullable=False)
|
||||
"""Scope of the preference, such as 'global', 'umo', 'plugin'."""
|
||||
scope_id: str = Field(nullable=False)
|
||||
"""ID of the scope, such as 'global', 'umo', 'plugin_name'."""
|
||||
key: str = Field(nullable=False)
|
||||
value: dict = Field(sa_type=JSON, nullable=False)
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"scope",
|
||||
"scope_id",
|
||||
"key",
|
||||
name="uix_preference_scope_scope_id_key",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PlatformMessageHistory(SQLModel, table=True):
|
||||
"""This class represents the message history for a specific platform.
|
||||
|
||||
It is used to store messages that are not LLM-generated, such as user messages
|
||||
or platform-specific messages.
|
||||
"""
|
||||
|
||||
__tablename__ = "platform_message_history" # type: ignore
|
||||
|
||||
id: int | None = Field(
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
default=None,
|
||||
)
|
||||
platform_id: str = Field(nullable=False)
|
||||
user_id: str = Field(nullable=False) # An id of group, user in platform
|
||||
sender_id: str | None = Field(default=None) # ID of the sender in the platform
|
||||
sender_name: str | None = Field(
|
||||
default=None,
|
||||
) # Name of the sender in the platform
|
||||
content: dict = Field(sa_type=JSON, nullable=False) # a message chain list
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
|
||||
|
||||
class PlatformSession(SQLModel, table=True):
|
||||
"""Platform session table for managing user sessions across different platforms.
|
||||
|
||||
A session represents a chat window for a specific user on a specific platform.
|
||||
Each session can have multiple conversations (对话) associated with it.
|
||||
"""
|
||||
|
||||
__tablename__ = "platform_sessions" # type: ignore
|
||||
|
||||
inner_id: int | None = Field(
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
default=None,
|
||||
)
|
||||
session_id: str = Field(
|
||||
max_length=100,
|
||||
nullable=False,
|
||||
unique=True,
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
platform_id: str = Field(default="webchat", nullable=False)
|
||||
"""Platform identifier (e.g., 'webchat', 'qq', 'discord')"""
|
||||
creator: str = Field(nullable=False)
|
||||
"""Username of the session creator"""
|
||||
display_name: str | None = Field(default=None, max_length=255)
|
||||
"""Display name for the session"""
|
||||
is_group: int = Field(default=0, nullable=False)
|
||||
"""0 for private chat, 1 for group chat (not implemented yet)"""
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"session_id",
|
||||
name="uix_platform_session_id",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Attachment(SQLModel, table=True):
|
||||
"""This class represents attachments for messages in AstrBot.
|
||||
|
||||
Attachments can be images, files, or other media types.
|
||||
"""
|
||||
|
||||
__tablename__ = "attachments" # type: ignore
|
||||
|
||||
inner_attachment_id: int | None = Field(
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
default=None,
|
||||
)
|
||||
attachment_id: str = Field(
|
||||
max_length=36,
|
||||
nullable=False,
|
||||
unique=True,
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
path: str = Field(nullable=False) # Path to the file on disk
|
||||
type: str = Field(nullable=False) # Type of the file (e.g., 'image', 'file')
|
||||
mime_type: str = Field(nullable=False) # MIME type of the file
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"attachment_id",
|
||||
name="uix_attachment_id",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Conversation:
|
||||
"""LLM 对话类
|
||||
|
||||
对于 WebChat,history 存储了包括指令、回复、图片等在内的所有消息。
|
||||
对于其他平台的聊天,不存储非 LLM 的回复(因为考虑到已经存储在各自的平台上)。
|
||||
|
||||
在 v4.0.0 版本及之后,WebChat 的历史记录被迁移至 `PlatformMessageHistory` 表中,
|
||||
"""
|
||||
|
||||
platform_id: str
|
||||
user_id: str
|
||||
cid: str
|
||||
"""对话 ID, 是 uuid 格式的字符串"""
|
||||
history: str = ""
|
||||
"""字符串格式的对话列表。"""
|
||||
title: str | None = ""
|
||||
persona_id: str | None = ""
|
||||
created_at: int = 0
|
||||
updated_at: int = 0
|
||||
|
||||
|
||||
class Personality(TypedDict):
|
||||
"""LLM 人格类。
|
||||
|
||||
在 v4.0.0 版本及之后,推荐使用上面的 Persona 类。并且, mood_imitation_dialogs 字段已被废弃。
|
||||
"""
|
||||
|
||||
prompt: str = ""
|
||||
name: str = ""
|
||||
begin_dialogs: list[str] = []
|
||||
mood_imitation_dialogs: list[str] = []
|
||||
"""情感模拟对话预设。在 v4.0.0 版本及之后,已被废弃。"""
|
||||
tools: list[str] | None = None
|
||||
"""工具列表。None 表示使用所有工具,空列表表示不使用任何工具"""
|
||||
|
||||
# cache
|
||||
_begin_dialogs_processed: list[dict] = []
|
||||
_mood_imitation_dialogs_processed: str = ""
|
||||
|
||||
|
||||
# ====
|
||||
# Deprecated, and will be removed in future versions.
|
||||
# ====
|
||||
|
||||
|
||||
@dataclass
|
||||
class Platform:
|
||||
"""平台使用统计数据"""
|
||||
|
||||
name: str
|
||||
count: int
|
||||
timestamp: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class Stats:
|
||||
platform: list[Platform] = field(default_factory=list)
|
||||
810
astrbot/core/db/sqlite.py
Normal file
810
astrbot/core/db/sqlite.py
Normal file
@@ -0,0 +1,810 @@
|
||||
import asyncio
|
||||
import threading
|
||||
import typing as T
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import col, delete, desc, func, or_, select, text, update
|
||||
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.core.db.po import (
|
||||
Attachment,
|
||||
ConversationV2,
|
||||
Persona,
|
||||
PlatformMessageHistory,
|
||||
PlatformSession,
|
||||
PlatformStat,
|
||||
Preference,
|
||||
SQLModel,
|
||||
)
|
||||
from astrbot.core.db.po import (
|
||||
Platform as DeprecatedPlatformStat,
|
||||
)
|
||||
from astrbot.core.db.po import (
|
||||
Stats as DeprecatedStats,
|
||||
)
|
||||
|
||||
NOT_GIVEN = T.TypeVar("NOT_GIVEN")
|
||||
|
||||
|
||||
class SQLiteDatabase(BaseDatabase):
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = db_path
|
||||
self.DATABASE_URL = f"sqlite+aiosqlite:///{db_path}"
|
||||
self.inited = False
|
||||
super().__init__()
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the database by creating tables if they do not exist."""
|
||||
async with self.engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.create_all)
|
||||
await conn.execute(text("PRAGMA journal_mode=WAL"))
|
||||
await conn.execute(text("PRAGMA synchronous=NORMAL"))
|
||||
await conn.execute(text("PRAGMA cache_size=20000"))
|
||||
await conn.execute(text("PRAGMA temp_store=MEMORY"))
|
||||
await conn.execute(text("PRAGMA mmap_size=134217728"))
|
||||
await conn.execute(text("PRAGMA optimize"))
|
||||
await conn.commit()
|
||||
|
||||
# ====
|
||||
# Platform Statistics
|
||||
# ====
|
||||
|
||||
async def insert_platform_stats(
|
||||
self,
|
||||
platform_id,
|
||||
platform_type,
|
||||
count=1,
|
||||
timestamp=None,
|
||||
) -> None:
|
||||
"""Insert a new platform statistic record."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
if timestamp is None:
|
||||
timestamp = datetime.now().replace(
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0,
|
||||
)
|
||||
current_hour = timestamp
|
||||
await session.execute(
|
||||
text("""
|
||||
INSERT INTO platform_stats (timestamp, platform_id, platform_type, count)
|
||||
VALUES (:timestamp, :platform_id, :platform_type, :count)
|
||||
ON CONFLICT(timestamp, platform_id, platform_type) DO UPDATE SET
|
||||
count = platform_stats.count + EXCLUDED.count
|
||||
"""),
|
||||
{
|
||||
"timestamp": current_hour,
|
||||
"platform_id": platform_id,
|
||||
"platform_type": platform_type,
|
||||
"count": count,
|
||||
},
|
||||
)
|
||||
|
||||
async def count_platform_stats(self) -> int:
|
||||
"""Count the number of platform statistics records."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
result = await session.execute(
|
||||
select(func.count(col(PlatformStat.platform_id))).select_from(
|
||||
PlatformStat,
|
||||
),
|
||||
)
|
||||
count = result.scalar_one_or_none()
|
||||
return count if count is not None else 0
|
||||
|
||||
async def get_platform_stats(self, offset_sec: int = 86400) -> list[PlatformStat]:
|
||||
"""Get platform statistics within the specified offset in seconds and group by platform_id."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
now = datetime.now()
|
||||
start_time = now - timedelta(seconds=offset_sec)
|
||||
result = await session.execute(
|
||||
text("""
|
||||
SELECT * FROM platform_stats
|
||||
WHERE timestamp >= :start_time
|
||||
ORDER BY timestamp DESC
|
||||
GROUP BY platform_id
|
||||
"""),
|
||||
{"start_time": start_time},
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
# ====
|
||||
# Conversation Management
|
||||
# ====
|
||||
|
||||
async def get_conversations(self, user_id=None, platform_id=None):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(ConversationV2)
|
||||
|
||||
if user_id:
|
||||
query = query.where(ConversationV2.user_id == user_id)
|
||||
if platform_id:
|
||||
query = query.where(ConversationV2.platform_id == platform_id)
|
||||
# order by
|
||||
query = query.order_by(desc(ConversationV2.created_at))
|
||||
result = await session.execute(query)
|
||||
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_conversation_by_id(self, cid):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(ConversationV2).where(ConversationV2.conversation_id == cid)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_all_conversations(self, page=1, page_size=20):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
offset = (page - 1) * page_size
|
||||
result = await session.execute(
|
||||
select(ConversationV2)
|
||||
.order_by(desc(ConversationV2.created_at))
|
||||
.offset(offset)
|
||||
.limit(page_size),
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_filtered_conversations(
|
||||
self,
|
||||
page=1,
|
||||
page_size=20,
|
||||
platform_ids=None,
|
||||
search_query="",
|
||||
**kwargs,
|
||||
):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
# Build the base query with filters
|
||||
base_query = select(ConversationV2)
|
||||
|
||||
if platform_ids:
|
||||
base_query = base_query.where(
|
||||
col(ConversationV2.platform_id).in_(platform_ids),
|
||||
)
|
||||
if search_query:
|
||||
search_query = search_query.encode("unicode_escape").decode("utf-8")
|
||||
base_query = base_query.where(
|
||||
or_(
|
||||
col(ConversationV2.title).ilike(f"%{search_query}%"),
|
||||
col(ConversationV2.content).ilike(f"%{search_query}%"),
|
||||
col(ConversationV2.user_id).ilike(f"%{search_query}%"),
|
||||
col(ConversationV2.conversation_id).ilike(f"%{search_query}%"),
|
||||
),
|
||||
)
|
||||
if "message_types" in kwargs and len(kwargs["message_types"]) > 0:
|
||||
for msg_type in kwargs["message_types"]:
|
||||
base_query = base_query.where(
|
||||
col(ConversationV2.user_id).ilike(f"%:{msg_type}:%"),
|
||||
)
|
||||
if "platforms" in kwargs and len(kwargs["platforms"]) > 0:
|
||||
base_query = base_query.where(
|
||||
col(ConversationV2.platform_id).in_(kwargs["platforms"]),
|
||||
)
|
||||
|
||||
# Get total count matching the filters
|
||||
count_query = select(func.count()).select_from(base_query.subquery())
|
||||
total_count = await session.execute(count_query)
|
||||
total = total_count.scalar_one()
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * page_size
|
||||
result_query = (
|
||||
base_query.order_by(desc(ConversationV2.created_at))
|
||||
.offset(offset)
|
||||
.limit(page_size)
|
||||
)
|
||||
result = await session.execute(result_query)
|
||||
conversations = result.scalars().all()
|
||||
|
||||
return conversations, total
|
||||
|
||||
async def create_conversation(
|
||||
self,
|
||||
user_id,
|
||||
platform_id,
|
||||
content=None,
|
||||
title=None,
|
||||
persona_id=None,
|
||||
cid=None,
|
||||
created_at=None,
|
||||
updated_at=None,
|
||||
):
|
||||
kwargs = {}
|
||||
if cid:
|
||||
kwargs["conversation_id"] = cid
|
||||
if created_at:
|
||||
kwargs["created_at"] = created_at
|
||||
if updated_at:
|
||||
kwargs["updated_at"] = updated_at
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
new_conversation = ConversationV2(
|
||||
user_id=user_id,
|
||||
content=content or [],
|
||||
platform_id=platform_id,
|
||||
title=title,
|
||||
persona_id=persona_id,
|
||||
**kwargs,
|
||||
)
|
||||
session.add(new_conversation)
|
||||
return new_conversation
|
||||
|
||||
async def update_conversation(self, cid, title=None, persona_id=None, content=None):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
query = update(ConversationV2).where(
|
||||
col(ConversationV2.conversation_id) == cid,
|
||||
)
|
||||
values = {}
|
||||
if title is not None:
|
||||
values["title"] = title
|
||||
if persona_id is not None:
|
||||
values["persona_id"] = persona_id
|
||||
if content is not None:
|
||||
values["content"] = content
|
||||
if not values:
|
||||
return None
|
||||
query = query.values(**values)
|
||||
await session.execute(query)
|
||||
return await self.get_conversation_by_id(cid)
|
||||
|
||||
async def delete_conversation(self, cid):
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(ConversationV2).where(
|
||||
col(ConversationV2.conversation_id) == cid,
|
||||
),
|
||||
)
|
||||
|
||||
async def delete_conversations_by_user_id(self, user_id: str) -> None:
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(ConversationV2).where(
|
||||
col(ConversationV2.user_id) == user_id
|
||||
),
|
||||
)
|
||||
|
||||
async def get_session_conversations(
|
||||
self,
|
||||
page=1,
|
||||
page_size=20,
|
||||
search_query=None,
|
||||
platform=None,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Get paginated session conversations with joined conversation and persona details."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
base_query = (
|
||||
select(
|
||||
col(Preference.scope_id).label("session_id"),
|
||||
func.json_extract(Preference.value, "$.val").label(
|
||||
"conversation_id",
|
||||
), # type: ignore
|
||||
col(ConversationV2.persona_id).label("persona_id"),
|
||||
col(ConversationV2.title).label("title"),
|
||||
col(Persona.persona_id).label("persona_name"),
|
||||
)
|
||||
.select_from(Preference)
|
||||
.outerjoin(
|
||||
ConversationV2,
|
||||
func.json_extract(Preference.value, "$.val")
|
||||
== ConversationV2.conversation_id,
|
||||
)
|
||||
.outerjoin(
|
||||
Persona,
|
||||
col(ConversationV2.persona_id) == Persona.persona_id,
|
||||
)
|
||||
.where(Preference.scope == "umo", Preference.key == "sel_conv_id")
|
||||
)
|
||||
|
||||
# 搜索筛选
|
||||
if search_query:
|
||||
search_pattern = f"%{search_query}%"
|
||||
base_query = base_query.where(
|
||||
or_(
|
||||
col(Preference.scope_id).ilike(search_pattern),
|
||||
col(ConversationV2.title).ilike(search_pattern),
|
||||
col(Persona.persona_id).ilike(search_pattern),
|
||||
),
|
||||
)
|
||||
|
||||
# 平台筛选
|
||||
if platform:
|
||||
platform_pattern = f"{platform}:%"
|
||||
base_query = base_query.where(
|
||||
col(Preference.scope_id).like(platform_pattern),
|
||||
)
|
||||
|
||||
# 排序
|
||||
base_query = base_query.order_by(Preference.scope_id)
|
||||
|
||||
# 分页结果
|
||||
result_query = base_query.offset(offset).limit(page_size)
|
||||
result = await session.execute(result_query)
|
||||
rows = result.fetchall()
|
||||
|
||||
# 查询总数(应用相同的筛选条件)
|
||||
count_base_query = (
|
||||
select(func.count(col(Preference.scope_id)))
|
||||
.select_from(Preference)
|
||||
.outerjoin(
|
||||
ConversationV2,
|
||||
func.json_extract(Preference.value, "$.val")
|
||||
== ConversationV2.conversation_id,
|
||||
)
|
||||
.outerjoin(
|
||||
Persona,
|
||||
col(ConversationV2.persona_id) == Persona.persona_id,
|
||||
)
|
||||
.where(Preference.scope == "umo", Preference.key == "sel_conv_id")
|
||||
)
|
||||
|
||||
# 应用相同的搜索和平台筛选条件到计数查询
|
||||
if search_query:
|
||||
search_pattern = f"%{search_query}%"
|
||||
count_base_query = count_base_query.where(
|
||||
or_(
|
||||
col(Preference.scope_id).ilike(search_pattern),
|
||||
col(ConversationV2.title).ilike(search_pattern),
|
||||
col(Persona.persona_id).ilike(search_pattern),
|
||||
),
|
||||
)
|
||||
|
||||
if platform:
|
||||
platform_pattern = f"{platform}:%"
|
||||
count_base_query = count_base_query.where(
|
||||
col(Preference.scope_id).like(platform_pattern),
|
||||
)
|
||||
|
||||
total_result = await session.execute(count_base_query)
|
||||
total = total_result.scalar() or 0
|
||||
|
||||
sessions_data = [
|
||||
{
|
||||
"session_id": row.session_id,
|
||||
"conversation_id": row.conversation_id,
|
||||
"persona_id": row.persona_id,
|
||||
"title": row.title,
|
||||
"persona_name": row.persona_name,
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
return sessions_data, total
|
||||
|
||||
async def insert_platform_message_history(
|
||||
self,
|
||||
platform_id,
|
||||
user_id,
|
||||
content,
|
||||
sender_id=None,
|
||||
sender_name=None,
|
||||
):
|
||||
"""Insert a new platform message history record."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
new_history = PlatformMessageHistory(
|
||||
platform_id=platform_id,
|
||||
user_id=user_id,
|
||||
content=content,
|
||||
sender_id=sender_id,
|
||||
sender_name=sender_name,
|
||||
)
|
||||
session.add(new_history)
|
||||
return new_history
|
||||
|
||||
async def delete_platform_message_offset(
|
||||
self,
|
||||
platform_id,
|
||||
user_id,
|
||||
offset_sec=86400,
|
||||
):
|
||||
"""Delete platform message history records newer than the specified offset."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
now = datetime.now()
|
||||
cutoff_time = now - timedelta(seconds=offset_sec)
|
||||
await session.execute(
|
||||
delete(PlatformMessageHistory).where(
|
||||
col(PlatformMessageHistory.platform_id) == platform_id,
|
||||
col(PlatformMessageHistory.user_id) == user_id,
|
||||
col(PlatformMessageHistory.created_at) >= cutoff_time,
|
||||
),
|
||||
)
|
||||
|
||||
async def get_platform_message_history(
|
||||
self,
|
||||
platform_id,
|
||||
user_id,
|
||||
page=1,
|
||||
page_size=20,
|
||||
):
|
||||
"""Get platform message history records."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
offset = (page - 1) * page_size
|
||||
query = (
|
||||
select(PlatformMessageHistory)
|
||||
.where(
|
||||
PlatformMessageHistory.platform_id == platform_id,
|
||||
PlatformMessageHistory.user_id == user_id,
|
||||
)
|
||||
.order_by(desc(PlatformMessageHistory.created_at))
|
||||
)
|
||||
result = await session.execute(query.offset(offset).limit(page_size))
|
||||
return result.scalars().all()
|
||||
|
||||
async def insert_attachment(self, path, type, mime_type):
|
||||
"""Insert a new attachment record."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
new_attachment = Attachment(
|
||||
path=path,
|
||||
type=type,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
session.add(new_attachment)
|
||||
return new_attachment
|
||||
|
||||
async def get_attachment_by_id(self, attachment_id):
|
||||
"""Get an attachment by its ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(Attachment).where(Attachment.attachment_id == attachment_id)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def insert_persona(
|
||||
self,
|
||||
persona_id,
|
||||
system_prompt,
|
||||
begin_dialogs=None,
|
||||
tools=None,
|
||||
):
|
||||
"""Insert a new persona record."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
new_persona = Persona(
|
||||
persona_id=persona_id,
|
||||
system_prompt=system_prompt,
|
||||
begin_dialogs=begin_dialogs or [],
|
||||
tools=tools,
|
||||
)
|
||||
session.add(new_persona)
|
||||
return new_persona
|
||||
|
||||
async def get_persona_by_id(self, persona_id):
|
||||
"""Get a persona by its ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(Persona).where(Persona.persona_id == persona_id)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_personas(self):
|
||||
"""Get all personas for a specific bot."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(Persona)
|
||||
result = await session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update_persona(
|
||||
self,
|
||||
persona_id,
|
||||
system_prompt=None,
|
||||
begin_dialogs=None,
|
||||
tools=NOT_GIVEN,
|
||||
):
|
||||
"""Update a persona's system prompt or begin dialogs."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
query = update(Persona).where(col(Persona.persona_id) == persona_id)
|
||||
values = {}
|
||||
if system_prompt is not None:
|
||||
values["system_prompt"] = system_prompt
|
||||
if begin_dialogs is not None:
|
||||
values["begin_dialogs"] = begin_dialogs
|
||||
if tools is not NOT_GIVEN:
|
||||
values["tools"] = tools
|
||||
if not values:
|
||||
return None
|
||||
query = query.values(**values)
|
||||
await session.execute(query)
|
||||
return await self.get_persona_by_id(persona_id)
|
||||
|
||||
async def delete_persona(self, persona_id):
|
||||
"""Delete a persona by its ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(Persona).where(col(Persona.persona_id) == persona_id),
|
||||
)
|
||||
|
||||
async def insert_preference_or_update(self, scope, scope_id, key, value):
|
||||
"""Insert a new preference record or update if it exists."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
query = select(Preference).where(
|
||||
Preference.scope == scope,
|
||||
Preference.scope_id == scope_id,
|
||||
Preference.key == key,
|
||||
)
|
||||
result = await session.execute(query)
|
||||
existing_preference = result.scalar_one_or_none()
|
||||
if existing_preference:
|
||||
existing_preference.value = value
|
||||
else:
|
||||
new_preference = Preference(
|
||||
scope=scope,
|
||||
scope_id=scope_id,
|
||||
key=key,
|
||||
value=value,
|
||||
)
|
||||
session.add(new_preference)
|
||||
return existing_preference or new_preference
|
||||
|
||||
async def get_preference(self, scope, scope_id, key):
|
||||
"""Get a preference by key."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(Preference).where(
|
||||
Preference.scope == scope,
|
||||
Preference.scope_id == scope_id,
|
||||
Preference.key == key,
|
||||
)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_preferences(self, scope, scope_id=None, key=None):
|
||||
"""Get all preferences for a specific scope ID or key."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(Preference).where(Preference.scope == scope)
|
||||
if scope_id is not None:
|
||||
query = query.where(Preference.scope_id == scope_id)
|
||||
if key is not None:
|
||||
query = query.where(Preference.key == key)
|
||||
result = await session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
async def remove_preference(self, scope, scope_id, key):
|
||||
"""Remove a preference by scope ID and key."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(Preference).where(
|
||||
col(Preference.scope) == scope,
|
||||
col(Preference.scope_id) == scope_id,
|
||||
col(Preference.key) == key,
|
||||
),
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
async def clear_preferences(self, scope, scope_id):
|
||||
"""Clear all preferences for a specific scope ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(Preference).where(
|
||||
col(Preference.scope) == scope,
|
||||
col(Preference.scope_id) == scope_id,
|
||||
),
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# ====
|
||||
# Deprecated Methods
|
||||
# ====
|
||||
|
||||
def get_base_stats(self, offset_sec=86400):
|
||||
"""Get base statistics within the specified offset in seconds."""
|
||||
|
||||
async def _inner():
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
now = datetime.now()
|
||||
start_time = now - timedelta(seconds=offset_sec)
|
||||
result = await session.execute(
|
||||
select(PlatformStat).where(PlatformStat.timestamp >= start_time),
|
||||
)
|
||||
all_datas = result.scalars().all()
|
||||
deprecated_stats = DeprecatedStats()
|
||||
for data in all_datas:
|
||||
deprecated_stats.platform.append(
|
||||
DeprecatedPlatformStat(
|
||||
name=data.platform_id,
|
||||
count=data.count,
|
||||
timestamp=int(data.timestamp.timestamp()),
|
||||
),
|
||||
)
|
||||
return deprecated_stats
|
||||
|
||||
result = None
|
||||
|
||||
def runner():
|
||||
nonlocal result
|
||||
result = asyncio.run(_inner())
|
||||
|
||||
t = threading.Thread(target=runner)
|
||||
t.start()
|
||||
t.join()
|
||||
return result
|
||||
|
||||
def get_total_message_count(self):
|
||||
"""Get the total message count from platform statistics."""
|
||||
|
||||
async def _inner():
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
result = await session.execute(
|
||||
select(func.sum(PlatformStat.count)).select_from(PlatformStat),
|
||||
)
|
||||
total_count = result.scalar_one_or_none()
|
||||
return total_count if total_count is not None else 0
|
||||
|
||||
result = None
|
||||
|
||||
def runner():
|
||||
nonlocal result
|
||||
result = asyncio.run(_inner())
|
||||
|
||||
t = threading.Thread(target=runner)
|
||||
t.start()
|
||||
t.join()
|
||||
return result
|
||||
|
||||
def get_grouped_base_stats(self, offset_sec=86400):
|
||||
# group by platform_id
|
||||
async def _inner():
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
now = datetime.now()
|
||||
start_time = now - timedelta(seconds=offset_sec)
|
||||
result = await session.execute(
|
||||
select(PlatformStat.platform_id, func.sum(PlatformStat.count))
|
||||
.where(PlatformStat.timestamp >= start_time)
|
||||
.group_by(PlatformStat.platform_id),
|
||||
)
|
||||
grouped_stats = result.all()
|
||||
deprecated_stats = DeprecatedStats()
|
||||
for platform_id, count in grouped_stats:
|
||||
deprecated_stats.platform.append(
|
||||
DeprecatedPlatformStat(
|
||||
name=platform_id,
|
||||
count=count,
|
||||
timestamp=int(start_time.timestamp()),
|
||||
),
|
||||
)
|
||||
return deprecated_stats
|
||||
|
||||
result = None
|
||||
|
||||
def runner():
|
||||
nonlocal result
|
||||
result = asyncio.run(_inner())
|
||||
|
||||
t = threading.Thread(target=runner)
|
||||
t.start()
|
||||
t.join()
|
||||
return result
|
||||
|
||||
# ====
|
||||
# Platform Session Management
|
||||
# ====
|
||||
|
||||
async def create_platform_session(
|
||||
self,
|
||||
creator: str,
|
||||
platform_id: str = "webchat",
|
||||
session_id: str | None = None,
|
||||
display_name: str | None = None,
|
||||
is_group: int = 0,
|
||||
) -> PlatformSession:
|
||||
"""Create a new Platform session."""
|
||||
kwargs = {}
|
||||
if session_id:
|
||||
kwargs["session_id"] = session_id
|
||||
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
new_session = PlatformSession(
|
||||
creator=creator,
|
||||
platform_id=platform_id,
|
||||
display_name=display_name,
|
||||
is_group=is_group,
|
||||
**kwargs,
|
||||
)
|
||||
session.add(new_session)
|
||||
await session.flush()
|
||||
await session.refresh(new_session)
|
||||
return new_session
|
||||
|
||||
async def get_platform_session_by_id(
|
||||
self, session_id: str
|
||||
) -> PlatformSession | None:
|
||||
"""Get a Platform session by its ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
query = select(PlatformSession).where(
|
||||
PlatformSession.session_id == session_id,
|
||||
)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_platform_sessions_by_creator(
|
||||
self,
|
||||
creator: str,
|
||||
platform_id: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> list[PlatformSession]:
|
||||
"""Get all Platform sessions for a specific creator (username) and optionally platform."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
offset = (page - 1) * page_size
|
||||
query = select(PlatformSession).where(PlatformSession.creator == creator)
|
||||
|
||||
if platform_id:
|
||||
query = query.where(PlatformSession.platform_id == platform_id)
|
||||
|
||||
query = (
|
||||
query.order_by(desc(PlatformSession.updated_at))
|
||||
.offset(offset)
|
||||
.limit(page_size)
|
||||
)
|
||||
result = await session.execute(query)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def update_platform_session(
|
||||
self,
|
||||
session_id: str,
|
||||
display_name: str | None = None,
|
||||
) -> None:
|
||||
"""Update a Platform session's updated_at timestamp and optionally display_name."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
values: dict[str, T.Any] = {"updated_at": datetime.now(timezone.utc)}
|
||||
if display_name is not None:
|
||||
values["display_name"] = display_name
|
||||
|
||||
await session.execute(
|
||||
update(PlatformSession)
|
||||
.where(col(PlatformSession.session_id) == session_id)
|
||||
.values(**values),
|
||||
)
|
||||
|
||||
async def delete_platform_session(self, session_id: str) -> None:
|
||||
"""Delete a Platform session by its ID."""
|
||||
async with self.get_db() as session:
|
||||
session: AsyncSession
|
||||
async with session.begin():
|
||||
await session.execute(
|
||||
delete(PlatformSession).where(
|
||||
col(PlatformSession.session_id) == session_id,
|
||||
),
|
||||
)
|
||||
73
astrbot/core/db/vec_db/base.py
Normal file
73
astrbot/core/db/vec_db/base.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import abc
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
similarity: float
|
||||
data: dict
|
||||
|
||||
|
||||
class BaseVecDB:
|
||||
async def initialize(self):
|
||||
"""初始化向量数据库"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert(
|
||||
self,
|
||||
content: str,
|
||||
metadata: dict | None = None,
|
||||
id: str | None = None,
|
||||
) -> int:
|
||||
"""插入一条文本和其对应向量,自动生成 ID 并保持一致性。"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def insert_batch(
|
||||
self,
|
||||
contents: list[str],
|
||||
metadatas: list[dict] | None = None,
|
||||
ids: list[str] | None = None,
|
||||
batch_size: int = 32,
|
||||
tasks_limit: int = 3,
|
||||
max_retries: int = 3,
|
||||
progress_callback=None,
|
||||
) -> int:
|
||||
"""批量插入文本和其对应向量,自动生成 ID 并保持一致性。
|
||||
|
||||
Args:
|
||||
progress_callback: 进度回调函数,接收参数 (current, total)
|
||||
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def retrieve(
|
||||
self,
|
||||
query: str,
|
||||
top_k: int = 5,
|
||||
fetch_k: int = 20,
|
||||
rerank: bool = False,
|
||||
metadata_filters: dict | None = None,
|
||||
) -> list[Result]:
|
||||
"""搜索最相似的文档。
|
||||
Args:
|
||||
query (str): 查询文本
|
||||
top_k (int): 返回的最相似文档的数量
|
||||
Returns:
|
||||
List[Result]: 查询结果
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def delete(self, doc_id: str) -> bool:
|
||||
"""删除指定文档。
|
||||
Args:
|
||||
doc_id (str): 要删除的文档 ID
|
||||
Returns:
|
||||
bool: 删除是否成功
|
||||
"""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
async def close(self): ...
|
||||
3
astrbot/core/db/vec_db/faiss_impl/__init__.py
Normal file
3
astrbot/core/db/vec_db/faiss_impl/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .vec_db import FaissVecDB
|
||||
|
||||
__all__ = ["FaissVecDB"]
|
||||
392
astrbot/core/db/vec_db/faiss_impl/document_storage.py
Normal file
392
astrbot/core/db/vec_db/faiss_impl/document_storage.py
Normal file
@@ -0,0 +1,392 @@
|
||||
import json
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Column, Text
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlmodel import Field, MetaData, SQLModel, col, func, select, text
|
||||
|
||||
from astrbot.core import logger
|
||||
|
||||
|
||||
class BaseDocModel(SQLModel, table=False):
|
||||
metadata = MetaData()
|
||||
|
||||
|
||||
class Document(BaseDocModel, table=True):
|
||||
"""SQLModel for documents table."""
|
||||
|
||||
__tablename__ = "documents" # type: ignore
|
||||
|
||||
id: int | None = Field(
|
||||
default=None,
|
||||
primary_key=True,
|
||||
sa_column_kwargs={"autoincrement": True},
|
||||
)
|
||||
doc_id: str = Field(nullable=False)
|
||||
text: str = Field(nullable=False)
|
||||
metadata_: str | None = Field(default=None, sa_column=Column("metadata", Text))
|
||||
created_at: datetime | None = Field(default=None)
|
||||
updated_at: datetime | None = Field(default=None)
|
||||
|
||||
|
||||
class DocumentStorage:
|
||||
def __init__(self, db_path: str):
|
||||
self.db_path = db_path
|
||||
self.DATABASE_URL = f"sqlite+aiosqlite:///{db_path}"
|
||||
self.engine: AsyncEngine | None = None
|
||||
self.async_session_maker: sessionmaker | None = None
|
||||
self.sqlite_init_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"sqlite_init.sql",
|
||||
)
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize the SQLite database and create the documents table if it doesn't exist."""
|
||||
await self.connect()
|
||||
async with self.engine.begin() as conn: # type: ignore
|
||||
# Create tables using SQLModel
|
||||
await conn.run_sync(BaseDocModel.metadata.create_all)
|
||||
|
||||
try:
|
||||
await conn.execute(
|
||||
text(
|
||||
"ALTER TABLE documents ADD COLUMN kb_doc_id TEXT "
|
||||
"GENERATED ALWAYS AS (json_extract(metadata, '$.kb_doc_id')) STORED",
|
||||
),
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"ALTER TABLE documents ADD COLUMN user_id TEXT "
|
||||
"GENERATED ALWAYS AS (json_extract(metadata, '$.user_id')) STORED",
|
||||
),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
await conn.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS idx_documents_kb_doc_id ON documents(kb_doc_id)",
|
||||
),
|
||||
)
|
||||
await conn.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS idx_documents_user_id ON documents(user_id)",
|
||||
),
|
||||
)
|
||||
except BaseException:
|
||||
pass
|
||||
|
||||
await conn.commit()
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to the SQLite database."""
|
||||
if self.engine is None:
|
||||
self.engine = create_async_engine(
|
||||
self.DATABASE_URL,
|
||||
echo=False,
|
||||
future=True,
|
||||
)
|
||||
self.async_session_maker = sessionmaker(
|
||||
self.engine, # type: ignore
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
) # type: ignore
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_session(self):
|
||||
"""Context manager for database sessions."""
|
||||
async with self.async_session_maker() as session: # type: ignore
|
||||
yield session
|
||||
|
||||
async def get_documents(
|
||||
self,
|
||||
metadata_filters: dict,
|
||||
ids: list | None = None,
|
||||
offset: int | None = 0,
|
||||
limit: int | None = 100,
|
||||
) -> list[dict]:
|
||||
"""Retrieve documents by metadata filters and ids.
|
||||
|
||||
Args:
|
||||
metadata_filters (dict): The metadata filters to apply.
|
||||
ids (list | None): Optional list of document IDs to filter.
|
||||
offset (int | None): Offset for pagination.
|
||||
limit (int | None): Limit for pagination.
|
||||
|
||||
Returns:
|
||||
list: The list of documents that match the filters.
|
||||
|
||||
"""
|
||||
if self.engine is None:
|
||||
logger.warning(
|
||||
"Database connection is not initialized, returning empty result",
|
||||
)
|
||||
return []
|
||||
|
||||
async with self.get_session() as session:
|
||||
query = select(Document)
|
||||
|
||||
for key, val in metadata_filters.items():
|
||||
query = query.where(
|
||||
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||
).params(**{f"filter_{key}": val})
|
||||
|
||||
if ids is not None and len(ids) > 0:
|
||||
valid_ids = [int(i) for i in ids if i != -1]
|
||||
if valid_ids:
|
||||
query = query.where(col(Document.id).in_(valid_ids))
|
||||
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
if offset is not None:
|
||||
query = query.offset(offset)
|
||||
|
||||
result = await session.execute(query)
|
||||
documents = result.scalars().all()
|
||||
|
||||
return [self._document_to_dict(doc) for doc in documents]
|
||||
|
||||
async def insert_document(self, doc_id: str, text: str, metadata: dict) -> int:
|
||||
"""Insert a single document and return its integer ID.
|
||||
|
||||
Args:
|
||||
doc_id (str): The document ID (UUID string).
|
||||
text (str): The document text.
|
||||
metadata (dict): The document metadata.
|
||||
|
||||
Returns:
|
||||
int: The integer ID of the inserted document.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session, session.begin():
|
||||
document = Document(
|
||||
doc_id=doc_id,
|
||||
text=text,
|
||||
metadata_=json.dumps(metadata),
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now(),
|
||||
)
|
||||
session.add(document)
|
||||
await session.flush() # Flush to get the ID
|
||||
return document.id # type: ignore
|
||||
|
||||
async def insert_documents_batch(
|
||||
self,
|
||||
doc_ids: list[str],
|
||||
texts: list[str],
|
||||
metadatas: list[dict],
|
||||
) -> list[int]:
|
||||
"""Batch insert documents and return their integer IDs.
|
||||
|
||||
Args:
|
||||
doc_ids (list[str]): List of document IDs (UUID strings).
|
||||
texts (list[str]): List of document texts.
|
||||
metadatas (list[dict]): List of document metadata.
|
||||
|
||||
Returns:
|
||||
list[int]: List of integer IDs of the inserted documents.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session, session.begin():
|
||||
import json
|
||||
|
||||
documents = []
|
||||
for doc_id, text, metadata in zip(doc_ids, texts, metadatas):
|
||||
document = Document(
|
||||
doc_id=doc_id,
|
||||
text=text,
|
||||
metadata_=json.dumps(metadata),
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now(),
|
||||
)
|
||||
documents.append(document)
|
||||
session.add(document)
|
||||
|
||||
await session.flush() # Flush to get all IDs
|
||||
return [doc.id for doc in documents] # type: ignore
|
||||
|
||||
async def delete_document_by_doc_id(self, doc_id: str):
|
||||
"""Delete a document by its doc_id.
|
||||
|
||||
Args:
|
||||
doc_id (str): The doc_id of the document to delete.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session, session.begin():
|
||||
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||
result = await session.execute(query)
|
||||
document = result.scalar_one_or_none()
|
||||
|
||||
if document:
|
||||
await session.delete(document)
|
||||
|
||||
async def get_document_by_doc_id(self, doc_id: str):
|
||||
"""Retrieve a document by its doc_id.
|
||||
|
||||
Args:
|
||||
doc_id (str): The doc_id of the document to retrieve.
|
||||
|
||||
Returns:
|
||||
dict: The document data or None if not found.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session:
|
||||
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||
result = await session.execute(query)
|
||||
document = result.scalar_one_or_none()
|
||||
|
||||
if document:
|
||||
return self._document_to_dict(document)
|
||||
return None
|
||||
|
||||
async def update_document_by_doc_id(self, doc_id: str, new_text: str):
|
||||
"""Update a document by its doc_id.
|
||||
|
||||
Args:
|
||||
doc_id (str): The doc_id.
|
||||
new_text (str): The new text to update the document with.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session, session.begin():
|
||||
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||
result = await session.execute(query)
|
||||
document = result.scalar_one_or_none()
|
||||
|
||||
if document:
|
||||
document.text = new_text
|
||||
document.updated_at = datetime.now()
|
||||
session.add(document)
|
||||
|
||||
async def delete_documents(self, metadata_filters: dict):
|
||||
"""Delete documents by their metadata filters.
|
||||
|
||||
Args:
|
||||
metadata_filters (dict): The metadata filters to apply.
|
||||
|
||||
"""
|
||||
if self.engine is None:
|
||||
logger.warning(
|
||||
"Database connection is not initialized, skipping delete operation",
|
||||
)
|
||||
return
|
||||
|
||||
async with self.get_session() as session, session.begin():
|
||||
query = select(Document)
|
||||
|
||||
for key, val in metadata_filters.items():
|
||||
query = query.where(
|
||||
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||
).params(**{f"filter_{key}": val})
|
||||
|
||||
result = await session.execute(query)
|
||||
documents = result.scalars().all()
|
||||
|
||||
for doc in documents:
|
||||
await session.delete(doc)
|
||||
|
||||
async def count_documents(self, metadata_filters: dict | None = None) -> int:
|
||||
"""Count documents in the database.
|
||||
|
||||
Args:
|
||||
metadata_filters (dict | None): Metadata filters to apply.
|
||||
|
||||
Returns:
|
||||
int: The count of documents.
|
||||
|
||||
"""
|
||||
if self.engine is None:
|
||||
logger.warning("Database connection is not initialized, returning 0")
|
||||
return 0
|
||||
|
||||
async with self.get_session() as session:
|
||||
query = select(func.count(col(Document.id)))
|
||||
|
||||
if metadata_filters:
|
||||
for key, val in metadata_filters.items():
|
||||
query = query.where(
|
||||
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||
).params(**{f"filter_{key}": val})
|
||||
|
||||
result = await session.execute(query)
|
||||
count = result.scalar_one_or_none()
|
||||
return count if count is not None else 0
|
||||
|
||||
async def get_user_ids(self) -> list[str]:
|
||||
"""Retrieve all user IDs from the documents table.
|
||||
|
||||
Returns:
|
||||
list: A list of user IDs.
|
||||
|
||||
"""
|
||||
assert self.engine is not None, "Database connection is not initialized."
|
||||
|
||||
async with self.get_session() as session:
|
||||
query = text(
|
||||
"SELECT DISTINCT user_id FROM documents WHERE user_id IS NOT NULL",
|
||||
)
|
||||
result = await session.execute(query)
|
||||
rows = result.fetchall()
|
||||
return [row[0] for row in rows]
|
||||
|
||||
def _document_to_dict(self, document: Document) -> dict:
|
||||
"""Convert a Document model to a dictionary.
|
||||
|
||||
Args:
|
||||
document (Document): The document to convert.
|
||||
|
||||
Returns:
|
||||
dict: The converted dictionary.
|
||||
|
||||
"""
|
||||
return {
|
||||
"id": document.id,
|
||||
"doc_id": document.doc_id,
|
||||
"text": document.text,
|
||||
"metadata": document.metadata_,
|
||||
"created_at": document.created_at.isoformat()
|
||||
if isinstance(document.created_at, datetime)
|
||||
else document.created_at,
|
||||
"updated_at": document.updated_at.isoformat()
|
||||
if isinstance(document.updated_at, datetime)
|
||||
else document.updated_at,
|
||||
}
|
||||
|
||||
async def tuple_to_dict(self, row):
|
||||
"""Convert a tuple to a dictionary.
|
||||
|
||||
Args:
|
||||
row (tuple): The row to convert.
|
||||
|
||||
Returns:
|
||||
dict: The converted dictionary.
|
||||
|
||||
Note: This method is kept for backward compatibility but is no longer used internally.
|
||||
|
||||
"""
|
||||
return {
|
||||
"id": row[0],
|
||||
"doc_id": row[1],
|
||||
"text": row[2],
|
||||
"metadata": row[3],
|
||||
"created_at": row[4],
|
||||
"updated_at": row[5],
|
||||
}
|
||||
|
||||
async def close(self):
|
||||
"""Close the connection to the SQLite database."""
|
||||
if self.engine:
|
||||
await self.engine.dispose()
|
||||
self.engine = None
|
||||
self.async_session_maker = None
|
||||
93
astrbot/core/db/vec_db/faiss_impl/embedding_storage.py
Normal file
93
astrbot/core/db/vec_db/faiss_impl/embedding_storage.py
Normal file
@@ -0,0 +1,93 @@
|
||||
try:
|
||||
import faiss
|
||||
except ModuleNotFoundError:
|
||||
raise ImportError(
|
||||
"faiss 未安装。请使用 'pip install faiss-cpu' 或 'pip install faiss-gpu' 安装。",
|
||||
)
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
class EmbeddingStorage:
|
||||
def __init__(self, dimension: int, path: str | None = None):
|
||||
self.dimension = dimension
|
||||
self.path = path
|
||||
self.index = None
|
||||
if path and os.path.exists(path):
|
||||
self.index = faiss.read_index(path)
|
||||
else:
|
||||
base_index = faiss.IndexFlatL2(dimension)
|
||||
self.index = faiss.IndexIDMap(base_index)
|
||||
|
||||
async def insert(self, vector: np.ndarray, id: int):
|
||||
"""插入向量
|
||||
|
||||
Args:
|
||||
vector (np.ndarray): 要插入的向量
|
||||
id (int): 向量的ID
|
||||
Raises:
|
||||
ValueError: 如果向量的维度与存储的维度不匹配
|
||||
|
||||
"""
|
||||
assert self.index is not None, "FAISS index is not initialized."
|
||||
if vector.shape[0] != self.dimension:
|
||||
raise ValueError(
|
||||
f"向量维度不匹配, 期望: {self.dimension}, 实际: {vector.shape[0]}",
|
||||
)
|
||||
self.index.add_with_ids(vector.reshape(1, -1), np.array([id]))
|
||||
await self.save_index()
|
||||
|
||||
async def insert_batch(self, vectors: np.ndarray, ids: list[int]):
|
||||
"""批量插入向量
|
||||
|
||||
Args:
|
||||
vectors (np.ndarray): 要插入的向量数组
|
||||
ids (list[int]): 向量的ID列表
|
||||
Raises:
|
||||
ValueError: 如果向量的维度与存储的维度不匹配
|
||||
|
||||
"""
|
||||
assert self.index is not None, "FAISS index is not initialized."
|
||||
if vectors.shape[1] != self.dimension:
|
||||
raise ValueError(
|
||||
f"向量维度不匹配, 期望: {self.dimension}, 实际: {vectors.shape[1]}",
|
||||
)
|
||||
self.index.add_with_ids(vectors, np.array(ids))
|
||||
await self.save_index()
|
||||
|
||||
async def search(self, vector: np.ndarray, k: int) -> tuple:
|
||||
"""搜索最相似的向量
|
||||
|
||||
Args:
|
||||
vector (np.ndarray): 查询向量
|
||||
k (int): 返回的最相似向量的数量
|
||||
Returns:
|
||||
tuple: (距离, 索引)
|
||||
|
||||
"""
|
||||
assert self.index is not None, "FAISS index is not initialized."
|
||||
faiss.normalize_L2(vector)
|
||||
distances, indices = self.index.search(vector, k)
|
||||
return distances, indices
|
||||
|
||||
async def delete(self, ids: list[int]):
|
||||
"""删除向量
|
||||
|
||||
Args:
|
||||
ids (list[int]): 要删除的向量ID列表
|
||||
|
||||
"""
|
||||
assert self.index is not None, "FAISS index is not initialized."
|
||||
id_array = np.array(ids, dtype=np.int64)
|
||||
self.index.remove_ids(id_array)
|
||||
await self.save_index()
|
||||
|
||||
async def save_index(self):
|
||||
"""保存索引
|
||||
|
||||
Args:
|
||||
path (str): 保存索引的路径
|
||||
|
||||
"""
|
||||
faiss.write_index(self.index, self.path)
|
||||
17
astrbot/core/db/vec_db/faiss_impl/sqlite_init.sql
Normal file
17
astrbot/core/db/vec_db/faiss_impl/sqlite_init.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- 创建文档存储表,包含 faiss 中文档的 id,文档文本,create_at,updated_at
|
||||
CREATE TABLE documents (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
doc_id TEXT NOT NULL,
|
||||
text TEXT NOT NULL,
|
||||
metadata TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
ALTER TABLE documents
|
||||
ADD COLUMN group_id TEXT GENERATED ALWAYS AS (json_extract(metadata, '$.group_id')) STORED;
|
||||
ALTER TABLE documents
|
||||
ADD COLUMN user_id TEXT GENERATED ALWAYS AS (json_extract(metadata, '$.user_id')) STORED;
|
||||
|
||||
CREATE INDEX idx_documents_user_id ON documents(user_id);
|
||||
CREATE INDEX idx_documents_group_id ON documents(group_id);
|
||||
204
astrbot/core/db/vec_db/faiss_impl/vec_db.py
Normal file
204
astrbot/core/db/vec_db/faiss_impl/vec_db.py
Normal file
@@ -0,0 +1,204 @@
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import numpy as np
|
||||
|
||||
from astrbot import logger
|
||||
from astrbot.core.provider.provider import EmbeddingProvider, RerankProvider
|
||||
|
||||
from ..base import BaseVecDB, Result
|
||||
from .document_storage import DocumentStorage
|
||||
from .embedding_storage import EmbeddingStorage
|
||||
|
||||
|
||||
class FaissVecDB(BaseVecDB):
|
||||
"""A class to represent a vector database."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
doc_store_path: str,
|
||||
index_store_path: str,
|
||||
embedding_provider: EmbeddingProvider,
|
||||
rerank_provider: RerankProvider | None = None,
|
||||
):
|
||||
self.doc_store_path = doc_store_path
|
||||
self.index_store_path = index_store_path
|
||||
self.embedding_provider = embedding_provider
|
||||
self.document_storage = DocumentStorage(doc_store_path)
|
||||
self.embedding_storage = EmbeddingStorage(
|
||||
embedding_provider.get_dim(),
|
||||
index_store_path,
|
||||
)
|
||||
self.embedding_provider = embedding_provider
|
||||
self.rerank_provider = rerank_provider
|
||||
|
||||
async def initialize(self):
|
||||
await self.document_storage.initialize()
|
||||
|
||||
async def insert(
|
||||
self,
|
||||
content: str,
|
||||
metadata: dict | None = None,
|
||||
id: str | None = None,
|
||||
) -> int:
|
||||
"""插入一条文本和其对应向量,自动生成 ID 并保持一致性。"""
|
||||
metadata = metadata or {}
|
||||
str_id = id or str(uuid.uuid4()) # 使用 UUID 作为原始 ID
|
||||
|
||||
vector = await self.embedding_provider.get_embedding(content)
|
||||
vector = np.array(vector, dtype=np.float32)
|
||||
|
||||
# 使用 DocumentStorage 的方法插入文档
|
||||
int_id = await self.document_storage.insert_document(str_id, content, metadata)
|
||||
|
||||
# 插入向量到 FAISS
|
||||
await self.embedding_storage.insert(vector, int_id)
|
||||
return int_id
|
||||
|
||||
async def insert_batch(
|
||||
self,
|
||||
contents: list[str],
|
||||
metadatas: list[dict] | None = None,
|
||||
ids: list[str] | None = None,
|
||||
batch_size: int = 32,
|
||||
tasks_limit: int = 3,
|
||||
max_retries: int = 3,
|
||||
progress_callback=None,
|
||||
) -> list[int]:
|
||||
"""批量插入文本和其对应向量,自动生成 ID 并保持一致性。
|
||||
|
||||
Args:
|
||||
progress_callback: 进度回调函数,接收参数 (current, total)
|
||||
|
||||
"""
|
||||
metadatas = metadatas or [{} for _ in contents]
|
||||
ids = ids or [str(uuid.uuid4()) for _ in contents]
|
||||
|
||||
start = time.time()
|
||||
logger.debug(f"Generating embeddings for {len(contents)} contents...")
|
||||
vectors = await self.embedding_provider.get_embeddings_batch(
|
||||
contents,
|
||||
batch_size=batch_size,
|
||||
tasks_limit=tasks_limit,
|
||||
max_retries=max_retries,
|
||||
progress_callback=progress_callback,
|
||||
)
|
||||
end = time.time()
|
||||
logger.debug(
|
||||
f"Generated embeddings for {len(contents)} contents in {end - start:.2f} seconds.",
|
||||
)
|
||||
|
||||
# 使用 DocumentStorage 的批量插入方法
|
||||
int_ids = await self.document_storage.insert_documents_batch(
|
||||
ids,
|
||||
contents,
|
||||
metadatas,
|
||||
)
|
||||
|
||||
# 批量插入向量到 FAISS
|
||||
vectors_array = np.array(vectors).astype("float32")
|
||||
await self.embedding_storage.insert_batch(vectors_array, int_ids)
|
||||
return int_ids
|
||||
|
||||
async def retrieve(
|
||||
self,
|
||||
query: str,
|
||||
k: int = 5,
|
||||
fetch_k: int = 20,
|
||||
rerank: bool = False,
|
||||
metadata_filters: dict | None = None,
|
||||
) -> list[Result]:
|
||||
"""搜索最相似的文档。
|
||||
|
||||
Args:
|
||||
query (str): 查询文本
|
||||
k (int): 返回的最相似文档的数量
|
||||
fetch_k (int): 在根据 metadata 过滤前从 FAISS 中获取的数量
|
||||
rerank (bool): 是否使用重排序。这需要在实例化时提供 rerank_provider, 如果未提供并且 rerank 为 True, 不会抛出异常。
|
||||
metadata_filters (dict): 元数据过滤器
|
||||
|
||||
Returns:
|
||||
List[Result]: 查询结果
|
||||
|
||||
"""
|
||||
embedding = await self.embedding_provider.get_embedding(query)
|
||||
scores, indices = await self.embedding_storage.search(
|
||||
vector=np.array([embedding]).astype("float32"),
|
||||
k=fetch_k if metadata_filters else k,
|
||||
)
|
||||
if len(indices[0]) == 0 or indices[0][0] == -1:
|
||||
return []
|
||||
# normalize scores
|
||||
scores[0] = 1.0 - (scores[0] / 2.0)
|
||||
# NOTE: maybe the size is less than k.
|
||||
fetched_docs = await self.document_storage.get_documents(
|
||||
metadata_filters=metadata_filters or {},
|
||||
ids=indices[0],
|
||||
)
|
||||
if not fetched_docs:
|
||||
return []
|
||||
result_docs: list[Result] = []
|
||||
|
||||
idx_pos = {fetch_doc["id"]: idx for idx, fetch_doc in enumerate(fetched_docs)}
|
||||
for i, indice_idx in enumerate(indices[0]):
|
||||
pos = idx_pos.get(indice_idx)
|
||||
if pos is None:
|
||||
continue
|
||||
fetch_doc = fetched_docs[pos]
|
||||
score = scores[0][i]
|
||||
result_docs.append(Result(similarity=float(score), data=fetch_doc))
|
||||
|
||||
top_k_results = result_docs[:k]
|
||||
|
||||
if rerank and self.rerank_provider:
|
||||
documents = [doc.data["text"] for doc in top_k_results]
|
||||
reranked_results = await self.rerank_provider.rerank(query, documents)
|
||||
reranked_results = sorted(
|
||||
reranked_results,
|
||||
key=lambda x: x.relevance_score,
|
||||
reverse=True,
|
||||
)
|
||||
top_k_results = [
|
||||
top_k_results[reranked_result.index]
|
||||
for reranked_result in reranked_results
|
||||
]
|
||||
|
||||
return top_k_results
|
||||
|
||||
async def delete(self, doc_id: str):
|
||||
"""删除一条文档块(chunk)"""
|
||||
# 获得对应的 int id
|
||||
result = await self.document_storage.get_document_by_doc_id(doc_id)
|
||||
int_id = result["id"] if result else None
|
||||
if int_id is None:
|
||||
return
|
||||
|
||||
# 使用 DocumentStorage 的删除方法
|
||||
await self.document_storage.delete_document_by_doc_id(doc_id)
|
||||
await self.embedding_storage.delete([int_id])
|
||||
|
||||
async def close(self):
|
||||
await self.document_storage.close()
|
||||
|
||||
async def count_documents(self, metadata_filter: dict | None = None) -> int:
|
||||
"""计算文档数量
|
||||
|
||||
Args:
|
||||
metadata_filter (dict | None): 元数据过滤器
|
||||
|
||||
"""
|
||||
count = await self.document_storage.count_documents(
|
||||
metadata_filters=metadata_filter or {},
|
||||
)
|
||||
return count
|
||||
|
||||
async def delete_documents(self, metadata_filters: dict):
|
||||
"""根据元数据过滤器删除文档"""
|
||||
docs = await self.document_storage.get_documents(
|
||||
metadata_filters=metadata_filters,
|
||||
offset=None,
|
||||
limit=None,
|
||||
)
|
||||
doc_ids: list[int] = [doc["id"] for doc in docs]
|
||||
await self.embedding_storage.delete(doc_ids)
|
||||
await self.document_storage.delete_documents(metadata_filters=metadata_filters)
|
||||
61
astrbot/core/event_bus.py
Normal file
61
astrbot/core/event_bus.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""事件总线, 用于处理事件的分发和处理
|
||||
事件总线是一个异步队列, 用于接收各种消息事件, 并将其发送到Scheduler调度器进行处理
|
||||
其中包含了一个无限循环的调度函数, 用于从事件队列中获取新的事件, 并创建一个新的异步任务来执行管道调度器的处理逻辑
|
||||
|
||||
class:
|
||||
EventBus: 事件总线, 用于处理事件的分发和处理
|
||||
|
||||
工作流程:
|
||||
1. 维护一个异步队列, 来接受各种消息事件
|
||||
2. 无限循环的调度函数, 从事件队列中获取新的事件, 打印日志并创建一个新的异步任务来执行管道调度器的处理逻辑
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from asyncio import Queue
|
||||
|
||||
from astrbot.core import logger
|
||||
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||
from astrbot.core.pipeline.scheduler import PipelineScheduler
|
||||
|
||||
from .platform import AstrMessageEvent
|
||||
|
||||
|
||||
class EventBus:
|
||||
"""用于处理事件的分发和处理"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
event_queue: Queue,
|
||||
pipeline_scheduler_mapping: dict[str, PipelineScheduler],
|
||||
astrbot_config_mgr: AstrBotConfigManager = None,
|
||||
):
|
||||
self.event_queue = event_queue # 事件队列
|
||||
# abconf uuid -> scheduler
|
||||
self.pipeline_scheduler_mapping = pipeline_scheduler_mapping
|
||||
self.astrbot_config_mgr = astrbot_config_mgr
|
||||
|
||||
async def dispatch(self):
|
||||
while True:
|
||||
event: AstrMessageEvent = await self.event_queue.get()
|
||||
conf_info = self.astrbot_config_mgr.get_conf_info(event.unified_msg_origin)
|
||||
self._print_event(event, conf_info["name"])
|
||||
scheduler = self.pipeline_scheduler_mapping.get(conf_info["id"])
|
||||
asyncio.create_task(scheduler.execute(event))
|
||||
|
||||
def _print_event(self, event: AstrMessageEvent, conf_name: str):
|
||||
"""用于记录事件信息
|
||||
|
||||
Args:
|
||||
event (AstrMessageEvent): 事件对象
|
||||
|
||||
"""
|
||||
# 如果有发送者名称: [平台名] 发送者名称/发送者ID: 消息概要
|
||||
if event.get_sender_name():
|
||||
logger.info(
|
||||
f"[{conf_name}] [{event.get_platform_id()}({event.get_platform_name()})] {event.get_sender_name()}/{event.get_sender_id()}: {event.get_message_outline()}",
|
||||
)
|
||||
# 没有发送者名称: [平台名] 发送者ID: 消息概要
|
||||
else:
|
||||
logger.info(
|
||||
f"[{conf_name}] [{event.get_platform_id()}({event.get_platform_name()})] {event.get_sender_id()}: {event.get_message_outline()}",
|
||||
)
|
||||
9
astrbot/core/exceptions.py
Normal file
9
astrbot/core/exceptions.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class AstrBotError(Exception):
|
||||
"""Base exception for all AstrBot errors."""
|
||||
|
||||
|
||||
class ProviderNotFoundError(AstrBotError):
|
||||
"""Raised when a specified provider is not found."""
|
||||
98
astrbot/core/file_token_service.py
Normal file
98
astrbot/core/file_token_service.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import asyncio
|
||||
import os
|
||||
import platform
|
||||
import time
|
||||
import uuid
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
|
||||
class FileTokenService:
|
||||
"""维护一个简单的基于令牌的文件下载服务,支持超时和懒清除。"""
|
||||
|
||||
def __init__(self, default_timeout: float = 300):
|
||||
self.lock = asyncio.Lock()
|
||||
self.staged_files = {} # token: (file_path, expire_time)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
async def _cleanup_expired_tokens(self):
|
||||
"""清理过期的令牌"""
|
||||
now = time.time()
|
||||
expired_tokens = [
|
||||
token for token, (_, expire) in self.staged_files.items() if expire < now
|
||||
]
|
||||
for token in expired_tokens:
|
||||
self.staged_files.pop(token, None)
|
||||
|
||||
async def check_token_expired(self, file_token: str) -> bool:
|
||||
async with self.lock:
|
||||
await self._cleanup_expired_tokens()
|
||||
return file_token not in self.staged_files
|
||||
|
||||
async def register_file(self, file_path: str, timeout: float | None = None) -> str:
|
||||
"""向令牌服务注册一个文件。
|
||||
|
||||
Args:
|
||||
file_path(str): 文件路径
|
||||
timeout(float): 超时时间,单位秒(可选)
|
||||
|
||||
Returns:
|
||||
str: 一个单次令牌
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: 当路径不存在时抛出
|
||||
|
||||
"""
|
||||
# 处理 file:///
|
||||
try:
|
||||
parsed_uri = urlparse(file_path)
|
||||
if parsed_uri.scheme == "file":
|
||||
local_path = unquote(parsed_uri.path)
|
||||
if platform.system() == "Windows" and local_path.startswith("/"):
|
||||
local_path = local_path[1:]
|
||||
else:
|
||||
# 如果没有 file:/// 前缀,则认为是普通路径
|
||||
local_path = file_path
|
||||
except Exception:
|
||||
# 解析失败时,按原路径处理
|
||||
local_path = file_path
|
||||
|
||||
async with self.lock:
|
||||
await self._cleanup_expired_tokens()
|
||||
|
||||
if not os.path.exists(local_path):
|
||||
raise FileNotFoundError(
|
||||
f"文件不存在: {local_path} (原始输入: {file_path})",
|
||||
)
|
||||
|
||||
file_token = str(uuid.uuid4())
|
||||
expire_time = time.time() + (
|
||||
timeout if timeout is not None else self.default_timeout
|
||||
)
|
||||
# 存储转换后的真实路径
|
||||
self.staged_files[file_token] = (local_path, expire_time)
|
||||
return file_token
|
||||
|
||||
async def handle_file(self, file_token: str) -> str:
|
||||
"""根据令牌获取文件路径,使用后令牌失效。
|
||||
|
||||
Args:
|
||||
file_token(str): 注册时返回的令牌
|
||||
|
||||
Returns:
|
||||
str: 文件路径
|
||||
|
||||
Raises:
|
||||
KeyError: 当令牌不存在或已过期时抛出
|
||||
FileNotFoundError: 当文件本身已被删除时抛出
|
||||
|
||||
"""
|
||||
async with self.lock:
|
||||
await self._cleanup_expired_tokens()
|
||||
|
||||
if file_token not in self.staged_files:
|
||||
raise KeyError(f"无效或过期的文件 token: {file_token}")
|
||||
|
||||
file_path, _ = self.staged_files.pop(file_token)
|
||||
if not os.path.exists(file_path):
|
||||
raise FileNotFoundError(f"文件不存在: {file_path}")
|
||||
return file_path
|
||||
57
astrbot/core/initial_loader.py
Normal file
57
astrbot/core/initial_loader.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""AstrBot 启动器,负责初始化和启动核心组件和仪表板服务器。
|
||||
|
||||
工作流程:
|
||||
1. 初始化核心生命周期, 传递数据库和日志代理实例到核心生命周期
|
||||
2. 运行核心生命周期任务和仪表板服务器
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import traceback
|
||||
|
||||
from astrbot.core import LogBroker, logger
|
||||
from astrbot.core.core_lifecycle import AstrBotCoreLifecycle
|
||||
from astrbot.core.db import BaseDatabase
|
||||
from astrbot.dashboard.server import AstrBotDashboard
|
||||
|
||||
|
||||
class InitialLoader:
|
||||
"""AstrBot 启动器,负责初始化和启动核心组件和仪表板服务器。"""
|
||||
|
||||
def __init__(self, db: BaseDatabase, log_broker: LogBroker):
|
||||
self.db = db
|
||||
self.logger = logger
|
||||
self.log_broker = log_broker
|
||||
self.webui_dir: str | None = None
|
||||
|
||||
async def start(self):
|
||||
core_lifecycle = AstrBotCoreLifecycle(self.log_broker, self.db)
|
||||
|
||||
try:
|
||||
await core_lifecycle.initialize()
|
||||
except Exception as e:
|
||||
logger.critical(traceback.format_exc())
|
||||
logger.critical(f"😭 初始化 AstrBot 失败:{e} !!!")
|
||||
return
|
||||
|
||||
core_task = core_lifecycle.start()
|
||||
|
||||
webui_dir = self.webui_dir
|
||||
|
||||
self.dashboard_server = AstrBotDashboard(
|
||||
core_lifecycle,
|
||||
self.db,
|
||||
core_lifecycle.dashboard_shutdown_event,
|
||||
webui_dir,
|
||||
)
|
||||
|
||||
coro = self.dashboard_server.run()
|
||||
if coro:
|
||||
# 启动核心任务和仪表板服务器
|
||||
task = asyncio.gather(core_task, coro)
|
||||
else:
|
||||
task = core_task
|
||||
try:
|
||||
await task # 整个AstrBot在这里运行
|
||||
except asyncio.CancelledError:
|
||||
logger.info("🌈 正在关闭 AstrBot...")
|
||||
await core_lifecycle.stop()
|
||||
9
astrbot/core/knowledge_base/chunking/__init__.py
Normal file
9
astrbot/core/knowledge_base/chunking/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""文档分块模块"""
|
||||
|
||||
from .base import BaseChunker
|
||||
from .fixed_size import FixedSizeChunker
|
||||
|
||||
__all__ = [
|
||||
"BaseChunker",
|
||||
"FixedSizeChunker",
|
||||
]
|
||||
25
astrbot/core/knowledge_base/chunking/base.py
Normal file
25
astrbot/core/knowledge_base/chunking/base.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""文档分块器基类
|
||||
|
||||
定义了文档分块处理的抽象接口。
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class BaseChunker(ABC):
|
||||
"""分块器基类
|
||||
|
||||
所有分块器都应该继承此类并实现 chunk 方法。
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||
"""将文本分块
|
||||
|
||||
Args:
|
||||
text: 输入文本
|
||||
|
||||
Returns:
|
||||
list[str]: 分块后的文本列表
|
||||
|
||||
"""
|
||||
59
astrbot/core/knowledge_base/chunking/fixed_size.py
Normal file
59
astrbot/core/knowledge_base/chunking/fixed_size.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""固定大小分块器
|
||||
|
||||
按照固定的字符数将文本分块,支持重叠区域。
|
||||
"""
|
||||
|
||||
from .base import BaseChunker
|
||||
|
||||
|
||||
class FixedSizeChunker(BaseChunker):
|
||||
"""固定大小分块器
|
||||
|
||||
按照固定的字符数分块,并支持块之间的重叠。
|
||||
"""
|
||||
|
||||
def __init__(self, chunk_size: int = 512, chunk_overlap: int = 50):
|
||||
"""初始化分块器
|
||||
|
||||
Args:
|
||||
chunk_size: 块的大小(字符数)
|
||||
chunk_overlap: 块之间的重叠字符数
|
||||
|
||||
"""
|
||||
self.chunk_size = chunk_size
|
||||
self.chunk_overlap = chunk_overlap
|
||||
|
||||
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||
"""固定大小分块
|
||||
|
||||
Args:
|
||||
text: 输入文本
|
||||
chunk_size: 每个文本块的最大大小
|
||||
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||
|
||||
Returns:
|
||||
list[str]: 分块后的文本列表
|
||||
|
||||
"""
|
||||
chunk_size = kwargs.get("chunk_size", self.chunk_size)
|
||||
chunk_overlap = kwargs.get("chunk_overlap", self.chunk_overlap)
|
||||
|
||||
chunks = []
|
||||
start = 0
|
||||
text_len = len(text)
|
||||
|
||||
while start < text_len:
|
||||
end = start + chunk_size
|
||||
chunk = text[start:end]
|
||||
|
||||
if chunk:
|
||||
chunks.append(chunk)
|
||||
|
||||
# 移动窗口,保留重叠部分
|
||||
start = end - chunk_overlap
|
||||
|
||||
# 防止无限循环: 如果重叠过大,直接移到end
|
||||
if start >= end or chunk_overlap >= chunk_size:
|
||||
start = end
|
||||
|
||||
return chunks
|
||||
161
astrbot/core/knowledge_base/chunking/recursive.py
Normal file
161
astrbot/core/knowledge_base/chunking/recursive.py
Normal file
@@ -0,0 +1,161 @@
|
||||
from collections.abc import Callable
|
||||
|
||||
from .base import BaseChunker
|
||||
|
||||
|
||||
class RecursiveCharacterChunker(BaseChunker):
|
||||
def __init__(
|
||||
self,
|
||||
chunk_size: int = 500,
|
||||
chunk_overlap: int = 100,
|
||||
length_function: Callable[[str], int] = len,
|
||||
is_separator_regex: bool = False,
|
||||
separators: list[str] | None = None,
|
||||
):
|
||||
"""初始化递归字符文本分割器
|
||||
|
||||
Args:
|
||||
chunk_size: 每个文本块的最大大小
|
||||
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||
length_function: 计算文本长度的函数
|
||||
is_separator_regex: 分隔符是否为正则表达式
|
||||
separators: 用于分割文本的分隔符列表,按优先级排序
|
||||
|
||||
"""
|
||||
self.chunk_size = chunk_size
|
||||
self.chunk_overlap = chunk_overlap
|
||||
self.length_function = length_function
|
||||
self.is_separator_regex = is_separator_regex
|
||||
|
||||
# 默认分隔符列表,按优先级从高到低
|
||||
self.separators = separators or [
|
||||
"\n\n", # 段落
|
||||
"\n", # 换行
|
||||
"。", # 中文句子
|
||||
",", # 中文逗号
|
||||
". ", # 句子
|
||||
", ", # 逗号分隔
|
||||
" ", # 单词
|
||||
"", # 字符
|
||||
]
|
||||
|
||||
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||
"""递归地将文本分割成块
|
||||
|
||||
Args:
|
||||
text: 要分割的文本
|
||||
chunk_size: 每个文本块的最大大小
|
||||
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||
|
||||
Returns:
|
||||
分割后的文本块列表
|
||||
|
||||
"""
|
||||
if not text:
|
||||
return []
|
||||
|
||||
overlap = kwargs.get("chunk_overlap", self.chunk_overlap)
|
||||
chunk_size = kwargs.get("chunk_size", self.chunk_size)
|
||||
|
||||
text_length = self.length_function(text)
|
||||
if text_length <= chunk_size:
|
||||
return [text]
|
||||
|
||||
for separator in self.separators:
|
||||
if separator == "":
|
||||
return self._split_by_character(text, chunk_size, overlap)
|
||||
|
||||
if separator in text:
|
||||
splits = text.split(separator)
|
||||
# 重新添加分隔符(除了最后一个片段)
|
||||
splits = [s + separator for s in splits[:-1]] + [splits[-1]]
|
||||
splits = [s for s in splits if s]
|
||||
if len(splits) == 1:
|
||||
continue
|
||||
|
||||
# 递归合并分割后的文本块
|
||||
final_chunks = []
|
||||
current_chunk = []
|
||||
current_chunk_length = 0
|
||||
|
||||
for split in splits:
|
||||
split_length = self.length_function(split)
|
||||
|
||||
# 如果单个分割部分已经超过了chunk_size,需要递归分割
|
||||
if split_length > chunk_size:
|
||||
# 先处理当前积累的块
|
||||
if current_chunk:
|
||||
combined_text = "".join(current_chunk)
|
||||
final_chunks.extend(
|
||||
await self.chunk(
|
||||
combined_text,
|
||||
chunk_size=chunk_size,
|
||||
chunk_overlap=overlap,
|
||||
),
|
||||
)
|
||||
current_chunk = []
|
||||
current_chunk_length = 0
|
||||
|
||||
# 递归分割过大的部分
|
||||
final_chunks.extend(
|
||||
await self.chunk(
|
||||
split,
|
||||
chunk_size=chunk_size,
|
||||
chunk_overlap=overlap,
|
||||
),
|
||||
)
|
||||
# 如果添加这部分会使当前块超过chunk_size
|
||||
elif current_chunk_length + split_length > chunk_size:
|
||||
# 合并当前块并添加到结果中
|
||||
combined_text = "".join(current_chunk)
|
||||
final_chunks.append(combined_text)
|
||||
|
||||
# 处理重叠部分
|
||||
overlap_start = max(0, len(combined_text) - overlap)
|
||||
if overlap_start > 0:
|
||||
overlap_text = combined_text[overlap_start:]
|
||||
current_chunk = [overlap_text, split]
|
||||
current_chunk_length = (
|
||||
self.length_function(overlap_text) + split_length
|
||||
)
|
||||
else:
|
||||
current_chunk = [split]
|
||||
current_chunk_length = split_length
|
||||
else:
|
||||
# 添加到当前块
|
||||
current_chunk.append(split)
|
||||
current_chunk_length += split_length
|
||||
|
||||
# 处理剩余的块
|
||||
if current_chunk:
|
||||
final_chunks.append("".join(current_chunk))
|
||||
|
||||
return final_chunks
|
||||
|
||||
return [text]
|
||||
|
||||
def _split_by_character(
|
||||
self,
|
||||
text: str,
|
||||
chunk_size: int | None = None,
|
||||
overlap: int | None = None,
|
||||
) -> list[str]:
|
||||
"""按字符级别分割文本
|
||||
|
||||
Args:
|
||||
text: 要分割的文本
|
||||
|
||||
Returns:
|
||||
分割后的文本块列表
|
||||
|
||||
"""
|
||||
chunk_size = chunk_size or self.chunk_size
|
||||
overlap = overlap or self.chunk_overlap
|
||||
result = []
|
||||
for i in range(0, len(text), chunk_size - overlap):
|
||||
end = min(i + chunk_size, len(text))
|
||||
result.append(text[i:end])
|
||||
if end == len(text):
|
||||
break
|
||||
|
||||
return result
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user