Compare commits
1860 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2930cc3fd8 | ||
|
|
0e841a8b25 | ||
|
|
67fa1611cc | ||
|
|
91136bb9f7 | ||
|
|
7c050d1adc | ||
|
|
a0690a6afc | ||
|
|
c51609b261 | ||
|
|
72148f66eb | ||
|
|
a04993a2bb | ||
|
|
74f845b06d | ||
|
|
50144ddcae | ||
|
|
94bf3b8195 | ||
|
|
e190bbeeed | ||
|
|
92abc43c9d | ||
|
|
c8e34ff26f | ||
|
|
630df3e76e | ||
|
|
bdbf382201 | ||
|
|
00eefc82db | ||
|
|
dc97080837 | ||
|
|
0b7fc29ac4 | ||
|
|
ff998fdd8d | ||
|
|
d7461ed54c | ||
|
|
3ce577acf9 | ||
|
|
50b1dccff3 | ||
|
|
c33e7e30d4 | ||
|
|
bc7f01ba36 | ||
|
|
2ce653caad | ||
|
|
0d850d7b22 | ||
|
|
a2be155b8e | ||
|
|
68aa107689 | ||
|
|
23096ed3a5 | ||
|
|
90a65c35c1 | ||
|
|
3d88827a95 | ||
|
|
40a0a8df5a | ||
|
|
20f7129c0b | ||
|
|
0e962e95dd | ||
|
|
07ba9c772c | ||
|
|
0622d88b22 | ||
|
|
594f0fed55 | ||
|
|
04b0d9b88d | ||
|
|
1f2af8ef94 | ||
|
|
598ea2d857 | ||
|
|
6dd9bbb516 | ||
|
|
3cd0b47dc6 | ||
|
|
65c71b5f20 | ||
|
|
1152b11202 | ||
|
|
51246ea31b | ||
|
|
7e5592dd32 | ||
|
|
c6b28caebf | ||
|
|
ca002f6fff | ||
|
|
14ec392091 | ||
|
|
5e2eb91ac0 | ||
|
|
c1626613ce | ||
|
|
42042d9e73 | ||
|
|
22c3b53ab8 | ||
|
|
090c32c90e | ||
|
|
4f4a9b9e55 | ||
|
|
6c7d7c9015 | ||
|
|
562e62a8c0 | ||
|
|
0823f7aa48 | ||
|
|
eb201c0420 | ||
|
|
6cfed9a39d | ||
|
|
33618c4a6b | ||
|
|
ace0a7c219 | ||
|
|
f7d018cf94 | ||
|
|
8ae2a556e4 | ||
|
|
4188deb386 | ||
|
|
82cf4ed909 | ||
|
|
88fc437abc | ||
|
|
57f868cab1 | ||
|
|
6cb5527894 | ||
|
|
016783a1e5 | ||
|
|
594ccff9c8 | ||
|
|
30792f0584 | ||
|
|
8f021eb35a | ||
|
|
1969abc340 | ||
|
|
b1b53ab983 | ||
|
|
9b5af23982 | ||
|
|
4cedc6d3c8 | ||
|
|
4e9cce76da | ||
|
|
9b004f3d2f | ||
|
|
9430e3090d | ||
|
|
ba44f9117b | ||
|
|
eb56710a72 | ||
|
|
38e3f27899 | ||
|
|
3c58d96db5 | ||
|
|
a6be0cc135 | ||
|
|
a53510bc41 | ||
|
|
1fd482e899 | ||
|
|
2f130ba009 | ||
|
|
e6d9db9395 | ||
|
|
e0ac743cdb | ||
|
|
b0d3fc11f0 | ||
|
|
7e0a50fbf2 | ||
|
|
59df244173 | ||
|
|
deb31a02cf | ||
|
|
e3aa1315ae | ||
|
|
65bc5efa19 | ||
|
|
abc4bc24b4 | ||
|
|
5df3f06f83 | ||
|
|
0e1de82bd7 | ||
|
|
f31e41b3f1 | ||
|
|
fe8d2718c4 | ||
|
|
8afefada0a | ||
|
|
745e1c37c0 | ||
|
|
fdb5988cec | ||
|
|
36ffcf3cc3 | ||
|
|
a0f8f3ae32 | ||
|
|
130f52f315 | ||
|
|
a05868cc45 | ||
|
|
2fc77aed15 | ||
|
|
c56edb4da6 | ||
|
|
6672190760 | ||
|
|
f122b17097 | ||
|
|
2c5f68e696 | ||
|
|
e1ca645a32 | ||
|
|
333bf56ddc | ||
|
|
b240594859 | ||
|
|
beccae933f | ||
|
|
e6aa1d2c54 | ||
|
|
5e808bab65 | ||
|
|
361d78247b | ||
|
|
3550103e45 | ||
|
|
8b0d4d4de4 | ||
|
|
dc71c04b67 | ||
|
|
a0254ed817 | ||
|
|
2563ecf3c5 | ||
|
|
c04738d9fe | ||
|
|
1266b4d086 | ||
|
|
99cf0a1522 | ||
|
|
98a75e923d | ||
|
|
ad96d676e6 | ||
|
|
79333bbc35 | ||
|
|
5c5b0f4fde | ||
|
|
ed6cdfedbb | ||
|
|
23f13ef05f | ||
|
|
f9c59d9706 | ||
|
|
e1cec42227 | ||
|
|
8d79c50d53 | ||
|
|
d77830b97f | ||
|
|
394540f689 | ||
|
|
7d776e0ce2 | ||
|
|
17df1692b9 | ||
|
|
9ab652641d | ||
|
|
9119f7166f | ||
|
|
da7d9d8eb9 | ||
|
|
80fccc90b7 | ||
|
|
dcebc70f1a | ||
|
|
259e7bc322 | ||
|
|
37bdb6c6f6 | ||
|
|
dc71afdd3f | ||
|
|
44638108d0 | ||
|
|
93fcac498c | ||
|
|
79e2743aac | ||
|
|
5e9c7cdd91 | ||
|
|
6f73e5087d | ||
|
|
8c120b020e | ||
|
|
12fc6f9d38 | ||
|
|
a6e8483b4c | ||
|
|
7191d28ada | ||
|
|
e6b5e3d282 | ||
|
|
1413d6b5fe | ||
|
|
dcd8a1094c | ||
|
|
e64b31b9ba | ||
|
|
080f347511 | ||
|
|
eaaff4298d | ||
|
|
dd5a02e8ef | ||
|
|
3211ec57ee | ||
|
|
6796afdaee | ||
|
|
cc6fe57773 | ||
|
|
1dfc831938 | ||
|
|
cafeda4abf | ||
|
|
d951b99718 | ||
|
|
0ad87209e5 | ||
|
|
1b50c5404d | ||
|
|
3007f67cab | ||
|
|
ee08659f01 | ||
|
|
baf5ad0fab | ||
|
|
8bdd748aec | ||
|
|
cef0c22f52 | ||
|
|
13d3fc5cfe | ||
|
|
b91141e2be | ||
|
|
f8a4b54165 | ||
|
|
afe007ca0b | ||
|
|
8a9a044f95 | ||
|
|
5eaf03e227 | ||
|
|
a8437d9331 | ||
|
|
e0392fa98b | ||
|
|
68ff8951de | ||
|
|
9c6b31e71c | ||
|
|
50f74f5ba2 | ||
|
|
b9de2aef60 | ||
|
|
7a47598538 | ||
|
|
3c8c28ebd5 | ||
|
|
524285f767 | ||
|
|
c2a34475f1 | ||
|
|
a69195a02b | ||
|
|
19d7438499 | ||
|
|
ccb380ce06 | ||
|
|
a35c439bbd | ||
|
|
09d1f96603 | ||
|
|
26aa18d980 | ||
|
|
d10b542797 | ||
|
|
ce4e4fb8dd | ||
|
|
8f4a31cf8c | ||
|
|
23549f13d6 | ||
|
|
869d11f9a6 | ||
|
|
02e73b82ee | ||
|
|
f85f87f545 | ||
|
|
1fff5713f3 | ||
|
|
8453ec36f0 | ||
|
|
d5b3ce8424 | ||
|
|
80cbbfa5ca | ||
|
|
9177bb660f | ||
|
|
a3df39a01a | ||
|
|
25dce05cbb | ||
|
|
1542ea3e03 | ||
|
|
6084abbcfe | ||
|
|
ed19b63914 | ||
|
|
4efeb85296 | ||
|
|
fc76665615 | ||
|
|
3a044bb71a | ||
|
|
cddd606562 | ||
|
|
7a5bc51c11 | ||
|
|
9f939b4b6f | ||
|
|
80a86f5b1b | ||
|
|
a0ce1855ab | ||
|
|
a4b43b884a | ||
|
|
824c0f6667 | ||
|
|
a030fe8491 | ||
|
|
3a9429e8ef | ||
|
|
c4eb1ab748 | ||
|
|
29ed19d600 | ||
|
|
0cc65513a5 | ||
|
|
debc048659 | ||
|
|
92f5c918dd | ||
|
|
9519f1e8e2 | ||
|
|
a8f874bf05 | ||
|
|
9d9917e45b | ||
|
|
91ee0a870d | ||
|
|
6cbbffc5a9 | ||
|
|
8f26fd34d1 | ||
|
|
fda655f6d7 | ||
|
|
a663d6509b | ||
|
|
9ec8839efa | ||
|
|
a7a0350eb2 | ||
|
|
39a7a0d960 | ||
|
|
7740e1e131 | ||
|
|
9dce1ed47e | ||
|
|
e84a00d3a5 | ||
|
|
88a944cb57 | ||
|
|
20c32e72cc | ||
|
|
4788c20816 | ||
|
|
e83fc570a4 | ||
|
|
e841b6af88 | ||
|
|
ea6f209557 | ||
|
|
9bfa726107 | ||
|
|
d24902c66d | ||
|
|
72aea2d3f3 | ||
|
|
dc9612d564 | ||
|
|
1770556d56 | ||
|
|
888fb84aee | ||
|
|
d597fd056d | ||
|
|
dea0ab3974 | ||
|
|
da6facd7d7 | ||
|
|
bb8ab5f173 | ||
|
|
ac8a541059 | ||
|
|
0e66771f0e | ||
|
|
d3a295a801 | ||
|
|
f2df771771 | ||
|
|
7b72cd87a5 | ||
|
|
9431efc6d1 | ||
|
|
7c3f5431ba | ||
|
|
d98cf16a4c | ||
|
|
2c3c3ae546 | ||
|
|
905eef48e3 | ||
|
|
b31b520c7c | ||
|
|
17aee086a3 | ||
|
|
c1756e5767 | ||
|
|
2920279c64 | ||
|
|
1f0f985b01 | ||
|
|
0762c81633 | ||
|
|
28ef301ccc | ||
|
|
26c6a2950f | ||
|
|
5082876de3 | ||
|
|
e50e7ad3d5 | ||
|
|
45a4a6b6da | ||
|
|
02918b7267 | ||
|
|
6c662a36c1 | ||
|
|
b78fe3822a | ||
|
|
35eda37e83 | ||
|
|
176a8e7067 | ||
|
|
61d4f1fd4b | ||
|
|
121b68995e | ||
|
|
d11f1d8dae | ||
|
|
c0ef2b5064 | ||
|
|
2a7308363e | ||
|
|
dc0c556f96 | ||
|
|
ba2ee1c0aa | ||
|
|
0f8b550d68 | ||
|
|
ed1fc98821 | ||
|
|
fa53b468fd | ||
|
|
4e2533d320 | ||
|
|
388ae49e55 | ||
|
|
f3f347dcba | ||
|
|
655be3519c | ||
|
|
06df2940af | ||
|
|
4149549e42 | ||
|
|
da351991f8 | ||
|
|
3305152e50 | ||
|
|
bea7bae674 | ||
|
|
45773d38ed | ||
|
|
8d4c176314 | ||
|
|
9ca5c87c4c | ||
|
|
36a6f00e5f | ||
|
|
e24a5b4cb5 | ||
|
|
f88031b0c9 | ||
|
|
830151e6da | ||
|
|
1e14fba81a | ||
|
|
7b8800c4eb | ||
|
|
8f4625f53b | ||
|
|
1e5f243edb | ||
|
|
e5eab2af34 | ||
|
|
c10973e160 | ||
|
|
b1e4bff3ec | ||
|
|
c1202cda63 | ||
|
|
32d6cd7776 | ||
|
|
2f78d30e93 | ||
|
|
33407c9f0d | ||
|
|
d2d5ef1c5c | ||
|
|
98d8eaee02 | ||
|
|
10b9228060 | ||
|
|
5872f1e017 | ||
|
|
5073f21002 | ||
|
|
69aaf09ac8 | ||
|
|
6e61ee81d8 | ||
|
|
cfd05a8d17 | ||
|
|
29845fcc4c | ||
|
|
e204b180a8 | ||
|
|
563972fd29 | ||
|
|
cbe94b84fc | ||
|
|
aa6f73574d | ||
|
|
94f0419ef7 | ||
|
|
cefd2d7f49 | ||
|
|
81e1e545fb | ||
|
|
d516920e72 | ||
|
|
2171372246 | ||
|
|
d2df4d0cce | ||
|
|
6ab90fc123 | ||
|
|
1a84ebbb1e | ||
|
|
c9c0352369 | ||
|
|
9903b028a3 | ||
|
|
49def5d883 | ||
|
|
6975525b70 | ||
|
|
fbc4f8527b | ||
|
|
90cb5a1951 | ||
|
|
ac71d9f034 | ||
|
|
64bcbc9fc0 | ||
|
|
9e7d46f956 | ||
|
|
e911896cfb | ||
|
|
9c6d66093f | ||
|
|
b2e39b9701 | ||
|
|
e95ad4049b | ||
|
|
1df49d1d6f | ||
|
|
b71000e2f3 | ||
|
|
47e6ed455e | ||
|
|
92592fb9d9 | ||
|
|
02a9769b35 | ||
|
|
7640f11bfc | ||
|
|
be8a0991ed | ||
|
|
9fa44dbcfa | ||
|
|
61aac9c80c | ||
|
|
60af83cfee | ||
|
|
cf64e6c231 | ||
|
|
2cae941bae | ||
|
|
bc0784f41d | ||
|
|
b711140f26 | ||
|
|
c57d75e01a | ||
|
|
1d766001bb | ||
|
|
0759a11a85 | ||
|
|
cb749a38ab | ||
|
|
369eab18ab | ||
|
|
73edeae013 | ||
|
|
7d46314dc8 | ||
|
|
d5a53a89eb | ||
|
|
a85bc510dd | ||
|
|
2beea7d218 | ||
|
|
a93cd3dd5f | ||
|
|
6c1f540170 | ||
|
|
d026a9f009 | ||
|
|
a8e7dadd39 | ||
|
|
2f8d921adf | ||
|
|
0c6e526f94 | ||
|
|
b1e3018b6b | ||
|
|
87f05fce66 | ||
|
|
1b37530c96 | ||
|
|
db4d02c2e2 | ||
|
|
fd7811402b | ||
|
|
eb0325e627 | ||
|
|
842c3c8ea9 | ||
|
|
8b4b04ec09 | ||
|
|
9f32c9280f | ||
|
|
4fcd09cfa8 | ||
|
|
7a8d65d37d | ||
|
|
23129a9ba2 | ||
|
|
7f791e730b | ||
|
|
f7e296b349 | ||
|
|
712d4acaaa | ||
|
|
74a5c01f21 | ||
|
|
3ba8724d77 | ||
|
|
6313a7d8a9 | ||
|
|
432a3f520c | ||
|
|
191b3e42d4 | ||
|
|
a27f05fcb4 | ||
|
|
2f33e0b873 | ||
|
|
f0359467f1 | ||
|
|
d1db8cf2c8 | ||
|
|
b1985ed2ce | ||
|
|
140ddc70e6 | ||
|
|
d7fd616470 | ||
|
|
3ccbef141e | ||
|
|
e92fbb0443 | ||
|
|
bd270aed68 | ||
|
|
28d7864393 | ||
|
|
b5d8173ee3 | ||
|
|
17d62a9af7 | ||
|
|
d89fb863ed | ||
|
|
a21ad77820 | ||
|
|
f86c8e8cab | ||
|
|
cb12cbdd3d | ||
|
|
6661fa996c | ||
|
|
c19bca798b | ||
|
|
8f98b411db | ||
|
|
a8aa03847e | ||
|
|
1bfd747cc6 | ||
|
|
ae06d945a7 | ||
|
|
9f41d5f34d | ||
|
|
ef61c52908 | ||
|
|
d8842ef274 | ||
|
|
c88fdaf353 | ||
|
|
af295da871 | ||
|
|
083235a2fe | ||
|
|
2a3a5f7eb2 | ||
|
|
77c48f280f | ||
|
|
0ee1eb2f9f | ||
|
|
c2b20365bb | ||
|
|
cfdc7e4452 | ||
|
|
2363f61aa9 | ||
|
|
557ac6f9fa | ||
|
|
a49b871cf9 | ||
|
|
a0d6b3efba | ||
|
|
6cabf07bc0 | ||
|
|
a15444ee8c | ||
|
|
ceb5f5669e | ||
|
|
25b75e05e4 | ||
|
|
4d214bb5c1 | ||
|
|
7cbaed8c6c | ||
|
|
2915fdf665 | ||
|
|
a66c385b08 | ||
|
|
4dace7c5d8 | ||
|
|
8ebf087dbf | ||
|
|
2fa8bda5bb | ||
|
|
a5ae833945 | ||
|
|
d21d42b312 | ||
|
|
78575f0f0a | ||
|
|
8ccd292d16 | ||
|
|
2534f59398 | ||
|
|
5c60dbe2b1 | ||
|
|
c99ecde15f | ||
|
|
219f3403d9 | ||
|
|
00f417bad6 | ||
|
|
81649f053b | ||
|
|
e5bde50f2d | ||
|
|
0321e00b0d | ||
|
|
09528e3292 | ||
|
|
e7412a9cbf | ||
|
|
01efe5f869 | ||
|
|
28a178a55c | ||
|
|
88f130014c | ||
|
|
af258c590c | ||
|
|
b0eb5733be | ||
|
|
fe35bfba37 | ||
|
|
7cfbc4ab8f | ||
|
|
7a9d4f0abd | ||
|
|
6f6a5b565c | ||
|
|
e57deb873c | ||
|
|
0f692b1608 | ||
|
|
8c03e79f99 | ||
|
|
71290f0929 | ||
|
|
22364ef7de | ||
|
|
2cc1eb1abc | ||
|
|
90dbcbb4e2 | ||
|
|
66503d58be | ||
|
|
8e10f0ce2b | ||
|
|
f51f510f2e | ||
|
|
c44f085b47 | ||
|
|
a35f36eeaf | ||
|
|
14564c392a | ||
|
|
76e05ea749 | ||
|
|
ab599dceed | ||
|
|
4c37604445 | ||
|
|
bb74018d19 | ||
|
|
575289e5bc | ||
|
|
e89da2a7b4 | ||
|
|
bd34959f68 | ||
|
|
622dcf8fd5 | ||
|
|
9e315739b7 | ||
|
|
7b01adc5df | ||
|
|
432fc47443 | ||
|
|
d8fba44c5e | ||
|
|
e29d3d8c01 | ||
|
|
e678413214 | ||
|
|
eaa9d9d087 | ||
|
|
9e3cc076b7 | ||
|
|
3bb01fa52c | ||
|
|
008e49d144 | ||
|
|
4e275384b0 | ||
|
|
63ec99f67a | ||
|
|
14a8bb57df | ||
|
|
7512bfc710 | ||
|
|
3c3b6dadc3 | ||
|
|
cd722a0e39 | ||
|
|
a1b5d0a100 | ||
|
|
69d3ae709c | ||
|
|
67ef993d61 | ||
|
|
20f49890ad | ||
|
|
3e4917f0a1 | ||
|
|
99ee75aec6 | ||
|
|
1674653a42 | ||
|
|
d2f7e55bf5 | ||
|
|
9f31df7f3a | ||
|
|
b8c1b53d67 | ||
|
|
2495837791 | ||
|
|
b6562e3c47 | ||
|
|
c57da046ee | ||
|
|
ff63134c14 | ||
|
|
3f5210c587 | ||
|
|
3df5e7b9b9 | ||
|
|
225db66738 | ||
|
|
383ebb8f57 | ||
|
|
e1bed60f1f | ||
|
|
edbb856023 | ||
|
|
98d3ab646f | ||
|
|
81be556f1b | ||
|
|
f45a085469 | ||
|
|
210cc58cc3 | ||
|
|
1063b11ef6 | ||
|
|
a4e999c47f | ||
|
|
543e01c301 | ||
|
|
14e0aa3ec5 | ||
|
|
1a8a171f8b | ||
|
|
f1954f9a43 | ||
|
|
441b148501 | ||
|
|
bd0f30b81c | ||
|
|
ad14e9bf40 | ||
|
|
6f71301aaf | ||
|
|
5f0d601baa | ||
|
|
f234a5bcc2 | ||
|
|
ab677ea100 | ||
|
|
f3ad53e949 | ||
|
|
d324cfa84d | ||
|
|
dd4319d72a | ||
|
|
1f2de3d3d8 | ||
|
|
72702beb0b | ||
|
|
adb0cbc5dd | ||
|
|
6a503b82c3 | ||
|
|
28a87351f1 | ||
|
|
bcc97378b0 | ||
|
|
eb8a138713 | ||
|
|
dcd7dcbbdf | ||
|
|
1538759ba7 | ||
|
|
30e8ea7fd8 | ||
|
|
879b7b582c | ||
|
|
8ba4236402 | ||
|
|
5eef8fa9b9 | ||
|
|
d03d035437 | ||
|
|
68e8e1f70b | ||
|
|
7acb45b157 | ||
|
|
c36142deaf | ||
|
|
5fd6e316fa | ||
|
|
39a9d7765a | ||
|
|
7cfcba29a6 | ||
|
|
9bf8aadca9 | ||
|
|
714d4af63d | ||
|
|
8203fdb4f0 | ||
|
|
5e1e2d1a4f | ||
|
|
2f941de65b | ||
|
|
777c503002 | ||
|
|
e9b23f68fd | ||
|
|
efa45e6203 | ||
|
|
638f55f83c | ||
|
|
8b2fc29d5b | ||
|
|
b516fb0550 | ||
|
|
efef34c01e | ||
|
|
5f1dfa7599 | ||
|
|
8e9c7544cf | ||
|
|
4e3d5641c8 | ||
|
|
20b760529e | ||
|
|
a55a07c5ff | ||
|
|
94ee8ea297 | ||
|
|
ec5d71d0e1 | ||
|
|
d121d08d05 | ||
|
|
be08f4a558 | ||
|
|
010f082fbb | ||
|
|
073cdf6d51 | ||
|
|
4df8606ab6 | ||
|
|
71442d26ec | ||
|
|
4f5528869c | ||
|
|
f16feff17b | ||
|
|
71b233fe5f | ||
|
|
770dec9ed6 | ||
|
|
2ca95a988e | ||
|
|
d8aae538cd | ||
|
|
cf1e7ee08a | ||
|
|
d14513ddfd | ||
|
|
9a9017bc6c | ||
|
|
3c9b654713 | ||
|
|
80d2ad40bc | ||
|
|
31670e75e5 | ||
|
|
ed6011a2be | ||
|
|
cdded38ade | ||
|
|
f536f24833 | ||
|
|
f5bff00b1f | ||
|
|
27c9717445 | ||
|
|
863a1ba8ef | ||
|
|
cb04dd2b83 | ||
|
|
8c7cf51958 | ||
|
|
244fb1fed6 | ||
|
|
25f7a68a13 | ||
|
|
62d8cf79ef | ||
|
|
646b18d910 | ||
|
|
2f81b2e381 | ||
|
|
1f5a7e7885 | ||
|
|
80fca470f2 | ||
|
|
6e9d9ac856 | ||
|
|
8d6fada1eb | ||
|
|
3e715399a1 | ||
|
|
81cc8831f9 | ||
|
|
f7370044a7 | ||
|
|
51b015a629 | ||
|
|
392af7a553 | ||
|
|
d2dd07bad7 | ||
|
|
cebcd6925a | ||
|
|
e7b4357fc7 | ||
|
|
dc279dde4a | ||
|
|
c0810a674f | ||
|
|
0760cabbbe | ||
|
|
3b149c520b | ||
|
|
3d19fc89ff | ||
|
|
cd1b1919f4 | ||
|
|
0ed646eb27 | ||
|
|
c0c5859c99 | ||
|
|
a47121b849 | ||
|
|
d9dd20e89a | ||
|
|
ed4609ebe5 | ||
|
|
e24225c828 | ||
|
|
01ef86d658 | ||
|
|
cd4802da04 | ||
|
|
2aca65780f | ||
|
|
2c435f7387 | ||
|
|
cc1afd1a9c | ||
|
|
6f098cdba6 | ||
|
|
d03e9fb90a | ||
|
|
9f2966abe9 | ||
|
|
4e28ea1883 | ||
|
|
289214e85c | ||
|
|
a20d98bf93 | ||
|
|
7c3d98acbe | ||
|
|
7311786f48 | ||
|
|
82de9c926e | ||
|
|
7fd86d4de3 | ||
|
|
724da29e2a | ||
|
|
54113d7b94 | ||
|
|
66396e8290 | ||
|
|
72be76215f | ||
|
|
ace86703a9 | ||
|
|
7b25495463 | ||
|
|
3d4b651c1f | ||
|
|
d305ae064d | ||
|
|
ac4f3d8907 | ||
|
|
af2687771b | ||
|
|
a67b7f909a | ||
|
|
f9c3e4cdb0 | ||
|
|
dc62c1f8d4 | ||
|
|
0441b51a68 | ||
|
|
5c0c9f687e | ||
|
|
e049c54043 | ||
|
|
99e47540d5 | ||
|
|
8e1885ffeb | ||
|
|
8501a0c205 | ||
|
|
797f2a3173 | ||
|
|
1057b4bc35 | ||
|
|
efc0116595 | ||
|
|
cdc560fad0 | ||
|
|
75a2803710 | ||
|
|
fb3169faa4 | ||
|
|
d587bd837e | ||
|
|
b9fab74edc | ||
|
|
50c22bbadb | ||
|
|
d0b10b9195 | ||
|
|
50a296de20 | ||
|
|
c8fe4f4a3c | ||
|
|
a8ba0720af | ||
|
|
745a01246c | ||
|
|
bee5d3550f | ||
|
|
1789393151 | ||
|
|
345afe1338 | ||
|
|
65428aa49f | ||
|
|
b251ee9322 | ||
|
|
04f00682a0 | ||
|
|
90dcda1475 | ||
|
|
f1ee4eb89f | ||
|
|
343fc22168 | ||
|
|
00ef0d7e3d | ||
|
|
f2deaf6199 | ||
|
|
617a2c010e | ||
|
|
c79e38e044 | ||
|
|
38eae1d1ee | ||
|
|
7e4c89b0cb | ||
|
|
14c29f07bd | ||
|
|
825e3dbcf5 | ||
|
|
8275130f04 | ||
|
|
2c47abea95 | ||
|
|
85aa28d724 | ||
|
|
53a3736b04 | ||
|
|
86ba3c230e | ||
|
|
8d21126bd6 | ||
|
|
74ded91976 | ||
|
|
7c27520d57 | ||
|
|
b54bbc4c5a | ||
|
|
3e09a4ddd4 | ||
|
|
f93f04a536 | ||
|
|
b93f30b809 | ||
|
|
95bd2f26a5 | ||
|
|
7cfcf056f9 | ||
|
|
96b565e1e8 | ||
|
|
9d7ad7a18f | ||
|
|
9838c2758b | ||
|
|
1b1f5f5a5e | ||
|
|
0f95f62aa1 | ||
|
|
9405ba7871 | ||
|
|
ccb95f803c | ||
|
|
dae745d925 | ||
|
|
791db65526 | ||
|
|
60b2ff0a7a | ||
|
|
e6c8507379 | ||
|
|
420db5416e | ||
|
|
6e03218d54 | ||
|
|
5e4bd36b26 | ||
|
|
bbc039366e | ||
|
|
e1ec7dbbba | ||
|
|
075b008740 | ||
|
|
b2c382fa01 | ||
|
|
02e2e617f5 | ||
|
|
c5f9b5861f | ||
|
|
2dace4c697 | ||
|
|
c7891385ca | ||
|
|
2059ddcadf | ||
|
|
ba1b68df20 | ||
|
|
bfc8024119 | ||
|
|
f26cf6ed6f | ||
|
|
403b61836d | ||
|
|
b5af7d1eb9 | ||
|
|
f453af6e4c | ||
|
|
f2be55bd8e | ||
|
|
d241dd17ca | ||
|
|
cecafdfe6c | ||
|
|
6fecfd1a0e | ||
|
|
64245d001c | ||
|
|
7d92965cae | ||
|
|
b4fa08c4e2 | ||
|
|
d4e9566851 | ||
|
|
a26b494f7f | ||
|
|
b84e22e41f | ||
|
|
cee6efab19 | ||
|
|
30f71cb550 | ||
|
|
771e755a78 | ||
|
|
16ec462abd | ||
|
|
ca55465d3c | ||
|
|
7098c98dde | ||
|
|
f56355da89 | ||
|
|
422160debd | ||
|
|
8062cf406a | ||
|
|
0e802232ec | ||
|
|
f650a9205d | ||
|
|
c85dbb2347 | ||
|
|
a6a79128c8 | ||
|
|
42839627e8 | ||
|
|
e7f35098e4 | ||
|
|
267e68a894 | ||
|
|
b32b444438 | ||
|
|
522d0f8313 | ||
|
|
5715e5de67 | ||
|
|
cc6b05e8b3 | ||
|
|
417747d5d0 | ||
|
|
a34f439226 | ||
|
|
b7ca014fd0 | ||
|
|
fa098d585a | ||
|
|
c35a14e3ec | ||
|
|
60651736a5 | ||
|
|
581f9b7bd3 | ||
|
|
124eb04807 | ||
|
|
1d561da7fb | ||
|
|
16e3cd0784 | ||
|
|
a6d91933dc | ||
|
|
445c40f758 | ||
|
|
725a841a3b | ||
|
|
f77c453843 | ||
|
|
ba6718d5bc | ||
|
|
cdb7a1b3fa | ||
|
|
a03c79b89d | ||
|
|
98800d3426 | ||
|
|
a616adaac4 | ||
|
|
ffb5605c99 | ||
|
|
621b556856 | ||
|
|
a3ffecbb2a | ||
|
|
ea64cebe2a | ||
|
|
e79487dd5f | ||
|
|
7fe1c1ec89 | ||
|
|
ab2bbff369 | ||
|
|
ec32825309 | ||
|
|
fd0c182087 | ||
|
|
49fcff1daf | ||
|
|
33b64ddf39 | ||
|
|
4c447aa648 | ||
|
|
ccbfc3d274 | ||
|
|
f83fe43bbb | ||
|
|
19022d67f8 | ||
|
|
58a815dd6b | ||
|
|
1ce95c473d | ||
|
|
eb365e398d | ||
|
|
bc9fe82860 | ||
|
|
b3cd9bf2b9 | ||
|
|
c5c2b829ec | ||
|
|
9713f96401 | ||
|
|
11f35ebf96 | ||
|
|
7d403aa181 | ||
|
|
64af810a4a | ||
|
|
30821905af | ||
|
|
a9dbff756b | ||
|
|
a6aba10d3d | ||
|
|
9c276c37fe | ||
|
|
6ab6c0fd4c | ||
|
|
b6b0fe3fff | ||
|
|
0d5825bda9 | ||
|
|
cdfb64631a | ||
|
|
d161c281c8 | ||
|
|
8fed5bf2a1 | ||
|
|
98d2e9bd27 | ||
|
|
a03af55edd | ||
|
|
86e2fd9aee | ||
|
|
97bd0e5e58 | ||
|
|
ceaba21986 | ||
|
|
172a77d942 | ||
|
|
4f9d2d2a7d | ||
|
|
8c929f6e05 | ||
|
|
3319b71f5b | ||
|
|
46ec028a5b | ||
|
|
0ce0ef3e5c | ||
|
|
375b071cb2 | ||
|
|
29e1417ff2 | ||
|
|
75db2bd366 | ||
|
|
60ca1efbda | ||
|
|
2692e4978b | ||
|
|
91982eb002 | ||
|
|
bb1dec76fa | ||
|
|
f618b8fcdc | ||
|
|
9147cab75b | ||
|
|
5f07bcc8e6 | ||
|
|
705cf2ea1b | ||
|
|
42c4394484 | ||
|
|
221221a3c1 | ||
|
|
9564166297 | ||
|
|
f5cf3c3c8e | ||
|
|
18f919fb6b | ||
|
|
0924835253 | ||
|
|
20d2e5c578 | ||
|
|
907801605c | ||
|
|
93bc684e8c | ||
|
|
a76c98d57e | ||
|
|
d937a800d0 | ||
|
|
d16f3a227f | ||
|
|
80c9a3eeda | ||
|
|
e68173b451 | ||
|
|
40c27d87f5 | ||
|
|
3c13b5049d | ||
|
|
8288d5e51f | ||
|
|
6e1449900a | ||
|
|
4ffbb18ab4 | ||
|
|
b27271b7a3 | ||
|
|
ebb6665f64 | ||
|
|
e4e5731ffd | ||
|
|
2ab5810f13 | ||
|
|
af934c5d09 | ||
|
|
1e0cf7c112 | ||
|
|
46859c93c9 | ||
|
|
ea1f9cb3b2 | ||
|
|
1641549016 | ||
|
|
716a5dbb8a | ||
|
|
af98cb11c5 | ||
|
|
9a4c2cf341 | ||
|
|
2bc3bcd102 | ||
|
|
d6c663f79d | ||
|
|
9ed86e5f53 | ||
|
|
303e0bc037 | ||
|
|
2cc24019f9 | ||
|
|
83ce774d19 | ||
|
|
2b4ee13b5e | ||
|
|
3a964561f0 | ||
|
|
6959f86632 | ||
|
|
537d373e10 | ||
|
|
cceadf222c | ||
|
|
cf5a4af623 | ||
|
|
39aea11c22 | ||
|
|
c2f1227700 | ||
|
|
900f14d37c | ||
|
|
598249b1d6 | ||
|
|
7ed15bdf04 | ||
|
|
2fc0ec0f72 | ||
|
|
5e9c2a669b | ||
|
|
b310521884 | ||
|
|
288945bf7e | ||
|
|
4fc07cff36 | ||
|
|
b884fe0e86 | ||
|
|
855858c236 | ||
|
|
c11a2a5419 | ||
|
|
773a6572af | ||
|
|
88ad373c9b | ||
|
|
51666464b9 | ||
|
|
5af9cf2f52 | ||
|
|
12c4ae4b10 | ||
|
|
4e1bef414a | ||
|
|
e896c18644 | ||
|
|
c852685e74 | ||
|
|
1e99797df8 | ||
|
|
52a4c986a8 | ||
|
|
c501728204 | ||
|
|
6b067fa6a7 | ||
|
|
a1cd5c53a9 | ||
|
|
a46d487e03 | ||
|
|
3deb6d3ab3 | ||
|
|
af34cdd5d2 | ||
|
|
6e1393235a | ||
|
|
343e0b54b9 | ||
|
|
ecb70cb6f7 | ||
|
|
ca50618af6 | ||
|
|
29c07ba83e | ||
|
|
45fbb83a9f | ||
|
|
ae7ba2df25 | ||
|
|
c3ef57cc32 | ||
|
|
7bb4ca5a14 | ||
|
|
063783d81d | ||
|
|
42116c9b65 | ||
|
|
a36e11973d | ||
|
|
5125568ea2 | ||
|
|
0fa164e50d | ||
|
|
cf814e81ee | ||
|
|
43a45f18ce | ||
|
|
ad51381063 | ||
|
|
0b0e4ce904 | ||
|
|
6a3e04d688 | ||
|
|
4107a17370 | ||
|
|
06b4d8f169 | ||
|
|
1c0c820746 | ||
|
|
d061403a28 | ||
|
|
5c092321a6 | ||
|
|
bdd3f61c1f | ||
|
|
8023557d6e | ||
|
|
074b0ced7a | ||
|
|
3864b1ac9b | ||
|
|
6e9b43457d | ||
|
|
ca1aec8920 | ||
|
|
acac580862 | ||
|
|
673e1b2980 | ||
|
|
f62157be72 | ||
|
|
f894ecf3b6 | ||
|
|
66dd4e28ad | ||
|
|
939dc1b0fb | ||
|
|
56bf5d38a1 | ||
|
|
d09b70b295 | ||
|
|
205180387a | ||
|
|
39c8cfeda5 | ||
|
|
f38a329be5 | ||
|
|
a0cd069539 | ||
|
|
bf306a2f01 | ||
|
|
c31f93a8d1 | ||
|
|
4730ab6309 | ||
|
|
1ae78ca98c | ||
|
|
d2379da478 | ||
|
|
0f64981b20 | ||
|
|
0002e49bb5 | ||
|
|
db13a60274 | ||
|
|
db0f11a359 | ||
|
|
ac7f43520b | ||
|
|
f67b9f5f6e | ||
|
|
c75156c4ce | ||
|
|
10270b5595 | ||
|
|
f7458572ed | ||
|
|
d57b7222b2 | ||
|
|
62e70a673a | ||
|
|
5e9eba6478 | ||
|
|
cb02dfe1a4 | ||
|
|
b50739e1af | ||
|
|
8da1b0212d | ||
|
|
ca1f2acb33 | ||
|
|
c15f966669 | ||
|
|
7705b8781a | ||
|
|
b2502746f0 | ||
|
|
ab68094386 | ||
|
|
bbec701223 | ||
|
|
b29d14e600 | ||
|
|
86e51c5cd1 | ||
|
|
cb8267be3f | ||
|
|
eaed43915c | ||
|
|
bd91fd2c38 | ||
|
|
1203b214cd | ||
|
|
c3fec15f11 | ||
|
|
0545653494 | ||
|
|
db2989bdb4 | ||
|
|
587bd00a19 | ||
|
|
960ff438e8 | ||
|
|
98e7ea85d3 | ||
|
|
2549e44710 | ||
|
|
4d32b563ca | ||
|
|
3a4b732977 | ||
|
|
500909a28e | ||
|
|
07753eb25b | ||
|
|
c6eaf3d010 | ||
|
|
6723fe8271 | ||
|
|
3348b70435 | ||
|
|
35a8527c16 | ||
|
|
7afc475290 | ||
|
|
789bceaa3a | ||
|
|
abbc043969 | ||
|
|
654e5762f1 | ||
|
|
507c3e3629 | ||
|
|
991dfeb2f2 | ||
|
|
26482fc2d3 | ||
|
|
e0ce6d9688 | ||
|
|
946595216a | ||
|
|
864b6bc56d | ||
|
|
6ea5b7581f | ||
|
|
f70b8f0c10 | ||
|
|
1593bcb537 | ||
|
|
bf7fc02c8d | ||
|
|
143702b92b | ||
|
|
c5ccc1a084 | ||
|
|
2ecb52a9b2 | ||
|
|
6439917cbe | ||
|
|
d21c18f657 | ||
|
|
25ef0039e4 | ||
|
|
e6981290bc | ||
|
|
75c3d8abbd | ||
|
|
d88683f498 | ||
|
|
40b9aa3a4c | ||
|
|
b6d1515d58 | ||
|
|
e01d4264e3 | ||
|
|
2117b65487 | ||
|
|
a7823b352f | ||
|
|
c543b62a08 | ||
|
|
3923b87f08 | ||
|
|
b7ecdadb83 | ||
|
|
5ff121e1ed | ||
|
|
f486e5448f | ||
|
|
c5aae98558 | ||
|
|
6d8a3b9897 | ||
|
|
6d98780e19 | ||
|
|
3ad2c46f3f | ||
|
|
a730cee7fd | ||
|
|
77c823c100 | ||
|
|
124f21c67a | ||
|
|
e46cf20dd3 | ||
|
|
4bef5e8313 | ||
|
|
22e93b0af4 | ||
|
|
5aeca9662b | ||
|
|
b996cf1f05 | ||
|
|
878a106877 | ||
|
|
45d36f86fd | ||
|
|
b108ae403a | ||
|
|
887ed66768 | ||
|
|
dac840a887 | ||
|
|
238de4ba8c | ||
|
|
9a7bdade43 | ||
|
|
aa84556204 | ||
|
|
6b68069fcd | ||
|
|
42c7034fb2 | ||
|
|
060c7e0145 | ||
|
|
b5b085dfb1 | ||
|
|
fc06ce9d7f | ||
|
|
d8d81b05a7 | ||
|
|
a60f42b1f2 | ||
|
|
6e18be88d0 | ||
|
|
b45e439c48 | ||
|
|
b87061c18c | ||
|
|
f78aca7752 | ||
|
|
3ccca2aa10 | ||
|
|
6d7c40eb76 | ||
|
|
da4cd7fb65 | ||
|
|
c97cda6b84 | ||
|
|
7a7fd4167a | ||
|
|
dffc1a43d5 | ||
|
|
36897fea1e | ||
|
|
c7b34735f0 | ||
|
|
5b07176c88 | ||
|
|
474b40d660 | ||
|
|
a62901b948 | ||
|
|
25d8746327 | ||
|
|
aff1698223 | ||
|
|
7f8941745f | ||
|
|
b858401098 | ||
|
|
d5a158b80f | ||
|
|
f315f284aa | ||
|
|
c367f5009d | ||
|
|
6db1e63bda | ||
|
|
e22ab2ede6 | ||
|
|
b7d7e0b682 | ||
|
|
96bba15f2f | ||
|
|
fcf965a595 | ||
|
|
e1a20d3c22 | ||
|
|
2abd7d8c5d | ||
|
|
5b8f73cdd7 | ||
|
|
7fd765421f | ||
|
|
d9d94af022 | ||
|
|
790b924e57 | ||
|
|
4a62f877df | ||
|
|
ac47c57bb7 | ||
|
|
3ace4199a1 | ||
|
|
e6bd7524c1 | ||
|
|
699c86e8c1 | ||
|
|
f40fa0ecea | ||
|
|
626f94686b | ||
|
|
752d13b1b1 | ||
|
|
54c0dc1b2b | ||
|
|
c5bc709898 | ||
|
|
ccdbb01513 | ||
|
|
5206d750ac | ||
|
|
a800e3df67 | ||
|
|
ccb1f87a20 | ||
|
|
c111da4681 | ||
|
|
9cc4e97a53 | ||
|
|
dca1c0b0f3 | ||
|
|
f06be6ed21 | ||
|
|
3c8ec2f42e | ||
|
|
7e193f7f52 | ||
|
|
7069b02929 | ||
|
|
66995db927 | ||
|
|
c36054ca1b | ||
|
|
3e07fbf3dc | ||
|
|
bf3fbe3e96 | ||
|
|
0a93d22bc8 | ||
|
|
f5b3d94d16 | ||
|
|
4d1a6994aa | ||
|
|
05c686782c | ||
|
|
85609ea742 | ||
|
|
20dabc0615 | ||
|
|
356dd9bc2b | ||
|
|
cd5d7534c4 | ||
|
|
b4f12fc933 | ||
|
|
cbea387ce0 | ||
|
|
345b155374 | ||
|
|
29d216950e | ||
|
|
321b04772c | ||
|
|
5b924aee98 | ||
|
|
46d44e3405 | ||
|
|
4d5332fe25 | ||
|
|
18bd4c54f4 | ||
|
|
31c7768ca0 | ||
|
|
6ec643e9d1 | ||
|
|
2b39f6f61c | ||
|
|
bf3ca13961 | ||
|
|
82026370ec | ||
|
|
6d49bf5346 | ||
|
|
67431d87fb | ||
|
|
fdf55221e6 | ||
|
|
07f277dd3b | ||
|
|
cf8f0603ca | ||
|
|
5592408ab8 | ||
|
|
a01617b45c | ||
|
|
7abb4087b3 | ||
|
|
dff15cf27a | ||
|
|
aa858137e5 | ||
|
|
45cb143202 | ||
|
|
7a9c6ab8c4 | ||
|
|
e2c26c292d | ||
|
|
be7c3fd00e | ||
|
|
7e5461a2cf | ||
|
|
6ee9010645 | ||
|
|
a23d5be056 | ||
|
|
97a6a1fdc2 | ||
|
|
c8f567347b | ||
|
|
74c1e7f69e | ||
|
|
15a5fc0cae | ||
|
|
f07c54d47c | ||
|
|
70446be108 | ||
|
|
d6d21fca56 | ||
|
|
8d7273924f | ||
|
|
ea64afbaa7 | ||
|
|
45da9837ec | ||
|
|
8c19b7d163 | ||
|
|
ab227a08d0 | ||
|
|
40d6e77964 | ||
|
|
9326e3f1b0 | ||
|
|
0e1eb3daf6 | ||
|
|
05daac12ed | ||
|
|
c5b24b4764 | ||
|
|
cc16548e5f | ||
|
|
291d65bb3e | ||
|
|
bd3ad03da6 | ||
|
|
5fa6788357 | ||
|
|
c5c5a98ac4 | ||
|
|
a1151143cf | ||
|
|
f5024984f7 | ||
|
|
f4880fd90d | ||
|
|
0ae61d5865 | ||
|
|
d3bd775a79 | ||
|
|
da546cfe7f | ||
|
|
a211933e83 | ||
|
|
1d40b5a821 | ||
|
|
33836daeb7 | ||
|
|
d921b0f6bd | ||
|
|
0607b95df6 | ||
|
|
0de6d0e046 | ||
|
|
98427345cf | ||
|
|
9fedaa9f77 | ||
|
|
bf4c2ecd33 | ||
|
|
f8c18cc1e0 | ||
|
|
458b900412 | ||
|
|
192c776e0b | ||
|
|
5cdec18863 | ||
|
|
15f856f951 | ||
|
|
01d52cef74 | ||
|
|
95563c8659 | ||
|
|
31d8c40eca | ||
|
|
56001ed272 | ||
|
|
d916fda04c | ||
|
|
cfae655068 | ||
|
|
5596565ec4 | ||
|
|
afa1aa5d93 | ||
|
|
e98c3d8393 | ||
|
|
6687b816f0 | ||
|
|
ea8035e854 | ||
|
|
54b0171d49 | ||
|
|
676d4277b9 | ||
|
|
a4b1da3ca2 | ||
|
|
9e9c16e770 | ||
|
|
dc87006fed | ||
|
|
b9b260f26a | ||
|
|
33fd6a5016 | ||
|
|
97cbccc2ba | ||
|
|
1ee4685d5d | ||
|
|
aba18232b1 | ||
|
|
0a02441b75 | ||
|
|
1be5b4c7ff | ||
|
|
a0ce0cf18a | ||
|
|
7c54e5d093 | ||
|
|
b825e51dab | ||
|
|
589855c393 | ||
|
|
4c546f2f53 | ||
|
|
3753fce912 | ||
|
|
4c02857ec5 | ||
|
|
33f87ff7d7 | ||
|
|
784dcf2a9a | ||
|
|
43ee943acb | ||
|
|
a769fd7d13 | ||
|
|
2c4fd00b16 | ||
|
|
264771fe98 | ||
|
|
ecd92dafef | ||
|
|
c8b6e4bea3 | ||
|
|
3756cb766e | ||
|
|
068d9ca60b | ||
|
|
93f632d8b8 | ||
|
|
bb44ce7e74 | ||
|
|
6986c8d8f7 | ||
|
|
fe95506db4 | ||
|
|
310ed76b18 | ||
|
|
98830d147f | ||
|
|
19c9177d7b | ||
|
|
f41c5f97f6 | ||
|
|
648c125697 | ||
|
|
0dc2b89897 | ||
|
|
83745f83a5 | ||
|
|
2f91fe4535 | ||
|
|
739f09059e | ||
|
|
c86f9f0f5f | ||
|
|
9470ca6bc5 | ||
|
|
2a92c4d5de | ||
|
|
bb6e892657 | ||
|
|
c9079b9299 | ||
|
|
b6963c1bf9 | ||
|
|
9c29df47bb | ||
|
|
fc146d3d00 | ||
|
|
1bf5a21678 | ||
|
|
011542dc2b | ||
|
|
489784104e | ||
|
|
3860634fd2 | ||
|
|
709c324e18 | ||
|
|
b75d24d92c | ||
|
|
ed80e9424c | ||
|
|
2fe1f2060a | ||
|
|
c6df820164 | ||
|
|
d6239822db | ||
|
|
bced9ffff9 | ||
|
|
d7d1c1544a | ||
|
|
7c1e8ce48c | ||
|
|
e3b0ca8ef6 | ||
|
|
9e266eb6d5 | ||
|
|
7231403e16 | ||
|
|
344a486fd7 | ||
|
|
4fd831875d | ||
|
|
0988d067ea | ||
|
|
44dbe475af | ||
|
|
bd24cf3ea4 | ||
|
|
b493a808fe | ||
|
|
54035d108d | ||
|
|
c5e8bc7e20 | ||
|
|
3bbb4779a3 | ||
|
|
1b3963ebea | ||
|
|
3b6dd7e15a | ||
|
|
757d2a3947 | ||
|
|
61b71143f2 | ||
|
|
1b343a36c9 | ||
|
|
8e94937060 | ||
|
|
e8ffebc006 | ||
|
|
2ca95eaa9f | ||
|
|
0dc5b4cdfc | ||
|
|
cc6cd96d8e | ||
|
|
4244d37625 | ||
|
|
0b766095d4 | ||
|
|
a4f212a18f | ||
|
|
caafb73190 | ||
|
|
09482799c9 | ||
|
|
37f93d1760 | ||
|
|
725f2e5204 | ||
|
|
967198fae0 | ||
|
|
43d57f6dcb | ||
|
|
6afa4db577 | ||
|
|
3b8c3fb29a | ||
|
|
921c3b0627 | ||
|
|
c0fadb45ab | ||
|
|
a1481fb179 | ||
|
|
987cd972d3 | ||
|
|
bdf25976a3 | ||
|
|
87c3aff4ce | ||
|
|
99350a957a | ||
|
|
319068dc7e | ||
|
|
cd18806c39 | ||
|
|
95b08b2023 | ||
|
|
0e70f76c86 | ||
|
|
4d414a2994 | ||
|
|
3d22772d4e | ||
|
|
0b381e2570 | ||
|
|
f2cc4311c5 | ||
|
|
e349671fdf | ||
|
|
01c02d5efa | ||
|
|
b62b1f3870 | ||
|
|
8844830859 | ||
|
|
0c51ee4b64 | ||
|
|
11920d5e31 | ||
|
|
848ea1eb63 | ||
|
|
a216519486 | ||
|
|
b04606c38e | ||
|
|
38072beea7 | ||
|
|
b843f1fa03 | ||
|
|
560d40e571 | ||
|
|
5f0b8161b7 | ||
|
|
062d482917 | ||
|
|
39693a27e3 | ||
|
|
7cd1eeac30 | ||
|
|
bafa473c8e | ||
|
|
750cf46b2e | ||
|
|
68885a4bbc | ||
|
|
bcc99a8904 | ||
|
|
59fbd98db3 | ||
|
|
b70ed425f1 | ||
|
|
45ef5811c8 | ||
|
|
3b137ac762 | ||
|
|
1ddb0caf73 | ||
|
|
ae4c6fe2dd | ||
|
|
b03fe438d0 | ||
|
|
db257af58e | ||
|
|
735368c71b | ||
|
|
9e04e3679b | ||
|
|
43b8414727 | ||
|
|
5a00187147 | ||
|
|
cb525c7c84 | ||
|
|
d88420dd03 | ||
|
|
b9a983f8e0 | ||
|
|
42431ea7db | ||
|
|
f9459e4abb | ||
|
|
72f917d611 | ||
|
|
9fd1d19e93 | ||
|
|
062af1ac08 | ||
|
|
41bd76e091 | ||
|
|
cfd3f4b199 | ||
|
|
79d38f9597 | ||
|
|
b3866559e1 | ||
|
|
4d186baa35 | ||
|
|
8ed3d5f3db | ||
|
|
f0c8f39b6d | ||
|
|
431db8fc9b | ||
|
|
ba252c5356 | ||
|
|
a2812c39c0 | ||
|
|
0490758820 | ||
|
|
7f56824b42 | ||
|
|
627da3a2bc | ||
|
|
9b36a5c8a6 | ||
|
|
c1cf2be533 | ||
|
|
e6b69042de | ||
|
|
109650faf3 | ||
|
|
e54eaab842 | ||
|
|
43b6297b5d | ||
|
|
c20f4f5adf | ||
|
|
dc1f222cd2 | ||
|
|
c2b687212c | ||
|
|
849913276d | ||
|
|
23579c1e4a | ||
|
|
e031161fd4 | ||
|
|
4800ee6c0a | ||
|
|
d3a7fef9b0 | ||
|
|
40822fe77a | ||
|
|
837b670213 | ||
|
|
57ce69f3fb | ||
|
|
be022c4894 | ||
|
|
8a366964bb | ||
|
|
ee86b68470 | ||
|
|
60352307aa | ||
|
|
3ebd2f746f | ||
|
|
1c1a65b637 | ||
|
|
010e60d029 | ||
|
|
7a25568861 | ||
|
|
5f4f913661 | ||
|
|
ccd0e34a53 | ||
|
|
72f1ffccd3 | ||
|
|
ea7a52945f | ||
|
|
89d4d1351a | ||
|
|
b757c91d93 | ||
|
|
27203d7a4d | ||
|
|
9ad4e18ac5 | ||
|
|
fcdc8f3ce7 | ||
|
|
78b994b84a | ||
|
|
58bfc677e2 | ||
|
|
7d17285a0c | ||
|
|
e9eb00a0d4 | ||
|
|
48d07af574 | ||
|
|
2fc62efd88 | ||
|
|
be516d75bd | ||
|
|
951d5fde85 | ||
|
|
1389abc052 | ||
|
|
19ad67a77f | ||
|
|
641f308344 | ||
|
|
9f097fa4d5 | ||
|
|
5ad362c52b | ||
|
|
614f238a61 | ||
|
|
dec91950bc | ||
|
|
6cef9c23f0 | ||
|
|
3f568bf136 | ||
|
|
5484b421ce | ||
|
|
02f21e07d3 | ||
|
|
fff1f23a83 | ||
|
|
a056ec0d38 | ||
|
|
2eb9e5dde3 | ||
|
|
627d2a4701 | ||
|
|
76895fe86d | ||
|
|
64c3c85780 | ||
|
|
7288348857 | ||
|
|
62e73299b1 | ||
|
|
fe76c41ed8 | ||
|
|
1a92edf8be | ||
|
|
b63b606a4e | ||
|
|
8e2ef3d22b | ||
|
|
c6c4a32283 | ||
|
|
b70b3b158e | ||
|
|
3d59ab8108 | ||
|
|
b6c3089510 | ||
|
|
bd92aac280 | ||
|
|
5299e802e9 | ||
|
|
8e5a57d7dd | ||
|
|
beaa324fb6 | ||
|
|
79e64fe206 | ||
|
|
93f525e3fe | ||
|
|
aacb803c64 | ||
|
|
8a0665b222 | ||
|
|
20e41a7f73 | ||
|
|
93a1699a35 | ||
|
|
c33c07e4af | ||
|
|
c7484d0cc9 | ||
|
|
fb85a7bb35 | ||
|
|
42ff9a4d34 | ||
|
|
005e9eae7c | ||
|
|
3e325debcc | ||
|
|
a221de9a2b | ||
|
|
32b0cc1865 | ||
|
|
bbf85f8a12 | ||
|
|
67a0172b28 | ||
|
|
fb19d4d45b | ||
|
|
a156b1af14 | ||
|
|
a604b4943c | ||
|
|
3f0b6435d9 | ||
|
|
e0f029e2cb | ||
|
|
89d3fd5fab | ||
|
|
a38b00be6b | ||
|
|
0e8d52b591 | ||
|
|
298c77740d | ||
|
|
c681aae8ee | ||
|
|
faef98b089 | ||
|
|
84a3e0a30b | ||
|
|
69bd553ce0 | ||
|
|
fd0c0f8975 | ||
|
|
860ceb06b4 | ||
|
|
ecf501bf72 | ||
|
|
81a2ed1e25 | ||
|
|
76ab28338a | ||
|
|
9a56c9630f | ||
|
|
53b9497c18 | ||
|
|
750b16b6ee | ||
|
|
0ee3e0779a | ||
|
|
333c2d9299 | ||
|
|
ad37ff5048 | ||
|
|
33f86f3bde | ||
|
|
8acb969a49 | ||
|
|
b74b5933b8 | ||
|
|
681c556b7e | ||
|
|
1746684e52 | ||
|
|
0b93d06555 | ||
|
|
8a8b8c7c27 | ||
|
|
6b6577006d | ||
|
|
23ee5e81c9 | ||
|
|
483f55e4b1 | ||
|
|
1bb1bc2553 | ||
|
|
a4e4e36f94 | ||
|
|
6849415812 | ||
|
|
86f6cb038e | ||
|
|
7480a1d6ce | ||
|
|
3cd10117dd | ||
|
|
0caf19d390 | ||
|
|
5c14ebb049 | ||
|
|
9717a736b1 | ||
|
|
9c9ab50d1a | ||
|
|
d4bcb8174e | ||
|
|
9e7fe773bd | ||
|
|
aca18fab0f | ||
|
|
691de01b79 | ||
|
|
3383f15142 | ||
|
|
84c1593889 | ||
|
|
3c80fa1e33 | ||
|
|
06b16a1deb | ||
|
|
4c4246fb09 | ||
|
|
364be1e9f6 | ||
|
|
f959ed71aa | ||
|
|
5c4326c302 | ||
|
|
125fc3a622 | ||
|
|
6b9e785db3 | ||
|
|
25d34e9a43 | ||
|
|
457d4aa1dc | ||
|
|
ff0c0992ff | ||
|
|
d379e012c4 | ||
|
|
151fff26fd | ||
|
|
3d0d561215 | ||
|
|
22d586ed7b | ||
|
|
6dc19b29e8 | ||
|
|
50975a87d4 | ||
|
|
ce721d9f0f | ||
|
|
20510a33f7 | ||
|
|
3abd9c8763 | ||
|
|
e9eff7420b | ||
|
|
64c250c9d8 | ||
|
|
8047f82bfd | ||
|
|
af6467fb3d | ||
|
|
3ff1664aec | ||
|
|
34ea2b44b8 | ||
|
|
6c8d851109 | ||
|
|
d678299a74 | ||
|
|
7aed0db2b6 | ||
|
|
0355524345 | ||
|
|
0a43e4672e | ||
|
|
71e0ccdfec | ||
|
|
1df33ac3c8 | ||
|
|
7334090ac1 | ||
|
|
6b0f044198 | ||
|
|
ddf54c9cf8 | ||
|
|
7c64e184e2 | ||
|
|
a904db033c | ||
|
|
b234856b02 | ||
|
|
89d51d2afc | ||
|
|
37cb9678e9 | ||
|
|
0500ff333a | ||
|
|
08528510ef | ||
|
|
ddbd03dc1e | ||
|
|
ade87f378a | ||
|
|
4db14b905f | ||
|
|
b669b31451 | ||
|
|
1cb2b62f81 | ||
|
|
e5828713cf | ||
|
|
d10cb84068 | ||
|
|
4222f8516f | ||
|
|
7f998c7611 | ||
|
|
db46000337 | ||
|
|
1aac8d8041 | ||
|
|
c59c8e05f7 | ||
|
|
4942d0a629 | ||
|
|
873b7715f4 | ||
|
|
98e7ed6920 | ||
|
|
046f5e645e | ||
|
|
f5e5a7094c | ||
|
|
154125fee6 | ||
|
|
9f8e960ebe | ||
|
|
4179b0be0a | ||
|
|
28bafa38db | ||
|
|
b07552565e | ||
|
|
c4427471d2 | ||
|
|
08f81c6784 | ||
|
|
a471e98aca | ||
|
|
75a8fcc8a0 | ||
|
|
46ef76c168 | ||
|
|
66637446c9 | ||
|
|
21efeb888a | ||
|
|
a4ee8b5322 | ||
|
|
36519ac47e | ||
|
|
3f514fceca | ||
|
|
c2249fdfac | ||
|
|
c610719a44 | ||
|
|
36a6c2461a | ||
|
|
c29f22c39e | ||
|
|
30d3062944 | ||
|
|
69ba75abf4 | ||
|
|
e4d486fec5 | ||
|
|
f242144dcf | ||
|
|
02dee2d664 | ||
|
|
a3dd2c3069 | ||
|
|
a23425e8aa | ||
|
|
be79ddc9a3 | ||
|
|
7d71015e8c | ||
|
|
ad54549b51 | ||
|
|
6cf032a164 | ||
|
|
6390d796ac | ||
|
|
98b8411905 | ||
|
|
ddf1029afa | ||
|
|
1effbc5cc9 | ||
|
|
414b645e9f | ||
|
|
398c76f496 | ||
|
|
1bc456dd95 | ||
|
|
2e8421884e | ||
|
|
70d9b193ac | ||
|
|
b49c11004a | ||
|
|
34843eea90 | ||
|
|
2d6d7f31e8 | ||
|
|
7a24cbff1c | ||
|
|
1e7eb2cf1c | ||
|
|
361256e016 | ||
|
|
8838dbd003 | ||
|
|
13a95e1f2b | ||
|
|
1aaa451a3e | ||
|
|
cbba81e54d | ||
|
|
370868dfac | ||
|
|
77f692aae2 | ||
|
|
9318e205ea | ||
|
|
ebcc717c19 | ||
|
|
4c16b564ee | ||
|
|
e2283d1453 | ||
|
|
d891801c5a | ||
|
|
de75386944 | ||
|
|
82dc37de50 | ||
|
|
b6fa7f62dc | ||
|
|
f9e0a95c5e | ||
|
|
b2c6e12647 | ||
|
|
caffb83780 | ||
|
|
8882cb5479 | ||
|
|
75dace2dee | ||
|
|
ad6487d042 | ||
|
|
a91604e8ab | ||
|
|
c364f7c643 | ||
|
|
53435ba184 | ||
|
|
25f8d5519b | ||
|
|
2e4fef6c66 | ||
|
|
80b2b7dc00 | ||
|
|
8585cd8e21 | ||
|
|
9fa2a7eeea | ||
|
|
2d1f74228d | ||
|
|
3d6f7aa0e1 | ||
|
|
3dea60366a | ||
|
|
d4d9a1df4c | ||
|
|
7d6975fd31 | ||
|
|
08be52ed17 | ||
|
|
682a7700c2 | ||
|
|
9d87009216 | ||
|
|
ef86838f62 | ||
|
|
35468233f8 | ||
|
|
26e229867d | ||
|
|
3a1578b3c6 | ||
|
|
d5e3d2cbbc | ||
|
|
c095248176 | ||
|
|
44601c8954 | ||
|
|
135dbb8f07 | ||
|
|
c95682a0c7 | ||
|
|
d177b9f7fa | ||
|
|
9b57615d94 | ||
|
|
c03f3eacd1 | ||
|
|
a26e395932 | ||
|
|
0870b87c96 | ||
|
|
b52a44a7dd | ||
|
|
0a290aafef | ||
|
|
9014d4c410 | ||
|
|
60e58b4f5f | ||
|
|
620e74a6aa | ||
|
|
efa287ed35 | ||
|
|
a24eb9d9b0 | ||
|
|
bd3dab8aae | ||
|
|
4fe1ebaa5b | ||
|
|
c5e944744b | ||
|
|
0c396181f7 | ||
|
|
0034474219 | ||
|
|
8136ad8287 | ||
|
|
681940d466 | ||
|
|
16488506e8 | ||
|
|
122fccc041 | ||
|
|
9d0ad35403 | ||
|
|
f9ec97e026 | ||
|
|
95495a2647 | ||
|
|
e3310a605c | ||
|
|
b55719bf28 | ||
|
|
b957b51279 | ||
|
|
90bcfab369 | ||
|
|
f8a8e30641 | ||
|
|
25cb98e7a7 | ||
|
|
03e1bb7cf9 | ||
|
|
85dbb24f3a | ||
|
|
d817635782 | ||
|
|
2f4f237810 | ||
|
|
5ac94d810f | ||
|
|
39dc46dc25 | ||
|
|
0d9cf725f7 | ||
|
|
e55dbead5b | ||
|
|
7d046e5b30 | ||
|
|
8b4693cf66 | ||
|
|
a1172c9a82 | ||
|
|
1ed2bd33f0 | ||
|
|
4c159bd0ba | ||
|
|
050654b2a9 | ||
|
|
61b261e1b2 | ||
|
|
017b010206 | ||
|
|
00f5189f58 | ||
|
|
4a8309ed1f | ||
|
|
76cfc31a1d | ||
|
|
d9ec434699 | ||
|
|
239f3c40be | ||
|
|
09c8c6e670 | ||
|
|
7e4ad01c94 | ||
|
|
ed98e269ef | ||
|
|
b47d63334f | ||
|
|
5e2a3a5aea | ||
|
|
1a7eb21fc7 | ||
|
|
834a51cdc9 | ||
|
|
1b69d99c06 | ||
|
|
ad189933c6 | ||
|
|
9d86ff32de | ||
|
|
278bb57a58 | ||
|
|
0ba494e0ba | ||
|
|
8b247054bb | ||
|
|
7c5c8e4e0d | ||
|
|
ad106a27f3 | ||
|
|
9d6f61b49e | ||
|
|
02368954a0 | ||
|
|
b477a35a01 | ||
|
|
16622887de | ||
|
|
9059d1fb17 | ||
|
|
df2b008d82 | ||
|
|
0da871efd0 | ||
|
|
1c55349f81 | ||
|
|
9309fa1e81 | ||
|
|
5996189f91 | ||
|
|
bd2b984bfb | ||
|
|
194409a117 | ||
|
|
27978b216d | ||
|
|
c38fa77ce6 | ||
|
|
3eb49f7422 | ||
|
|
1989d615d2 | ||
|
|
239412d265 | ||
|
|
375a419a9e | ||
|
|
875c8ab424 | ||
|
|
c9bfc810ce | ||
|
|
46ecb16949 | ||
|
|
f6dc16f17b | ||
|
|
4eef42f730 | ||
|
|
8612d9a771 | ||
|
|
0caff054f5 | ||
|
|
4aa91ad599 | ||
|
|
7a0864f5c2 | ||
|
|
73dc0dfcf6 | ||
|
|
1ff9a69339 | ||
|
|
179eb5d847 | ||
|
|
52c868828c | ||
|
|
7eea4615b6 | ||
|
|
d9b351df1a | ||
|
|
d6a785b645 | ||
|
|
79db828a01 | ||
|
|
a5ffb0f8dc | ||
|
|
9492fcde74 | ||
|
|
d2456ce4cd | ||
|
|
7de27abc8d | ||
|
|
d8155bc8eb | ||
|
|
cf08e52a92 | ||
|
|
768398b991 | ||
|
|
24c20a19f1 | ||
|
|
8fbcbcd4c0 | ||
|
|
e0da5bb943 | ||
|
|
36fbc4fb82 | ||
|
|
cb11051f42 | ||
|
|
a824781d14 | ||
|
|
600a2c6748 | ||
|
|
77df64bfb5 | ||
|
|
2d6e54903c | ||
|
|
baa2b83df9 | ||
|
|
1ff02446af | ||
|
|
b58c6ba762 | ||
|
|
611a902000 | ||
|
|
c1b3f9dd29 | ||
|
|
7c5a88a6a6 | ||
|
|
be9abfef58 | ||
|
|
b549c9377e | ||
|
|
a5b00dbf74 | ||
|
|
90e2e14cd7 | ||
|
|
14bb245424 | ||
|
|
b63a0f3a45 | ||
|
|
e1f8842d7f | ||
|
|
3dda5fb268 | ||
|
|
248e0c5240 | ||
|
|
0297a43de6 | ||
|
|
2b4f66e0cf | ||
|
|
e622af2cc3 | ||
|
|
f527b1b5a6 | ||
|
|
c15b13a107 | ||
|
|
bc06acdd25 | ||
|
|
5252870733 | ||
|
|
3cac6a47a5 | ||
|
|
49bba9bf98 | ||
|
|
f4d12e4e5e | ||
|
|
d305211a36 | ||
|
|
9ec44d6f97 | ||
|
|
175bb3ee01 | ||
|
|
036c78750f | ||
|
|
a18de9de7d | ||
|
|
59fbbd5987 | ||
|
|
7e89fbc907 | ||
|
|
0956f240b3 | ||
|
|
f9db97c6b0 | ||
|
|
a2443c4ac1 | ||
|
|
095bd95044 | ||
|
|
b569209647 | ||
|
|
9057cac2b9 | ||
|
|
f9a6c685df | ||
|
|
208eb4f454 | ||
|
|
b3cb9e6714 | ||
|
|
5f9233f9b7 | ||
|
|
16447ae597 | ||
|
|
103edd5260 | ||
|
|
928089bf0f | ||
|
|
e5bd74695a | ||
|
|
f796969465 | ||
|
|
10756175b7 | ||
|
|
5637a71486 |
@@ -1,3 +0,0 @@
|
|||||||
comment:
|
|
||||||
layout: "condensed_header, condensed_files, condensed_footer"
|
|
||||||
hide_project_coverage: TRUE
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
[run]
|
|
||||||
omit =
|
|
||||||
*/site-packages/*
|
|
||||||
*/dist-packages/*
|
|
||||||
your_package_name/tests/*
|
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
# github acions
|
# github actions
|
||||||
.github/
|
.github/
|
||||||
.*ignore
|
.*ignore
|
||||||
.git/
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
.idea/
|
.idea/
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
@@ -15,6 +14,9 @@ env/
|
|||||||
venv*/
|
venv*/
|
||||||
ENV/
|
ENV/
|
||||||
.conda/
|
.conda/
|
||||||
README*.md
|
|
||||||
dashboard/
|
dashboard/
|
||||||
data/
|
data/
|
||||||
|
changelogs/
|
||||||
|
tests/
|
||||||
|
.ruff_cache/
|
||||||
|
.astrbot
|
||||||
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: astrbot
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||||
|
polar: # Replace with a single Polar username
|
||||||
|
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||||
|
thanks_dev: # Replace with a single thanks.dev username
|
||||||
|
custom: ['https://afdian.com/a/astrbot_team']
|
||||||
57
.github/ISSUE_TEMPLATE/PLUGIN_PUBLISH.yml
vendored
Normal file
57
.github/ISSUE_TEMPLATE/PLUGIN_PUBLISH.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
name: 🥳 发布插件
|
||||||
|
description: 提交插件到插件市场
|
||||||
|
title: "[Plugin] 插件名"
|
||||||
|
labels: ["plugin-publish"]
|
||||||
|
assignees: []
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
欢迎发布插件到插件市场!
|
||||||
|
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## 插件基本信息
|
||||||
|
|
||||||
|
请将插件信息填写到下方的 JSON 代码块中。其中 `tags`(插件标签)和 `social_link`(社交链接)选填。
|
||||||
|
|
||||||
|
不熟悉 JSON ?可以从 [此处](https://plugins.astrbot.app/submit) 生成 JSON ,生成后记得复制粘贴过来.
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: plugin-info
|
||||||
|
attributes:
|
||||||
|
label: 插件信息
|
||||||
|
description: 请在下方代码块中填写您的插件信息,确保反引号包裹了JSON
|
||||||
|
value: |
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "插件名,请以 astrbot_plugin_ 开头",
|
||||||
|
"display_name": "用于展示的插件名,方便人类阅读",
|
||||||
|
"desc": "插件的简短介绍",
|
||||||
|
"author": "作者名",
|
||||||
|
"repo": "插件仓库链接",
|
||||||
|
"tags": [],
|
||||||
|
"social_link": "",
|
||||||
|
}
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## 检查
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: checks
|
||||||
|
attributes:
|
||||||
|
label: 插件检查清单
|
||||||
|
description: 请确认以下所有项目
|
||||||
|
options:
|
||||||
|
- label: 我的插件经过完整的测试
|
||||||
|
required: true
|
||||||
|
- label: 我的插件不包含恶意代码
|
||||||
|
required: true
|
||||||
|
- label: 我已阅读并同意遵守该项目的 [行为准则](https://docs.github.com/zh/site-policy/github-terms/github-community-code-of-conduct)。
|
||||||
|
required: true
|
||||||
14
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
14
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -6,13 +6,13 @@ body:
|
|||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
感谢您抽出时间报告问题!请准确解释您的问题。如果可能,请提供一个可复现的片段(这有助于更快地解决问题)。
|
感谢您抽出时间报告问题!请准确解释您的问题。如果可能,请提供一个可复现的片段(这有助于更快地解决问题)。请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: 发生了什么
|
label: 发生了什么
|
||||||
description: 描述你遇到的异常
|
description: 描述你遇到的异常
|
||||||
placeholder: >
|
placeholder: >
|
||||||
一个清晰且具体的描述这个异常是什么。
|
一个清晰且具体的描述这个异常是什么。请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ body:
|
|||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: AstrBot 版本与部署方式
|
label: AstrBot 版本、部署方式(如 Windows Docker Desktop 部署)、使用的提供商、使用的消息平台适配器
|
||||||
description: >
|
description: >
|
||||||
请提供您的 AstrBot 版本和部署方式。
|
请提供您的 AstrBot 版本和部署方式。
|
||||||
placeholder: >
|
placeholder: >
|
||||||
@@ -53,9 +53,9 @@ body:
|
|||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: 额外信息
|
label: 报错日志
|
||||||
description: >
|
description: >
|
||||||
任何额外信息,如报错日志、截图等。
|
如报错日志、截图等。请提供完整的 Debug 级别的日志,不要介意它很长!请注意,不详细 / 没有日志的 issue 会被直接关闭,谢谢理解。
|
||||||
placeholder: >
|
placeholder: >
|
||||||
请提供完整的报错日志或截图。
|
请提供完整的报错日志或截图。
|
||||||
validations:
|
validations:
|
||||||
@@ -65,7 +65,7 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: 你愿意提交 PR 吗?
|
label: 你愿意提交 PR 吗?
|
||||||
description: >
|
description: >
|
||||||
这绝对不是必需的,但我们很乐意在贡献过程中为您提供指导特别是如果你已经很好地理解了如何实现修复。
|
这不是必需的,但我们很乐意在贡献过程中为您提供指导特别是如果你已经很好地理解了如何实现修复。
|
||||||
options:
|
options:
|
||||||
- label: 是的,我愿意提交 PR!
|
- label: 是的,我愿意提交 PR!
|
||||||
|
|
||||||
@@ -79,4 +79,4 @@ body:
|
|||||||
|
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: "感谢您填写我们的表单!"
|
value: "感谢您填写我们的表单!"
|
||||||
|
|||||||
48
.github/PULL_REQUEST_TEMPLATE.md
vendored
48
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,10 +1,46 @@
|
|||||||
<!-- 如果有的话,指定这个 PR 要解决的 ISSUE -->
|
<!-- 如果有的话,请指定此 PR 旨在解决的 ISSUE 编号。 -->
|
||||||
修复了 #XYZ
|
<!-- If applicable, please specify the ISSUE number this PR aims to resolve. -->
|
||||||
|
|
||||||
### Motivation
|
fixes #XYZ
|
||||||
|
|
||||||
<!--解释为什么要改动-->
|
---
|
||||||
|
|
||||||
### Modifications
|
### Motivation / 动机
|
||||||
|
|
||||||
<!--简单解释你的改动-->
|
<!--请描述此项更改的动机:它解决了什么问题?(例如:修复了 XX 错误,添加了 YY 功能)-->
|
||||||
|
<!--Please describe the motivation for this change: What problem does it solve? (e.g., Fixes XX bug, adds YY feature)-->
|
||||||
|
|
||||||
|
### Modifications / 改动点
|
||||||
|
|
||||||
|
<!--请总结你的改动:哪些核心文件被修改了?实现了什么功能?-->
|
||||||
|
<!--Please summarize your changes: What core files were modified? What functionality was implemented?-->
|
||||||
|
|
||||||
|
### Verification Steps / 验证步骤
|
||||||
|
|
||||||
|
<!--请为审查者 (Reviewer) 提供清晰、可复现的验证步骤(例如:1. 导航到... 2. 点击...)。-->
|
||||||
|
<!--Please provide clear and reproducible verification steps for the Reviewer (e.g., 1. Navigate to... 2. Click...).-->
|
||||||
|
|
||||||
|
### Screenshots or Test Results / 运行截图或测试结果
|
||||||
|
|
||||||
|
<!--请粘贴截图、GIF 或测试日志,作为执行“验证步骤”的证据,证明此改动有效。-->
|
||||||
|
<!--Please paste screenshots, GIFs, or test logs here as evidence of executing the "Verification Steps" to prove this change is effective.-->
|
||||||
|
|
||||||
|
### Compatibility & Breaking Changes / 兼容性与破坏性变更
|
||||||
|
|
||||||
|
<!--请说明此变更的兼容性:哪些是破坏性变更?哪些地方做了向后兼容处理?是否提供了数据迁移方法?-->
|
||||||
|
<!--Please explain the compatibility of this change: What are the breaking changes? What backward-compatible measures were taken? Are data migration paths provided?-->
|
||||||
|
|
||||||
|
- [ ] 这是一个破坏性变更 (Breaking Change)。/ This is a breaking change.
|
||||||
|
- [ ] 这不是一个破坏性变更。/ This is NOT a breaking change.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Checklist / 检查清单
|
||||||
|
|
||||||
|
<!--如果分支被合并,您的代码将服务于数万名用户!在提交前,请核查一下几点内容。-->
|
||||||
|
<!--If merged, your code will serve tens of thousands of users! Please double-check the following items before submitting.-->
|
||||||
|
|
||||||
|
- [ ] 😊 如果 PR 中有新加入的功能,已经通过 Issue / 邮件等方式和作者讨论过。/ If there are new features added in the PR, I have discussed it with the authors through issues/emails, etc.
|
||||||
|
- [ ] 👀 我的更改经过了良好的测试,**并已在上方提供了“验证步骤”和“运行截图”**。/ My changes have been well-tested, **and "Verification Steps" and "Screenshots" have been provided above**.
|
||||||
|
- [ ] 🤓 我确保没有引入新依赖库,或者引入了新依赖库的同时将其添加到了 `requirements.txt` 和 `pyproject.toml` 文件相应位置。/ I have ensured that no new dependencies are introduced, OR if new dependencies are introduced, they have been added to the appropriate locations in `requirements.txt` and `pyproject.toml`.
|
||||||
|
- [ ] 😮 我的更改没有引入恶意代码。/ My changes do not introduce malicious code.
|
||||||
|
|||||||
38
.github/auto_assign.yml
vendored
Normal file
38
.github/auto_assign.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Set to true to add reviewers to pull requests
|
||||||
|
addReviewers: true
|
||||||
|
|
||||||
|
# Set to true to add assignees to pull requests
|
||||||
|
addAssignees: false
|
||||||
|
|
||||||
|
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||||
|
reviewers:
|
||||||
|
- Soulter
|
||||||
|
- Raven95676
|
||||||
|
- Larch-C
|
||||||
|
- anka-afk
|
||||||
|
- advent259141
|
||||||
|
- Fridemn
|
||||||
|
- LIghtJUNction
|
||||||
|
# - zouyonghe
|
||||||
|
|
||||||
|
# A number of reviewers added to the pull request
|
||||||
|
# Set 0 to add all the reviewers (default: 0)
|
||||||
|
numberOfReviewers: 2
|
||||||
|
|
||||||
|
# A list of assignees, overrides reviewers if set
|
||||||
|
# assignees:
|
||||||
|
# - assigneeA
|
||||||
|
|
||||||
|
# A number of assignees to add to the pull request
|
||||||
|
# Set to 0 to add all of the assignees.
|
||||||
|
# Uses numberOfReviewers if unset.
|
||||||
|
# numberOfAssignees: 2
|
||||||
|
|
||||||
|
# A list of keywords to be skipped the process that add reviewers if pull requests include it
|
||||||
|
skipKeywords:
|
||||||
|
- wip
|
||||||
|
- draft
|
||||||
|
|
||||||
|
# A list of users to be skipped by both the add reviewers and add assignees processes
|
||||||
|
# skipUsers:
|
||||||
|
# - dependabot[bot]
|
||||||
63
.github/copilot-instructions.md
vendored
Normal file
63
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# AstrBot Development Instructions
|
||||||
|
|
||||||
|
AstrBot is a multi-platform LLM chatbot and development framework written in Python with a Vue.js dashboard. It supports multiple messaging platforms (QQ, Telegram, Discord, etc.) and various LLM providers (OpenAI, Anthropic, Google Gemini, etc.).
|
||||||
|
|
||||||
|
Always reference these instructions first and fallback to search or bash commands only when you encounter unexpected information that does not match the info here.
|
||||||
|
|
||||||
|
## Working Effectively
|
||||||
|
|
||||||
|
### Bootstrap and Install Dependencies
|
||||||
|
- **Python 3.10+ required** - Check `.python-version` file
|
||||||
|
- Install UV package manager: `pip install uv`
|
||||||
|
- Install project dependencies: `uv sync` -- takes 6-7 minutes. NEVER CANCEL. Set timeout to 10+ minutes.
|
||||||
|
- Create required directories: `mkdir -p data/plugins data/config data/temp`
|
||||||
|
|
||||||
|
### Running the Application
|
||||||
|
- Run main application: `uv run main.py` -- starts in ~3 seconds
|
||||||
|
- Application creates WebUI on http://localhost:6185 (default credentials: `astrbot`/`astrbot`)
|
||||||
|
- Application loads plugins automatically from `packages/` and `data/plugins/` directories
|
||||||
|
|
||||||
|
### Dashboard Build (Vue.js/Node.js)
|
||||||
|
- **Prerequisites**: Node.js 20+ and npm 10+ required
|
||||||
|
- Navigate to dashboard: `cd dashboard`
|
||||||
|
- Install dashboard dependencies: `npm install` -- takes 2-3 minutes. NEVER CANCEL. Set timeout to 5+ minutes.
|
||||||
|
- Build dashboard: `npm run build` -- takes 25-30 seconds. NEVER CANCEL.
|
||||||
|
- Dashboard creates optimized production build in `dashboard/dist/`
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
- Do not generate test files for now.
|
||||||
|
|
||||||
|
### Code Quality and Linting
|
||||||
|
- Install ruff linter: `uv add --dev ruff`
|
||||||
|
- Check code style: `uv run ruff check .` -- takes <1 second
|
||||||
|
- Check formatting: `uv run ruff format --check .` -- takes <1 second
|
||||||
|
- Fix formatting: `uv run ruff format .`
|
||||||
|
- **ALWAYS** run `uv run ruff check .` and `uv run ruff format .` before committing changes
|
||||||
|
|
||||||
|
### Plugin Development
|
||||||
|
- Plugins load from `packages/` (built-in) and `data/plugins/` (user-installed)
|
||||||
|
- Plugin system supports function tools and message handlers
|
||||||
|
- Key plugins: python_interpreter, web_searcher, astrbot, reminder, session_controller
|
||||||
|
|
||||||
|
### Common Issues and Workarounds
|
||||||
|
- **Dashboard download fails**: Known issue with "division by zero" error - application still works
|
||||||
|
- **Import errors in tests**: Ensure `uv run` is used to run tests in proper environment
|
||||||
|
=- **Build timeouts**: Always set appropriate timeouts (10+ minutes for uv sync, 5+ minutes for npm install)
|
||||||
|
|
||||||
|
## CI/CD Integration
|
||||||
|
- GitHub Actions workflows in `.github/workflows/`
|
||||||
|
- Docker builds supported via `Dockerfile`
|
||||||
|
- Pre-commit hooks enforce ruff formatting and linting
|
||||||
|
|
||||||
|
## Docker Support
|
||||||
|
- Primary deployment method: `docker run soulter/astrbot:latest`
|
||||||
|
- Compose file available: `compose.yml`
|
||||||
|
- Exposes ports: 6185 (WebUI), 6195 (WeChat), 6199 (QQ), etc.
|
||||||
|
- Volume mount required: `./data:/AstrBot/data`
|
||||||
|
|
||||||
|
## Multi-language Support
|
||||||
|
- Documentation in Chinese (README.md), English (README_en.md), Japanese (README_ja.md)
|
||||||
|
- UI supports internationalization
|
||||||
|
- Default language is Chinese
|
||||||
|
|
||||||
|
Remember: This is a production chatbot framework with real users. Always test thoroughly and ensure changes don't break existing functionality.
|
||||||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Keep GitHub Actions up to date with GitHub's Dependabot...
|
||||||
|
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot
|
||||||
|
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
groups:
|
||||||
|
github-actions:
|
||||||
|
patterns:
|
||||||
|
- "*" # Group all Actions updates into a single larger pull request
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
65
.github/workflows/auto_release.yml
vendored
65
.github/workflows/auto_release.yml
vendored
@@ -7,13 +7,13 @@ on:
|
|||||||
name: Auto Release
|
name: Auto Release
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build-and-publish-to-github-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Dashboard Build
|
- name: Dashboard Build
|
||||||
run: |
|
run: |
|
||||||
@@ -23,13 +23,70 @@ jobs:
|
|||||||
echo "COMMIT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
echo "COMMIT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
echo ${{ github.ref_name }} > dist/assets/version
|
echo ${{ github.ref_name }} > dist/assets/version
|
||||||
zip -r dist.zip dist
|
zip -r dist.zip dist
|
||||||
|
|
||||||
|
- name: Upload to Cloudflare R2
|
||||||
|
env:
|
||||||
|
R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }}
|
||||||
|
R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||||
|
R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||||
|
R2_BUCKET_NAME: "astrbot"
|
||||||
|
R2_OBJECT_NAME: "astrbot-webui-latest.zip"
|
||||||
|
VERSION_TAG: ${{ github.ref_name }}
|
||||||
|
run: |
|
||||||
|
echo "Installing rclone..."
|
||||||
|
curl https://rclone.org/install.sh | sudo bash
|
||||||
|
|
||||||
|
echo "Configuring rclone remote..."
|
||||||
|
mkdir -p ~/.config/rclone
|
||||||
|
cat <<EOF > ~/.config/rclone/rclone.conf
|
||||||
|
[r2]
|
||||||
|
type = s3
|
||||||
|
provider = Cloudflare
|
||||||
|
access_key_id = $R2_ACCESS_KEY_ID
|
||||||
|
secret_access_key = $R2_SECRET_ACCESS_KEY
|
||||||
|
endpoint = https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Uploading dist.zip to R2 bucket: $R2_BUCKET_NAME/$R2_OBJECT_NAME"
|
||||||
|
mv dashboard/dist.zip dashboard/$R2_OBJECT_NAME
|
||||||
|
rclone copy dashboard/$R2_OBJECT_NAME r2:$R2_BUCKET_NAME --progress
|
||||||
|
mv dashboard/$R2_OBJECT_NAME dashboard/astrbot-webui-${VERSION_TAG}.zip
|
||||||
|
rclone copy dashboard/astrbot-webui-${VERSION_TAG}.zip r2:$R2_BUCKET_NAME --progress
|
||||||
|
mv dashboard/astrbot-webui-${VERSION_TAG}.zip dashboard/dist.zip
|
||||||
|
|
||||||
- name: Fetch Changelog
|
- name: Fetch Changelog
|
||||||
run: |
|
run: |
|
||||||
echo "changelog=changelogs/${{github.ref_name}}.md" >> "$GITHUB_ENV"
|
echo "changelog=changelogs/${{github.ref_name}}.md" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
- name: Create Release
|
- name: Create GitHub Release
|
||||||
uses: ncipollo/release-action@v1
|
uses: ncipollo/release-action@v1
|
||||||
with:
|
with:
|
||||||
bodyFile: ${{ env.changelog }}
|
bodyFile: ${{ env.changelog }}
|
||||||
artifacts: "dashboard/dist.zip"
|
artifacts: "dashboard/dist.zip"
|
||||||
|
|
||||||
|
build-and-publish-to-pypi:
|
||||||
|
# 构建并发布到 PyPI
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-and-publish-to-github-release
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
run: |
|
||||||
|
python -m pip install uv
|
||||||
|
|
||||||
|
- name: Build package
|
||||||
|
run: |
|
||||||
|
uv build
|
||||||
|
|
||||||
|
- name: Publish to PyPI
|
||||||
|
env:
|
||||||
|
UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
run: |
|
||||||
|
uv publish
|
||||||
|
|||||||
34
.github/workflows/code-format.yml
vendored
Normal file
34
.github/workflows/code-format.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: Code Format Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
format-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
|
- name: Install UV
|
||||||
|
run: pip install uv
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync
|
||||||
|
|
||||||
|
- name: Check code formatting with ruff
|
||||||
|
run: |
|
||||||
|
uv run ruff format --check .
|
||||||
|
|
||||||
|
- name: Check code style with ruff
|
||||||
|
run: |
|
||||||
|
uv run ruff check .
|
||||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -56,11 +56,11 @@ jobs:
|
|||||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v4
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
build-mode: ${{ matrix.build-mode }}
|
build-mode: ${{ matrix.build-mode }}
|
||||||
@@ -88,6 +88,6 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v4
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
|||||||
24
.github/workflows/coverage_test.yml
vendored
24
.github/workflows/coverage_test.yml
vendored
@@ -1,6 +1,6 @@
|
|||||||
name: Run tests and upload coverage
|
name: Run tests and upload coverage
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
@@ -8,6 +8,7 @@ on:
|
|||||||
- 'README.md'
|
- 'README.md'
|
||||||
- 'changelogs/**'
|
- 'changelogs/**'
|
||||||
- 'dashboard/**'
|
- 'dashboard/**'
|
||||||
|
pull_request:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -16,30 +17,29 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v6
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install -r requirements.txt
|
pip install pytest pytest-asyncio pytest-cov
|
||||||
pip install pytest pytest-cov pytest-asyncio
|
pip install --editable .
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
mkdir data
|
mkdir -p data/plugins
|
||||||
mkdir data/plugins
|
mkdir -p data/config
|
||||||
mkdir data/config
|
mkdir -p data/temp
|
||||||
mkdir data/temp
|
|
||||||
export TESTING=true
|
export TESTING=true
|
||||||
export ZHIPU_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
export ZHIPU_API_KEY=${{ secrets.OPENAI_API_KEY }}
|
||||||
PYTHONPATH=./ pytest --cov=. tests/ -v -o log_cli=true -o log_level=DEBUG
|
pytest --cov=. -v -o log_cli=true -o log_level=DEBUG
|
||||||
|
|
||||||
- name: Upload results to Codecov
|
- name: Upload results to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
55
.github/workflows/dashboard_ci.yml
vendored
Normal file
55
.github/workflows/dashboard_ci.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: AstrBot Dashboard CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master" ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: 'latest'
|
||||||
|
|
||||||
|
- name: npm install, build
|
||||||
|
run: |
|
||||||
|
cd dashboard
|
||||||
|
npm install pnpm -g
|
||||||
|
pnpm install
|
||||||
|
pnpm i --save-dev @types/markdown-it
|
||||||
|
pnpm run build
|
||||||
|
|
||||||
|
- name: Inject Commit SHA
|
||||||
|
id: get_sha
|
||||||
|
run: |
|
||||||
|
echo "COMMIT_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
|
mkdir -p dashboard/dist/assets
|
||||||
|
echo $COMMIT_SHA > dashboard/dist/assets/version
|
||||||
|
cd dashboard
|
||||||
|
zip -r dist.zip dist
|
||||||
|
|
||||||
|
- name: Archive production artifacts
|
||||||
|
uses: actions/upload-artifact@v5
|
||||||
|
with:
|
||||||
|
name: dist-without-markdown
|
||||||
|
path: |
|
||||||
|
dashboard/dist
|
||||||
|
!dist/**/*.md
|
||||||
|
|
||||||
|
- name: Create GitHub Release
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
uses: ncipollo/release-action@v1
|
||||||
|
with:
|
||||||
|
tag: release-${{ github.sha }}
|
||||||
|
owner: AstrBotDevs
|
||||||
|
repo: astrbot-release-harbour
|
||||||
|
body: "Automated release from commit ${{ github.sha }}"
|
||||||
|
token: ${{ secrets.ASTRBOT_HARBOUR_TOKEN }}
|
||||||
|
artifacts: "dashboard/dist.zip"
|
||||||
66
.github/workflows/docker-image.yml
vendored
66
.github/workflows/docker-image.yml
vendored
@@ -11,33 +11,79 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 拉取源码
|
- name: Pull The Codes
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 1
|
fetch-depth: 0 # Must be 0 so we can fetch tags
|
||||||
|
|
||||||
- name: 设置 QEMU
|
- name: Get latest tag (only on manual trigger)
|
||||||
|
id: get-latest-tag
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
|
run: |
|
||||||
|
tag=$(git describe --tags --abbrev=0)
|
||||||
|
echo "latest_tag=$tag" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Checkout to latest tag (only on manual trigger)
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
|
run: git checkout ${{ steps.get-latest-tag.outputs.latest_tag }}
|
||||||
|
|
||||||
|
- name: Check if version is pre-release
|
||||||
|
id: check-prerelease
|
||||||
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||||
|
version="${{ steps.get-latest-tag.outputs.latest_tag }}"
|
||||||
|
else
|
||||||
|
version="${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
if [[ "$version" == *"beta"* ]] || [[ "$version" == *"alpha"* ]]; then
|
||||||
|
echo "is_prerelease=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "Version $version is a pre-release, will not push latest tag"
|
||||||
|
else
|
||||||
|
echo "is_prerelease=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "Version $version is a stable release, will push latest tag"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build Dashboard
|
||||||
|
run: |
|
||||||
|
cd dashboard
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
mkdir -p dist/assets
|
||||||
|
echo $(git rev-parse HEAD) > dist/assets/version
|
||||||
|
cd ..
|
||||||
|
mkdir -p data
|
||||||
|
cp -r dashboard/dist data/
|
||||||
|
|
||||||
|
- name: Set QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: 设置 Docker Buildx
|
- name: Set Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: 登录到 DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||||
|
|
||||||
- name: 构建和推送 Docker hub
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: Soulter
|
||||||
|
password: ${{ secrets.GHCR_GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and Push Docker to DockerHub and Github GHCR
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:latest
|
${{ steps.check-prerelease.outputs.is_prerelease == 'false' && format('{0}/astrbot:latest', secrets.DOCKER_HUB_USERNAME) || '' }}
|
||||||
${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:${{ github.ref_name }}
|
${{ secrets.DOCKER_HUB_USERNAME }}/astrbot:${{ github.event_name == 'workflow_dispatch' && steps.get-latest-tag.outputs.latest_tag || github.ref_name }}
|
||||||
|
${{ steps.check-prerelease.outputs.is_prerelease == 'false' && 'ghcr.io/soulter/astrbot:latest' || '' }}
|
||||||
|
ghcr.io/soulter/astrbot:${{ github.event_name == 'workflow_dispatch' && steps.get-latest-tag.outputs.latest_tag || github.ref_name }}
|
||||||
|
|
||||||
- name: Post build notifications
|
- name: Post build notifications
|
||||||
run: echo "Docker image has been built and pushed successfully"
|
run: echo "Docker image has been built and pushed successfully"
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v5
|
- uses: actions/stale@v10
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
stale-issue-message: 'Stale issue message'
|
stale-issue-message: 'Stale issue message'
|
||||||
|
|||||||
57
.gitignore
vendored
57
.gitignore
vendored
@@ -1,28 +1,49 @@
|
|||||||
|
# Python related
|
||||||
__pycache__
|
__pycache__
|
||||||
botpy.log
|
.mypy_cache
|
||||||
.vscode
|
.venv*
|
||||||
data_v2.db
|
.conda/
|
||||||
data_v3.db
|
uv.lock
|
||||||
configs/session
|
|
||||||
configs/config.yaml
|
|
||||||
**/.DS_Store
|
|
||||||
temp
|
|
||||||
cmd_config.json
|
|
||||||
data
|
|
||||||
cookies.json
|
|
||||||
logs/
|
|
||||||
addons/plugins
|
|
||||||
.coverage
|
.coverage
|
||||||
|
|
||||||
|
# IDE and editors
|
||||||
|
.vscode
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Logs and temporary files
|
||||||
|
botpy.log
|
||||||
|
logs/
|
||||||
|
temp
|
||||||
|
cookies.json
|
||||||
|
|
||||||
|
# Data files
|
||||||
|
data_v2.db
|
||||||
|
data_v3.db
|
||||||
|
data
|
||||||
|
configs/session
|
||||||
|
configs/config.yaml
|
||||||
|
cmd_config.json
|
||||||
|
|
||||||
|
# Plugins and packages
|
||||||
|
addons/plugins
|
||||||
|
packages/python_interpreter/workplace
|
||||||
tests/astrbot_plugin_openai
|
tests/astrbot_plugin_openai
|
||||||
chroma
|
|
||||||
|
# Dashboard
|
||||||
dashboard/node_modules/
|
dashboard/node_modules/
|
||||||
dashboard/dist/
|
dashboard/dist/
|
||||||
.DS_Store
|
|
||||||
package-lock.json
|
package-lock.json
|
||||||
package.json
|
package.json
|
||||||
|
|
||||||
|
# Operating System
|
||||||
|
**/.DS_Store
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# AstrBot specific
|
||||||
|
.astrbot
|
||||||
|
astrbot.lock
|
||||||
|
|
||||||
|
# Other
|
||||||
|
chroma
|
||||||
venv/*
|
venv/*
|
||||||
packages/python_interpreter/workplace
|
pytest.ini
|
||||||
.venv/*
|
|
||||||
.conda/
|
|
||||||
|
|||||||
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
default_install_hook_types: [pre-commit, prepare-commit-msg]
|
||||||
|
ci:
|
||||||
|
autofix_commit_msg: ":balloon: auto fixes by pre-commit hooks"
|
||||||
|
autofix_prs: true
|
||||||
|
autoupdate_branch: master
|
||||||
|
autoupdate_schedule: weekly
|
||||||
|
autoupdate_commit_msg: ":balloon: pre-commit autoupdate"
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
# Ruff version.
|
||||||
|
rev: v0.14.1
|
||||||
|
hooks:
|
||||||
|
# Run the linter.
|
||||||
|
- id: ruff-check
|
||||||
|
types_or: [ python, pyi ]
|
||||||
|
args: [ --fix ]
|
||||||
|
# Run the formatter.
|
||||||
|
- id: ruff-format
|
||||||
|
types_or: [ python, pyi ]
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.21.0
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py310-plus]
|
||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.10
|
||||||
22
Dockerfile
22
Dockerfile
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.10-slim
|
FROM python:3.11-slim
|
||||||
WORKDIR /AstrBot
|
WORKDIR /AstrBot
|
||||||
|
|
||||||
COPY . /AstrBot/
|
COPY . /AstrBot/
|
||||||
@@ -9,14 +9,24 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
python3-dev \
|
python3-dev \
|
||||||
libffi-dev \
|
libffi-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
|
ca-certificates \
|
||||||
|
bash \
|
||||||
|
ffmpeg \
|
||||||
|
curl \
|
||||||
|
gnupg \
|
||||||
|
git \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
RUN python -m pip install -r requirements.txt --no-cache-dir
|
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
|
||||||
|
apt-get install -y --no-install-recommends nodejs && \
|
||||||
|
echo "3.11" > .python-version && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN python -m pip install socksio wechatpy cryptography --no-cache-dir
|
RUN python -m pip install --no-cache-dir uv && \
|
||||||
|
uv pip install socksio pilk --no-cache-dir --system
|
||||||
|
|
||||||
EXPOSE 6185
|
EXPOSE 6185
|
||||||
EXPOSE 6186
|
EXPOSE 6186
|
||||||
|
|
||||||
CMD [ "python", "main.py" ]
|
CMD ["uv", "run", "main.py"]
|
||||||
|
|||||||
40
Dockerfile_with_node
Normal file
40
Dockerfile_with_node
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /AstrBot
|
||||||
|
|
||||||
|
COPY . /AstrBot/
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
gcc \
|
||||||
|
build-essential \
|
||||||
|
python3-dev \
|
||||||
|
libffi-dev \
|
||||||
|
libssl-dev \
|
||||||
|
curl \
|
||||||
|
unzip \
|
||||||
|
ca-certificates \
|
||||||
|
bash \
|
||||||
|
git \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
ENV NVM_DIR="/root/.nvm" \
|
||||||
|
NODE_VERSION=22
|
||||||
|
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.2/install.sh | bash && \
|
||||||
|
. "$NVM_DIR/nvm.sh" && \
|
||||||
|
nvm install $NODE_VERSION && \
|
||||||
|
nvm use $NODE_VERSION && \
|
||||||
|
nvm alias default $NODE_VERSION && \
|
||||||
|
node -v && npm -v && \
|
||||||
|
echo "3.11" > .python-version
|
||||||
|
ENV PATH="$NVM_DIR/versions/node/v$(node -v | cut -d 'v' -f 2)/bin:$PATH"
|
||||||
|
|
||||||
|
RUN python -m pip install --no-cache-dir uv
|
||||||
|
|
||||||
|
# 安装项目依赖(根据指南,使用 uv sync)
|
||||||
|
RUN uv sync --no-cache
|
||||||
|
|
||||||
|
EXPOSE 6185
|
||||||
|
EXPOSE 6186
|
||||||
|
|
||||||
|
CMD ["uv", "run", "main.py"]
|
||||||
293
README.md
293
README.md
@@ -1,170 +1,243 @@
|
|||||||
<p align="center">
|

|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
_✨ 易上手的多平台 LLM 聊天机器人及开发框架 ✨_
|
<br>
|
||||||
|
|
||||||
|
<div>
|
||||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
<a href="https://hellogithub.com/repository/AstrBotDevs/AstrBot" target="_blank"><img src="https://api.hellogithub.com/v1/widgets/recommend.svg?rid=d127d50cd5e54c5382328acc3bb25483&claim_uid=ZO9by7qCXgSd6Lp&t=1" alt="Featured|HelloGitHub" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||||
[](https://github.com/Soulter/AstrBot/releases/latest)
|
|
||||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
|
||||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-630166526-purple">
|
|
||||||
[](https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/018e705a-a1a7-409a-a849-3013485e6c8e)
|
|
||||||

|
|
||||||
[](https://codecov.io/gh/Soulter/AstrBot)
|
|
||||||
|
|
||||||
<a href="https://github.com/Soulter/AstrBot/blob/master/README_en.md">English</a> |
|
|
||||||
<a href="https://github.com/Soulter/AstrBot/blob/master/README_ja.md">日本語</a> |
|
|
||||||
<a href="https://astrbot.app/">查看文档</a> |
|
|
||||||
<a href="https://github.com/Soulter/AstrBot/issues">问题提交</a>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
AstrBot 是一个松耦合、异步、支持多消息平台部署、具有易用的插件系统和完善的大语言模型(LLM)接入功能的聊天机器人及开发框架。
|
<br>
|
||||||
|
|
||||||
## ✨ 主要功能
|
<div>
|
||||||
|
<img src="https://img.shields.io/github/v/release/AstrBotDevs/AstrBot?style=for-the-badge&color=76bad9" href="https://github.com/AstrBotDevs/AstrBot/releases/latest">
|
||||||
|
<img src="https://img.shields.io/badge/python-3.10+-blue.svg?style=for-the-badge&color=76bad9" alt="python">
|
||||||
|
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg?style=for-the-badge&color=76bad9"/></a>
|
||||||
|
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=wtbaNx7EioxeaqS9z7RQWVXPIxg2zYr7&jump_from=webapi&authKey=vlqnv/AV2DbJEvGIcxdlNSpfxVy+8vVqijgreRdnVKOaydpc+YSw4MctmEbr0k5"><img alt="QQ_community" src="https://img.shields.io/badge/QQ群-775869627-purple?style=for-the-badge&color=76bad9"></a>
|
||||||
|
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||||
|
<img src="https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fapi.soulter.top%2Fastrbot%2Fplugin-num&query=%24.result&suffix=%E4%B8%AA&style=for-the-badge&label=%E6%8F%92%E4%BB%B6%E5%B8%82%E5%9C%BA&cacheSeconds=3600">
|
||||||
|
</div>
|
||||||
|
|
||||||
1. **大语言模型对话**。支持各种大语言模型,包括 OpenAI API、Google Gemini、Llama、Deepseek、ChatGLM 等,支持接入本地部署的大模型,通过 Ollama、LLMTuner。具有多轮对话、人格情境、多模态能力,支持图片理解、语音转文字(Whisper)。
|
<br>
|
||||||
2. **多消息平台接入**。支持接入 QQ(OneBot)、QQ 频道、微信(Gewechat)、飞书、Telegram。后续将支持钉钉、Discord、WhatsApp、小爱音响。支持速率限制、白名单、关键词过滤、百度内容审核。
|
|
||||||
3. **Agent**。原生支持部分 Agent 能力,如代码执行器、自然语言待办、网页搜索。对接 [Dify 平台](https://astrbot.app/others/dify.html),便捷接入 Dify 智能助手、知识库和 Dify 工作流。
|
|
||||||
4. **插件扩展**。深度优化的插件机制,支持[开发插件](https://astrbot.app/dev/plugin.html)扩展功能,极简开发。已支持安装多个插件。
|
|
||||||
5. **可视化管理面板**。支持可视化修改配置、插件管理、日志查看等功能,降低配置难度。集成 WebChat,可在面板上与大模型对话。
|
|
||||||
6. **高稳定性、高模块化**。基于事件总线和流水线的架构设计,高度模块化,低耦合。
|
|
||||||
|
|
||||||
> [!TIP]
|
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_en.md">English</a> |
|
||||||
> 管理面板在线体验 Demo: [https://demo.astrbot.app/](https://demo.astrbot.app/)
|
<a href="https://github.com/AstrBotDevs/AstrBot/blob/master/README_ja.md">日本語</a> |
|
||||||
>
|
<a href="https://astrbot.app/">文档</a> |
|
||||||
> 用户名: `astrbot`, 密码: `astrbot`。未配置 LLM,无法在聊天页使用大模型。(不要再修改 demo 的登录密码了 😭)
|
<a href="https://blog.astrbot.app/">Blog</a> |
|
||||||
|
<a href="https://astrbot.featurebase.app/roadmap">路线图</a> |
|
||||||
|
<a href="https://github.com/AstrBotDevs/AstrBot/issues">问题提交</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
## ✨ 使用方式
|
AstrBot 是一个开源的一站式 Agent 聊天机器人平台及开发框架。
|
||||||
|
|
||||||
#### Docker 部署
|
## 主要功能
|
||||||
|
|
||||||
|
1. **大模型对话**。支持接入多种大模型服务。支持多模态、工具调用、MCP、原生知识库、人设等功能。
|
||||||
|
2. **多消息平台支持**。支持接入 QQ、企业微信、微信公众号、飞书、Telegram、钉钉、Discord、KOOK 等平台。支持速率限制、白名单、百度内容审核。
|
||||||
|
3. **Agent**。完善适配的 Agentic 能力。支持多轮工具调用、内置沙盒代码执行器、网页搜索等功能。
|
||||||
|
4. **插件扩展**。深度优化的插件机制,支持[开发插件](https://astrbot.app/dev/plugin.html)扩展功能,社区插件生态丰富。
|
||||||
|
5. **WebUI**。可视化配置和管理机器人,功能齐全。
|
||||||
|
|
||||||
|
## 部署方式
|
||||||
|
|
||||||
|
#### Docker 部署(推荐 🥳)
|
||||||
|
|
||||||
|
推荐使用 Docker / Docker Compose 方式部署 AstrBot。
|
||||||
|
|
||||||
请参阅官方文档 [使用 Docker 部署 AstrBot](https://astrbot.app/deploy/astrbot/docker.html#%E4%BD%BF%E7%94%A8-docker-%E9%83%A8%E7%BD%B2-astrbot) 。
|
请参阅官方文档 [使用 Docker 部署 AstrBot](https://astrbot.app/deploy/astrbot/docker.html#%E4%BD%BF%E7%94%A8-docker-%E9%83%A8%E7%BD%B2-astrbot) 。
|
||||||
|
|
||||||
|
#### 宝塔面板部署
|
||||||
|
|
||||||
|
AstrBot 与宝塔面板合作,已上架至宝塔面板。
|
||||||
|
|
||||||
|
请参阅官方文档 [宝塔面板部署](https://astrbot.app/deploy/astrbot/btpanel.html) 。
|
||||||
|
|
||||||
|
#### 1Panel 部署
|
||||||
|
|
||||||
|
AstrBot 已由 1Panel 官方上架至 1Panel 面板。
|
||||||
|
|
||||||
|
请参阅官方文档 [1Panel 部署](https://astrbot.app/deploy/astrbot/1panel.html) 。
|
||||||
|
|
||||||
|
#### 在 雨云 上部署
|
||||||
|
|
||||||
|
AstrBot 已由雨云官方上架至云应用平台,可一键部署。
|
||||||
|
|
||||||
|
[](https://app.rainyun.com/apps/rca/store/5994?ref=NjU1ODg0)
|
||||||
|
|
||||||
|
#### 在 Replit 上部署
|
||||||
|
|
||||||
|
社区贡献的部署方式。
|
||||||
|
|
||||||
|
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||||
|
|
||||||
#### Windows 一键安装器部署
|
#### Windows 一键安装器部署
|
||||||
|
|
||||||
需要电脑上安装有 Python(>3.10)。请参阅官方文档 [使用 Windows 一键安装器部署 AstrBot](https://astrbot.app/deploy/astrbot/windows.html) 。
|
请参阅官方文档 [使用 Windows 一键安装器部署 AstrBot](https://astrbot.app/deploy/astrbot/windows.html) 。
|
||||||
|
|
||||||
#### Replit 部署
|
|
||||||
|
|
||||||
[](https://repl.it/github/Soulter/AstrBot)
|
|
||||||
|
|
||||||
#### CasaOS 部署
|
#### CasaOS 部署
|
||||||
|
|
||||||
社区贡献的部署方式。
|
社区贡献的部署方式。
|
||||||
|
|
||||||
请参阅官方文档 [通过源码部署 AstrBot](https://astrbot.app/deploy/astrbot/casaos.html) 。
|
请参阅官方文档 [CasaOS 部署](https://astrbot.app/deploy/astrbot/casaos.html) 。
|
||||||
|
|
||||||
#### 手动部署
|
#### 手动部署
|
||||||
|
|
||||||
请参阅官方文档 [通过源码部署 AstrBot](https://astrbot.app/deploy/astrbot/cli.html) 。
|
首先安装 uv:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install uv
|
||||||
|
```
|
||||||
|
|
||||||
|
通过 Git Clone 安装 AstrBot:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/AstrBotDevs/AstrBot && cd AstrBot
|
||||||
|
uv run main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
或者请参阅官方文档 [通过源码部署 AstrBot](https://astrbot.app/deploy/astrbot/cli.html) 。
|
||||||
|
|
||||||
|
## 🌍 社区
|
||||||
|
|
||||||
|
### QQ 群组
|
||||||
|
|
||||||
|
- 1 群:322154837
|
||||||
|
- 3 群:630166526
|
||||||
|
- 5 群:822130018
|
||||||
|
- 6 群:753075035
|
||||||
|
- 开发者群:975206796
|
||||||
|
|
||||||
|
### Telegram 群组
|
||||||
|
|
||||||
|
<a href="https://t.me/+hAsD2Ebl5as3NmY1"><img alt="Telegram_community" src="https://img.shields.io/badge/Telegram-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||||
|
|
||||||
|
### Discord 群组
|
||||||
|
|
||||||
|
<a href="https://discord.gg/hAVk6tgV36"><img alt="Discord_community" src="https://img.shields.io/badge/Discord-AstrBot-purple?style=for-the-badge&color=76bad9"></a>
|
||||||
|
|
||||||
## ⚡ 消息平台支持情况
|
## ⚡ 消息平台支持情况
|
||||||
|
|
||||||
| 平台 | 支持性 | 详情 | 消息类型 |
|
**官方维护**
|
||||||
| -------- | ------- | ------- | ------ |
|
|
||||||
| QQ(官方机器人接口) | ✔ | 私聊、群聊,QQ 频道私聊、群聊 | 文字、图片 |
|
|
||||||
| QQ(OneBot) | ✔ | 私聊、群聊 | 文字、图片、语音 |
|
|
||||||
| 微信(个人号) | ✔ | 微信个人号私聊、群聊 | 文字、图片、语音 |
|
|
||||||
| [Telegram](https://github.com/Soulter/astrbot_plugin_telegram) | ✔ | 私聊、群聊 | 文字、图片 |
|
|
||||||
| [微信(企业微信)](https://github.com/Soulter/astrbot_plugin_wecom) | ✔ | 私聊 | 文字、图片、语音 |
|
|
||||||
| 飞书 | ✔ | 群聊 | 文字、图片 |
|
|
||||||
| 微信对话开放平台 | 🚧 | 计划内 | - |
|
|
||||||
| Discord | 🚧 | 计划内 | - |
|
|
||||||
| WhatsApp | 🚧 | 计划内 | - |
|
|
||||||
| 小爱音响 | 🚧 | 计划内 | - |
|
|
||||||
|
|
||||||
# 🦌 接下来的路线图
|
| 平台 | 支持性 |
|
||||||
|
| -------- | ------- |
|
||||||
|
| QQ(官方平台) | ✔ |
|
||||||
|
| QQ(OneBot) | ✔ |
|
||||||
|
| Telegram | ✔ |
|
||||||
|
| 企微应用 | ✔ |
|
||||||
|
| 企微智能机器人 | ✔ |
|
||||||
|
| 微信客服 | ✔ |
|
||||||
|
| 微信公众号 | ✔ |
|
||||||
|
| 飞书 | ✔ |
|
||||||
|
| 钉钉 | ✔ |
|
||||||
|
| Slack | ✔ |
|
||||||
|
| Discord | ✔ |
|
||||||
|
| Satori | ✔ |
|
||||||
|
| Misskey | ✔ |
|
||||||
|
| Whatsapp | 将支持 |
|
||||||
|
| LINE | 将支持 |
|
||||||
|
|
||||||
> [!TIP]
|
**社区维护**
|
||||||
> 欢迎在 Issue 提出更多建议 <3
|
|
||||||
|
|
||||||
- [ ] 完善并保证目前所有平台适配器的功能一致性
|
| 平台 | 支持性 |
|
||||||
- [ ] 优化插件接口
|
| -------- | ------- |
|
||||||
- [ ] 默认支持更多 TTS 服务,如 GPT-Sovits
|
| [KOOK](https://github.com/wuyan1003/astrbot_plugin_kook_adapter) | ✔ |
|
||||||
- [ ] 完善“聊天增强”部分,支持持久化记忆
|
| [VoceChat](https://github.com/HikariFroya/astrbot_plugin_vocechat) | ✔ |
|
||||||
- [ ] 规划 i18n
|
| [Bilibili 私信](https://github.com/Hina-Chat/astrbot_plugin_bilibili_adapter) | ✔ |
|
||||||
|
| [wxauto](https://github.com/luosheng520qaq/wxauto-repost-onebotv11) | ✔ |
|
||||||
|
|
||||||
|
## ⚡ 提供商支持情况
|
||||||
|
|
||||||
|
**大模型服务**
|
||||||
|
|
||||||
|
| 名称 | 支持性 | 备注 |
|
||||||
|
| -------- | ------- | ------- |
|
||||||
|
| OpenAI | ✔ | 支持任何兼容 OpenAI API 的服务 |
|
||||||
|
| Anthropic | ✔ | |
|
||||||
|
| Google Gemini | ✔ | |
|
||||||
|
| Moonshot AI | ✔ | |
|
||||||
|
| 智谱 AI | ✔ | |
|
||||||
|
| DeepSeek | ✔ | |
|
||||||
|
| Ollama | ✔ | 本地部署 DeepSeek 等开源语言模型 |
|
||||||
|
| LM Studio | ✔ | 本地部署 DeepSeek 等开源语言模型 |
|
||||||
|
| [优云智算](https://www.compshare.cn/?ytag=GPU_YY-gh_astrbot&referral_code=FV7DcGowN4hB5UuXKgpE74) | ✔ | |
|
||||||
|
| [302.AI](https://share.302.ai/rr1M3l) | ✔ | |
|
||||||
|
| [小马算力](https://www.tokenpony.cn/3YPyf) | ✔ | |
|
||||||
|
| 硅基流动 | ✔ | |
|
||||||
|
| PPIO 派欧云 | ✔ | |
|
||||||
|
| ModelScope | ✔ | |
|
||||||
|
| OneAPI | ✔ | |
|
||||||
|
| Dify | ✔ | |
|
||||||
|
| 阿里云百炼应用 | ✔ | |
|
||||||
|
| Coze | ✔ | |
|
||||||
|
|
||||||
|
**语音转文本服务**
|
||||||
|
|
||||||
|
| 名称 | 支持性 | 备注 |
|
||||||
|
| -------- | ------- | ------- |
|
||||||
|
| Whisper | ✔ | 支持 API、本地部署 |
|
||||||
|
| SenseVoice | ✔ | 本地部署 |
|
||||||
|
|
||||||
|
**文本转语音服务**
|
||||||
|
|
||||||
|
| 名称 | 支持性 | 备注 |
|
||||||
|
| -------- | ------- | ------- |
|
||||||
|
| OpenAI TTS | ✔ | |
|
||||||
|
| Gemini TTS | ✔ | |
|
||||||
|
| GSVI | ✔ | GPT-Sovits-Inference |
|
||||||
|
| GPT-SoVITs | ✔ | GPT-Sovits |
|
||||||
|
| FishAudio | ✔ | |
|
||||||
|
| Edge TTS | ✔ | Edge 浏览器的免费 TTS |
|
||||||
|
| 阿里云百炼 TTS | ✔ | |
|
||||||
|
| Azure TTS | ✔ | |
|
||||||
|
| Minimax TTS | ✔ | |
|
||||||
|
| 火山引擎 TTS | ✔ | |
|
||||||
|
|
||||||
## ❤️ 贡献
|
## ❤️ 贡献
|
||||||
|
|
||||||
欢迎任何 Issues/Pull Requests!只需要将你的更改提交到此项目 :)
|
欢迎任何 Issues/Pull Requests!只需要将你的更改提交到此项目 :)
|
||||||
|
|
||||||
对于新功能的添加,请先通过 Issue 讨论。
|
### 如何贡献
|
||||||
|
|
||||||
## 🌟 支持
|
你可以通过查看问题或帮助审核 PR(拉取请求)来贡献。任何问题或 PR 都欢迎参与,以促进社区贡献。当然,这些只是建议,你可以以任何方式进行贡献。对于新功能的添加,请先通过 Issue 讨论。
|
||||||
|
|
||||||
- Star 这个项目!
|
### 开发环境
|
||||||
- 在[爱发电](https://afdian.com/a/soulter)支持我!
|
|
||||||
- 在[微信](https://drive.soulter.top/f/pYfA/d903f4fa49a496fda3f16d2be9e023b5.png)支持我~
|
|
||||||
|
|
||||||
## ✨ Demo
|
AstrBot 使用 `ruff` 进行代码格式化和检查。
|
||||||
|
|
||||||
> [!NOTE]
|
```bash
|
||||||
> 代码执行器的文件输入/输出目前仅测试了 Napcat(QQ), Lagrange(QQ)
|
git clone https://github.com/AstrBotDevs/AstrBot
|
||||||
|
pip install pre-commit
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
||||||
<div align='center'>
|
## ❤️ Special Thanks
|
||||||
|
|
||||||
<img src="https://github.com/user-attachments/assets/4ee688d9-467d-45c8-99d6-368f9a8a92d8" width="600">
|
特别感谢所有 Contributors 和插件开发者对 AstrBot 的贡献 ❤️
|
||||||
|
|
||||||
_✨基于 Docker 的沙箱化代码执行器(Beta 测试中)✨_
|
<a href="https://github.com/AstrBotDevs/AstrBot/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=AstrBotDevs/AstrBot" />
|
||||||
|
</a>
|
||||||
|
|
||||||
<img src="https://github.com/user-attachments/assets/0378f407-6079-4f64-ae4c-e97ab20611d2" height=500>
|
此外,本项目的诞生离不开以下开源项目的帮助:
|
||||||
|
|
||||||
_✨ 多模态、网页搜索、长文本转图片(可配置) ✨_
|
- [NapNeko/NapCatQQ](https://github.com/NapNeko/NapCatQQ) - 伟大的猫猫框架
|
||||||
|
|
||||||
<img src="https://github.com/user-attachments/assets/8ec12797-e70f-460a-959e-48eca39ca2bb" height=100>
|
|
||||||
|
|
||||||
_✨ 自然语言待办事项 ✨_
|
|
||||||
|
|
||||||
<img src="https://github.com/user-attachments/assets/e137a9e1-340a-4bf2-bb2b-771132780735" height=150>
|
|
||||||
<img src="https://github.com/user-attachments/assets/480f5e82-cf6a-4955-a869-0d73137aa6e1" height=150>
|
|
||||||
|
|
||||||
_✨ 插件系统——部分插件展示 ✨_
|
|
||||||
|
|
||||||
<img src="https://github.com/user-attachments/assets/592a8630-14c7-4e06-b496-9c0386e4f36c" width=600>
|
|
||||||
|
|
||||||
_✨ 管理面板 ✨_
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
_✨ 内置 Web Chat,在线与机器人交互 ✨_
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
## ⭐ Star History
|
## ⭐ Star History
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> 如果本项目对您的生活 / 工作产生了帮助,或者您关注本项目的未来发展,请给项目 Star,这是我维护这个开源项目的动力 <3
|
> 如果本项目对您的生活 / 工作产生了帮助,或者您关注本项目的未来发展,请给项目 Star,这是我们维护这个开源项目的动力 <3
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://star-history.com/#soulter/astrbot&Date)
|
[](https://star-history.com/#astrbotdevs/astrbot&Date)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
## Disclaimer
|
</details>
|
||||||
|
|
||||||
1. The project is protected under the `AGPL-v3` opensource license.
|
|
||||||
2. The deployment of WeChat (personal account) utilizes [Gewechat](https://github.com/Devo919/Gewechat) service. AstrBot only guarantees connectivity with Gewechat and recommends using a WeChat account that is not frequently used. In the event of account risk control, the author of this project shall not bear any responsibility.
|
|
||||||
3. Please ensure compliance with local laws and regulations when using this project.
|
|
||||||
|
|
||||||
<!-- ## ✨ ATRI [Beta 测试]
|
|
||||||
|
|
||||||
该功能作为插件载入。插件仓库地址:[astrbot_plugin_atri](https://github.com/Soulter/astrbot_plugin_atri)
|
|
||||||
|
|
||||||
1. 基于《ATRI ~ My Dear Moments》主角 ATRI 角色台词作为微调数据集的 `Qwen1.5-7B-Chat Lora` 微调模型。
|
|
||||||
2. 长期记忆
|
|
||||||
3. 表情包理解与回复
|
|
||||||
4. TTS
|
|
||||||
-->
|
|
||||||
|
|
||||||
|
|
||||||
_私は、高性能ですから!_
|
_私は、高性能ですから!_
|
||||||
|
|
||||||
|
|||||||
40
README_en.md
40
README_en.md
@@ -10,16 +10,16 @@ _✨ Easy-to-use Multi-platform LLM Chatbot & Development Framework ✨_
|
|||||||
|
|
||||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
|
||||||
[](https://github.com/Soulter/AstrBot/releases/latest)
|
[](https://github.com/AstrBotDevs/AstrBot/releases/latest)
|
||||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
||||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot"/></a>
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-630166526-purple">
|
<a href="https://qm.qq.com/cgi-bin/qm/qr?k=wtbaNx7EioxeaqS9z7RQWVXPIxg2zYr7&jump_from=webapi&authKey=vlqnv/AV2DbJEvGIcxdlNSpfxVy+8vVqijgreRdnVKOaydpc+YSw4MctmEbr0k5"><img alt="Static Badge" src="https://img.shields.io/badge/QQ群-630166526-purple"></a>
|
||||||
[](https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/018e705a-a1a7-409a-a849-3013485e6c8e)
|
[](https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/018e705a-a1a7-409a-a849-3013485e6c8e)
|
||||||

|

|
||||||
[](https://codecov.io/gh/Soulter/AstrBot)
|
[](https://codecov.io/gh/AstrBotDevs/AstrBot)
|
||||||
|
|
||||||
<a href="https://astrbot.app/">Documentation</a> |
|
<a href="https://astrbot.app/">Documentation</a> |
|
||||||
<a href="https://github.com/Soulter/AstrBot/issues">Issue Tracking</a>
|
<a href="https://github.com/AstrBotDevs/AstrBot/issues">Issue Tracking</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
AstrBot is a loosely coupled, asynchronous chatbot and development framework that supports multi-platform deployment, featuring an easy-to-use plugin system and comprehensive Large Language Model (LLM) integration capabilities.
|
AstrBot is a loosely coupled, asynchronous chatbot and development framework that supports multi-platform deployment, featuring an easy-to-use plugin system and comprehensive Large Language Model (LLM) integration capabilities.
|
||||||
@@ -28,7 +28,7 @@ AstrBot is a loosely coupled, asynchronous chatbot and development framework tha
|
|||||||
|
|
||||||
1. **LLM Conversations** - Supports various LLMs including OpenAI API, Google Gemini, Llama, Deepseek, ChatGLM, etc. Enables local model deployment via Ollama/LLMTuner. Features multi-turn dialogues, personality contexts, multimodal capabilities (image understanding), and speech-to-text (Whisper).
|
1. **LLM Conversations** - Supports various LLMs including OpenAI API, Google Gemini, Llama, Deepseek, ChatGLM, etc. Enables local model deployment via Ollama/LLMTuner. Features multi-turn dialogues, personality contexts, multimodal capabilities (image understanding), and speech-to-text (Whisper).
|
||||||
2. **Multi-platform Integration** - Supports QQ (OneBot), QQ Channels, WeChat (Gewechat), Feishu, and Telegram. Planned support for DingTalk, Discord, WhatsApp, and Xiaomi Smart Speakers. Includes rate limiting, whitelisting, keyword filtering, and Baidu content moderation.
|
2. **Multi-platform Integration** - Supports QQ (OneBot), QQ Channels, WeChat (Gewechat), Feishu, and Telegram. Planned support for DingTalk, Discord, WhatsApp, and Xiaomi Smart Speakers. Includes rate limiting, whitelisting, keyword filtering, and Baidu content moderation.
|
||||||
3. **Agent Capabilities** - Native support for code execution, natural language TODO lists, web search. Integrates with [Dify Platform](https://astrbot.app/others/dify.html) for easy access to Dify assistants/knowledge bases/workflows.
|
3. **Agent Capabilities** - Native support for code execution, natural language TODO lists, web search. Integrates with [Dify Platform](https://dify.ai/) for easy access to Dify assistants/knowledge bases/workflows.
|
||||||
4. **Plugin System** - Optimized plugin mechanism with minimal development effort. Supports multiple installed plugins.
|
4. **Plugin System** - Optimized plugin mechanism with minimal development effort. Supports multiple installed plugins.
|
||||||
5. **Web Dashboard** - Visual configuration management, plugin controls, logging, and WebChat interface for direct LLM interaction.
|
5. **Web Dashboard** - Visual configuration management, plugin controls, logging, and WebChat interface for direct LLM interaction.
|
||||||
6. **High Stability & Modularity** - Event bus and pipeline architecture ensures high modularization and loose coupling.
|
6. **High Stability & Modularity** - Event bus and pipeline architecture ensures high modularization and loose coupling.
|
||||||
@@ -49,7 +49,7 @@ Requires Python (>3.10). See docs: [Windows Installer Guide](https://astrbot.app
|
|||||||
|
|
||||||
#### Replit Deployment
|
#### Replit Deployment
|
||||||
|
|
||||||
[](https://repl.it/github/Soulter/AstrBot)
|
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||||
|
|
||||||
#### CasaOS Deployment
|
#### CasaOS Deployment
|
||||||
|
|
||||||
@@ -67,14 +67,32 @@ See docs: [Source Code Deployment](https://astrbot.app/deploy/astrbot/cli.html)
|
|||||||
| QQ (Official Bot) | ✔ | Private/Group chats | Text, Images |
|
| QQ (Official Bot) | ✔ | Private/Group chats | Text, Images |
|
||||||
| QQ (OneBot) | ✔ | Private/Group chats | Text, Images, Voice |
|
| QQ (OneBot) | ✔ | Private/Group chats | Text, Images, Voice |
|
||||||
| WeChat (Personal) | ✔ | Private/Group chats | Text, Images, Voice |
|
| WeChat (Personal) | ✔ | Private/Group chats | Text, Images, Voice |
|
||||||
| [Telegram](https://github.com/Soulter/astrbot_plugin_telegram) | ✔ | Private/Group chats | Text, Images |
|
| [Telegram](https://github.com/AstrBotDevs/AstrBot_plugin_telegram) | ✔ | Private/Group chats | Text, Images |
|
||||||
| [WeChat Work](https://github.com/Soulter/astrbot_plugin_wecom) | ✔ | Private chats | Text, Images, Voice |
|
| [WeChat Work](https://github.com/AstrBotDevs/AstrBot_plugin_wecom) | ✔ | Private chats | Text, Images, Voice |
|
||||||
| Feishu | ✔ | Group chats | Text, Images |
|
| Feishu | ✔ | Group chats | Text, Images |
|
||||||
| WeChat Open Platform | 🚧 | Planned | - |
|
| WeChat Open Platform | 🚧 | Planned | - |
|
||||||
| Discord | 🚧 | Planned | - |
|
| Discord | 🚧 | Planned | - |
|
||||||
| WhatsApp | 🚧 | Planned | - |
|
| WhatsApp | 🚧 | Planned | - |
|
||||||
| Xiaomi Speakers | 🚧 | Planned | - |
|
| Xiaomi Speakers | 🚧 | Planned | - |
|
||||||
|
|
||||||
|
## Provider Support Status
|
||||||
|
|
||||||
|
| Name | Support | Type | Notes |
|
||||||
|
|---------------------------|---------|------------------------|-----------------------------------------------------------------------|
|
||||||
|
| OpenAI API | ✔ | Text Generation | Supports all OpenAI API-compatible services including DeepSeek, Google Gemini, GLM, Moonshot, Alibaba Cloud Bailian, Silicon Flow, xAI, etc. |
|
||||||
|
| Claude API | ✔ | Text Generation | |
|
||||||
|
| Google Gemini API | ✔ | Text Generation | |
|
||||||
|
| Dify | ✔ | LLMOps | |
|
||||||
|
| DashScope (Alibaba Cloud) | ✔ | LLMOps | |
|
||||||
|
| Ollama | ✔ | Model Loader | Local deployment for open-source LLMs (DeepSeek, Llama, etc.) |
|
||||||
|
| LM Studio | ✔ | Model Loader | Local deployment for open-source LLMs (DeepSeek, Llama, etc.) |
|
||||||
|
| LLMTuner | ✔ | Model Loader | Local loading of fine-tuned models (e.g. LoRA) |
|
||||||
|
| OneAPI | ✔ | LLM Distribution | |
|
||||||
|
| Whisper | ✔ | Speech-to-Text | Supports API and local deployment |
|
||||||
|
| SenseVoice | ✔ | Speech-to-Text | Local deployment |
|
||||||
|
| OpenAI TTS API | ✔ | Text-to-Speech | |
|
||||||
|
| Fishaudio | ✔ | Text-to-Speech | Project involving GPT-Sovits author |
|
||||||
|
|
||||||
# 🦌 Roadmap
|
# 🦌 Roadmap
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
@@ -139,7 +157,7 @@ _✨ Built-in Web Chat Interface ✨_
|
|||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://star-history.com/#soulter/astrbot&Date)
|
[](https://star-history.com/#AstrBotDevs/AstrBot&Date)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -151,7 +169,7 @@ _✨ Built-in Web Chat Interface ✨_
|
|||||||
|
|
||||||
<!-- ## ✨ ATRI [Beta]
|
<!-- ## ✨ ATRI [Beta]
|
||||||
|
|
||||||
Available as plugin: [astrbot_plugin_atri](https://github.com/Soulter/astrbot_plugin_atri)
|
Available as plugin: [astrbot_plugin_atri](https://github.com/AstrBotDevs/AstrBot_plugin_atri)
|
||||||
|
|
||||||
1. Qwen1.5-7B-Chat Lora model fine-tuned with ATRI character data
|
1. Qwen1.5-7B-Chat Lora model fine-tuned with ATRI character data
|
||||||
2. Long-term memory
|
2. Long-term memory
|
||||||
|
|||||||
25
README_ja.md
25
README_ja.md
@@ -1,5 +1,5 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
</p>
|
</p>
|
||||||
@@ -10,16 +10,16 @@ _✨ 簡単に使えるマルチプラットフォーム LLM チャットボッ
|
|||||||
|
|
||||||
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
<a href="https://trendshift.io/repositories/12875" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12875" alt="Soulter%2FAstrBot | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
|
||||||
[](https://github.com/Soulter/AstrBot/releases/latest)
|
[](https://github.com/AstrBotDevs/AstrBot/releases/latest)
|
||||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
||||||
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
<a href="https://hub.docker.com/r/soulter/astrbot"><img alt="Docker pull" src="https://img.shields.io/docker/pulls/soulter/astrbot.svg"/></a>
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-630166526-purple">
|
<img alt="Static Badge" src="https://img.shields.io/badge/QQ群-630166526-purple">
|
||||||
[](https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/018e705a-a1a7-409a-a849-3013485e6c8e)
|
[](https://wakatime.com/badge/user/915e5316-99c6-4563-a483-ef186cf000c9/project/018e705a-a1a7-409a-a849-3013485e6c8e)
|
||||||

|

|
||||||
[](https://codecov.io/gh/Soulter/AstrBot)
|
[](https://codecov.io/gh/AstrBotDevs/AstrBot)
|
||||||
|
|
||||||
<a href="https://astrbot.app/">ドキュメントを見る</a> |
|
<a href="https://astrbot.app/">ドキュメントを見る</a> |
|
||||||
<a href="https://github.com/Soulter/AstrBot/issues">問題を報告する</a>
|
<a href="https://github.com/AstrBotDevs/AstrBot/issues">問題を報告する</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
AstrBot は、疎結合、非同期、複数のメッセージプラットフォームに対応したデプロイ、使いやすいプラグインシステム、および包括的な大規模言語モデル(LLM)接続機能を備えたチャットボットおよび開発フレームワークです。
|
AstrBot は、疎結合、非同期、複数のメッセージプラットフォームに対応したデプロイ、使いやすいプラグインシステム、および包括的な大規模言語モデル(LLM)接続機能を備えたチャットボットおよび開発フレームワークです。
|
||||||
@@ -27,15 +27,15 @@ AstrBot は、疎結合、非同期、複数のメッセージプラットフォ
|
|||||||
## ✨ 主な機能
|
## ✨ 主な機能
|
||||||
|
|
||||||
1. **大規模言語モデルの対話**。OpenAI API、Google Gemini、Llama、Deepseek、ChatGLM など、さまざまな大規模言語モデルをサポートし、Ollama、LLMTuner を介してローカルにデプロイされた大規模モデルをサポートします。多輪対話、人格シナリオ、多モーダル機能を備え、画像理解、音声からテキストへの変換(Whisper)をサポートします。
|
1. **大規模言語モデルの対話**。OpenAI API、Google Gemini、Llama、Deepseek、ChatGLM など、さまざまな大規模言語モデルをサポートし、Ollama、LLMTuner を介してローカルにデプロイされた大規模モデルをサポートします。多輪対話、人格シナリオ、多モーダル機能を備え、画像理解、音声からテキストへの変換(Whisper)をサポートします。
|
||||||
2. **複数のメッセージプラットフォームの接続**。QQ(OneBot)、QQ チャンネル、WeChat(Gewechat)、Feishu、Telegram への接続をサポートします。今後、DingTalk、Discord、WhatsApp、Xiaoai 音響をサポートする予定です。レート制限、ホワイトリスト、キーワードフィルタリング、Baidu コンテンツ監査をサポートします。
|
2. **複数のメッセージプラットフォームの接続**。QQ(OneBot)、QQ チャンネル、Feishu、Telegram への接続をサポートします。今後、DingTalk、Discord、WhatsApp、Xiaoai 音響をサポートする予定です。レート制限、ホワイトリスト、キーワードフィルタリング、Baidu コンテンツ監査をサポートします。
|
||||||
3. **エージェント**。一部のエージェント機能をネイティブにサポートし、コードエグゼキューター、自然言語タスク、ウェブ検索などを提供します。[Dify プラットフォーム](https://astrbot.app/others/dify.html)と連携し、Dify スマートアシスタント、ナレッジベース、Dify ワークフローを簡単に接続できます。
|
3. **エージェント**。一部のエージェント機能をネイティブにサポートし、コードエグゼキューター、自然言語タスク、ウェブ検索などを提供します。[Dify プラットフォーム](https://dify.ai/)と連携し、Dify スマートアシスタント、ナレッジベース、Dify ワークフローを簡単に接続できます。
|
||||||
4. **プラグインの拡張**。深く最適化されたプラグインメカニズムを備え、[プラグインの開発](https://astrbot.app/dev/plugin.html)をサポートし、機能を拡張できます。複数のプラグインのインストールをサポートします。
|
4. **プラグインの拡張**。深く最適化されたプラグインメカニズムを備え、[プラグインの開発](https://astrbot.app/dev/plugin.html)をサポートし、機能を拡張できます。複数のプラグインのインストールをサポートします。
|
||||||
5. **ビジュアル管理パネル**。設定の視覚的な変更、プラグイン管理、ログの表示などをサポートし、設定の難易度を低減します。WebChat を統合し、パネル上で大規模モデルと対話できます。
|
5. **ビジュアル管理パネル**。設定の視覚的な変更、プラグイン管理、ログの表示などをサポートし、設定の難易度を低減します。WebChat を統合し、パネル上で大規模モデルと対話できます。
|
||||||
6. **高い安定性と高いモジュール性**。イベントバスとパイプラインに基づくアーキテクチャ設計により、高度にモジュール化され、低結合です。
|
6. **高い安定性と高いモジュール性**。イベントバスとパイプラインに基づくアーキテクチャ設計により、高度にモジュール化され、低結合です。
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> 管理パネルのオンラインデモを体験する: [https://demo.astrbot.app/](https://demo.astrbot.app/)
|
> 管理パネルのオンラインデモを体験する: [https://demo.astrbot.app/](https://demo.astrbot.app/)
|
||||||
>
|
>
|
||||||
> ユーザー名: `astrbot`, パスワード: `astrbot`。LLM が設定されていないため、チャットページで大規模モデルを使用することはできません。(デモのログインパスワードを変更しないでください 😭)
|
> ユーザー名: `astrbot`, パスワード: `astrbot`。LLM が設定されていないため、チャットページで大規模モデルを使用することはできません。(デモのログインパスワードを変更しないでください 😭)
|
||||||
|
|
||||||
## ✨ 使用方法
|
## ✨ 使用方法
|
||||||
@@ -50,7 +50,7 @@ AstrBot は、疎結合、非同期、複数のメッセージプラットフォ
|
|||||||
|
|
||||||
#### Replit デプロイ
|
#### Replit デプロイ
|
||||||
|
|
||||||
[](https://repl.it/github/Soulter/AstrBot)
|
[](https://repl.it/github/AstrBotDevs/AstrBot)
|
||||||
|
|
||||||
#### CasaOS デプロイ
|
#### CasaOS デプロイ
|
||||||
|
|
||||||
@@ -136,11 +136,11 @@ _✨ 内蔵 Web Chat、オンラインでボットと対話 ✨_
|
|||||||
|
|
||||||
## ⭐ Star History
|
## ⭐ Star History
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> このプロジェクトがあなたの生活や仕事に役立った場合、またはこのプロジェクトの将来の発展に関心がある場合は、プロジェクトに Star を付けてください。これはこのオープンソースプロジェクトを維持するためのモチベーションです <3
|
> このプロジェクトがあなたの生活や仕事に役立った場合、またはこのプロジェクトの将来の発展に関心がある場合は、プロジェクトに Star を付けてください。これはこのオープンソースプロジェクトを維持するためのモチベーションです <3
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://star-history.com/#soulter/astrbot&Date)
|
[](https://star-history.com/#soulter/astrbot&Date)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@@ -152,8 +152,7 @@ _✨ 内蔵 Web Chat、オンラインでボットと対話 ✨_
|
|||||||
## 免責事項
|
## 免責事項
|
||||||
|
|
||||||
1. このプロジェクトは `AGPL-v3` オープンソースライセンスの下で保護されています。
|
1. このプロジェクトは `AGPL-v3` オープンソースライセンスの下で保護されています。
|
||||||
2. WeChat(個人アカウント)のデプロイメントには [Gewechat](https://github.com/Devo919/Gewechat) サービスを利用しています。AstrBot は Gewechat との接続を保証するだけであり、アカウントのリスク管理に関しては、このプロジェクトの著者は一切の責任を負いません。
|
2. このプロジェクトを使用する際は、現地の法律および規制を遵守してください。
|
||||||
3. このプロジェクトを使用する際は、現地の法律および規制を遵守してください。
|
|
||||||
|
|
||||||
<!-- ## ✨ ATRI [ベータテスト]
|
<!-- ## ✨ ATRI [ベータテスト]
|
||||||
|
|
||||||
@@ -165,6 +164,4 @@ _✨ 内蔵 Web Chat、オンラインでボットと対話 ✨_
|
|||||||
4. TTS
|
4. TTS
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|
||||||
_私は、高性能ですから!_
|
_私は、高性能ですから!_
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
from .core.log import LogManager
|
from .core.log import LogManager
|
||||||
logger = LogManager.GetLogger(log_name='astrbot')
|
|
||||||
|
logger = LogManager.GetLogger(log_name="astrbot")
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
from astrbot.core.config.astrbot_config import AstrBotConfig
|
|
||||||
from astrbot import logger
|
from astrbot import logger
|
||||||
from astrbot.core import html_renderer
|
from astrbot.core import html_renderer, sp
|
||||||
from astrbot.core import sp
|
from astrbot.core.agent.tool import FunctionTool, ToolSet
|
||||||
|
from astrbot.core.agent.tool_executor import BaseFunctionToolExecutor
|
||||||
|
from astrbot.core.config.astrbot_config import AstrBotConfig
|
||||||
|
from astrbot.core.star.register import register_agent as agent
|
||||||
from astrbot.core.star.register import register_llm_tool as llm_tool
|
from astrbot.core.star.register import register_llm_tool as llm_tool
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"AstrBotConfig",
|
"AstrBotConfig",
|
||||||
"logger",
|
"BaseFunctionToolExecutor",
|
||||||
|
"FunctionTool",
|
||||||
|
"ToolSet",
|
||||||
|
"agent",
|
||||||
"html_renderer",
|
"html_renderer",
|
||||||
"llm_tool",
|
"llm_tool",
|
||||||
"sp"
|
"logger",
|
||||||
]
|
"sp",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from astrbot.core.config.astrbot_config import AstrBotConfig
|
from astrbot.core.config.astrbot_config import AstrBotConfig
|
||||||
from astrbot import logger
|
from astrbot import logger
|
||||||
from astrbot.core import html_renderer
|
from astrbot.core import html_renderer
|
||||||
@@ -6,8 +5,11 @@ from astrbot.core.star.register import register_llm_tool as llm_tool
|
|||||||
|
|
||||||
# event
|
# event
|
||||||
from astrbot.core.message.message_event_result import (
|
from astrbot.core.message.message_event_result import (
|
||||||
MessageEventResult, MessageChain, CommandResult, EventResultType
|
MessageEventResult,
|
||||||
)
|
MessageChain,
|
||||||
|
CommandResult,
|
||||||
|
EventResultType,
|
||||||
|
)
|
||||||
from astrbot.core.platform import AstrMessageEvent
|
from astrbot.core.platform import AstrMessageEvent
|
||||||
|
|
||||||
# star register
|
# star register
|
||||||
@@ -18,10 +20,16 @@ from astrbot.core.star.register import (
|
|||||||
register_regex as regex,
|
register_regex as regex,
|
||||||
register_platform_adapter_type as platform_adapter_type,
|
register_platform_adapter_type as platform_adapter_type,
|
||||||
)
|
)
|
||||||
from astrbot.core.star.filter.event_message_type import EventMessageTypeFilter, EventMessageType
|
from astrbot.core.star.filter.event_message_type import (
|
||||||
from astrbot.core.star.filter.platform_adapter_type import PlatformAdapterTypeFilter, PlatformAdapterType
|
EventMessageTypeFilter,
|
||||||
|
EventMessageType,
|
||||||
|
)
|
||||||
|
from astrbot.core.star.filter.platform_adapter_type import (
|
||||||
|
PlatformAdapterTypeFilter,
|
||||||
|
PlatformAdapterType,
|
||||||
|
)
|
||||||
from astrbot.core.star.register import (
|
from astrbot.core.star.register import (
|
||||||
register_star as register # 注册插件(Star)
|
register_star as register, # 注册插件(Star)
|
||||||
)
|
)
|
||||||
from astrbot.core.star import Context, Star
|
from astrbot.core.star import Context, Star
|
||||||
from astrbot.core.star.config import *
|
from astrbot.core.star.config import *
|
||||||
@@ -32,9 +40,14 @@ from astrbot.core.provider import Provider, Personality, ProviderMetaData
|
|||||||
|
|
||||||
# platform
|
# platform
|
||||||
from astrbot.core.platform import (
|
from astrbot.core.platform import (
|
||||||
AstrMessageEvent, Platform, AstrBotMessage, MessageMember, MessageType, PlatformMetadata
|
AstrMessageEvent,
|
||||||
|
Platform,
|
||||||
|
AstrBotMessage,
|
||||||
|
MessageMember,
|
||||||
|
MessageType,
|
||||||
|
PlatformMetadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
from astrbot.core.platform.register import register_platform_adapter
|
from astrbot.core.platform.register import register_platform_adapter
|
||||||
|
|
||||||
from .message_components import *
|
from .message_components import *
|
||||||
@@ -1,18 +1,17 @@
|
|||||||
from astrbot.core.message.message_event_result import (
|
from astrbot.core.message.message_event_result import (
|
||||||
MessageEventResult,
|
|
||||||
MessageChain,
|
|
||||||
CommandResult,
|
CommandResult,
|
||||||
EventResultType,
|
EventResultType,
|
||||||
|
MessageChain,
|
||||||
|
MessageEventResult,
|
||||||
ResultContentType,
|
ResultContentType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from astrbot.core.platform import AstrMessageEvent
|
from astrbot.core.platform import AstrMessageEvent
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"MessageEventResult",
|
"AstrMessageEvent",
|
||||||
"MessageChain",
|
|
||||||
"CommandResult",
|
"CommandResult",
|
||||||
"EventResultType",
|
"EventResultType",
|
||||||
"AstrMessageEvent",
|
"MessageChain",
|
||||||
|
"MessageEventResult",
|
||||||
"ResultContentType",
|
"ResultContentType",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,41 +1,52 @@
|
|||||||
from astrbot.core.star.register import (
|
|
||||||
register_command as command,
|
|
||||||
register_command_group as command_group,
|
|
||||||
register_event_message_type as event_message_type,
|
|
||||||
register_regex as regex,
|
|
||||||
register_platform_adapter_type as platform_adapter_type,
|
|
||||||
register_permission_type as permission_type,
|
|
||||||
register_custom_filter as custom_filter,
|
|
||||||
register_on_llm_request as on_llm_request,
|
|
||||||
register_on_llm_response as on_llm_response,
|
|
||||||
register_llm_tool as llm_tool,
|
|
||||||
register_on_decorating_result as on_decorating_result,
|
|
||||||
register_after_message_sent as after_message_sent
|
|
||||||
)
|
|
||||||
|
|
||||||
from astrbot.core.star.filter.event_message_type import EventMessageTypeFilter, EventMessageType
|
|
||||||
from astrbot.core.star.filter.platform_adapter_type import PlatformAdapterTypeFilter, PlatformAdapterType
|
|
||||||
from astrbot.core.star.filter.permission import PermissionTypeFilter, PermissionType
|
|
||||||
from astrbot.core.star.filter.custom_filter import CustomFilter
|
from astrbot.core.star.filter.custom_filter import CustomFilter
|
||||||
|
from astrbot.core.star.filter.event_message_type import (
|
||||||
|
EventMessageType,
|
||||||
|
EventMessageTypeFilter,
|
||||||
|
)
|
||||||
|
from astrbot.core.star.filter.permission import PermissionType, PermissionTypeFilter
|
||||||
|
from astrbot.core.star.filter.platform_adapter_type import (
|
||||||
|
PlatformAdapterType,
|
||||||
|
PlatformAdapterTypeFilter,
|
||||||
|
)
|
||||||
|
from astrbot.core.star.register import register_after_message_sent as after_message_sent
|
||||||
|
from astrbot.core.star.register import register_command as command
|
||||||
|
from astrbot.core.star.register import register_command_group as command_group
|
||||||
|
from astrbot.core.star.register import register_custom_filter as custom_filter
|
||||||
|
from astrbot.core.star.register import register_event_message_type as event_message_type
|
||||||
|
from astrbot.core.star.register import register_llm_tool as llm_tool
|
||||||
|
from astrbot.core.star.register import register_on_astrbot_loaded as on_astrbot_loaded
|
||||||
|
from astrbot.core.star.register import (
|
||||||
|
register_on_decorating_result as on_decorating_result,
|
||||||
|
)
|
||||||
|
from astrbot.core.star.register import register_on_llm_request as on_llm_request
|
||||||
|
from astrbot.core.star.register import register_on_llm_response as on_llm_response
|
||||||
|
from astrbot.core.star.register import register_on_platform_loaded as on_platform_loaded
|
||||||
|
from astrbot.core.star.register import register_permission_type as permission_type
|
||||||
|
from astrbot.core.star.register import (
|
||||||
|
register_platform_adapter_type as platform_adapter_type,
|
||||||
|
)
|
||||||
|
from astrbot.core.star.register import register_regex as regex
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'command',
|
"CustomFilter",
|
||||||
'command_group',
|
"EventMessageType",
|
||||||
'event_message_type',
|
"EventMessageTypeFilter",
|
||||||
'regex',
|
"PermissionType",
|
||||||
'platform_adapter_type',
|
"PermissionTypeFilter",
|
||||||
'permission_type',
|
"PlatformAdapterType",
|
||||||
'EventMessageTypeFilter',
|
"PlatformAdapterTypeFilter",
|
||||||
'EventMessageType',
|
"after_message_sent",
|
||||||
'PlatformAdapterTypeFilter',
|
"command",
|
||||||
'PlatformAdapterType',
|
"command_group",
|
||||||
'PermissionTypeFilter',
|
"custom_filter",
|
||||||
'CustomFilter',
|
"event_message_type",
|
||||||
'custom_filter',
|
"llm_tool",
|
||||||
'PermissionType',
|
"on_astrbot_loaded",
|
||||||
'on_llm_request',
|
"on_decorating_result",
|
||||||
'llm_tool',
|
"on_llm_request",
|
||||||
'on_decorating_result',
|
"on_llm_response",
|
||||||
'after_message_sent',
|
"on_platform_loaded",
|
||||||
'on_llm_response'
|
"permission_type",
|
||||||
]
|
"platform_adapter_type",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
from astrbot.core.message.components import *
|
from astrbot.core.message.components import *
|
||||||
|
|||||||
@@ -1,6 +1,22 @@
|
|||||||
|
from astrbot.core.message.components import *
|
||||||
from astrbot.core.platform import (
|
from astrbot.core.platform import (
|
||||||
AstrMessageEvent, Platform, AstrBotMessage, MessageMember, MessageType, PlatformMetadata
|
AstrBotMessage,
|
||||||
|
AstrMessageEvent,
|
||||||
|
Group,
|
||||||
|
MessageMember,
|
||||||
|
MessageType,
|
||||||
|
Platform,
|
||||||
|
PlatformMetadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
from astrbot.core.platform.register import register_platform_adapter
|
from astrbot.core.platform.register import register_platform_adapter
|
||||||
from astrbot.core.message.components import *
|
|
||||||
|
__all__ = [
|
||||||
|
"AstrBotMessage",
|
||||||
|
"AstrMessageEvent",
|
||||||
|
"Group",
|
||||||
|
"MessageMember",
|
||||||
|
"MessageType",
|
||||||
|
"Platform",
|
||||||
|
"PlatformMetadata",
|
||||||
|
"register_platform_adapter",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,2 +1,17 @@
|
|||||||
from astrbot.core.provider import Provider, STTProvider, Personality
|
from astrbot.core.provider import Personality, Provider, STTProvider
|
||||||
from astrbot.core.provider.entites import ProviderRequest, ProviderType, ProviderMetaData, LLMResponse
|
from astrbot.core.provider.entities import (
|
||||||
|
LLMResponse,
|
||||||
|
ProviderMetaData,
|
||||||
|
ProviderRequest,
|
||||||
|
ProviderType,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"LLMResponse",
|
||||||
|
"Personality",
|
||||||
|
"Provider",
|
||||||
|
"ProviderMetaData",
|
||||||
|
"ProviderRequest",
|
||||||
|
"ProviderType",
|
||||||
|
"STTProvider",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
|
from astrbot.core.star import Context, Star, StarTools
|
||||||
|
from astrbot.core.star.config import *
|
||||||
from astrbot.core.star.register import (
|
from astrbot.core.star.register import (
|
||||||
register_star as register # 注册插件(Star)
|
register_star as register, # 注册插件(Star)
|
||||||
)
|
)
|
||||||
|
|
||||||
from astrbot.core.star import Context, Star
|
__all__ = ["Context", "Star", "StarTools", "register"]
|
||||||
from astrbot.core.star.config import *
|
|
||||||
|
|||||||
7
astrbot/api/util/__init__.py
Normal file
7
astrbot/api/util/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from astrbot.core.utils.session_waiter import (
|
||||||
|
SessionController,
|
||||||
|
SessionWaiter,
|
||||||
|
session_waiter,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = ["SessionController", "SessionWaiter", "session_waiter"]
|
||||||
1
astrbot/cli/__init__.py
Normal file
1
astrbot/cli/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__version__ = "3.5.23"
|
||||||
59
astrbot/cli/__main__.py
Normal file
59
astrbot/cli/__main__.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""AstrBot CLI入口"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from . import __version__
|
||||||
|
from .commands import conf, init, plug, run
|
||||||
|
|
||||||
|
logo_tmpl = r"""
|
||||||
|
___ _______.___________..______ .______ ______ .___________.
|
||||||
|
/ \ / | || _ \ | _ \ / __ \ | |
|
||||||
|
/ ^ \ | (----`---| |----`| |_) | | |_) | | | | | `---| |----`
|
||||||
|
/ /_\ \ \ \ | | | / | _ < | | | | | |
|
||||||
|
/ _____ \ .----) | | | | |\ \----.| |_) | | `--' | | |
|
||||||
|
/__/ \__\ |_______/ |__| | _| `._____||______/ \______/ |__|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.version_option(__version__, prog_name="AstrBot")
|
||||||
|
def cli() -> None:
|
||||||
|
"""The AstrBot CLI"""
|
||||||
|
click.echo(logo_tmpl)
|
||||||
|
click.echo("Welcome to AstrBot CLI!")
|
||||||
|
click.echo(f"AstrBot CLI version: {__version__}")
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.argument("command_name", required=False, type=str)
|
||||||
|
def help(command_name: str | None) -> None:
|
||||||
|
"""显示命令的帮助信息
|
||||||
|
|
||||||
|
如果提供了 COMMAND_NAME,则显示该命令的详细帮助信息。
|
||||||
|
否则,显示通用帮助信息。
|
||||||
|
"""
|
||||||
|
ctx = click.get_current_context()
|
||||||
|
if command_name:
|
||||||
|
# 查找指定命令
|
||||||
|
command = cli.get_command(ctx, command_name)
|
||||||
|
if command:
|
||||||
|
# 显示特定命令的帮助信息
|
||||||
|
click.echo(command.get_help(ctx))
|
||||||
|
else:
|
||||||
|
click.echo(f"Unknown command: {command_name}")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
# 显示通用帮助信息
|
||||||
|
click.echo(cli.get_help(ctx))
|
||||||
|
|
||||||
|
|
||||||
|
cli.add_command(init)
|
||||||
|
cli.add_command(run)
|
||||||
|
cli.add_command(help)
|
||||||
|
cli.add_command(plug)
|
||||||
|
cli.add_command(conf)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
||||||
6
astrbot/cli/commands/__init__.py
Normal file
6
astrbot/cli/commands/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from .cmd_conf import conf
|
||||||
|
from .cmd_init import init
|
||||||
|
from .cmd_plug import plug
|
||||||
|
from .cmd_run import run
|
||||||
|
|
||||||
|
__all__ = ["conf", "init", "plug", "run"]
|
||||||
209
astrbot/cli/commands/cmd_conf.py
Normal file
209
astrbot/cli/commands/cmd_conf.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import zoneinfo
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ..utils import check_astrbot_root, get_astrbot_root
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_log_level(value: str) -> str:
|
||||||
|
"""验证日志级别"""
|
||||||
|
value = value.upper()
|
||||||
|
if value not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]:
|
||||||
|
raise click.ClickException(
|
||||||
|
"日志级别必须是 DEBUG/INFO/WARNING/ERROR/CRITICAL 之一",
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_dashboard_port(value: str) -> int:
|
||||||
|
"""验证 Dashboard 端口"""
|
||||||
|
try:
|
||||||
|
port = int(value)
|
||||||
|
if port < 1 or port > 65535:
|
||||||
|
raise click.ClickException("端口必须在 1-65535 范围内")
|
||||||
|
return port
|
||||||
|
except ValueError:
|
||||||
|
raise click.ClickException("端口必须是数字")
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_dashboard_username(value: str) -> str:
|
||||||
|
"""验证 Dashboard 用户名"""
|
||||||
|
if not value:
|
||||||
|
raise click.ClickException("用户名不能为空")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_dashboard_password(value: str) -> str:
|
||||||
|
"""验证 Dashboard 密码"""
|
||||||
|
if not value:
|
||||||
|
raise click.ClickException("密码不能为空")
|
||||||
|
return hashlib.md5(value.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_timezone(value: str) -> str:
|
||||||
|
"""验证时区"""
|
||||||
|
try:
|
||||||
|
zoneinfo.ZoneInfo(value)
|
||||||
|
except Exception:
|
||||||
|
raise click.ClickException(f"无效的时区: {value},请使用有效的IANA时区名称")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_callback_api_base(value: str) -> str:
|
||||||
|
"""验证回调接口基址"""
|
||||||
|
if not value.startswith("http://") and not value.startswith("https://"):
|
||||||
|
raise click.ClickException("回调接口基址必须以 http:// 或 https:// 开头")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# 可通过CLI设置的配置项,配置键到验证器函数的映射
|
||||||
|
CONFIG_VALIDATORS: dict[str, Callable[[str], Any]] = {
|
||||||
|
"timezone": _validate_timezone,
|
||||||
|
"log_level": _validate_log_level,
|
||||||
|
"dashboard.port": _validate_dashboard_port,
|
||||||
|
"dashboard.username": _validate_dashboard_username,
|
||||||
|
"dashboard.password": _validate_dashboard_password,
|
||||||
|
"callback_api_base": _validate_callback_api_base,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_config() -> dict[str, Any]:
|
||||||
|
"""加载或初始化配置文件"""
|
||||||
|
root = get_astrbot_root()
|
||||||
|
if not check_astrbot_root(root):
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{root}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||||
|
)
|
||||||
|
|
||||||
|
config_path = root / "data" / "cmd_config.json"
|
||||||
|
if not config_path.exists():
|
||||||
|
from astrbot.core.config.default import DEFAULT_CONFIG
|
||||||
|
|
||||||
|
config_path.write_text(
|
||||||
|
json.dumps(DEFAULT_CONFIG, ensure_ascii=False, indent=2),
|
||||||
|
encoding="utf-8-sig",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return json.loads(config_path.read_text(encoding="utf-8-sig"))
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
raise click.ClickException(f"配置文件解析失败: {e!s}")
|
||||||
|
|
||||||
|
|
||||||
|
def _save_config(config: dict[str, Any]) -> None:
|
||||||
|
"""保存配置文件"""
|
||||||
|
config_path = get_astrbot_root() / "data" / "cmd_config.json"
|
||||||
|
|
||||||
|
config_path.write_text(
|
||||||
|
json.dumps(config, ensure_ascii=False, indent=2),
|
||||||
|
encoding="utf-8-sig",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_nested_item(obj: dict[str, Any], path: str, value: Any) -> None:
|
||||||
|
"""设置嵌套字典中的值"""
|
||||||
|
parts = path.split(".")
|
||||||
|
for part in parts[:-1]:
|
||||||
|
if part not in obj:
|
||||||
|
obj[part] = {}
|
||||||
|
elif not isinstance(obj[part], dict):
|
||||||
|
raise click.ClickException(
|
||||||
|
f"配置路径冲突: {'.'.join(parts[: parts.index(part) + 1])} 不是字典",
|
||||||
|
)
|
||||||
|
obj = obj[part]
|
||||||
|
obj[parts[-1]] = value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_nested_item(obj: dict[str, Any], path: str) -> Any:
|
||||||
|
"""获取嵌套字典中的值"""
|
||||||
|
parts = path.split(".")
|
||||||
|
for part in parts:
|
||||||
|
obj = obj[part]
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
@click.group(name="conf")
|
||||||
|
def conf():
|
||||||
|
"""配置管理命令
|
||||||
|
|
||||||
|
支持的配置项:
|
||||||
|
|
||||||
|
- timezone: 时区设置 (例如: Asia/Shanghai)
|
||||||
|
|
||||||
|
- log_level: 日志级别 (DEBUG/INFO/WARNING/ERROR/CRITICAL)
|
||||||
|
|
||||||
|
- dashboard.port: Dashboard 端口
|
||||||
|
|
||||||
|
- dashboard.username: Dashboard 用户名
|
||||||
|
|
||||||
|
- dashboard.password: Dashboard 密码
|
||||||
|
|
||||||
|
- callback_api_base: 回调接口基址
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@conf.command(name="set")
|
||||||
|
@click.argument("key")
|
||||||
|
@click.argument("value")
|
||||||
|
def set_config(key: str, value: str):
|
||||||
|
"""设置配置项的值"""
|
||||||
|
if key not in CONFIG_VALIDATORS:
|
||||||
|
raise click.ClickException(f"不支持的配置项: {key}")
|
||||||
|
|
||||||
|
config = _load_config()
|
||||||
|
|
||||||
|
try:
|
||||||
|
old_value = _get_nested_item(config, key)
|
||||||
|
validated_value = CONFIG_VALIDATORS[key](value)
|
||||||
|
_set_nested_item(config, key, validated_value)
|
||||||
|
_save_config(config)
|
||||||
|
|
||||||
|
click.echo(f"配置已更新: {key}")
|
||||||
|
if key == "dashboard.password":
|
||||||
|
click.echo(" 原值: ********")
|
||||||
|
click.echo(" 新值: ********")
|
||||||
|
else:
|
||||||
|
click.echo(f" 原值: {old_value}")
|
||||||
|
click.echo(f" 新值: {validated_value}")
|
||||||
|
|
||||||
|
except KeyError:
|
||||||
|
raise click.ClickException(f"未知的配置项: {key}")
|
||||||
|
except Exception as e:
|
||||||
|
raise click.UsageError(f"设置配置失败: {e!s}")
|
||||||
|
|
||||||
|
|
||||||
|
@conf.command(name="get")
|
||||||
|
@click.argument("key", required=False)
|
||||||
|
def get_config(key: str | None = None):
|
||||||
|
"""获取配置项的值,不提供key则显示所有可配置项"""
|
||||||
|
config = _load_config()
|
||||||
|
|
||||||
|
if key:
|
||||||
|
if key not in CONFIG_VALIDATORS:
|
||||||
|
raise click.ClickException(f"不支持的配置项: {key}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = _get_nested_item(config, key)
|
||||||
|
if key == "dashboard.password":
|
||||||
|
value = "********"
|
||||||
|
click.echo(f"{key}: {value}")
|
||||||
|
except KeyError:
|
||||||
|
raise click.ClickException(f"未知的配置项: {key}")
|
||||||
|
except Exception as e:
|
||||||
|
raise click.UsageError(f"获取配置失败: {e!s}")
|
||||||
|
else:
|
||||||
|
click.echo("当前配置:")
|
||||||
|
for key in CONFIG_VALIDATORS:
|
||||||
|
try:
|
||||||
|
value = (
|
||||||
|
"********"
|
||||||
|
if key == "dashboard.password"
|
||||||
|
else _get_nested_item(config, key)
|
||||||
|
)
|
||||||
|
click.echo(f" {key}: {value}")
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
pass
|
||||||
56
astrbot/cli/commands/cmd_init.py
Normal file
56
astrbot/cli/commands/cmd_init.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import asyncio
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
from filelock import FileLock, Timeout
|
||||||
|
|
||||||
|
from ..utils import check_dashboard, get_astrbot_root
|
||||||
|
|
||||||
|
|
||||||
|
async def initialize_astrbot(astrbot_root: Path) -> None:
|
||||||
|
"""执行 AstrBot 初始化逻辑"""
|
||||||
|
dot_astrbot = astrbot_root / ".astrbot"
|
||||||
|
|
||||||
|
if not dot_astrbot.exists():
|
||||||
|
click.echo(f"Current Directory: {astrbot_root}")
|
||||||
|
click.echo(
|
||||||
|
"如果你确认这是 Astrbot root directory, 你需要在当前目录下创建一个 .astrbot 文件标记该目录为 AstrBot 的数据目录。",
|
||||||
|
)
|
||||||
|
if click.confirm(
|
||||||
|
f"请检查当前目录是否正确,确认正确请回车: {astrbot_root}",
|
||||||
|
default=True,
|
||||||
|
abort=True,
|
||||||
|
):
|
||||||
|
dot_astrbot.touch()
|
||||||
|
click.echo(f"Created {dot_astrbot}")
|
||||||
|
|
||||||
|
paths = {
|
||||||
|
"data": astrbot_root / "data",
|
||||||
|
"config": astrbot_root / "data" / "config",
|
||||||
|
"plugins": astrbot_root / "data" / "plugins",
|
||||||
|
"temp": astrbot_root / "data" / "temp",
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, path in paths.items():
|
||||||
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
click.echo(f"{'Created' if not path.exists() else 'Directory exists'}: {path}")
|
||||||
|
|
||||||
|
await check_dashboard(astrbot_root / "data")
|
||||||
|
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
def init() -> None:
|
||||||
|
"""初始化 AstrBot"""
|
||||||
|
click.echo("Initializing AstrBot...")
|
||||||
|
astrbot_root = get_astrbot_root()
|
||||||
|
lock_file = astrbot_root / "astrbot.lock"
|
||||||
|
lock = FileLock(lock_file, timeout=5)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with lock.acquire():
|
||||||
|
asyncio.run(initialize_astrbot(astrbot_root))
|
||||||
|
except Timeout:
|
||||||
|
raise click.ClickException("无法获取锁文件,请检查是否有其他实例正在运行")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise click.ClickException(f"初始化失败: {e!s}")
|
||||||
245
astrbot/cli/commands/cmd_plug.py
Normal file
245
astrbot/cli/commands/cmd_plug.py
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ..utils import (
|
||||||
|
PluginStatus,
|
||||||
|
build_plug_list,
|
||||||
|
check_astrbot_root,
|
||||||
|
get_astrbot_root,
|
||||||
|
get_git_repo,
|
||||||
|
manage_plugin,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def plug():
|
||||||
|
"""插件管理"""
|
||||||
|
|
||||||
|
|
||||||
|
def _get_data_path() -> Path:
|
||||||
|
base = get_astrbot_root()
|
||||||
|
if not check_astrbot_root(base):
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{base}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||||
|
)
|
||||||
|
return (base / "data").resolve()
|
||||||
|
|
||||||
|
|
||||||
|
def display_plugins(plugins, title=None, color=None):
|
||||||
|
if title:
|
||||||
|
click.echo(click.style(title, fg=color, bold=True))
|
||||||
|
|
||||||
|
click.echo(f"{'名称':<20} {'版本':<10} {'状态':<10} {'作者':<15} {'描述':<30}")
|
||||||
|
click.echo("-" * 85)
|
||||||
|
|
||||||
|
for p in plugins:
|
||||||
|
desc = p["desc"][:30] + ("..." if len(p["desc"]) > 30 else "")
|
||||||
|
click.echo(
|
||||||
|
f"{p['name']:<20} {p['version']:<10} {p['status']:<10} "
|
||||||
|
f"{p['author']:<15} {desc:<30}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.argument("name")
|
||||||
|
def new(name: str):
|
||||||
|
"""创建新插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plug_path = base_path / "plugins" / name
|
||||||
|
|
||||||
|
if plug_path.exists():
|
||||||
|
raise click.ClickException(f"插件 {name} 已存在")
|
||||||
|
|
||||||
|
author = click.prompt("请输入插件作者", type=str)
|
||||||
|
desc = click.prompt("请输入插件描述", type=str)
|
||||||
|
version = click.prompt("请输入插件版本", type=str)
|
||||||
|
if not re.match(r"^\d+\.\d+(\.\d+)?$", version.lower().lstrip("v")):
|
||||||
|
raise click.ClickException("版本号必须为 x.y 或 x.y.z 格式")
|
||||||
|
repo = click.prompt("请输入插件仓库:", type=str)
|
||||||
|
if not repo.startswith("http"):
|
||||||
|
raise click.ClickException("仓库地址必须以 http 开头")
|
||||||
|
|
||||||
|
click.echo("下载插件模板...")
|
||||||
|
get_git_repo(
|
||||||
|
"https://github.com/Soulter/helloworld",
|
||||||
|
plug_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
click.echo("重写插件信息...")
|
||||||
|
# 重写 metadata.yaml
|
||||||
|
with open(plug_path / "metadata.yaml", "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
f"name: {name}\n"
|
||||||
|
f"desc: {desc}\n"
|
||||||
|
f"version: {version}\n"
|
||||||
|
f"author: {author}\n"
|
||||||
|
f"repo: {repo}\n",
|
||||||
|
)
|
||||||
|
|
||||||
|
# 重写 README.md
|
||||||
|
with open(plug_path / "README.md", "w", encoding="utf-8") as f:
|
||||||
|
f.write(f"# {name}\n\n{desc}\n\n# 支持\n\n[帮助文档](https://astrbot.app)\n")
|
||||||
|
|
||||||
|
# 重写 main.py
|
||||||
|
with open(plug_path / "main.py", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
new_content = content.replace(
|
||||||
|
'@register("helloworld", "YourName", "一个简单的 Hello World 插件", "1.0.0")',
|
||||||
|
f'@register("{name}", "{author}", "{desc}", "{version}")',
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(plug_path / "main.py", "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
click.echo(f"插件 {name} 创建成功")
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.option("--all", "-a", is_flag=True, help="列出未安装的插件")
|
||||||
|
def list(all: bool):
|
||||||
|
"""列出插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plugins = build_plug_list(base_path / "plugins")
|
||||||
|
|
||||||
|
# 未发布的插件
|
||||||
|
not_published_plugins = [
|
||||||
|
p for p in plugins if p["status"] == PluginStatus.NOT_PUBLISHED
|
||||||
|
]
|
||||||
|
if not_published_plugins:
|
||||||
|
display_plugins(not_published_plugins, "未发布的插件", "red")
|
||||||
|
|
||||||
|
# 需要更新的插件
|
||||||
|
need_update_plugins = [
|
||||||
|
p for p in plugins if p["status"] == PluginStatus.NEED_UPDATE
|
||||||
|
]
|
||||||
|
if need_update_plugins:
|
||||||
|
display_plugins(need_update_plugins, "需要更新的插件", "yellow")
|
||||||
|
|
||||||
|
# 已安装的插件
|
||||||
|
installed_plugins = [p for p in plugins if p["status"] == PluginStatus.INSTALLED]
|
||||||
|
if installed_plugins:
|
||||||
|
display_plugins(installed_plugins, "已安装的插件", "green")
|
||||||
|
|
||||||
|
# 未安装的插件
|
||||||
|
not_installed_plugins = [
|
||||||
|
p for p in plugins if p["status"] == PluginStatus.NOT_INSTALLED
|
||||||
|
]
|
||||||
|
if not_installed_plugins and all:
|
||||||
|
display_plugins(not_installed_plugins, "未安装的插件", "blue")
|
||||||
|
|
||||||
|
if (
|
||||||
|
not any([not_published_plugins, need_update_plugins, installed_plugins])
|
||||||
|
and not all
|
||||||
|
):
|
||||||
|
click.echo("未安装任何插件")
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.argument("name")
|
||||||
|
@click.option("--proxy", help="代理服务器地址")
|
||||||
|
def install(name: str, proxy: str | None):
|
||||||
|
"""安装插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plug_path = base_path / "plugins"
|
||||||
|
plugins = build_plug_list(base_path / "plugins")
|
||||||
|
|
||||||
|
plugin = next(
|
||||||
|
(
|
||||||
|
p
|
||||||
|
for p in plugins
|
||||||
|
if p["name"] == name and p["status"] == PluginStatus.NOT_INSTALLED
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
raise click.ClickException(f"未找到可安装的插件 {name},可能是不存在或已安装")
|
||||||
|
|
||||||
|
manage_plugin(plugin, plug_path, is_update=False, proxy=proxy)
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.argument("name")
|
||||||
|
def remove(name: str):
|
||||||
|
"""卸载插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plugins = build_plug_list(base_path / "plugins")
|
||||||
|
plugin = next((p for p in plugins if p["name"] == name), None)
|
||||||
|
|
||||||
|
if not plugin or not plugin.get("local_path"):
|
||||||
|
raise click.ClickException(f"插件 {name} 不存在或未安装")
|
||||||
|
|
||||||
|
plugin_path = plugin["local_path"]
|
||||||
|
|
||||||
|
click.confirm(f"确定要卸载插件 {name} 吗?", default=False, abort=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.rmtree(plugin_path)
|
||||||
|
click.echo(f"插件 {name} 已卸载")
|
||||||
|
except Exception as e:
|
||||||
|
raise click.ClickException(f"卸载插件 {name} 失败: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.argument("name", required=False)
|
||||||
|
@click.option("--proxy", help="Github代理地址")
|
||||||
|
def update(name: str, proxy: str | None):
|
||||||
|
"""更新插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plug_path = base_path / "plugins"
|
||||||
|
plugins = build_plug_list(base_path / "plugins")
|
||||||
|
|
||||||
|
if name:
|
||||||
|
plugin = next(
|
||||||
|
(
|
||||||
|
p
|
||||||
|
for p in plugins
|
||||||
|
if p["name"] == name and p["status"] == PluginStatus.NEED_UPDATE
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
raise click.ClickException(f"插件 {name} 不需要更新或无法更新")
|
||||||
|
|
||||||
|
manage_plugin(plugin, plug_path, is_update=True, proxy=proxy)
|
||||||
|
else:
|
||||||
|
need_update_plugins = [
|
||||||
|
p for p in plugins if p["status"] == PluginStatus.NEED_UPDATE
|
||||||
|
]
|
||||||
|
|
||||||
|
if not need_update_plugins:
|
||||||
|
click.echo("没有需要更新的插件")
|
||||||
|
return
|
||||||
|
|
||||||
|
click.echo(f"发现 {len(need_update_plugins)} 个插件需要更新")
|
||||||
|
for plugin in need_update_plugins:
|
||||||
|
plugin_name = plugin["name"]
|
||||||
|
click.echo(f"正在更新插件 {plugin_name}...")
|
||||||
|
manage_plugin(plugin, plug_path, is_update=True, proxy=proxy)
|
||||||
|
|
||||||
|
|
||||||
|
@plug.command()
|
||||||
|
@click.argument("query")
|
||||||
|
def search(query: str):
|
||||||
|
"""搜索插件"""
|
||||||
|
base_path = _get_data_path()
|
||||||
|
plugins = build_plug_list(base_path / "plugins")
|
||||||
|
|
||||||
|
matched_plugins = [
|
||||||
|
p
|
||||||
|
for p in plugins
|
||||||
|
if query.lower() in p["name"].lower()
|
||||||
|
or query.lower() in p["desc"].lower()
|
||||||
|
or query.lower() in p["author"].lower()
|
||||||
|
]
|
||||||
|
|
||||||
|
if not matched_plugins:
|
||||||
|
click.echo(f"未找到匹配 '{query}' 的插件")
|
||||||
|
return
|
||||||
|
|
||||||
|
display_plugins(matched_plugins, f"搜索结果: '{query}'", "cyan")
|
||||||
62
astrbot/cli/commands/cmd_run.py
Normal file
62
astrbot/cli/commands/cmd_run.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
from filelock import FileLock, Timeout
|
||||||
|
|
||||||
|
from ..utils import check_astrbot_root, check_dashboard, get_astrbot_root
|
||||||
|
|
||||||
|
|
||||||
|
async def run_astrbot(astrbot_root: Path):
|
||||||
|
"""运行 AstrBot"""
|
||||||
|
from astrbot.core import LogBroker, LogManager, db_helper, logger
|
||||||
|
from astrbot.core.initial_loader import InitialLoader
|
||||||
|
|
||||||
|
await check_dashboard(astrbot_root / "data")
|
||||||
|
|
||||||
|
log_broker = LogBroker()
|
||||||
|
LogManager.set_queue_handler(logger, log_broker)
|
||||||
|
db = db_helper
|
||||||
|
|
||||||
|
core_lifecycle = InitialLoader(db, log_broker)
|
||||||
|
|
||||||
|
await core_lifecycle.start()
|
||||||
|
|
||||||
|
|
||||||
|
@click.option("--reload", "-r", is_flag=True, help="插件自动重载")
|
||||||
|
@click.option("--port", "-p", help="Astrbot Dashboard端口", required=False, type=str)
|
||||||
|
@click.command()
|
||||||
|
def run(reload: bool, port: str) -> None:
|
||||||
|
"""运行 AstrBot"""
|
||||||
|
try:
|
||||||
|
os.environ["ASTRBOT_CLI"] = "1"
|
||||||
|
astrbot_root = get_astrbot_root()
|
||||||
|
|
||||||
|
if not check_astrbot_root(astrbot_root):
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{astrbot_root}不是有效的 AstrBot 根目录,如需初始化请使用 astrbot init",
|
||||||
|
)
|
||||||
|
|
||||||
|
os.environ["ASTRBOT_ROOT"] = str(astrbot_root)
|
||||||
|
sys.path.insert(0, str(astrbot_root))
|
||||||
|
|
||||||
|
if port:
|
||||||
|
os.environ["DASHBOARD_PORT"] = port
|
||||||
|
|
||||||
|
if reload:
|
||||||
|
click.echo("启用插件自动重载")
|
||||||
|
os.environ["ASTRBOT_RELOAD"] = "1"
|
||||||
|
|
||||||
|
lock_file = astrbot_root / "astrbot.lock"
|
||||||
|
lock = FileLock(lock_file, timeout=5)
|
||||||
|
with lock.acquire():
|
||||||
|
asyncio.run(run_astrbot(astrbot_root))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
click.echo("AstrBot 已关闭...")
|
||||||
|
except Timeout:
|
||||||
|
raise click.ClickException("无法获取锁文件,请检查是否有其他实例正在运行")
|
||||||
|
except Exception as e:
|
||||||
|
raise click.ClickException(f"运行时出现错误: {e}\n{traceback.format_exc()}")
|
||||||
18
astrbot/cli/utils/__init__.py
Normal file
18
astrbot/cli/utils/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from .basic import (
|
||||||
|
check_astrbot_root,
|
||||||
|
check_dashboard,
|
||||||
|
get_astrbot_root,
|
||||||
|
)
|
||||||
|
from .plugin import PluginStatus, build_plug_list, get_git_repo, manage_plugin
|
||||||
|
from .version_comparator import VersionComparator
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"PluginStatus",
|
||||||
|
"VersionComparator",
|
||||||
|
"build_plug_list",
|
||||||
|
"check_astrbot_root",
|
||||||
|
"check_dashboard",
|
||||||
|
"get_astrbot_root",
|
||||||
|
"get_git_repo",
|
||||||
|
"manage_plugin",
|
||||||
|
]
|
||||||
76
astrbot/cli/utils/basic.py
Normal file
76
astrbot/cli/utils/basic.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
|
||||||
|
def check_astrbot_root(path: str | Path) -> bool:
|
||||||
|
"""检查路径是否为 AstrBot 根目录"""
|
||||||
|
if not isinstance(path, Path):
|
||||||
|
path = Path(path)
|
||||||
|
if not path.exists() or not path.is_dir():
|
||||||
|
return False
|
||||||
|
if not (path / ".astrbot").exists():
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_astrbot_root() -> Path:
|
||||||
|
"""获取Astrbot根目录路径"""
|
||||||
|
return Path.cwd()
|
||||||
|
|
||||||
|
|
||||||
|
async def check_dashboard(astrbot_root: Path) -> None:
|
||||||
|
"""检查是否安装了dashboard"""
|
||||||
|
from astrbot.core.config.default import VERSION
|
||||||
|
from astrbot.core.utils.io import download_dashboard, get_dashboard_version
|
||||||
|
|
||||||
|
from .version_comparator import VersionComparator
|
||||||
|
|
||||||
|
try:
|
||||||
|
dashboard_version = await get_dashboard_version()
|
||||||
|
match dashboard_version:
|
||||||
|
case None:
|
||||||
|
click.echo("未安装管理面板")
|
||||||
|
if click.confirm(
|
||||||
|
"是否安装管理面板?",
|
||||||
|
default=True,
|
||||||
|
abort=True,
|
||||||
|
):
|
||||||
|
click.echo("正在安装管理面板...")
|
||||||
|
await download_dashboard(
|
||||||
|
path="data/dashboard.zip",
|
||||||
|
extract_path=str(astrbot_root),
|
||||||
|
version=f"v{VERSION}",
|
||||||
|
latest=False,
|
||||||
|
)
|
||||||
|
click.echo("管理面板安装完成")
|
||||||
|
|
||||||
|
case str():
|
||||||
|
if VersionComparator.compare_version(VERSION, dashboard_version) <= 0:
|
||||||
|
click.echo("管理面板已是最新版本")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
version = dashboard_version.split("v")[1]
|
||||||
|
click.echo(f"管理面板版本: {version}")
|
||||||
|
await download_dashboard(
|
||||||
|
path="data/dashboard.zip",
|
||||||
|
extract_path=str(astrbot_root),
|
||||||
|
version=f"v{VERSION}",
|
||||||
|
latest=False,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"下载管理面板失败: {e}")
|
||||||
|
return
|
||||||
|
except FileNotFoundError:
|
||||||
|
click.echo("初始化管理面板目录...")
|
||||||
|
try:
|
||||||
|
await download_dashboard(
|
||||||
|
path=str(astrbot_root / "dashboard.zip"),
|
||||||
|
extract_path=str(astrbot_root),
|
||||||
|
version=f"v{VERSION}",
|
||||||
|
latest=False,
|
||||||
|
)
|
||||||
|
click.echo("管理面板初始化完成")
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"下载管理面板失败: {e}")
|
||||||
|
return
|
||||||
246
astrbot/cli/utils/plugin.py
Normal file
246
astrbot/cli/utils/plugin.py
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from enum import Enum
|
||||||
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
import click
|
||||||
|
import httpx
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from .version_comparator import VersionComparator
|
||||||
|
|
||||||
|
|
||||||
|
class PluginStatus(str, Enum):
|
||||||
|
INSTALLED = "已安装"
|
||||||
|
NEED_UPDATE = "需更新"
|
||||||
|
NOT_INSTALLED = "未安装"
|
||||||
|
NOT_PUBLISHED = "未发布"
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_repo(url: str, target_path: Path, proxy: str | None = None):
|
||||||
|
"""从 Git 仓库下载代码并解压到指定路径"""
|
||||||
|
temp_dir = Path(tempfile.mkdtemp())
|
||||||
|
try:
|
||||||
|
# 解析仓库信息
|
||||||
|
repo_namespace = url.split("/")[-2:]
|
||||||
|
author = repo_namespace[0]
|
||||||
|
repo = repo_namespace[1]
|
||||||
|
|
||||||
|
# 尝试获取最新的 release
|
||||||
|
release_url = f"https://api.github.com/repos/{author}/{repo}/releases"
|
||||||
|
try:
|
||||||
|
with httpx.Client(
|
||||||
|
proxy=proxy if proxy else None,
|
||||||
|
follow_redirects=True,
|
||||||
|
) as client:
|
||||||
|
resp = client.get(release_url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
releases = resp.json()
|
||||||
|
|
||||||
|
if releases:
|
||||||
|
# 使用最新的 release
|
||||||
|
download_url = releases[0]["zipball_url"]
|
||||||
|
else:
|
||||||
|
# 没有 release,使用默认分支
|
||||||
|
click.echo(f"正在从默认分支下载 {author}/{repo}")
|
||||||
|
download_url = f"https://github.com/{author}/{repo}/archive/refs/heads/master.zip"
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"获取 release 信息失败: {e},将直接使用提供的 URL")
|
||||||
|
download_url = url
|
||||||
|
|
||||||
|
# 应用代理
|
||||||
|
if proxy:
|
||||||
|
download_url = f"{proxy}/{download_url}"
|
||||||
|
|
||||||
|
# 下载并解压
|
||||||
|
with httpx.Client(
|
||||||
|
proxy=proxy if proxy else None,
|
||||||
|
follow_redirects=True,
|
||||||
|
) as client:
|
||||||
|
resp = client.get(download_url)
|
||||||
|
if (
|
||||||
|
resp.status_code == 404
|
||||||
|
and "archive/refs/heads/master.zip" in download_url
|
||||||
|
):
|
||||||
|
alt_url = download_url.replace("master.zip", "main.zip")
|
||||||
|
click.echo("master 分支不存在,尝试下载 main 分支")
|
||||||
|
resp = client.get(alt_url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
else:
|
||||||
|
resp.raise_for_status()
|
||||||
|
zip_content = BytesIO(resp.content)
|
||||||
|
with ZipFile(zip_content) as z:
|
||||||
|
z.extractall(temp_dir)
|
||||||
|
namelist = z.namelist()
|
||||||
|
root_dir = Path(namelist[0]).parts[0] if namelist else ""
|
||||||
|
if target_path.exists():
|
||||||
|
shutil.rmtree(target_path)
|
||||||
|
shutil.move(temp_dir / root_dir, target_path)
|
||||||
|
finally:
|
||||||
|
if temp_dir.exists():
|
||||||
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
def load_yaml_metadata(plugin_dir: Path) -> dict:
|
||||||
|
"""从 metadata.yaml 文件加载插件元数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plugin_dir: 插件目录路径
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: 包含元数据的字典,如果读取失败则返回空字典
|
||||||
|
|
||||||
|
"""
|
||||||
|
yaml_path = plugin_dir / "metadata.yaml"
|
||||||
|
if yaml_path.exists():
|
||||||
|
try:
|
||||||
|
return yaml.safe_load(yaml_path.read_text(encoding="utf-8")) or {}
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"读取 {yaml_path} 失败: {e}", err=True)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def build_plug_list(plugins_dir: Path) -> list:
|
||||||
|
"""构建插件列表,包含本地和在线插件信息
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plugins_dir (Path): 插件目录路径
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: 包含插件信息的字典列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 获取本地插件信息
|
||||||
|
result = []
|
||||||
|
if plugins_dir.exists():
|
||||||
|
for plugin_name in [d.name for d in plugins_dir.glob("*") if d.is_dir()]:
|
||||||
|
plugin_dir = plugins_dir / plugin_name
|
||||||
|
|
||||||
|
# 从 metadata.yaml 加载元数据
|
||||||
|
metadata = load_yaml_metadata(plugin_dir)
|
||||||
|
|
||||||
|
if "desc" not in metadata and "description" in metadata:
|
||||||
|
metadata["desc"] = metadata["description"]
|
||||||
|
|
||||||
|
# 如果成功加载元数据,添加到结果列表
|
||||||
|
if metadata and all(
|
||||||
|
k in metadata for k in ["name", "desc", "version", "author", "repo"]
|
||||||
|
):
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"name": str(metadata.get("name", "")),
|
||||||
|
"desc": str(metadata.get("desc", "")),
|
||||||
|
"version": str(metadata.get("version", "")),
|
||||||
|
"author": str(metadata.get("author", "")),
|
||||||
|
"repo": str(metadata.get("repo", "")),
|
||||||
|
"status": PluginStatus.INSTALLED,
|
||||||
|
"local_path": str(plugin_dir),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# 获取在线插件列表
|
||||||
|
online_plugins = []
|
||||||
|
try:
|
||||||
|
with httpx.Client() as client:
|
||||||
|
resp = client.get("https://api.soulter.top/astrbot/plugins")
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
for plugin_id, plugin_info in data.items():
|
||||||
|
online_plugins.append(
|
||||||
|
{
|
||||||
|
"name": str(plugin_id),
|
||||||
|
"desc": str(plugin_info.get("desc", "")),
|
||||||
|
"version": str(plugin_info.get("version", "")),
|
||||||
|
"author": str(plugin_info.get("author", "")),
|
||||||
|
"repo": str(plugin_info.get("repo", "")),
|
||||||
|
"status": PluginStatus.NOT_INSTALLED,
|
||||||
|
"local_path": None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"获取在线插件列表失败: {e}", err=True)
|
||||||
|
|
||||||
|
# 与在线插件比对,更新状态
|
||||||
|
online_plugin_names = {plugin["name"] for plugin in online_plugins}
|
||||||
|
for local_plugin in result:
|
||||||
|
if local_plugin["name"] in online_plugin_names:
|
||||||
|
# 查找对应的在线插件
|
||||||
|
online_plugin = next(
|
||||||
|
p for p in online_plugins if p["name"] == local_plugin["name"]
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
VersionComparator.compare_version(
|
||||||
|
local_plugin["version"],
|
||||||
|
online_plugin["version"],
|
||||||
|
)
|
||||||
|
< 0
|
||||||
|
):
|
||||||
|
local_plugin["status"] = PluginStatus.NEED_UPDATE
|
||||||
|
else:
|
||||||
|
# 本地插件未在线上发布
|
||||||
|
local_plugin["status"] = PluginStatus.NOT_PUBLISHED
|
||||||
|
|
||||||
|
# 添加未安装的在线插件
|
||||||
|
for online_plugin in online_plugins:
|
||||||
|
if not any(plugin["name"] == online_plugin["name"] for plugin in result):
|
||||||
|
result.append(online_plugin)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def manage_plugin(
|
||||||
|
plugin: dict,
|
||||||
|
plugins_dir: Path,
|
||||||
|
is_update: bool = False,
|
||||||
|
proxy: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""安装或更新插件
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plugin (dict): 插件信息字典
|
||||||
|
plugins_dir (Path): 插件目录
|
||||||
|
is_update (bool, optional): 是否为更新操作. 默认为 False
|
||||||
|
proxy (str, optional): 代理服务器地址
|
||||||
|
|
||||||
|
"""
|
||||||
|
plugin_name = plugin["name"]
|
||||||
|
repo_url = plugin["repo"]
|
||||||
|
|
||||||
|
# 如果是更新且有本地路径,直接使用本地路径
|
||||||
|
if is_update and plugin.get("local_path"):
|
||||||
|
target_path = Path(plugin["local_path"])
|
||||||
|
else:
|
||||||
|
target_path = plugins_dir / plugin_name
|
||||||
|
|
||||||
|
backup_path = Path(f"{target_path}_backup") if is_update else None
|
||||||
|
|
||||||
|
# 检查插件是否存在
|
||||||
|
if is_update and not target_path.exists():
|
||||||
|
raise click.ClickException(f"插件 {plugin_name} 未安装,无法更新")
|
||||||
|
|
||||||
|
# 备份现有插件
|
||||||
|
if is_update and backup_path is not None and backup_path.exists():
|
||||||
|
shutil.rmtree(backup_path)
|
||||||
|
if is_update and backup_path is not None:
|
||||||
|
shutil.copytree(target_path, backup_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
click.echo(
|
||||||
|
f"正在从 {repo_url} {'更新' if is_update else '下载'}插件 {plugin_name}...",
|
||||||
|
)
|
||||||
|
get_git_repo(repo_url, target_path, proxy)
|
||||||
|
|
||||||
|
# 更新成功,删除备份
|
||||||
|
if is_update and backup_path is not None and backup_path.exists():
|
||||||
|
shutil.rmtree(backup_path)
|
||||||
|
click.echo(f"插件 {plugin_name} {'更新' if is_update else '安装'}成功")
|
||||||
|
except Exception as e:
|
||||||
|
if target_path.exists():
|
||||||
|
shutil.rmtree(target_path, ignore_errors=True)
|
||||||
|
if is_update and backup_path is not None and backup_path.exists():
|
||||||
|
shutil.move(backup_path, target_path)
|
||||||
|
raise click.ClickException(
|
||||||
|
f"{'更新' if is_update else '安装'}插件 {plugin_name} 时出错: {e}",
|
||||||
|
)
|
||||||
90
astrbot/cli/utils/version_comparator.py
Normal file
90
astrbot/cli/utils/version_comparator.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
"""拷贝自 astrbot.core.utils.version_comparator"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class VersionComparator:
|
||||||
|
@staticmethod
|
||||||
|
def compare_version(v1: str, v2: str) -> int:
|
||||||
|
"""根据 Semver 语义版本规范来比较版本号的大小。支持不仅局限于 3 个数字的版本号,并处理预发布标签。
|
||||||
|
|
||||||
|
参考: https://semver.org/lang/zh-CN/
|
||||||
|
|
||||||
|
返回 1 表示 v1 > v2,返回 -1 表示 v1 < v2,返回 0 表示 v1 = v2。
|
||||||
|
"""
|
||||||
|
v1 = v1.lower().replace("v", "")
|
||||||
|
v2 = v2.lower().replace("v", "")
|
||||||
|
|
||||||
|
def split_version(version):
|
||||||
|
match = re.match(
|
||||||
|
r"^([0-9]+(?:\.[0-9]+)*)(?:-([0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))?(?:\+(.+))?$",
|
||||||
|
version,
|
||||||
|
)
|
||||||
|
if not match:
|
||||||
|
return [], None
|
||||||
|
major_minor_patch = match.group(1).split(".")
|
||||||
|
prerelease = match.group(2)
|
||||||
|
# buildmetadata = match.group(3) # 构建元数据在比较时忽略
|
||||||
|
parts = [int(x) for x in major_minor_patch]
|
||||||
|
prerelease = VersionComparator._split_prerelease(prerelease)
|
||||||
|
return parts, prerelease
|
||||||
|
|
||||||
|
v1_parts, v1_prerelease = split_version(v1)
|
||||||
|
v2_parts, v2_prerelease = split_version(v2)
|
||||||
|
|
||||||
|
# 比较数字部分
|
||||||
|
length = max(len(v1_parts), len(v2_parts))
|
||||||
|
v1_parts.extend([0] * (length - len(v1_parts)))
|
||||||
|
v2_parts.extend([0] * (length - len(v2_parts)))
|
||||||
|
|
||||||
|
for i in range(length):
|
||||||
|
if v1_parts[i] > v2_parts[i]:
|
||||||
|
return 1
|
||||||
|
if v1_parts[i] < v2_parts[i]:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# 比较预发布标签
|
||||||
|
if v1_prerelease is None and v2_prerelease is not None:
|
||||||
|
return 1 # 没有预发布标签的版本高于有预发布标签的版本
|
||||||
|
if v1_prerelease is not None and v2_prerelease is None:
|
||||||
|
return -1 # 有预发布标签的版本低于没有预发布标签的版本
|
||||||
|
if v1_prerelease is not None and v2_prerelease is not None:
|
||||||
|
len_pre = max(len(v1_prerelease), len(v2_prerelease))
|
||||||
|
for i in range(len_pre):
|
||||||
|
p1 = v1_prerelease[i] if i < len(v1_prerelease) else None
|
||||||
|
p2 = v2_prerelease[i] if i < len(v2_prerelease) else None
|
||||||
|
|
||||||
|
if p1 is None and p2 is not None:
|
||||||
|
return -1
|
||||||
|
if p1 is not None and p2 is None:
|
||||||
|
return 1
|
||||||
|
if isinstance(p1, int) and isinstance(p2, str):
|
||||||
|
return -1
|
||||||
|
if isinstance(p1, str) and isinstance(p2, int):
|
||||||
|
return 1
|
||||||
|
if isinstance(p1, int) and isinstance(p2, int):
|
||||||
|
if p1 > p2:
|
||||||
|
return 1
|
||||||
|
if p1 < p2:
|
||||||
|
return -1
|
||||||
|
elif isinstance(p1, str) and isinstance(p2, str):
|
||||||
|
if p1 > p2:
|
||||||
|
return 1
|
||||||
|
if p1 < p2:
|
||||||
|
return -1
|
||||||
|
return 0 # 预发布标签完全相同
|
||||||
|
|
||||||
|
return 0 # 数字部分和预发布标签都相同
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _split_prerelease(prerelease):
|
||||||
|
if not prerelease:
|
||||||
|
return None
|
||||||
|
parts = prerelease.split(".")
|
||||||
|
result = []
|
||||||
|
for part in parts:
|
||||||
|
if part.isdigit():
|
||||||
|
result.append(int(part))
|
||||||
|
else:
|
||||||
|
result.append(part)
|
||||||
|
return result
|
||||||
@@ -1,26 +1,31 @@
|
|||||||
import os
|
import os
|
||||||
import asyncio
|
|
||||||
from .log import LogManager, LogBroker
|
|
||||||
from astrbot.core.utils.t2i.renderer import HtmlRenderer
|
|
||||||
from astrbot.core.utils.shared_preferences import SharedPreferences
|
|
||||||
from astrbot.core.utils.pip_installer import PipInstaller
|
|
||||||
from astrbot.core.db.sqlite import SQLiteDatabase
|
|
||||||
from astrbot.core.config.default import DB_PATH
|
|
||||||
from astrbot.core.config import AstrBotConfig
|
|
||||||
|
|
||||||
os.makedirs("data", exist_ok=True)
|
from astrbot.core.config import AstrBotConfig
|
||||||
|
from astrbot.core.config.default import DB_PATH
|
||||||
|
from astrbot.core.db.sqlite import SQLiteDatabase
|
||||||
|
from astrbot.core.file_token_service import FileTokenService
|
||||||
|
from astrbot.core.utils.pip_installer import PipInstaller
|
||||||
|
from astrbot.core.utils.shared_preferences import SharedPreferences
|
||||||
|
from astrbot.core.utils.t2i.renderer import HtmlRenderer
|
||||||
|
|
||||||
|
from .log import LogBroker, LogManager # noqa
|
||||||
|
from .utils.astrbot_path import get_astrbot_data_path
|
||||||
|
|
||||||
|
# 初始化数据存储文件夹
|
||||||
|
os.makedirs(get_astrbot_data_path(), exist_ok=True)
|
||||||
|
|
||||||
|
DEMO_MODE = os.getenv("DEMO_MODE", False)
|
||||||
|
|
||||||
astrbot_config = AstrBotConfig()
|
astrbot_config = AstrBotConfig()
|
||||||
t2i_base_url = astrbot_config.get('t2i_endpoint', 'https://t2i.soulter.top/text2img')
|
t2i_base_url = astrbot_config.get("t2i_endpoint", "https://t2i.soulter.top/text2img")
|
||||||
html_renderer = HtmlRenderer(t2i_base_url)
|
html_renderer = HtmlRenderer(t2i_base_url)
|
||||||
logger = LogManager.GetLogger(log_name='astrbot')
|
logger = LogManager.GetLogger(log_name="astrbot")
|
||||||
|
|
||||||
if os.environ.get('TESTING', ""):
|
|
||||||
logger.setLevel('DEBUG')
|
|
||||||
|
|
||||||
db_helper = SQLiteDatabase(DB_PATH)
|
db_helper = SQLiteDatabase(DB_PATH)
|
||||||
sp = SharedPreferences() # 简单的偏好设置存储
|
# 简单的偏好设置存储, 这里后续应该存储到数据库中, 一些部分可以存储到配置中
|
||||||
pip_installer = PipInstaller(astrbot_config.get('pip_install_arg', ''))
|
sp = SharedPreferences(db_helper=db_helper)
|
||||||
web_chat_queue = asyncio.Queue(maxsize=32)
|
# 文件令牌服务
|
||||||
web_chat_back_queue = asyncio.Queue(maxsize=32)
|
file_token_service = FileTokenService()
|
||||||
WEBUI_SK = "Advanced_System_for_Text_Response_and_Bot_Operations_Tool"
|
pip_installer = PipInstaller(
|
||||||
|
astrbot_config.get("pip_install_arg", ""),
|
||||||
|
astrbot_config.get("pypi_index_url", None),
|
||||||
|
)
|
||||||
|
|||||||
14
astrbot/core/agent/agent.py
Normal file
14
astrbot/core/agent/agent.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Generic
|
||||||
|
|
||||||
|
from .hooks import BaseAgentRunHooks
|
||||||
|
from .run_context import TContext
|
||||||
|
from .tool import FunctionTool
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Agent(Generic[TContext]):
|
||||||
|
name: str
|
||||||
|
instructions: str | None = None
|
||||||
|
tools: list[str | FunctionTool] | None = None
|
||||||
|
run_hooks: BaseAgentRunHooks[TContext] | None = None
|
||||||
38
astrbot/core/agent/handoff.py
Normal file
38
astrbot/core/agent/handoff.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from typing import Generic
|
||||||
|
|
||||||
|
from .agent import Agent
|
||||||
|
from .run_context import TContext
|
||||||
|
from .tool import FunctionTool
|
||||||
|
|
||||||
|
|
||||||
|
class HandoffTool(FunctionTool, Generic[TContext]):
|
||||||
|
"""Handoff tool for delegating tasks to another agent."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
agent: Agent[TContext],
|
||||||
|
parameters: dict | None = None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
self.agent = agent
|
||||||
|
super().__init__(
|
||||||
|
name=f"transfer_to_{agent.name}",
|
||||||
|
parameters=parameters or self.default_parameters(),
|
||||||
|
description=agent.instructions or self.default_description(agent.name),
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def default_parameters(self) -> dict:
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"input": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The input to be handed off to another agent. This should be a clear and concise request or task.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def default_description(self, agent_name: str | None) -> str:
|
||||||
|
agent_name = agent_name or "another"
|
||||||
|
return f"Delegate tasks to {self.name} agent to handle the request."
|
||||||
30
astrbot/core/agent/hooks.py
Normal file
30
astrbot/core/agent/hooks.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from typing import Generic
|
||||||
|
|
||||||
|
import mcp
|
||||||
|
|
||||||
|
from astrbot.core.agent.tool import FunctionTool
|
||||||
|
from astrbot.core.provider.entities import LLMResponse
|
||||||
|
|
||||||
|
from .run_context import ContextWrapper, TContext
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAgentRunHooks(Generic[TContext]):
|
||||||
|
async def on_agent_begin(self, run_context: ContextWrapper[TContext]): ...
|
||||||
|
async def on_tool_start(
|
||||||
|
self,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
tool: FunctionTool,
|
||||||
|
tool_args: dict | None,
|
||||||
|
): ...
|
||||||
|
async def on_tool_end(
|
||||||
|
self,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
tool: FunctionTool,
|
||||||
|
tool_args: dict | None,
|
||||||
|
tool_result: mcp.types.CallToolResult | None,
|
||||||
|
): ...
|
||||||
|
async def on_agent_done(
|
||||||
|
self,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
llm_response: LLMResponse,
|
||||||
|
): ...
|
||||||
259
astrbot/core/agent/mcp_client.py
Normal file
259
astrbot/core/agent/mcp_client.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from contextlib import AsyncExitStack
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Generic
|
||||||
|
|
||||||
|
from astrbot import logger
|
||||||
|
from astrbot.core.agent.run_context import ContextWrapper
|
||||||
|
from astrbot.core.utils.log_pipe import LogPipe
|
||||||
|
|
||||||
|
from .run_context import TContext
|
||||||
|
from .tool import FunctionTool
|
||||||
|
|
||||||
|
try:
|
||||||
|
import mcp
|
||||||
|
from mcp.client.sse import sse_client
|
||||||
|
except (ModuleNotFoundError, ImportError):
|
||||||
|
logger.warning("警告: 缺少依赖库 'mcp',将无法使用 MCP 服务。")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from mcp.client.streamable_http import streamablehttp_client
|
||||||
|
except (ModuleNotFoundError, ImportError):
|
||||||
|
logger.warning(
|
||||||
|
"警告: 缺少依赖库 'mcp' 或者 mcp 库版本过低,无法使用 Streamable HTTP 连接方式。",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_config(config: dict) -> dict:
|
||||||
|
"""准备配置,处理嵌套格式"""
|
||||||
|
if config.get("mcpServers"):
|
||||||
|
first_key = next(iter(config["mcpServers"]))
|
||||||
|
config = config["mcpServers"][first_key]
|
||||||
|
config.pop("active", None)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
async def _quick_test_mcp_connection(config: dict) -> tuple[bool, str]:
|
||||||
|
"""快速测试 MCP 服务器可达性"""
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
cfg = _prepare_config(config.copy())
|
||||||
|
|
||||||
|
url = cfg["url"]
|
||||||
|
headers = cfg.get("headers", {})
|
||||||
|
timeout = cfg.get("timeout", 10)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if "transport" in cfg:
|
||||||
|
transport_type = cfg["transport"]
|
||||||
|
elif "type" in cfg:
|
||||||
|
transport_type = cfg["type"]
|
||||||
|
else:
|
||||||
|
raise Exception("MCP 连接配置缺少 transport 或 type 字段")
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
if transport_type == "streamable_http":
|
||||||
|
test_payload = {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "initialize",
|
||||||
|
"id": 0,
|
||||||
|
"params": {
|
||||||
|
"protocolVersion": "2024-11-05",
|
||||||
|
"capabilities": {},
|
||||||
|
"clientInfo": {"name": "test-client", "version": "1.2.3"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
async with session.post(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
**headers,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json, text/event-stream",
|
||||||
|
},
|
||||||
|
json=test_payload,
|
||||||
|
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
return True, ""
|
||||||
|
return False, f"HTTP {response.status}: {response.reason}"
|
||||||
|
else:
|
||||||
|
async with session.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
**headers,
|
||||||
|
"Accept": "application/json, text/event-stream",
|
||||||
|
},
|
||||||
|
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
return True, ""
|
||||||
|
return False, f"HTTP {response.status}: {response.reason}"
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
return False, f"连接超时: {timeout}秒"
|
||||||
|
except Exception as e:
|
||||||
|
return False, f"{e!s}"
|
||||||
|
|
||||||
|
|
||||||
|
class MCPClient:
|
||||||
|
def __init__(self):
|
||||||
|
# Initialize session and client objects
|
||||||
|
self.session: mcp.ClientSession | None = None
|
||||||
|
self.exit_stack = AsyncExitStack()
|
||||||
|
|
||||||
|
self.name: str | None = None
|
||||||
|
self.active: bool = True
|
||||||
|
self.tools: list[mcp.Tool] = []
|
||||||
|
self.server_errlogs: list[str] = []
|
||||||
|
self.running_event = asyncio.Event()
|
||||||
|
|
||||||
|
async def connect_to_server(self, mcp_server_config: dict, name: str):
|
||||||
|
"""连接到 MCP 服务器
|
||||||
|
|
||||||
|
如果 `url` 参数存在:
|
||||||
|
1. 当 transport 指定为 `streamable_http` 时,使用 Streamable HTTP 连接方式。
|
||||||
|
1. 当 transport 指定为 `sse` 时,使用 SSE 连接方式。
|
||||||
|
2. 如果没有指定,默认使用 SSE 的方式连接到 MCP 服务。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mcp_server_config (dict): Configuration for the MCP server. See https://modelcontextprotocol.io/quickstart/server
|
||||||
|
|
||||||
|
"""
|
||||||
|
cfg = _prepare_config(mcp_server_config.copy())
|
||||||
|
|
||||||
|
def logging_callback(msg: str):
|
||||||
|
# 处理 MCP 服务的错误日志
|
||||||
|
print(f"MCP Server {name} Error: {msg}")
|
||||||
|
self.server_errlogs.append(msg)
|
||||||
|
|
||||||
|
if "url" in cfg:
|
||||||
|
success, error_msg = await _quick_test_mcp_connection(cfg)
|
||||||
|
if not success:
|
||||||
|
raise Exception(error_msg)
|
||||||
|
|
||||||
|
if "transport" in cfg:
|
||||||
|
transport_type = cfg["transport"]
|
||||||
|
elif "type" in cfg:
|
||||||
|
transport_type = cfg["type"]
|
||||||
|
else:
|
||||||
|
raise Exception("MCP 连接配置缺少 transport 或 type 字段")
|
||||||
|
|
||||||
|
if transport_type != "streamable_http":
|
||||||
|
# SSE transport method
|
||||||
|
self._streams_context = sse_client(
|
||||||
|
url=cfg["url"],
|
||||||
|
headers=cfg.get("headers", {}),
|
||||||
|
timeout=cfg.get("timeout", 5),
|
||||||
|
sse_read_timeout=cfg.get("sse_read_timeout", 60 * 5),
|
||||||
|
)
|
||||||
|
streams = await self.exit_stack.enter_async_context(
|
||||||
|
self._streams_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a new client session
|
||||||
|
read_timeout = timedelta(seconds=cfg.get("session_read_timeout", 60))
|
||||||
|
self.session = await self.exit_stack.enter_async_context(
|
||||||
|
mcp.ClientSession(
|
||||||
|
*streams,
|
||||||
|
read_timeout_seconds=read_timeout,
|
||||||
|
logging_callback=logging_callback, # type: ignore
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
timeout = timedelta(seconds=cfg.get("timeout", 30))
|
||||||
|
sse_read_timeout = timedelta(
|
||||||
|
seconds=cfg.get("sse_read_timeout", 60 * 5),
|
||||||
|
)
|
||||||
|
self._streams_context = streamablehttp_client(
|
||||||
|
url=cfg["url"],
|
||||||
|
headers=cfg.get("headers", {}),
|
||||||
|
timeout=timeout,
|
||||||
|
sse_read_timeout=sse_read_timeout,
|
||||||
|
terminate_on_close=cfg.get("terminate_on_close", True),
|
||||||
|
)
|
||||||
|
read_s, write_s, _ = await self.exit_stack.enter_async_context(
|
||||||
|
self._streams_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a new client session
|
||||||
|
read_timeout = timedelta(seconds=cfg.get("session_read_timeout", 60))
|
||||||
|
self.session = await self.exit_stack.enter_async_context(
|
||||||
|
mcp.ClientSession(
|
||||||
|
read_stream=read_s,
|
||||||
|
write_stream=write_s,
|
||||||
|
read_timeout_seconds=read_timeout,
|
||||||
|
logging_callback=logging_callback, # type: ignore
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
server_params = mcp.StdioServerParameters(
|
||||||
|
**cfg,
|
||||||
|
)
|
||||||
|
|
||||||
|
def callback(msg: str):
|
||||||
|
# 处理 MCP 服务的错误日志
|
||||||
|
self.server_errlogs.append(msg)
|
||||||
|
|
||||||
|
stdio_transport = await self.exit_stack.enter_async_context(
|
||||||
|
mcp.stdio_client(
|
||||||
|
server_params,
|
||||||
|
errlog=LogPipe(
|
||||||
|
level=logging.ERROR,
|
||||||
|
logger=logger,
|
||||||
|
identifier=f"MCPServer-{name}",
|
||||||
|
callback=callback,
|
||||||
|
), # type: ignore
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a new client session
|
||||||
|
self.session = await self.exit_stack.enter_async_context(
|
||||||
|
mcp.ClientSession(*stdio_transport),
|
||||||
|
)
|
||||||
|
await self.session.initialize()
|
||||||
|
|
||||||
|
async def list_tools_and_save(self) -> mcp.ListToolsResult:
|
||||||
|
"""List all tools from the server and save them to self.tools"""
|
||||||
|
if not self.session:
|
||||||
|
raise Exception("MCP Client is not initialized")
|
||||||
|
response = await self.session.list_tools()
|
||||||
|
self.tools = response.tools
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Clean up resources"""
|
||||||
|
await self.exit_stack.aclose()
|
||||||
|
self.running_event.set() # Set the running event to indicate cleanup is done
|
||||||
|
|
||||||
|
|
||||||
|
class MCPTool(FunctionTool, Generic[TContext]):
|
||||||
|
"""A function tool that calls an MCP service."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, mcp_tool: mcp.Tool, mcp_client: MCPClient, mcp_server_name: str, **kwargs
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
name=mcp_tool.name,
|
||||||
|
description=mcp_tool.description or "",
|
||||||
|
parameters=mcp_tool.inputSchema,
|
||||||
|
)
|
||||||
|
self.mcp_tool = mcp_tool
|
||||||
|
self.mcp_client = mcp_client
|
||||||
|
self.mcp_server_name = mcp_server_name
|
||||||
|
|
||||||
|
async def call(
|
||||||
|
self, context: ContextWrapper[TContext], **kwargs
|
||||||
|
) -> mcp.types.CallToolResult:
|
||||||
|
session = self.mcp_client.session
|
||||||
|
if not session:
|
||||||
|
raise ValueError("MCP session is not available for MCP function tools.")
|
||||||
|
res = await session.call_tool(
|
||||||
|
name=self.mcp_tool.name,
|
||||||
|
arguments=kwargs,
|
||||||
|
read_timeout_seconds=timedelta(
|
||||||
|
seconds=context.tool_call_timeout,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return res
|
||||||
168
astrbot/core/agent/message.py
Normal file
168
astrbot/core/agent/message.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
# Inspired by MoonshotAI/kosong, credits to MoonshotAI/kosong authors for the original implementation.
|
||||||
|
# License: Apache License 2.0
|
||||||
|
|
||||||
|
from typing import Any, ClassVar, Literal, cast
|
||||||
|
|
||||||
|
from pydantic import BaseModel, GetCoreSchemaHandler
|
||||||
|
from pydantic_core import core_schema
|
||||||
|
|
||||||
|
|
||||||
|
class ContentPart(BaseModel):
|
||||||
|
"""A part of the content in a message."""
|
||||||
|
|
||||||
|
__content_part_registry: ClassVar[dict[str, type["ContentPart"]]] = {}
|
||||||
|
|
||||||
|
type: str
|
||||||
|
|
||||||
|
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||||
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
|
invalid_subclass_error_msg = f"ContentPart subclass {cls.__name__} must have a `type` field of type `str`"
|
||||||
|
|
||||||
|
type_value = getattr(cls, "type", None)
|
||||||
|
if type_value is None or not isinstance(type_value, str):
|
||||||
|
raise ValueError(invalid_subclass_error_msg)
|
||||||
|
|
||||||
|
cls.__content_part_registry[type_value] = cls
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_core_schema__(
|
||||||
|
cls, source_type: Any, handler: GetCoreSchemaHandler
|
||||||
|
) -> core_schema.CoreSchema:
|
||||||
|
# If we're dealing with the base ContentPart class, use custom validation
|
||||||
|
if cls.__name__ == "ContentPart":
|
||||||
|
|
||||||
|
def validate_content_part(value: Any) -> Any:
|
||||||
|
# if it's already an instance of a ContentPart subclass, return it
|
||||||
|
if hasattr(value, "__class__") and issubclass(value.__class__, cls):
|
||||||
|
return value
|
||||||
|
|
||||||
|
# if it's a dict with a type field, dispatch to the appropriate subclass
|
||||||
|
if isinstance(value, dict) and "type" in value:
|
||||||
|
type_value: Any | None = cast(dict[str, Any], value).get("type")
|
||||||
|
if not isinstance(type_value, str):
|
||||||
|
raise ValueError(f"Cannot validate {value} as ContentPart")
|
||||||
|
target_class = cls.__content_part_registry[type_value]
|
||||||
|
return target_class.model_validate(value)
|
||||||
|
|
||||||
|
raise ValueError(f"Cannot validate {value} as ContentPart")
|
||||||
|
|
||||||
|
return core_schema.no_info_plain_validator_function(validate_content_part)
|
||||||
|
|
||||||
|
# for subclasses, use the default schema
|
||||||
|
return handler(source_type)
|
||||||
|
|
||||||
|
|
||||||
|
class TextPart(ContentPart):
|
||||||
|
"""
|
||||||
|
>>> TextPart(text="Hello, world!").model_dump()
|
||||||
|
{'type': 'text', 'text': 'Hello, world!'}
|
||||||
|
"""
|
||||||
|
|
||||||
|
type: str = "text"
|
||||||
|
text: str
|
||||||
|
|
||||||
|
|
||||||
|
class ImageURLPart(ContentPart):
|
||||||
|
"""
|
||||||
|
>>> ImageURLPart(image_url="http://example.com/image.jpg").model_dump()
|
||||||
|
{'type': 'image_url', 'image_url': 'http://example.com/image.jpg'}
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ImageURL(BaseModel):
|
||||||
|
url: str
|
||||||
|
"""The URL of the image, can be data URI scheme like `data:image/png;base64,...`."""
|
||||||
|
id: str | None = None
|
||||||
|
"""The ID of the image, to allow LLMs to distinguish different images."""
|
||||||
|
|
||||||
|
type: str = "image_url"
|
||||||
|
image_url: str
|
||||||
|
|
||||||
|
|
||||||
|
class AudioURLPart(ContentPart):
|
||||||
|
"""
|
||||||
|
>>> AudioURLPart(audio_url=AudioURLPart.AudioURL(url="https://example.com/audio.mp3")).model_dump()
|
||||||
|
{'type': 'audio_url', 'audio_url': {'url': 'https://example.com/audio.mp3', 'id': None}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
class AudioURL(BaseModel):
|
||||||
|
url: str
|
||||||
|
"""The URL of the audio, can be data URI scheme like `data:audio/aac;base64,...`."""
|
||||||
|
id: str | None = None
|
||||||
|
"""The ID of the audio, to allow LLMs to distinguish different audios."""
|
||||||
|
|
||||||
|
type: str = "audio_url"
|
||||||
|
audio_url: AudioURL
|
||||||
|
|
||||||
|
|
||||||
|
class ToolCall(BaseModel):
|
||||||
|
"""
|
||||||
|
A tool call requested by the assistant.
|
||||||
|
|
||||||
|
>>> ToolCall(
|
||||||
|
... id="123",
|
||||||
|
... function=ToolCall.FunctionBody(
|
||||||
|
... name="function",
|
||||||
|
... arguments="{}"
|
||||||
|
... ),
|
||||||
|
... ).model_dump()
|
||||||
|
{'type': 'function', 'id': '123', 'function': {'name': 'function', 'arguments': '{}'}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
class FunctionBody(BaseModel):
|
||||||
|
name: str
|
||||||
|
arguments: str | None
|
||||||
|
|
||||||
|
type: Literal["function"] = "function"
|
||||||
|
|
||||||
|
id: str
|
||||||
|
"""The ID of the tool call."""
|
||||||
|
function: FunctionBody
|
||||||
|
"""The function body of the tool call."""
|
||||||
|
|
||||||
|
|
||||||
|
class ToolCallPart(BaseModel):
|
||||||
|
"""A part of the tool call."""
|
||||||
|
|
||||||
|
arguments_part: str | None = None
|
||||||
|
"""A part of the arguments of the tool call."""
|
||||||
|
|
||||||
|
|
||||||
|
class Message(BaseModel):
|
||||||
|
"""A message in a conversation."""
|
||||||
|
|
||||||
|
role: Literal[
|
||||||
|
"system",
|
||||||
|
"user",
|
||||||
|
"assistant",
|
||||||
|
"tool",
|
||||||
|
]
|
||||||
|
|
||||||
|
content: str | list[ContentPart]
|
||||||
|
"""The content of the message."""
|
||||||
|
|
||||||
|
|
||||||
|
class AssistantMessageSegment(Message):
|
||||||
|
"""A message segment from the assistant."""
|
||||||
|
|
||||||
|
role: Literal["assistant"] = "assistant"
|
||||||
|
tool_calls: list[ToolCall] | list[dict] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class ToolCallMessageSegment(Message):
|
||||||
|
"""A message segment representing a tool call."""
|
||||||
|
|
||||||
|
role: Literal["tool"] = "tool"
|
||||||
|
tool_call_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserMessageSegment(Message):
|
||||||
|
"""A message segment from the user."""
|
||||||
|
|
||||||
|
role: Literal["user"] = "user"
|
||||||
|
|
||||||
|
|
||||||
|
class SystemMessageSegment(Message):
|
||||||
|
"""A message segment from the system."""
|
||||||
|
|
||||||
|
role: Literal["system"] = "system"
|
||||||
14
astrbot/core/agent/response.py
Normal file
14
astrbot/core/agent/response.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import typing as T
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from astrbot.core.message.message_event_result import MessageChain
|
||||||
|
|
||||||
|
|
||||||
|
class AgentResponseData(T.TypedDict):
|
||||||
|
chain: MessageChain
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AgentResponse:
|
||||||
|
type: str
|
||||||
|
data: AgentResponseData
|
||||||
17
astrbot/core/agent/run_context.py
Normal file
17
astrbot/core/agent/run_context.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Generic
|
||||||
|
|
||||||
|
from typing_extensions import TypeVar
|
||||||
|
|
||||||
|
TContext = TypeVar("TContext", default=Any)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ContextWrapper(Generic[TContext]):
|
||||||
|
"""A context for running an agent, which can be used to pass additional data or state."""
|
||||||
|
|
||||||
|
context: TContext
|
||||||
|
tool_call_timeout: int = 60 # Default tool call timeout in seconds
|
||||||
|
|
||||||
|
|
||||||
|
NoContext = ContextWrapper[None]
|
||||||
3
astrbot/core/agent/runners/__init__.py
Normal file
3
astrbot/core/agent/runners/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .base import BaseAgentRunner
|
||||||
|
|
||||||
|
__all__ = ["BaseAgentRunner"]
|
||||||
55
astrbot/core/agent/runners/base.py
Normal file
55
astrbot/core/agent/runners/base.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import abc
|
||||||
|
import typing as T
|
||||||
|
from enum import Enum, auto
|
||||||
|
|
||||||
|
from astrbot.core.provider import Provider
|
||||||
|
from astrbot.core.provider.entities import LLMResponse
|
||||||
|
|
||||||
|
from ..hooks import BaseAgentRunHooks
|
||||||
|
from ..response import AgentResponse
|
||||||
|
from ..run_context import ContextWrapper, TContext
|
||||||
|
from ..tool_executor import BaseFunctionToolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
class AgentState(Enum):
|
||||||
|
"""Defines the state of the agent."""
|
||||||
|
|
||||||
|
IDLE = auto() # Initial state
|
||||||
|
RUNNING = auto() # Currently processing
|
||||||
|
DONE = auto() # Completed
|
||||||
|
ERROR = auto() # Error state
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAgentRunner(T.Generic[TContext]):
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def reset(
|
||||||
|
self,
|
||||||
|
provider: Provider,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
tool_executor: BaseFunctionToolExecutor[TContext],
|
||||||
|
agent_hooks: BaseAgentRunHooks[TContext],
|
||||||
|
**kwargs: T.Any,
|
||||||
|
) -> None:
|
||||||
|
"""Reset the agent to its initial state.
|
||||||
|
This method should be called before starting a new run.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def step(self) -> T.AsyncGenerator[AgentResponse, None]:
|
||||||
|
"""Process a single step of the agent."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def done(self) -> bool:
|
||||||
|
"""Check if the agent has completed its task.
|
||||||
|
Returns True if the agent is done, False otherwise.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||||
|
"""Get the final observation from the agent.
|
||||||
|
This method should be called after the agent is done.
|
||||||
|
"""
|
||||||
|
...
|
||||||
358
astrbot/core/agent/runners/tool_loop_agent_runner.py
Normal file
358
astrbot/core/agent/runners/tool_loop_agent_runner.py
Normal file
@@ -0,0 +1,358 @@
|
|||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
import typing as T
|
||||||
|
|
||||||
|
from mcp.types import (
|
||||||
|
BlobResourceContents,
|
||||||
|
CallToolResult,
|
||||||
|
EmbeddedResource,
|
||||||
|
ImageContent,
|
||||||
|
TextContent,
|
||||||
|
TextResourceContents,
|
||||||
|
)
|
||||||
|
|
||||||
|
from astrbot import logger
|
||||||
|
from astrbot.core.message.message_event_result import (
|
||||||
|
MessageChain,
|
||||||
|
)
|
||||||
|
from astrbot.core.provider.entities import (
|
||||||
|
LLMResponse,
|
||||||
|
ProviderRequest,
|
||||||
|
ToolCallsResult,
|
||||||
|
)
|
||||||
|
from astrbot.core.provider.provider import Provider
|
||||||
|
|
||||||
|
from ..hooks import BaseAgentRunHooks
|
||||||
|
from ..message import AssistantMessageSegment, ToolCallMessageSegment
|
||||||
|
from ..response import AgentResponseData
|
||||||
|
from ..run_context import ContextWrapper, TContext
|
||||||
|
from ..tool_executor import BaseFunctionToolExecutor
|
||||||
|
from .base import AgentResponse, AgentState, BaseAgentRunner
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
from typing import override
|
||||||
|
else:
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
|
||||||
|
class ToolLoopAgentRunner(BaseAgentRunner[TContext]):
|
||||||
|
@override
|
||||||
|
async def reset(
|
||||||
|
self,
|
||||||
|
provider: Provider,
|
||||||
|
request: ProviderRequest,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
tool_executor: BaseFunctionToolExecutor[TContext],
|
||||||
|
agent_hooks: BaseAgentRunHooks[TContext],
|
||||||
|
**kwargs: T.Any,
|
||||||
|
) -> None:
|
||||||
|
self.req = request
|
||||||
|
self.streaming = kwargs.get("streaming", False)
|
||||||
|
self.provider = provider
|
||||||
|
self.final_llm_resp = None
|
||||||
|
self._state = AgentState.IDLE
|
||||||
|
self.tool_executor = tool_executor
|
||||||
|
self.agent_hooks = agent_hooks
|
||||||
|
self.run_context = run_context
|
||||||
|
|
||||||
|
def _transition_state(self, new_state: AgentState) -> None:
|
||||||
|
"""转换 Agent 状态"""
|
||||||
|
if self._state != new_state:
|
||||||
|
logger.debug(f"Agent state transition: {self._state} -> {new_state}")
|
||||||
|
self._state = new_state
|
||||||
|
|
||||||
|
async def _iter_llm_responses(self) -> T.AsyncGenerator[LLMResponse, None]:
|
||||||
|
"""Yields chunks *and* a final LLMResponse."""
|
||||||
|
if self.streaming:
|
||||||
|
stream = self.provider.text_chat_stream(**self.req.__dict__)
|
||||||
|
async for resp in stream: # type: ignore
|
||||||
|
yield resp
|
||||||
|
else:
|
||||||
|
yield await self.provider.text_chat(**self.req.__dict__)
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def step(self):
|
||||||
|
"""Process a single step of the agent.
|
||||||
|
This method should return the result of the step.
|
||||||
|
"""
|
||||||
|
if not self.req:
|
||||||
|
raise ValueError("Request is not set. Please call reset() first.")
|
||||||
|
|
||||||
|
if self._state == AgentState.IDLE:
|
||||||
|
try:
|
||||||
|
await self.agent_hooks.on_agent_begin(self.run_context)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in on_agent_begin hook: {e}", exc_info=True)
|
||||||
|
|
||||||
|
# 开始处理,转换到运行状态
|
||||||
|
self._transition_state(AgentState.RUNNING)
|
||||||
|
llm_resp_result = None
|
||||||
|
|
||||||
|
async for llm_response in self._iter_llm_responses():
|
||||||
|
assert isinstance(llm_response, LLMResponse)
|
||||||
|
if llm_response.is_chunk:
|
||||||
|
if llm_response.result_chain:
|
||||||
|
yield AgentResponse(
|
||||||
|
type="streaming_delta",
|
||||||
|
data=AgentResponseData(chain=llm_response.result_chain),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield AgentResponse(
|
||||||
|
type="streaming_delta",
|
||||||
|
data=AgentResponseData(
|
||||||
|
chain=MessageChain().message(llm_response.completion_text),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
llm_resp_result = llm_response
|
||||||
|
break # got final response
|
||||||
|
|
||||||
|
if not llm_resp_result:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 处理 LLM 响应
|
||||||
|
llm_resp = llm_resp_result
|
||||||
|
|
||||||
|
if llm_resp.role == "err":
|
||||||
|
# 如果 LLM 响应错误,转换到错误状态
|
||||||
|
self.final_llm_resp = llm_resp
|
||||||
|
self._transition_state(AgentState.ERROR)
|
||||||
|
yield AgentResponse(
|
||||||
|
type="err",
|
||||||
|
data=AgentResponseData(
|
||||||
|
chain=MessageChain().message(
|
||||||
|
f"LLM 响应错误: {llm_resp.completion_text or '未知错误'}",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if not llm_resp.tools_call_name:
|
||||||
|
# 如果没有工具调用,转换到完成状态
|
||||||
|
self.final_llm_resp = llm_resp
|
||||||
|
self._transition_state(AgentState.DONE)
|
||||||
|
try:
|
||||||
|
await self.agent_hooks.on_agent_done(self.run_context, llm_resp)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in on_agent_done hook: {e}", exc_info=True)
|
||||||
|
|
||||||
|
# 返回 LLM 结果
|
||||||
|
if llm_resp.result_chain:
|
||||||
|
yield AgentResponse(
|
||||||
|
type="llm_result",
|
||||||
|
data=AgentResponseData(chain=llm_resp.result_chain),
|
||||||
|
)
|
||||||
|
elif llm_resp.completion_text:
|
||||||
|
yield AgentResponse(
|
||||||
|
type="llm_result",
|
||||||
|
data=AgentResponseData(
|
||||||
|
chain=MessageChain().message(llm_resp.completion_text),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# 如果有工具调用,还需处理工具调用
|
||||||
|
if llm_resp.tools_call_name:
|
||||||
|
tool_call_result_blocks = []
|
||||||
|
for tool_call_name in llm_resp.tools_call_name:
|
||||||
|
yield AgentResponse(
|
||||||
|
type="tool_call",
|
||||||
|
data=AgentResponseData(
|
||||||
|
chain=MessageChain().message(f"🔨 调用工具: {tool_call_name}"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async for result in self._handle_function_tools(self.req, llm_resp):
|
||||||
|
if isinstance(result, list):
|
||||||
|
tool_call_result_blocks = result
|
||||||
|
elif isinstance(result, MessageChain):
|
||||||
|
yield AgentResponse(
|
||||||
|
type="tool_call_result",
|
||||||
|
data=AgentResponseData(chain=result),
|
||||||
|
)
|
||||||
|
# 将结果添加到上下文中
|
||||||
|
tool_calls_result = ToolCallsResult(
|
||||||
|
tool_calls_info=AssistantMessageSegment(
|
||||||
|
tool_calls=llm_resp.to_openai_to_calls_model(),
|
||||||
|
content=llm_resp.completion_text,
|
||||||
|
),
|
||||||
|
tool_calls_result=tool_call_result_blocks,
|
||||||
|
)
|
||||||
|
self.req.append_tool_calls_result(tool_calls_result)
|
||||||
|
|
||||||
|
async def _handle_function_tools(
|
||||||
|
self,
|
||||||
|
req: ProviderRequest,
|
||||||
|
llm_response: LLMResponse,
|
||||||
|
) -> T.AsyncGenerator[MessageChain | list[ToolCallMessageSegment], None]:
|
||||||
|
"""处理函数工具调用。"""
|
||||||
|
tool_call_result_blocks: list[ToolCallMessageSegment] = []
|
||||||
|
logger.info(f"Agent 使用工具: {llm_response.tools_call_name}")
|
||||||
|
|
||||||
|
# 执行函数调用
|
||||||
|
for func_tool_name, func_tool_args, func_tool_id in zip(
|
||||||
|
llm_response.tools_call_name,
|
||||||
|
llm_response.tools_call_args,
|
||||||
|
llm_response.tools_call_ids,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
if not req.func_tool:
|
||||||
|
return
|
||||||
|
func_tool = req.func_tool.get_func(func_tool_name)
|
||||||
|
logger.info(f"使用工具:{func_tool_name},参数:{func_tool_args}")
|
||||||
|
|
||||||
|
if not func_tool:
|
||||||
|
logger.warning(f"未找到指定的工具: {func_tool_name},将跳过。")
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content=f"error: 未找到工具 {func_tool_name}",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
valid_params = {} # 参数过滤:只传递函数实际需要的参数
|
||||||
|
|
||||||
|
# 获取实际的 handler 函数
|
||||||
|
if func_tool.handler:
|
||||||
|
logger.debug(
|
||||||
|
f"工具 {func_tool_name} 期望的参数: {func_tool.parameters}",
|
||||||
|
)
|
||||||
|
if func_tool.parameters and func_tool.parameters.get("properties"):
|
||||||
|
expected_params = set(func_tool.parameters["properties"].keys())
|
||||||
|
|
||||||
|
valid_params = {
|
||||||
|
k: v
|
||||||
|
for k, v in func_tool_args.items()
|
||||||
|
if k in expected_params
|
||||||
|
}
|
||||||
|
|
||||||
|
# 记录被忽略的参数
|
||||||
|
ignored_params = set(func_tool_args.keys()) - set(
|
||||||
|
valid_params.keys(),
|
||||||
|
)
|
||||||
|
if ignored_params:
|
||||||
|
logger.warning(
|
||||||
|
f"工具 {func_tool_name} 忽略非期望参数: {ignored_params}",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# 如果没有 handler(如 MCP 工具),使用所有参数
|
||||||
|
valid_params = func_tool_args
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.agent_hooks.on_tool_start(
|
||||||
|
self.run_context,
|
||||||
|
func_tool,
|
||||||
|
valid_params,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in on_tool_start hook: {e}", exc_info=True)
|
||||||
|
|
||||||
|
executor = self.tool_executor.execute(
|
||||||
|
tool=func_tool,
|
||||||
|
run_context=self.run_context,
|
||||||
|
**valid_params, # 只传递有效的参数
|
||||||
|
)
|
||||||
|
|
||||||
|
_final_resp: CallToolResult | None = None
|
||||||
|
async for resp in executor: # type: ignore
|
||||||
|
if isinstance(resp, CallToolResult):
|
||||||
|
res = resp
|
||||||
|
_final_resp = resp
|
||||||
|
if isinstance(res.content[0], TextContent):
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content=res.content[0].text,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
yield MessageChain().message(res.content[0].text)
|
||||||
|
elif isinstance(res.content[0], ImageContent):
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content="返回了图片(已直接发送给用户)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
yield MessageChain(type="tool_direct_result").base64_image(
|
||||||
|
res.content[0].data,
|
||||||
|
)
|
||||||
|
elif isinstance(res.content[0], EmbeddedResource):
|
||||||
|
resource = res.content[0].resource
|
||||||
|
if isinstance(resource, TextResourceContents):
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content=resource.text,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
yield MessageChain().message(resource.text)
|
||||||
|
elif (
|
||||||
|
isinstance(resource, BlobResourceContents)
|
||||||
|
and resource.mimeType
|
||||||
|
and resource.mimeType.startswith("image/")
|
||||||
|
):
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content="返回了图片(已直接发送给用户)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
yield MessageChain(
|
||||||
|
type="tool_direct_result",
|
||||||
|
).base64_image(resource.blob)
|
||||||
|
else:
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content="返回的数据类型不受支持",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
yield MessageChain().message("返回的数据类型不受支持。")
|
||||||
|
|
||||||
|
elif resp is None:
|
||||||
|
# Tool 直接请求发送消息给用户
|
||||||
|
# 这里我们将直接结束 Agent Loop。
|
||||||
|
# 发送消息逻辑在 ToolExecutor 中处理了。
|
||||||
|
logger.warning(
|
||||||
|
f"{func_tool_name} 没有没有返回值或者将结果直接发送给用户,此工具调用不会被记录到历史中。"
|
||||||
|
)
|
||||||
|
self._transition_state(AgentState.DONE)
|
||||||
|
else:
|
||||||
|
# 不应该出现其他类型
|
||||||
|
logger.warning(
|
||||||
|
f"Tool 返回了不支持的类型: {type(resp)},将忽略。",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.agent_hooks.on_tool_end(
|
||||||
|
self.run_context,
|
||||||
|
func_tool,
|
||||||
|
func_tool_args,
|
||||||
|
_final_resp,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in on_tool_end hook: {e}", exc_info=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(traceback.format_exc())
|
||||||
|
tool_call_result_blocks.append(
|
||||||
|
ToolCallMessageSegment(
|
||||||
|
role="tool",
|
||||||
|
tool_call_id=func_tool_id,
|
||||||
|
content=f"error: {e!s}",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# 处理函数调用响应
|
||||||
|
if tool_call_result_blocks:
|
||||||
|
yield tool_call_result_blocks
|
||||||
|
|
||||||
|
def done(self) -> bool:
|
||||||
|
"""检查 Agent 是否已完成工作"""
|
||||||
|
return self._state in (AgentState.DONE, AgentState.ERROR)
|
||||||
|
|
||||||
|
def get_final_llm_resp(self) -> LLMResponse | None:
|
||||||
|
return self.final_llm_resp
|
||||||
286
astrbot/core/agent/tool.py
Normal file
286
astrbot/core/agent/tool.py
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from typing import Any, Generic
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
import mcp
|
||||||
|
from deprecated import deprecated
|
||||||
|
from pydantic import model_validator
|
||||||
|
from pydantic.dataclasses import dataclass
|
||||||
|
|
||||||
|
from .run_context import ContextWrapper, TContext
|
||||||
|
|
||||||
|
ParametersType = dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ToolSchema:
|
||||||
|
"""A class representing the schema of a tool for function calling."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
"""The name of the tool."""
|
||||||
|
|
||||||
|
description: str
|
||||||
|
"""The description of the tool."""
|
||||||
|
|
||||||
|
parameters: ParametersType
|
||||||
|
"""The parameters of the tool, in JSON Schema format."""
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_parameters(self) -> "ToolSchema":
|
||||||
|
jsonschema.validate(
|
||||||
|
self.parameters, jsonschema.Draft202012Validator.META_SCHEMA
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FunctionTool(ToolSchema, Generic[TContext]):
|
||||||
|
"""A callable tool, for function calling."""
|
||||||
|
|
||||||
|
handler: Callable[..., Awaitable[Any]] | None = None
|
||||||
|
"""a callable that implements the tool's functionality. It should be an async function."""
|
||||||
|
|
||||||
|
handler_module_path: str | None = None
|
||||||
|
"""
|
||||||
|
The module path of the handler function. This is empty when the origin is mcp.
|
||||||
|
This field must be retained, as the handler will be wrapped in functools.partial during initialization,
|
||||||
|
causing the handler's __module__ to be functools
|
||||||
|
"""
|
||||||
|
active: bool = True
|
||||||
|
"""
|
||||||
|
Whether the tool is active. This field is a special field for AstrBot.
|
||||||
|
You can ignore it when integrating with other frameworks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"FuncTool(name={self.name}, parameters={self.parameters}, description={self.description})"
|
||||||
|
|
||||||
|
async def call(
|
||||||
|
self, context: ContextWrapper[TContext], **kwargs
|
||||||
|
) -> str | mcp.types.CallToolResult:
|
||||||
|
"""Run the tool with the given arguments. The handler field has priority."""
|
||||||
|
raise NotImplementedError(
|
||||||
|
"FunctionTool.call() must be implemented by subclasses or set a handler."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ToolSet:
|
||||||
|
"""A set of function tools that can be used in function calling.
|
||||||
|
|
||||||
|
This class provides methods to add, remove, and retrieve tools, as well as
|
||||||
|
convert the tools to different API formats (OpenAI, Anthropic, Google GenAI).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, tools: list[FunctionTool] | None = None):
|
||||||
|
self.tools: list[FunctionTool] = tools or []
|
||||||
|
|
||||||
|
def empty(self) -> bool:
|
||||||
|
"""Check if the tool set is empty."""
|
||||||
|
return len(self.tools) == 0
|
||||||
|
|
||||||
|
def add_tool(self, tool: FunctionTool):
|
||||||
|
"""Add a tool to the set."""
|
||||||
|
# 检查是否已存在同名工具
|
||||||
|
for i, existing_tool in enumerate(self.tools):
|
||||||
|
if existing_tool.name == tool.name:
|
||||||
|
self.tools[i] = tool
|
||||||
|
return
|
||||||
|
self.tools.append(tool)
|
||||||
|
|
||||||
|
def remove_tool(self, name: str):
|
||||||
|
"""Remove a tool by its name."""
|
||||||
|
self.tools = [tool for tool in self.tools if tool.name != name]
|
||||||
|
|
||||||
|
def get_tool(self, name: str) -> FunctionTool | None:
|
||||||
|
"""Get a tool by its name."""
|
||||||
|
for tool in self.tools:
|
||||||
|
if tool.name == name:
|
||||||
|
return tool
|
||||||
|
return None
|
||||||
|
|
||||||
|
@deprecated(reason="Use add_tool() instead", version="4.0.0")
|
||||||
|
def add_func(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
func_args: list,
|
||||||
|
desc: str,
|
||||||
|
handler: Callable[..., Awaitable[Any]],
|
||||||
|
):
|
||||||
|
"""Add a function tool to the set."""
|
||||||
|
params = {
|
||||||
|
"type": "object", # hard-coded here
|
||||||
|
"properties": {},
|
||||||
|
}
|
||||||
|
for param in func_args:
|
||||||
|
params["properties"][param["name"]] = {
|
||||||
|
"type": param["type"],
|
||||||
|
"description": param["description"],
|
||||||
|
}
|
||||||
|
_func = FunctionTool(
|
||||||
|
name=name,
|
||||||
|
parameters=params,
|
||||||
|
description=desc,
|
||||||
|
handler=handler,
|
||||||
|
)
|
||||||
|
self.add_tool(_func)
|
||||||
|
|
||||||
|
@deprecated(reason="Use remove_tool() instead", version="4.0.0")
|
||||||
|
def remove_func(self, name: str):
|
||||||
|
"""Remove a function tool by its name."""
|
||||||
|
self.remove_tool(name)
|
||||||
|
|
||||||
|
@deprecated(reason="Use get_tool() instead", version="4.0.0")
|
||||||
|
def get_func(self, name: str) -> FunctionTool | None:
|
||||||
|
"""Get all function tools."""
|
||||||
|
return self.get_tool(name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def func_list(self) -> list[FunctionTool]:
|
||||||
|
"""Get the list of function tools."""
|
||||||
|
return self.tools
|
||||||
|
|
||||||
|
def openai_schema(self, omit_empty_parameter_field: bool = False) -> list[dict]:
|
||||||
|
"""Convert tools to OpenAI API function calling schema format."""
|
||||||
|
result = []
|
||||||
|
for tool in self.tools:
|
||||||
|
func_def = {
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": tool.name,
|
||||||
|
"description": tool.description,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
tool.parameters and tool.parameters.get("properties")
|
||||||
|
) or not omit_empty_parameter_field:
|
||||||
|
func_def["function"]["parameters"] = tool.parameters
|
||||||
|
|
||||||
|
result.append(func_def)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def anthropic_schema(self) -> list[dict]:
|
||||||
|
"""Convert tools to Anthropic API format."""
|
||||||
|
result = []
|
||||||
|
for tool in self.tools:
|
||||||
|
input_schema = {"type": "object"}
|
||||||
|
if tool.parameters:
|
||||||
|
input_schema["properties"] = tool.parameters.get("properties", {})
|
||||||
|
input_schema["required"] = tool.parameters.get("required", [])
|
||||||
|
tool_def = {
|
||||||
|
"name": tool.name,
|
||||||
|
"description": tool.description,
|
||||||
|
"input_schema": input_schema,
|
||||||
|
}
|
||||||
|
result.append(tool_def)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def google_schema(self) -> dict:
|
||||||
|
"""Convert tools to Google GenAI API format."""
|
||||||
|
|
||||||
|
def convert_schema(schema: dict) -> dict:
|
||||||
|
"""Convert schema to Gemini API format."""
|
||||||
|
supported_types = {
|
||||||
|
"string",
|
||||||
|
"number",
|
||||||
|
"integer",
|
||||||
|
"boolean",
|
||||||
|
"array",
|
||||||
|
"object",
|
||||||
|
"null",
|
||||||
|
}
|
||||||
|
supported_formats = {
|
||||||
|
"string": {"enum", "date-time"},
|
||||||
|
"integer": {"int32", "int64"},
|
||||||
|
"number": {"float", "double"},
|
||||||
|
}
|
||||||
|
|
||||||
|
if "anyOf" in schema:
|
||||||
|
return {"anyOf": [convert_schema(s) for s in schema["anyOf"]]}
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
if "type" in schema and schema["type"] in supported_types:
|
||||||
|
result["type"] = schema["type"]
|
||||||
|
if "format" in schema and schema["format"] in supported_formats.get(
|
||||||
|
result["type"],
|
||||||
|
set(),
|
||||||
|
):
|
||||||
|
result["format"] = schema["format"]
|
||||||
|
else:
|
||||||
|
result["type"] = "null"
|
||||||
|
|
||||||
|
support_fields = {
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"enum",
|
||||||
|
"minimum",
|
||||||
|
"maximum",
|
||||||
|
"maxItems",
|
||||||
|
"minItems",
|
||||||
|
"nullable",
|
||||||
|
"required",
|
||||||
|
}
|
||||||
|
result.update({k: schema[k] for k in support_fields if k in schema})
|
||||||
|
|
||||||
|
if "properties" in schema:
|
||||||
|
properties = {}
|
||||||
|
for key, value in schema["properties"].items():
|
||||||
|
prop_value = convert_schema(value)
|
||||||
|
if "default" in prop_value:
|
||||||
|
del prop_value["default"]
|
||||||
|
properties[key] = prop_value
|
||||||
|
|
||||||
|
if properties:
|
||||||
|
result["properties"] = properties
|
||||||
|
|
||||||
|
if "items" in schema:
|
||||||
|
result["items"] = convert_schema(schema["items"])
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
tools = []
|
||||||
|
for tool in self.tools:
|
||||||
|
d: dict[str, Any] = {
|
||||||
|
"name": tool.name,
|
||||||
|
"description": tool.description,
|
||||||
|
}
|
||||||
|
if tool.parameters:
|
||||||
|
d["parameters"] = convert_schema(tool.parameters)
|
||||||
|
tools.append(d)
|
||||||
|
|
||||||
|
declarations = {}
|
||||||
|
if tools:
|
||||||
|
declarations["function_declarations"] = tools
|
||||||
|
return declarations
|
||||||
|
|
||||||
|
@deprecated(reason="Use openai_schema() instead", version="4.0.0")
|
||||||
|
def get_func_desc_openai_style(self, omit_empty_parameter_field: bool = False):
|
||||||
|
return self.openai_schema(omit_empty_parameter_field)
|
||||||
|
|
||||||
|
@deprecated(reason="Use anthropic_schema() instead", version="4.0.0")
|
||||||
|
def get_func_desc_anthropic_style(self):
|
||||||
|
return self.anthropic_schema()
|
||||||
|
|
||||||
|
@deprecated(reason="Use google_schema() instead", version="4.0.0")
|
||||||
|
def get_func_desc_google_genai_style(self):
|
||||||
|
return self.google_schema()
|
||||||
|
|
||||||
|
def names(self) -> list[str]:
|
||||||
|
"""获取所有工具的名称列表"""
|
||||||
|
return [tool.name for tool in self.tools]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.tools)
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return len(self.tools) > 0
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.tools)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"ToolSet(tools={self.tools})"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"ToolSet(tools={self.tools})"
|
||||||
17
astrbot/core/agent/tool_executor.py
Normal file
17
astrbot/core/agent/tool_executor.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
from typing import Any, Generic
|
||||||
|
|
||||||
|
import mcp
|
||||||
|
|
||||||
|
from .run_context import ContextWrapper, TContext
|
||||||
|
from .tool import FunctionTool
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFunctionToolExecutor(Generic[TContext]):
|
||||||
|
@classmethod
|
||||||
|
async def execute(
|
||||||
|
cls,
|
||||||
|
tool: FunctionTool,
|
||||||
|
run_context: ContextWrapper[TContext],
|
||||||
|
**tool_args,
|
||||||
|
) -> AsyncGenerator[Any | mcp.types.CallToolResult, None]: ...
|
||||||
14
astrbot/core/astr_agent_context.py
Normal file
14
astrbot/core/astr_agent_context.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from astrbot.core.platform.astr_message_event import AstrMessageEvent
|
||||||
|
from astrbot.core.provider import Provider
|
||||||
|
from astrbot.core.provider.entities import ProviderRequest
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AstrAgentContext:
|
||||||
|
provider: Provider
|
||||||
|
first_provider_request: ProviderRequest
|
||||||
|
curr_provider_request: ProviderRequest
|
||||||
|
streaming: bool
|
||||||
|
event: AstrMessageEvent
|
||||||
275
astrbot/core/astrbot_config_mgr.py
Normal file
275
astrbot/core/astrbot_config_mgr.py
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from typing import TypedDict, TypeVar
|
||||||
|
|
||||||
|
from astrbot.core import AstrBotConfig, logger
|
||||||
|
from astrbot.core.config.astrbot_config import ASTRBOT_CONFIG_PATH
|
||||||
|
from astrbot.core.config.default import DEFAULT_CONFIG
|
||||||
|
from astrbot.core.platform.message_session import MessageSession
|
||||||
|
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||||
|
from astrbot.core.utils.astrbot_path import get_astrbot_config_path
|
||||||
|
from astrbot.core.utils.shared_preferences import SharedPreferences
|
||||||
|
|
||||||
|
_VT = TypeVar("_VT")
|
||||||
|
|
||||||
|
|
||||||
|
class ConfInfo(TypedDict):
|
||||||
|
"""Configuration information for a specific session or platform."""
|
||||||
|
|
||||||
|
id: str # UUID of the configuration or "default"
|
||||||
|
name: str
|
||||||
|
path: str # File name to the configuration file
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CONFIG_CONF_INFO = ConfInfo(
|
||||||
|
id="default",
|
||||||
|
name="default",
|
||||||
|
path=ASTRBOT_CONFIG_PATH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AstrBotConfigManager:
|
||||||
|
"""A class to manage the system configuration of AstrBot, aka ACM"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
default_config: AstrBotConfig,
|
||||||
|
ucr: UmopConfigRouter,
|
||||||
|
sp: SharedPreferences,
|
||||||
|
):
|
||||||
|
self.sp = sp
|
||||||
|
self.ucr = ucr
|
||||||
|
self.confs: dict[str, AstrBotConfig] = {}
|
||||||
|
"""uuid / "default" -> AstrBotConfig"""
|
||||||
|
self.confs["default"] = default_config
|
||||||
|
self.abconf_data = None
|
||||||
|
self._load_all_configs()
|
||||||
|
|
||||||
|
def _get_abconf_data(self) -> dict:
|
||||||
|
"""获取所有的 abconf 数据"""
|
||||||
|
if self.abconf_data is None:
|
||||||
|
self.abconf_data = self.sp.get(
|
||||||
|
"abconf_mapping",
|
||||||
|
{},
|
||||||
|
scope="global",
|
||||||
|
scope_id="global",
|
||||||
|
)
|
||||||
|
return self.abconf_data
|
||||||
|
|
||||||
|
def _load_all_configs(self):
|
||||||
|
"""Load all configurations from the shared preferences."""
|
||||||
|
abconf_data = self._get_abconf_data()
|
||||||
|
self.abconf_data = abconf_data
|
||||||
|
for uuid_, meta in abconf_data.items():
|
||||||
|
filename = meta["path"]
|
||||||
|
conf_path = os.path.join(get_astrbot_config_path(), filename)
|
||||||
|
if os.path.exists(conf_path):
|
||||||
|
conf = AstrBotConfig(config_path=conf_path)
|
||||||
|
self.confs[uuid_] = conf
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"Config file {conf_path} for UUID {uuid_} does not exist, skipping.",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
def _load_conf_mapping(self, umo: str | MessageSession) -> ConfInfo:
|
||||||
|
"""获取指定 umo 的配置文件 uuid, 如果不存在则返回默认配置(返回 "default")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ConfInfo: 包含配置文件的 uuid, 路径和名称等信息, 是一个 dict 类型
|
||||||
|
|
||||||
|
"""
|
||||||
|
# uuid -> { "path": str, "name": str }
|
||||||
|
abconf_data = self._get_abconf_data()
|
||||||
|
|
||||||
|
if isinstance(umo, MessageSession):
|
||||||
|
umo = str(umo)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
umo = str(MessageSession.from_str(umo)) # validate
|
||||||
|
except Exception:
|
||||||
|
return DEFAULT_CONFIG_CONF_INFO
|
||||||
|
|
||||||
|
conf_id = self.ucr.get_conf_id_for_umop(umo)
|
||||||
|
if conf_id:
|
||||||
|
meta = abconf_data.get(conf_id)
|
||||||
|
if meta and isinstance(meta, dict):
|
||||||
|
# the bind relation between umo and conf is defined in ucr now, so we remove "umop" here
|
||||||
|
meta.pop("umop", None)
|
||||||
|
return ConfInfo(**meta, id=conf_id)
|
||||||
|
|
||||||
|
return DEFAULT_CONFIG_CONF_INFO
|
||||||
|
|
||||||
|
def _save_conf_mapping(
|
||||||
|
self,
|
||||||
|
abconf_path: str,
|
||||||
|
abconf_id: str,
|
||||||
|
abconf_name: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""保存配置文件的映射关系"""
|
||||||
|
abconf_data = self.sp.get(
|
||||||
|
"abconf_mapping",
|
||||||
|
{},
|
||||||
|
scope="global",
|
||||||
|
scope_id="global",
|
||||||
|
)
|
||||||
|
random_word = abconf_name or uuid.uuid4().hex[:8]
|
||||||
|
abconf_data[abconf_id] = {
|
||||||
|
"path": abconf_path,
|
||||||
|
"name": random_word,
|
||||||
|
}
|
||||||
|
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||||
|
self.abconf_data = abconf_data
|
||||||
|
|
||||||
|
def get_conf(self, umo: str | MessageSession | None) -> AstrBotConfig:
|
||||||
|
"""获取指定 umo 的配置文件。如果不存在,则 fallback 到默认配置文件。"""
|
||||||
|
if not umo:
|
||||||
|
return self.confs["default"]
|
||||||
|
if isinstance(umo, MessageSession):
|
||||||
|
umo = f"{umo.platform_id}:{umo.message_type}:{umo.session_id}"
|
||||||
|
|
||||||
|
uuid_ = self._load_conf_mapping(umo)["id"]
|
||||||
|
|
||||||
|
conf = self.confs.get(uuid_)
|
||||||
|
if not conf:
|
||||||
|
conf = self.confs["default"] # default MUST exists
|
||||||
|
|
||||||
|
return conf
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_conf(self) -> AstrBotConfig:
|
||||||
|
"""获取默认配置文件"""
|
||||||
|
return self.confs["default"]
|
||||||
|
|
||||||
|
def get_conf_info(self, umo: str | MessageSession) -> ConfInfo:
|
||||||
|
"""获取指定 umo 的配置文件元数据"""
|
||||||
|
if isinstance(umo, MessageSession):
|
||||||
|
umo = f"{umo.platform_id}:{umo.message_type}:{umo.session_id}"
|
||||||
|
|
||||||
|
return self._load_conf_mapping(umo)
|
||||||
|
|
||||||
|
def get_conf_list(self) -> list[ConfInfo]:
|
||||||
|
"""获取所有配置文件的元数据列表"""
|
||||||
|
conf_list = []
|
||||||
|
abconf_mapping = self._get_abconf_data()
|
||||||
|
for uuid_, meta in abconf_mapping.items():
|
||||||
|
if not isinstance(meta, dict):
|
||||||
|
continue
|
||||||
|
meta.pop("umop", None)
|
||||||
|
conf_list.append(ConfInfo(**meta, id=uuid_))
|
||||||
|
conf_list.append(DEFAULT_CONFIG_CONF_INFO)
|
||||||
|
return conf_list
|
||||||
|
|
||||||
|
def create_conf(
|
||||||
|
self,
|
||||||
|
config: dict = DEFAULT_CONFIG,
|
||||||
|
name: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
conf_uuid = str(uuid.uuid4())
|
||||||
|
conf_file_name = f"abconf_{conf_uuid}.json"
|
||||||
|
conf_path = os.path.join(get_astrbot_config_path(), conf_file_name)
|
||||||
|
conf = AstrBotConfig(config_path=conf_path, default_config=config)
|
||||||
|
conf.save_config()
|
||||||
|
self._save_conf_mapping(conf_file_name, conf_uuid, abconf_name=name)
|
||||||
|
self.confs[conf_uuid] = conf
|
||||||
|
return conf_uuid
|
||||||
|
|
||||||
|
def delete_conf(self, conf_id: str) -> bool:
|
||||||
|
"""删除指定配置文件
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conf_id: 配置文件的 UUID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: 删除是否成功
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: 如果试图删除默认配置文件
|
||||||
|
|
||||||
|
"""
|
||||||
|
if conf_id == "default":
|
||||||
|
raise ValueError("不能删除默认配置文件")
|
||||||
|
|
||||||
|
# 从映射中移除
|
||||||
|
abconf_data = self.sp.get(
|
||||||
|
"abconf_mapping",
|
||||||
|
{},
|
||||||
|
scope="global",
|
||||||
|
scope_id="global",
|
||||||
|
)
|
||||||
|
if conf_id not in abconf_data:
|
||||||
|
logger.warning(f"配置文件 {conf_id} 不存在于映射中")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 获取配置文件路径
|
||||||
|
conf_path = os.path.join(
|
||||||
|
get_astrbot_config_path(),
|
||||||
|
abconf_data[conf_id]["path"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# 删除配置文件
|
||||||
|
try:
|
||||||
|
if os.path.exists(conf_path):
|
||||||
|
os.remove(conf_path)
|
||||||
|
logger.info(f"已删除配置文件: {conf_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"删除配置文件 {conf_path} 失败: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 从内存中移除
|
||||||
|
if conf_id in self.confs:
|
||||||
|
del self.confs[conf_id]
|
||||||
|
|
||||||
|
# 从映射中移除
|
||||||
|
del abconf_data[conf_id]
|
||||||
|
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||||
|
self.abconf_data = abconf_data
|
||||||
|
|
||||||
|
logger.info(f"成功删除配置文件 {conf_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def update_conf_info(self, conf_id: str, name: str | None = None) -> bool:
|
||||||
|
"""更新配置文件信息
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conf_id: 配置文件的 UUID
|
||||||
|
name: 新的配置文件名称 (可选)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: 更新是否成功
|
||||||
|
|
||||||
|
"""
|
||||||
|
if conf_id == "default":
|
||||||
|
raise ValueError("不能更新默认配置文件的信息")
|
||||||
|
|
||||||
|
abconf_data = self.sp.get(
|
||||||
|
"abconf_mapping",
|
||||||
|
{},
|
||||||
|
scope="global",
|
||||||
|
scope_id="global",
|
||||||
|
)
|
||||||
|
if conf_id not in abconf_data:
|
||||||
|
logger.warning(f"配置文件 {conf_id} 不存在于映射中")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 更新名称
|
||||||
|
if name is not None:
|
||||||
|
abconf_data[conf_id]["name"] = name
|
||||||
|
|
||||||
|
# 保存更新
|
||||||
|
self.sp.put("abconf_mapping", abconf_data, scope="global", scope_id="global")
|
||||||
|
self.abconf_data = abconf_data
|
||||||
|
logger.info(f"成功更新配置文件 {conf_id} 的信息")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def g(
|
||||||
|
self,
|
||||||
|
umo: str | None = None,
|
||||||
|
key: str | None = None,
|
||||||
|
default: _VT = None,
|
||||||
|
) -> _VT:
|
||||||
|
"""获取配置项。umo 为 None 时使用默认配置"""
|
||||||
|
if umo is None:
|
||||||
|
return self.confs["default"].get(key, default)
|
||||||
|
conf = self.get_conf(umo)
|
||||||
|
return conf.get(key, default)
|
||||||
@@ -1,2 +1,9 @@
|
|||||||
from .default import DEFAULT_CONFIG, VERSION, DB_PATH
|
from .astrbot_config import *
|
||||||
from .astrbot_config import *
|
from .default import DB_PATH, DEFAULT_CONFIG, VERSION
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DB_PATH",
|
||||||
|
"DEFAULT_CONFIG",
|
||||||
|
"VERSION",
|
||||||
|
"AstrBotConfig",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,118 +1,163 @@
|
|||||||
import os
|
import enum
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import enum
|
import os
|
||||||
from .default import DEFAULT_CONFIG, DEFAULT_VALUE_MAP
|
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
ASTRBOT_CONFIG_PATH = "data/cmd_config.json"
|
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||||
|
|
||||||
|
from .default import DEFAULT_CONFIG, DEFAULT_VALUE_MAP
|
||||||
|
|
||||||
|
ASTRBOT_CONFIG_PATH = os.path.join(get_astrbot_data_path(), "cmd_config.json")
|
||||||
logger = logging.getLogger("astrbot")
|
logger = logging.getLogger("astrbot")
|
||||||
|
|
||||||
|
|
||||||
class RateLimitStrategy(enum.Enum):
|
class RateLimitStrategy(enum.Enum):
|
||||||
STALL = "stall"
|
STALL = "stall"
|
||||||
DISCARD = "discard"
|
DISCARD = "discard"
|
||||||
|
|
||||||
|
|
||||||
class AstrBotConfig(dict):
|
class AstrBotConfig(dict):
|
||||||
'''从配置文件中加载的配置,支持直接通过点号操作符访问根配置项。
|
"""从配置文件中加载的配置,支持直接通过点号操作符访问根配置项。
|
||||||
|
|
||||||
- 初始化时会将传入的 default_config 与配置文件进行比对,如果配置文件中缺少配置项则会自动插入默认值并进行一次写入操作。会递归检查配置项。
|
- 初始化时会将传入的 default_config 与配置文件进行比对,如果配置文件中缺少配置项则会自动插入默认值并进行一次写入操作。会递归检查配置项。
|
||||||
- 如果配置文件路径对应的文件不存在,则会自动创建并写入默认配置。
|
- 如果配置文件路径对应的文件不存在,则会自动创建并写入默认配置。
|
||||||
- 如果传入了 schema,将会通过 schema 解析出 default_config,此时传入的 default_config 会被忽略。
|
- 如果传入了 schema,将会通过 schema 解析出 default_config,此时传入的 default_config 会被忽略。
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
config_path: str = ASTRBOT_CONFIG_PATH,
|
config_path: str = ASTRBOT_CONFIG_PATH,
|
||||||
default_config: dict = DEFAULT_CONFIG,
|
default_config: dict = DEFAULT_CONFIG,
|
||||||
schema: dict = None
|
schema: dict | None = None,
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
# 调用父类的 __setattr__ 方法,防止保存配置时将此属性写入配置文件
|
# 调用父类的 __setattr__ 方法,防止保存配置时将此属性写入配置文件
|
||||||
object.__setattr__(self, 'config_path', config_path)
|
object.__setattr__(self, "config_path", config_path)
|
||||||
object.__setattr__(self, 'default_config', default_config)
|
object.__setattr__(self, "default_config", default_config)
|
||||||
object.__setattr__(self, 'schema', schema)
|
object.__setattr__(self, "schema", schema)
|
||||||
|
|
||||||
if schema:
|
if schema:
|
||||||
default_config = self._config_schema_to_default_config(schema)
|
default_config = self._config_schema_to_default_config(schema)
|
||||||
|
|
||||||
if not self.check_exist():
|
if not self.check_exist():
|
||||||
'''不存在时载入默认配置'''
|
"""不存在时载入默认配置"""
|
||||||
with open(config_path, "w", encoding="utf-8-sig") as f:
|
with open(config_path, "w", encoding="utf-8-sig") as f:
|
||||||
json.dump(default_config, f, indent=4, ensure_ascii=False)
|
json.dump(default_config, f, indent=4, ensure_ascii=False)
|
||||||
|
object.__setattr__(self, "first_deploy", True) # 标记第一次部署
|
||||||
|
|
||||||
with open(config_path, "r", encoding="utf-8-sig") as f:
|
with open(config_path, encoding="utf-8-sig") as f:
|
||||||
conf_str = f.read()
|
conf_str = f.read()
|
||||||
if conf_str.startswith(u'/ufeff'): # remove BOM
|
|
||||||
conf_str = conf_str.encode('utf8')[3:].decode('utf8')
|
|
||||||
conf = json.loads(conf_str)
|
conf = json.loads(conf_str)
|
||||||
|
|
||||||
# 检查配置完整性,并插入
|
# 检查配置完整性,并插入
|
||||||
has_new = self.check_config_integrity(default_config, conf)
|
has_new = self.check_config_integrity(default_config, conf)
|
||||||
self.update(conf)
|
self.update(conf)
|
||||||
if has_new:
|
if has_new:
|
||||||
self.save_config()
|
self.save_config()
|
||||||
|
|
||||||
self.update(conf)
|
self.update(conf)
|
||||||
|
|
||||||
def _config_schema_to_default_config(self, schema: dict) -> dict:
|
def _config_schema_to_default_config(self, schema: dict) -> dict:
|
||||||
'''将 Schema 转换成 Config'''
|
"""将 Schema 转换成 Config"""
|
||||||
conf = {}
|
conf = {}
|
||||||
|
|
||||||
def _parse_schema(schema: dict, conf: dict):
|
def _parse_schema(schema: dict, conf: dict):
|
||||||
for k, v in schema.items():
|
for k, v in schema.items():
|
||||||
if v['type'] not in DEFAULT_VALUE_MAP:
|
if v["type"] not in DEFAULT_VALUE_MAP:
|
||||||
raise TypeError(f"不受支持的配置类型 {v['type']}。支持的类型有:{DEFAULT_VALUE_MAP.keys()}")
|
raise TypeError(
|
||||||
if 'default' in v:
|
f"不受支持的配置类型 {v['type']}。支持的类型有:{DEFAULT_VALUE_MAP.keys()}",
|
||||||
default = v['default']
|
)
|
||||||
|
if "default" in v:
|
||||||
|
default = v["default"]
|
||||||
else:
|
else:
|
||||||
default = DEFAULT_VALUE_MAP[v['type']]
|
default = DEFAULT_VALUE_MAP[v["type"]]
|
||||||
|
|
||||||
if v['type'] == 'object':
|
if v["type"] == "object":
|
||||||
conf[k] = {}
|
conf[k] = {}
|
||||||
_parse_schema(v['items'], conf[k])
|
_parse_schema(v["items"], conf[k])
|
||||||
else:
|
else:
|
||||||
conf[k] = default
|
conf[k] = default
|
||||||
|
|
||||||
_parse_schema(schema, conf)
|
_parse_schema(schema, conf)
|
||||||
|
|
||||||
return conf
|
return conf
|
||||||
|
|
||||||
|
def check_config_integrity(self, refer_conf: dict, conf: dict, path=""):
|
||||||
def check_config_integrity(self, refer_conf: Dict, conf: Dict, path=""):
|
"""检查配置完整性,如果有新的配置项或顺序不一致则返回 True"""
|
||||||
'''检查配置完整性,如果有新的配置项则返回 True'''
|
|
||||||
has_new = False
|
has_new = False
|
||||||
|
|
||||||
|
# 创建一个新的有序字典以保持参考配置的顺序
|
||||||
|
new_conf = {}
|
||||||
|
|
||||||
|
# 先按照参考配置的顺序添加配置项
|
||||||
for key, value in refer_conf.items():
|
for key, value in refer_conf.items():
|
||||||
if key not in conf:
|
if key not in conf:
|
||||||
# logger.info(f"检查到配置项 {path + "." + key if path else key} 不存在,已插入默认值 {value}")
|
# 配置项不存在,插入默认值
|
||||||
path_ = path + "." + key if path else key
|
path_ = path + "." + key if path else key
|
||||||
logger.info(f"检查到配置项 {path_} 不存在,已插入默认值 {value}")
|
logger.info(f"检查到配置项 {path_} 不存在,已插入默认值 {value}")
|
||||||
conf[key] = value
|
new_conf[key] = value
|
||||||
has_new = True
|
has_new = True
|
||||||
else:
|
elif conf[key] is None:
|
||||||
if conf[key] is None:
|
# 配置项为 None,使用默认值
|
||||||
conf[key] = value
|
new_conf[key] = value
|
||||||
|
has_new = True
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
# 递归检查子配置项
|
||||||
|
if not isinstance(conf[key], dict):
|
||||||
|
# 类型不匹配,使用默认值
|
||||||
|
new_conf[key] = value
|
||||||
has_new = True
|
has_new = True
|
||||||
elif isinstance(value, dict):
|
else:
|
||||||
has_new |= self.check_config_integrity(value, conf[key], path + "." + key if path else key)
|
# 递归检查并同步顺序
|
||||||
|
child_has_new = self.check_config_integrity(
|
||||||
|
value,
|
||||||
|
conf[key],
|
||||||
|
path + "." + key if path else key,
|
||||||
|
)
|
||||||
|
new_conf[key] = conf[key]
|
||||||
|
has_new |= child_has_new
|
||||||
|
else:
|
||||||
|
# 直接使用现有配置
|
||||||
|
new_conf[key] = conf[key]
|
||||||
|
|
||||||
|
# 检查是否存在参考配置中没有的配置项
|
||||||
|
for key in list(conf.keys()):
|
||||||
|
if key not in refer_conf:
|
||||||
|
path_ = path + "." + key if path else key
|
||||||
|
logger.info(f"检查到配置项 {path_} 不存在,将从当前配置中删除")
|
||||||
|
has_new = True
|
||||||
|
|
||||||
|
# 顺序不一致也算作变更
|
||||||
|
if list(conf.keys()) != list(new_conf.keys()):
|
||||||
|
if path:
|
||||||
|
logger.info(f"检查到配置项 {path} 的子项顺序不一致,已重新排序")
|
||||||
|
else:
|
||||||
|
logger.info("检查到配置项顺序不一致,已重新排序")
|
||||||
|
has_new = True
|
||||||
|
|
||||||
|
# 更新原始配置
|
||||||
|
conf.clear()
|
||||||
|
conf.update(new_conf)
|
||||||
|
|
||||||
return has_new
|
return has_new
|
||||||
|
|
||||||
def save_config(self, replace_config: Dict = None):
|
def save_config(self, replace_config: dict | None = None):
|
||||||
'''将配置写入文件
|
"""将配置写入文件
|
||||||
|
|
||||||
如果传入 replace_config,则将配置替换为 replace_config
|
如果传入 replace_config,则将配置替换为 replace_config
|
||||||
'''
|
"""
|
||||||
if replace_config:
|
if replace_config:
|
||||||
self.update(replace_config)
|
self.update(replace_config)
|
||||||
with open(self.config_path, "w", encoding="utf-8-sig") as f:
|
with open(self.config_path, "w", encoding="utf-8-sig") as f:
|
||||||
json.dump(self, f, indent=2, ensure_ascii=False)
|
json.dump(self, f, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
def __getattr__(self, item):
|
def __getattr__(self, item):
|
||||||
try:
|
try:
|
||||||
return self[item]
|
return self[item]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __delattr__(self, key):
|
def __delattr__(self, key):
|
||||||
try:
|
try:
|
||||||
del self[key]
|
del self[key]
|
||||||
@@ -124,4 +169,4 @@ class AstrBotConfig(dict):
|
|||||||
self[key] = value
|
self[key] = value
|
||||||
|
|
||||||
def check_exist(self) -> bool:
|
def check_exist(self) -> bool:
|
||||||
return os.path.exists(self.config_path)
|
return os.path.exists(self.config_path)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,119 +1,409 @@
|
|||||||
import uuid
|
"""AstrBot 会话-对话管理器, 维护两个本地存储, 其中一个是 json 格式的shared_preferences, 另外一个是数据库.
|
||||||
import json
|
|
||||||
import asyncio
|
在 AstrBot 中, 会话和对话是独立的, 会话用于标记对话窗口, 例如群聊"123456789"可以建立一个会话,
|
||||||
from astrbot.core import sp
|
在一个会话中可以建立多个对话, 并且支持对话的切换和删除
|
||||||
from typing import Dict, List
|
"""
|
||||||
from astrbot.core.db import BaseDatabase
|
|
||||||
from astrbot.core.db.po import Conversation
|
import json
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
|
||||||
|
from astrbot.core import sp
|
||||||
|
from astrbot.core.agent.message import AssistantMessageSegment, UserMessageSegment
|
||||||
|
from astrbot.core.db import BaseDatabase
|
||||||
|
from astrbot.core.db.po import Conversation, ConversationV2
|
||||||
|
|
||||||
|
|
||||||
|
class ConversationManager:
|
||||||
|
"""负责管理会话与 LLM 的对话,某个会话当前正在用哪个对话。"""
|
||||||
|
|
||||||
class ConversationManager():
|
|
||||||
'''负责管理会话与 LLM 的对话,某个会话当前正在用哪个对话。'''
|
|
||||||
def __init__(self, db_helper: BaseDatabase):
|
def __init__(self, db_helper: BaseDatabase):
|
||||||
self.session_conversations: Dict[str, str] = sp.get("session_conversation", {})
|
self.session_conversations: dict[str, str] = {}
|
||||||
self.db = db_helper
|
self.db = db_helper
|
||||||
self.save_interval = 60 # 每 60 秒保存一次
|
self.save_interval = 60 # 每 60 秒保存一次
|
||||||
self._start_periodic_save()
|
|
||||||
|
|
||||||
def _start_periodic_save(self):
|
|
||||||
asyncio.create_task(self._periodic_save())
|
|
||||||
|
|
||||||
async def _periodic_save(self):
|
# 会话删除回调函数列表(用于级联清理,如知识库配置)
|
||||||
while True:
|
self._on_session_deleted_callbacks: list[Callable[[str], Awaitable[None]]] = []
|
||||||
await asyncio.sleep(self.save_interval)
|
|
||||||
self._save_to_storage()
|
|
||||||
|
|
||||||
def _save_to_storage(self):
|
def register_on_session_deleted(
|
||||||
sp.put("session_conversation", self.session_conversations)
|
self,
|
||||||
|
callback: Callable[[str], Awaitable[None]],
|
||||||
|
) -> None:
|
||||||
|
"""注册会话删除回调函数.
|
||||||
|
|
||||||
async def new_conversation(self, unified_msg_origin: str) -> str:
|
其他模块可以注册回调来响应会话删除事件,实现级联清理。
|
||||||
'''新建对话,并将当前会话的对话转移到新对话'''
|
例如:知识库模块可以注册回调来清理会话的知识库配置。
|
||||||
conversation_id = str(uuid.uuid4())
|
|
||||||
self.db.new_conversation(
|
Args:
|
||||||
user_id=unified_msg_origin,
|
callback: 回调函数,接收会话ID (unified_msg_origin) 作为参数
|
||||||
cid=conversation_id
|
|
||||||
|
"""
|
||||||
|
self._on_session_deleted_callbacks.append(callback)
|
||||||
|
|
||||||
|
async def _trigger_session_deleted(self, unified_msg_origin: str) -> None:
|
||||||
|
"""触发会话删除回调.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin: 会话ID
|
||||||
|
|
||||||
|
"""
|
||||||
|
for callback in self._on_session_deleted_callbacks:
|
||||||
|
try:
|
||||||
|
await callback(unified_msg_origin)
|
||||||
|
except Exception as e:
|
||||||
|
from astrbot.core import logger
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
f"会话删除回调执行失败 (session: {unified_msg_origin}): {e}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _convert_conv_from_v2_to_v1(self, conv_v2: ConversationV2) -> Conversation:
|
||||||
|
"""将 ConversationV2 对象转换为 Conversation 对象"""
|
||||||
|
created_at = int(conv_v2.created_at.timestamp())
|
||||||
|
updated_at = int(conv_v2.updated_at.timestamp())
|
||||||
|
return Conversation(
|
||||||
|
platform_id=conv_v2.platform_id,
|
||||||
|
user_id=conv_v2.user_id,
|
||||||
|
cid=conv_v2.conversation_id,
|
||||||
|
history=json.dumps(conv_v2.content or []),
|
||||||
|
title=conv_v2.title,
|
||||||
|
persona_id=conv_v2.persona_id,
|
||||||
|
created_at=created_at,
|
||||||
|
updated_at=updated_at,
|
||||||
)
|
)
|
||||||
self.session_conversations[unified_msg_origin] = conversation_id
|
|
||||||
sp.put("session_conversation", self.session_conversations)
|
async def new_conversation(
|
||||||
return conversation_id
|
self,
|
||||||
|
unified_msg_origin: str,
|
||||||
|
platform_id: str | None = None,
|
||||||
|
content: list[dict] | None = None,
|
||||||
|
title: str | None = None,
|
||||||
|
persona_id: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""新建对话,并将当前会话的对话转移到新对话.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
Returns:
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not platform_id:
|
||||||
|
# 如果没有提供 platform_id,则从 unified_msg_origin 中解析
|
||||||
|
parts = unified_msg_origin.split(":")
|
||||||
|
if len(parts) >= 3:
|
||||||
|
platform_id = parts[0]
|
||||||
|
if not platform_id:
|
||||||
|
platform_id = "unknown"
|
||||||
|
conv = await self.db.create_conversation(
|
||||||
|
user_id=unified_msg_origin,
|
||||||
|
platform_id=platform_id,
|
||||||
|
content=content,
|
||||||
|
title=title,
|
||||||
|
persona_id=persona_id,
|
||||||
|
)
|
||||||
|
self.session_conversations[unified_msg_origin] = conv.conversation_id
|
||||||
|
await sp.session_put(unified_msg_origin, "sel_conv_id", conv.conversation_id)
|
||||||
|
return conv.conversation_id
|
||||||
|
|
||||||
async def switch_conversation(self, unified_msg_origin: str, conversation_id: str):
|
async def switch_conversation(self, unified_msg_origin: str, conversation_id: str):
|
||||||
'''切换会话的对话'''
|
"""切换会话的对话
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
|
||||||
|
"""
|
||||||
self.session_conversations[unified_msg_origin] = conversation_id
|
self.session_conversations[unified_msg_origin] = conversation_id
|
||||||
sp.put("session_conversation", self.session_conversations)
|
await sp.session_put(unified_msg_origin, "sel_conv_id", conversation_id)
|
||||||
|
|
||||||
async def delete_conversation(self, unified_msg_origin: str, conversation_id: str=None):
|
async def delete_conversation(
|
||||||
'''删除会话的对话,当 conversation_id 为 None 时删除会话当前的对话'''
|
self,
|
||||||
conversation_id = self.session_conversations.get(unified_msg_origin)
|
unified_msg_origin: str,
|
||||||
|
conversation_id: str | None = None,
|
||||||
|
):
|
||||||
|
"""删除会话的对话,当 conversation_id 为 None 时删除会话当前的对话
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not conversation_id:
|
||||||
|
conversation_id = self.session_conversations.get(unified_msg_origin)
|
||||||
if conversation_id:
|
if conversation_id:
|
||||||
self.db.delete_conversation(
|
await self.db.delete_conversation(cid=conversation_id)
|
||||||
user_id=unified_msg_origin,
|
curr_cid = await self.get_curr_conversation_id(unified_msg_origin)
|
||||||
cid=conversation_id
|
if curr_cid == conversation_id:
|
||||||
)
|
self.session_conversations.pop(unified_msg_origin, None)
|
||||||
del self.session_conversations[unified_msg_origin]
|
await sp.session_remove(unified_msg_origin, "sel_conv_id")
|
||||||
sp.put("session_conversation", self.session_conversations)
|
|
||||||
|
async def delete_conversations_by_user_id(self, unified_msg_origin: str):
|
||||||
async def get_curr_conversation_id(self, unified_msg_origin: str) -> str:
|
"""删除会话的所有对话
|
||||||
'''获取会话当前的对话 ID'''
|
|
||||||
return self.session_conversations.get(unified_msg_origin, None)
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
async def get_conversation(self, unified_msg_origin: str, conversation_id: str) -> Conversation:
|
|
||||||
'''获取会话的对话'''
|
"""
|
||||||
return self.db.get_conversation_by_user_id(unified_msg_origin, conversation_id)
|
await self.db.delete_conversations_by_user_id(user_id=unified_msg_origin)
|
||||||
|
self.session_conversations.pop(unified_msg_origin, None)
|
||||||
async def get_conversations(self, unified_msg_origin: str) -> List[Conversation]:
|
await sp.session_remove(unified_msg_origin, "sel_conv_id")
|
||||||
'''获取会话的所有对话'''
|
|
||||||
return self.db.get_conversations(unified_msg_origin)
|
# 触发会话删除回调(级联清理)
|
||||||
|
await self._trigger_session_deleted(unified_msg_origin)
|
||||||
async def update_conversation(self, unified_msg_origin: str, conversation_id: str, history: List[Dict]):
|
|
||||||
'''更新会话的对话'''
|
async def get_curr_conversation_id(self, unified_msg_origin: str) -> str | None:
|
||||||
|
"""获取会话当前的对话 ID
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
Returns:
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
|
||||||
|
"""
|
||||||
|
ret = self.session_conversations.get(unified_msg_origin, None)
|
||||||
|
if not ret:
|
||||||
|
ret = await sp.session_get(unified_msg_origin, "sel_conv_id", None)
|
||||||
|
if ret:
|
||||||
|
self.session_conversations[unified_msg_origin] = ret
|
||||||
|
return ret
|
||||||
|
|
||||||
|
async def get_conversation(
|
||||||
|
self,
|
||||||
|
unified_msg_origin: str,
|
||||||
|
conversation_id: str,
|
||||||
|
create_if_not_exists: bool = False,
|
||||||
|
) -> Conversation | None:
|
||||||
|
"""获取会话的对话.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
create_if_not_exists (bool): 如果对话不存在,是否创建一个新的对话
|
||||||
|
Returns:
|
||||||
|
conversation (Conversation): 对话对象
|
||||||
|
|
||||||
|
"""
|
||||||
|
conv = await self.db.get_conversation_by_id(cid=conversation_id)
|
||||||
|
if not conv and create_if_not_exists:
|
||||||
|
# 如果对话不存在且需要创建,则新建一个对话
|
||||||
|
conversation_id = await self.new_conversation(unified_msg_origin)
|
||||||
|
conv = await self.db.get_conversation_by_id(cid=conversation_id)
|
||||||
|
conv_res = None
|
||||||
|
if conv:
|
||||||
|
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||||
|
return conv_res
|
||||||
|
|
||||||
|
async def get_conversations(
|
||||||
|
self,
|
||||||
|
unified_msg_origin: str | None = None,
|
||||||
|
platform_id: str | None = None,
|
||||||
|
) -> list[Conversation]:
|
||||||
|
"""获取对话列表.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id,可选
|
||||||
|
platform_id (str): 平台 ID, 可选参数, 用于过滤对话
|
||||||
|
Returns:
|
||||||
|
conversations (List[Conversation]): 对话对象列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
convs = await self.db.get_conversations(
|
||||||
|
user_id=unified_msg_origin,
|
||||||
|
platform_id=platform_id,
|
||||||
|
)
|
||||||
|
convs_res = []
|
||||||
|
for conv in convs:
|
||||||
|
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||||
|
convs_res.append(conv_res)
|
||||||
|
return convs_res
|
||||||
|
|
||||||
|
async def get_filtered_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
platform_ids: list[str] | None = None,
|
||||||
|
search_query: str = "",
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[list[Conversation], int]:
|
||||||
|
"""获取过滤后的对话列表.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
page (int): 页码, 默认为 1
|
||||||
|
page_size (int): 每页大小, 默认为 20
|
||||||
|
platform_ids (list[str]): 平台 ID 列表, 可选
|
||||||
|
search_query (str): 搜索查询字符串, 可选
|
||||||
|
Returns:
|
||||||
|
conversations (list[Conversation]): 对话对象列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
convs, cnt = await self.db.get_filtered_conversations(
|
||||||
|
page=page,
|
||||||
|
page_size=page_size,
|
||||||
|
platform_ids=platform_ids,
|
||||||
|
search_query=search_query,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
convs_res = []
|
||||||
|
for conv in convs:
|
||||||
|
conv_res = self._convert_conv_from_v2_to_v1(conv)
|
||||||
|
convs_res.append(conv_res)
|
||||||
|
return convs_res, cnt
|
||||||
|
|
||||||
|
async def update_conversation(
|
||||||
|
self,
|
||||||
|
unified_msg_origin: str,
|
||||||
|
conversation_id: str | None = None,
|
||||||
|
history: list[dict] | None = None,
|
||||||
|
title: str | None = None,
|
||||||
|
persona_id: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""更新会话的对话.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
history (List[Dict]): 对话历史记录, 是一个字典列表, 每个字典包含 role 和 content 字段
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not conversation_id:
|
||||||
|
# 如果没有提供 conversation_id,则获取当前的
|
||||||
|
conversation_id = await self.get_curr_conversation_id(unified_msg_origin)
|
||||||
if conversation_id:
|
if conversation_id:
|
||||||
self.db.update_conversation(
|
await self.db.update_conversation(
|
||||||
user_id=unified_msg_origin,
|
|
||||||
cid=conversation_id,
|
cid=conversation_id,
|
||||||
history=json.dumps(history)
|
title=title,
|
||||||
|
persona_id=persona_id,
|
||||||
|
content=history,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def update_conversation_title(self, unified_msg_origin: str, title: str):
|
async def update_conversation_title(
|
||||||
'''更新会话的对话标题'''
|
self,
|
||||||
conversation_id = self.session_conversations.get(unified_msg_origin)
|
unified_msg_origin: str,
|
||||||
if conversation_id:
|
title: str,
|
||||||
self.db.update_conversation_title(
|
conversation_id: str | None = None,
|
||||||
user_id=unified_msg_origin,
|
) -> None:
|
||||||
cid=conversation_id,
|
"""更新会话的对话标题.
|
||||||
title=title
|
|
||||||
)
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
async def update_conversation_persona_id(self, unified_msg_origin: str, persona_id: str):
|
title (str): 对话标题
|
||||||
'''更新会话的对话 Persona ID'''
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
conversation_id = self.session_conversations.get(unified_msg_origin)
|
Deprecated:
|
||||||
if conversation_id:
|
Use `update_conversation` with `title` parameter instead.
|
||||||
self.db.update_conversation_persona_id(
|
|
||||||
user_id=unified_msg_origin,
|
"""
|
||||||
cid=conversation_id,
|
await self.update_conversation(
|
||||||
persona_id=persona_id
|
unified_msg_origin=unified_msg_origin,
|
||||||
)
|
conversation_id=conversation_id,
|
||||||
|
title=title,
|
||||||
async def get_human_readable_context(self, unified_msg_origin, conversation_id, page=1, page_size=10):
|
)
|
||||||
|
|
||||||
|
async def update_conversation_persona_id(
|
||||||
|
self,
|
||||||
|
unified_msg_origin: str,
|
||||||
|
persona_id: str,
|
||||||
|
conversation_id: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""更新会话的对话 Persona ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
persona_id (str): 对话 Persona ID
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
Deprecated:
|
||||||
|
Use `update_conversation` with `persona_id` parameter instead.
|
||||||
|
|
||||||
|
"""
|
||||||
|
await self.update_conversation(
|
||||||
|
unified_msg_origin=unified_msg_origin,
|
||||||
|
conversation_id=conversation_id,
|
||||||
|
persona_id=persona_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def add_message_pair(
|
||||||
|
self,
|
||||||
|
cid: str,
|
||||||
|
user_message: UserMessageSegment | dict,
|
||||||
|
assistant_message: AssistantMessageSegment | dict,
|
||||||
|
) -> None:
|
||||||
|
"""Add a user-assistant message pair to the conversation history.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cid (str): Conversation ID
|
||||||
|
user_message (UserMessageSegment | dict): OpenAI-format user message object or dict
|
||||||
|
assistant_message (AssistantMessageSegment | dict): OpenAI-format assistant message object or dict
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: If the conversation with the given ID is not found
|
||||||
|
"""
|
||||||
|
conv = await self.db.get_conversation_by_id(cid=cid)
|
||||||
|
if not conv:
|
||||||
|
raise Exception(f"Conversation with id {cid} not found")
|
||||||
|
history = conv.content or []
|
||||||
|
if isinstance(user_message, UserMessageSegment):
|
||||||
|
user_msg_dict = user_message.model_dump()
|
||||||
|
else:
|
||||||
|
user_msg_dict = user_message
|
||||||
|
if isinstance(assistant_message, AssistantMessageSegment):
|
||||||
|
assistant_msg_dict = assistant_message.model_dump()
|
||||||
|
else:
|
||||||
|
assistant_msg_dict = assistant_message
|
||||||
|
history.append(user_msg_dict)
|
||||||
|
history.append(assistant_msg_dict)
|
||||||
|
await self.db.update_conversation(
|
||||||
|
cid=cid,
|
||||||
|
content=history,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_human_readable_context(
|
||||||
|
self,
|
||||||
|
unified_msg_origin: str,
|
||||||
|
conversation_id: str,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 10,
|
||||||
|
) -> tuple[list[str], int]:
|
||||||
|
"""获取人类可读的上下文.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
unified_msg_origin (str): 统一的消息来源字符串。格式为 platform_name:message_type:session_id
|
||||||
|
conversation_id (str): 对话 ID, 是 uuid 格式的字符串
|
||||||
|
page (int): 页码
|
||||||
|
page_size (int): 每页大小
|
||||||
|
|
||||||
|
"""
|
||||||
conversation = await self.get_conversation(unified_msg_origin, conversation_id)
|
conversation = await self.get_conversation(unified_msg_origin, conversation_id)
|
||||||
|
if not conversation:
|
||||||
|
return [], 0
|
||||||
history = json.loads(conversation.history)
|
history = json.loads(conversation.history)
|
||||||
|
|
||||||
contexts = []
|
# contexts_groups 存放按顺序的段落(每个段落是一个 str 列表),
|
||||||
temp_contexts = []
|
# 之后会被展平成一个扁平的 str 列表返回。
|
||||||
|
contexts_groups: list[list[str]] = []
|
||||||
|
temp_contexts: list[str] = []
|
||||||
for record in history:
|
for record in history:
|
||||||
if record['role'] == "user":
|
if record["role"] == "user":
|
||||||
temp_contexts.append(f"User: {record['content']}")
|
temp_contexts.append(f"User: {record['content']}")
|
||||||
elif record['role'] == "assistant":
|
elif record["role"] == "assistant":
|
||||||
temp_contexts.append(f"Assistant: {record['content']}")
|
if record.get("content"):
|
||||||
contexts.insert(0, temp_contexts)
|
temp_contexts.append(f"Assistant: {record['content']}")
|
||||||
|
elif "tool_calls" in record:
|
||||||
|
tool_calls_str = json.dumps(
|
||||||
|
record["tool_calls"],
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
|
temp_contexts.append(f"Assistant: [函数调用] {tool_calls_str}")
|
||||||
|
else:
|
||||||
|
temp_contexts.append("Assistant: [未知的内容]")
|
||||||
|
contexts_groups.insert(0, temp_contexts)
|
||||||
temp_contexts = []
|
temp_contexts = []
|
||||||
|
|
||||||
# 展平 contexts 列表
|
# 展平分组后的 contexts 列表为单层字符串列表
|
||||||
contexts = [item for sublist in contexts for item in sublist]
|
contexts = [item for sublist in contexts_groups for item in sublist]
|
||||||
|
|
||||||
# 计算分页
|
# 计算分页
|
||||||
paged_contexts = contexts[(page-1)*page_size:page*page_size]
|
paged_contexts = contexts[(page - 1) * page_size : page * page_size]
|
||||||
total_pages = len(contexts) // page_size
|
total_pages = len(contexts) // page_size
|
||||||
if len(contexts) % page_size != 0:
|
if len(contexts) % page_size != 0:
|
||||||
total_pages += 1
|
total_pages += 1
|
||||||
|
|
||||||
return paged_contexts, total_pages
|
return paged_contexts, total_pages
|
||||||
|
|||||||
@@ -1,118 +1,269 @@
|
|||||||
import traceback
|
"""Astrbot 核心生命周期管理类, 负责管理 AstrBot 的启动、停止、重启等操作.
|
||||||
|
|
||||||
|
该类负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、EventBus等。
|
||||||
|
该类还负责加载和执行插件, 以及处理事件总线的分发。
|
||||||
|
|
||||||
|
工作流程:
|
||||||
|
1. 初始化所有组件
|
||||||
|
2. 启动事件总线和任务, 所有任务都在这里运行
|
||||||
|
3. 执行启动完成事件钩子
|
||||||
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
import os
|
import os
|
||||||
from .event_bus import EventBus
|
import threading
|
||||||
from . import astrbot_config
|
import time
|
||||||
|
import traceback
|
||||||
from asyncio import Queue
|
from asyncio import Queue
|
||||||
from typing import List
|
|
||||||
from astrbot.core.pipeline.scheduler import PipelineScheduler, PipelineContext
|
from astrbot.core import LogBroker, logger, sp
|
||||||
from astrbot.core.star import PluginManager
|
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||||
from astrbot.core.platform.manager import PlatformManager
|
|
||||||
from astrbot.core.star.context import Context
|
|
||||||
from astrbot.core.provider.manager import ProviderManager
|
|
||||||
from astrbot.core import LogBroker
|
|
||||||
from astrbot.core.db import BaseDatabase
|
|
||||||
from astrbot.core.updator import AstrBotUpdator
|
|
||||||
from astrbot.core import logger
|
|
||||||
from astrbot.core.config.default import VERSION
|
from astrbot.core.config.default import VERSION
|
||||||
from astrbot.core.rag.knowledge_db_mgr import KnowledgeDBManager
|
|
||||||
from astrbot.core.conversation_mgr import ConversationManager
|
from astrbot.core.conversation_mgr import ConversationManager
|
||||||
|
from astrbot.core.db import BaseDatabase
|
||||||
|
from astrbot.core.db.migration.migra_45_to_46 import migrate_45_to_46
|
||||||
|
from astrbot.core.knowledge_base.kb_mgr import KnowledgeBaseManager
|
||||||
|
from astrbot.core.persona_mgr import PersonaManager
|
||||||
|
from astrbot.core.pipeline.scheduler import PipelineContext, PipelineScheduler
|
||||||
|
from astrbot.core.platform.manager import PlatformManager
|
||||||
|
from astrbot.core.platform_message_history_mgr import PlatformMessageHistoryManager
|
||||||
|
from astrbot.core.provider.manager import ProviderManager
|
||||||
|
from astrbot.core.star import PluginManager
|
||||||
|
from astrbot.core.star.context import Context
|
||||||
|
from astrbot.core.star.star_handler import EventType, star_handlers_registry, star_map
|
||||||
|
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||||
|
from astrbot.core.updator import AstrBotUpdator
|
||||||
|
|
||||||
|
from . import astrbot_config, html_renderer
|
||||||
|
from .event_bus import EventBus
|
||||||
|
|
||||||
|
|
||||||
class AstrBotCoreLifecycle:
|
class AstrBotCoreLifecycle:
|
||||||
def __init__(self, log_broker: LogBroker, db: BaseDatabase):
|
"""AstrBot 核心生命周期管理类, 负责管理 AstrBot 的启动、停止、重启等操作.
|
||||||
self.log_broker = log_broker
|
|
||||||
self.astrbot_config = astrbot_config
|
该类负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、
|
||||||
self.db = db
|
EventBus 等。
|
||||||
|
该类还负责加载和执行插件, 以及处理事件总线的分发。
|
||||||
os.environ['https_proxy'] = self.astrbot_config['http_proxy']
|
"""
|
||||||
os.environ['http_proxy'] = self.astrbot_config['http_proxy']
|
|
||||||
os.environ['no_proxy'] = 'localhost'
|
def __init__(self, log_broker: LogBroker, db: BaseDatabase) -> None:
|
||||||
|
self.log_broker = log_broker # 初始化日志代理
|
||||||
async def initialize(self):
|
self.astrbot_config = astrbot_config # 初始化配置
|
||||||
logger.info("AstrBot v"+ VERSION)
|
self.db = db # 初始化数据库
|
||||||
if os.environ.get("TESTING", ""):
|
|
||||||
logger.setLevel("DEBUG")
|
# 设置代理
|
||||||
|
proxy_config = self.astrbot_config.get("http_proxy", "")
|
||||||
|
if proxy_config != "":
|
||||||
|
os.environ["https_proxy"] = proxy_config
|
||||||
|
os.environ["http_proxy"] = proxy_config
|
||||||
|
logger.debug(f"Using proxy: {proxy_config}")
|
||||||
|
# 设置 no_proxy
|
||||||
|
no_proxy_list = self.astrbot_config.get("no_proxy", [])
|
||||||
|
os.environ["no_proxy"] = ",".join(no_proxy_list)
|
||||||
else:
|
else:
|
||||||
logger.setLevel(self.astrbot_config['log_level'])
|
# 清空代理环境变量
|
||||||
|
if "https_proxy" in os.environ:
|
||||||
|
del os.environ["https_proxy"]
|
||||||
|
if "http_proxy" in os.environ:
|
||||||
|
del os.environ["http_proxy"]
|
||||||
|
if "no_proxy" in os.environ:
|
||||||
|
del os.environ["no_proxy"]
|
||||||
|
logger.debug("HTTP proxy cleared")
|
||||||
|
|
||||||
|
async def initialize(self) -> None:
|
||||||
|
"""初始化 AstrBot 核心生命周期管理类.
|
||||||
|
|
||||||
|
负责初始化各个组件, 包括 ProviderManager、PlatformManager、ConversationManager、PluginManager、PipelineScheduler、EventBus、AstrBotUpdator等。
|
||||||
|
"""
|
||||||
|
# 初始化日志代理
|
||||||
|
logger.info("AstrBot v" + VERSION)
|
||||||
|
if os.environ.get("TESTING", ""):
|
||||||
|
logger.setLevel("DEBUG") # 测试模式下设置日志级别为 DEBUG
|
||||||
|
else:
|
||||||
|
logger.setLevel(self.astrbot_config["log_level"]) # 设置日志级别
|
||||||
|
|
||||||
|
await self.db.initialize()
|
||||||
|
|
||||||
|
await html_renderer.initialize()
|
||||||
|
|
||||||
|
# 初始化 UMOP 配置路由器
|
||||||
|
self.umop_config_router = UmopConfigRouter(sp=sp)
|
||||||
|
|
||||||
|
# 初始化 AstrBot 配置管理器
|
||||||
|
self.astrbot_config_mgr = AstrBotConfigManager(
|
||||||
|
default_config=self.astrbot_config,
|
||||||
|
ucr=self.umop_config_router,
|
||||||
|
sp=sp,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 4.5 to 4.6 migration for umop_config_router
|
||||||
|
try:
|
||||||
|
await migrate_45_to_46(self.astrbot_config_mgr, self.umop_config_router)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Migration from version 4.5 to 4.6 failed: {e!s}")
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
# 初始化事件队列
|
||||||
self.event_queue = Queue()
|
self.event_queue = Queue()
|
||||||
self.event_queue.closed = False
|
|
||||||
|
# 初始化人格管理器
|
||||||
self.provider_manager = ProviderManager(self.astrbot_config, self.db)
|
self.persona_mgr = PersonaManager(self.db, self.astrbot_config_mgr)
|
||||||
|
await self.persona_mgr.initialize()
|
||||||
|
|
||||||
|
# 初始化供应商管理器
|
||||||
|
self.provider_manager = ProviderManager(
|
||||||
|
self.astrbot_config_mgr,
|
||||||
|
self.db,
|
||||||
|
self.persona_mgr,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 初始化平台管理器
|
||||||
self.platform_manager = PlatformManager(self.astrbot_config, self.event_queue)
|
self.platform_manager = PlatformManager(self.astrbot_config, self.event_queue)
|
||||||
|
|
||||||
self.knowledge_db_manager = KnowledgeDBManager(self.astrbot_config)
|
# 初始化对话管理器
|
||||||
|
|
||||||
self.conversation_manager = ConversationManager(self.db)
|
self.conversation_manager = ConversationManager(self.db)
|
||||||
|
|
||||||
|
# 初始化平台消息历史管理器
|
||||||
|
self.platform_message_history_manager = PlatformMessageHistoryManager(self.db)
|
||||||
|
|
||||||
|
# 初始化知识库管理器
|
||||||
|
self.kb_manager = KnowledgeBaseManager(self.provider_manager)
|
||||||
|
|
||||||
|
# 初始化提供给插件的上下文
|
||||||
self.star_context = Context(
|
self.star_context = Context(
|
||||||
self.event_queue,
|
self.event_queue,
|
||||||
self.astrbot_config,
|
self.astrbot_config,
|
||||||
self.db,
|
self.db,
|
||||||
self.provider_manager,
|
self.provider_manager,
|
||||||
self.platform_manager,
|
self.platform_manager,
|
||||||
self.conversation_manager,
|
self.conversation_manager,
|
||||||
self.knowledge_db_manager
|
self.platform_message_history_manager,
|
||||||
|
self.persona_mgr,
|
||||||
|
self.astrbot_config_mgr,
|
||||||
|
self.kb_manager,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# 初始化插件管理器
|
||||||
self.plugin_manager = PluginManager(self.star_context, self.astrbot_config)
|
self.plugin_manager = PluginManager(self.star_context, self.astrbot_config)
|
||||||
|
|
||||||
|
# 扫描、注册插件、实例化插件类
|
||||||
await self.plugin_manager.reload()
|
await self.plugin_manager.reload()
|
||||||
'''扫描、注册插件、实例化插件类'''
|
|
||||||
|
# 根据配置实例化各个 Provider
|
||||||
await self.provider_manager.initialize()
|
await self.provider_manager.initialize()
|
||||||
'''根据配置实例化各个 Provider'''
|
|
||||||
|
await self.kb_manager.initialize()
|
||||||
self.pipeline_scheduler = PipelineScheduler(PipelineContext(self.astrbot_config, self.plugin_manager))
|
|
||||||
await self.pipeline_scheduler.initialize()
|
# 初始化消息事件流水线调度器
|
||||||
'''初始化消息事件流水线调度器'''
|
self.pipeline_scheduler_mapping = await self.load_pipeline_scheduler()
|
||||||
|
|
||||||
self.astrbot_updator = AstrBotUpdator(self.astrbot_config['plugin_repo_mirror'])
|
# 初始化更新器
|
||||||
self.event_bus = EventBus(self.event_queue, self.pipeline_scheduler)
|
self.astrbot_updator = AstrBotUpdator()
|
||||||
|
|
||||||
|
# 初始化事件总线
|
||||||
|
self.event_bus = EventBus(
|
||||||
|
self.event_queue,
|
||||||
|
self.pipeline_scheduler_mapping,
|
||||||
|
self.astrbot_config_mgr,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 记录启动时间
|
||||||
self.start_time = int(time.time())
|
self.start_time = int(time.time())
|
||||||
self.curr_tasks: List[asyncio.Task] = []
|
|
||||||
|
# 初始化当前任务列表
|
||||||
|
self.curr_tasks: list[asyncio.Task] = []
|
||||||
|
|
||||||
|
# 根据配置实例化各个平台适配器
|
||||||
await self.platform_manager.initialize()
|
await self.platform_manager.initialize()
|
||||||
'''根据配置实例化各个平台适配器'''
|
|
||||||
|
# 初始化关闭控制面板的事件
|
||||||
def _load(self):
|
self.dashboard_shutdown_event = asyncio.Event()
|
||||||
event_bus_task = asyncio.create_task(self.event_bus.dispatch(), name="event_bus")
|
|
||||||
|
def _load(self) -> None:
|
||||||
|
"""加载事件总线和任务并初始化."""
|
||||||
|
# 创建一个异步任务来执行事件总线的 dispatch() 方法
|
||||||
|
# dispatch是一个无限循环的协程, 从事件队列中获取事件并处理
|
||||||
|
event_bus_task = asyncio.create_task(
|
||||||
|
self.event_bus.dispatch(),
|
||||||
|
name="event_bus",
|
||||||
|
)
|
||||||
|
|
||||||
|
# 把插件中注册的所有协程函数注册到事件总线中并执行
|
||||||
extra_tasks = []
|
extra_tasks = []
|
||||||
for task in self.star_context._register_tasks:
|
for task in self.star_context._register_tasks:
|
||||||
extra_tasks.append(asyncio.create_task(task, name=task.__name__))
|
extra_tasks.append(asyncio.create_task(task, name=task.__name__))
|
||||||
|
|
||||||
tasks_ = [event_bus_task, *extra_tasks]
|
tasks_ = [event_bus_task, *extra_tasks]
|
||||||
for task in tasks_:
|
for task in tasks_:
|
||||||
self.curr_tasks.append(asyncio.create_task(self._task_wrapper(task), name=task.get_name()))
|
self.curr_tasks.append(
|
||||||
|
asyncio.create_task(self._task_wrapper(task), name=task.get_name()),
|
||||||
|
)
|
||||||
|
|
||||||
self.start_time = int(time.time())
|
self.start_time = int(time.time())
|
||||||
|
|
||||||
async def _task_wrapper(self, task: asyncio.Task):
|
async def _task_wrapper(self, task: asyncio.Task) -> None:
|
||||||
|
"""异步任务包装器, 用于处理异步任务执行中出现的各种异常.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task (asyncio.Task): 要执行的异步任务
|
||||||
|
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
await task
|
await task
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass # 任务被取消, 静默处理
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
# 获取完整的异常堆栈信息, 按行分割并记录到日志中
|
||||||
logger.error(f"------- 任务 {task.get_name()} 发生错误: {e}")
|
logger.error(f"------- 任务 {task.get_name()} 发生错误: {e}")
|
||||||
for line in traceback.format_exc().split("\n"):
|
for line in traceback.format_exc().split("\n"):
|
||||||
logger.error(f"| {line}")
|
logger.error(f"| {line}")
|
||||||
logger.error("-------")
|
logger.error("-------")
|
||||||
|
|
||||||
async def start(self):
|
async def start(self) -> None:
|
||||||
|
"""启动 AstrBot 核心生命周期管理类.
|
||||||
|
|
||||||
|
用load加载事件总线和任务并初始化, 执行启动完成事件钩子
|
||||||
|
"""
|
||||||
self._load()
|
self._load()
|
||||||
logger.info("AstrBot 启动完成。")
|
logger.info("AstrBot 启动完成。")
|
||||||
|
|
||||||
|
# 执行启动完成事件钩子
|
||||||
|
handlers = star_handlers_registry.get_handlers_by_event_type(
|
||||||
|
EventType.OnAstrBotLoadedEvent,
|
||||||
|
)
|
||||||
|
for handler in handlers:
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"hook(on_astrbot_loaded) -> {star_map[handler.handler_module_path].name} - {handler.handler_name}",
|
||||||
|
)
|
||||||
|
await handler.handler()
|
||||||
|
except BaseException:
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
# 同时运行curr_tasks中的所有任务
|
||||||
await asyncio.gather(*self.curr_tasks, return_exceptions=True)
|
await asyncio.gather(*self.curr_tasks, return_exceptions=True)
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self) -> None:
|
||||||
self.event_queue.closed = True
|
"""停止 AstrBot 核心生命周期管理类, 取消所有当前任务并终止各个管理器."""
|
||||||
|
# 请求停止所有正在运行的异步任务
|
||||||
for task in self.curr_tasks:
|
for task in self.curr_tasks:
|
||||||
task.cancel()
|
task.cancel()
|
||||||
|
|
||||||
|
for plugin in self.plugin_manager.context.get_all_stars():
|
||||||
|
try:
|
||||||
|
await self.plugin_manager._terminate_plugin(plugin)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(traceback.format_exc())
|
||||||
|
logger.warning(
|
||||||
|
f"插件 {plugin.name} 未被正常终止 {e!s}, 可能会导致资源泄露等问题。",
|
||||||
|
)
|
||||||
|
|
||||||
await self.provider_manager.terminate()
|
await self.provider_manager.terminate()
|
||||||
|
await self.platform_manager.terminate()
|
||||||
|
await self.kb_manager.terminate()
|
||||||
|
self.dashboard_shutdown_event.set()
|
||||||
|
|
||||||
|
# 再次遍历curr_tasks等待每个任务真正结束
|
||||||
for task in self.curr_tasks:
|
for task in self.curr_tasks:
|
||||||
try:
|
try:
|
||||||
await task
|
await task
|
||||||
@@ -120,14 +271,60 @@ class AstrBotCoreLifecycle:
|
|||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"任务 {task.get_name()} 发生错误: {e}")
|
logger.error(f"任务 {task.get_name()} 发生错误: {e}")
|
||||||
|
|
||||||
def restart(self):
|
async def restart(self) -> None:
|
||||||
self.event_queue.closed = True
|
"""重启 AstrBot 核心生命周期管理类, 终止各个管理器并重新加载平台实例"""
|
||||||
threading.Thread(target=self.astrbot_updator._reboot, name="restart", daemon=True).start()
|
await self.provider_manager.terminate()
|
||||||
|
await self.platform_manager.terminate()
|
||||||
def load_platform(self) -> List[asyncio.Task]:
|
await self.kb_manager.terminate()
|
||||||
|
self.dashboard_shutdown_event.set()
|
||||||
|
threading.Thread(
|
||||||
|
target=self.astrbot_updator._reboot,
|
||||||
|
name="restart",
|
||||||
|
daemon=True,
|
||||||
|
).start()
|
||||||
|
|
||||||
|
def load_platform(self) -> list[asyncio.Task]:
|
||||||
|
"""加载平台实例并返回所有平台实例的异步任务列表"""
|
||||||
tasks = []
|
tasks = []
|
||||||
platform_insts = self.platform_manager.get_insts()
|
platform_insts = self.platform_manager.get_insts()
|
||||||
for platform_inst in platform_insts:
|
for platform_inst in platform_insts:
|
||||||
tasks.append(asyncio.create_task(platform_inst.run(), name=platform_inst.meta().name))
|
tasks.append(
|
||||||
return tasks
|
asyncio.create_task(
|
||||||
|
platform_inst.run(),
|
||||||
|
name=f"{platform_inst.meta().id}({platform_inst.meta().name})",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return tasks
|
||||||
|
|
||||||
|
async def load_pipeline_scheduler(self) -> dict[str, PipelineScheduler]:
|
||||||
|
"""加载消息事件流水线调度器.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, PipelineScheduler]: 平台 ID 到流水线调度器的映射
|
||||||
|
|
||||||
|
"""
|
||||||
|
mapping = {}
|
||||||
|
for conf_id, ab_config in self.astrbot_config_mgr.confs.items():
|
||||||
|
scheduler = PipelineScheduler(
|
||||||
|
PipelineContext(ab_config, self.plugin_manager, conf_id),
|
||||||
|
)
|
||||||
|
await scheduler.initialize()
|
||||||
|
mapping[conf_id] = scheduler
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
async def reload_pipeline_scheduler(self, conf_id: str) -> None:
|
||||||
|
"""重新加载消息事件流水线调度器.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, PipelineScheduler]: 平台 ID 到流水线调度器的映射
|
||||||
|
|
||||||
|
"""
|
||||||
|
ab_config = self.astrbot_config_mgr.confs.get(conf_id)
|
||||||
|
if not ab_config:
|
||||||
|
raise ValueError(f"配置文件 {conf_id} 不存在")
|
||||||
|
scheduler = PipelineScheduler(
|
||||||
|
PipelineContext(ab_config, self.plugin_manager, conf_id),
|
||||||
|
)
|
||||||
|
await scheduler.initialize()
|
||||||
|
self.pipeline_scheduler_mapping[conf_id] = scheduler
|
||||||
|
|||||||
@@ -1,113 +1,315 @@
|
|||||||
import abc
|
import abc
|
||||||
|
import datetime
|
||||||
|
import typing as T
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List
|
|
||||||
from astrbot.core.db.po import Stats, LLMHistory, ATRIVision, Conversation
|
from deprecated import deprecated
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from astrbot.core.db.po import (
|
||||||
|
Attachment,
|
||||||
|
ConversationV2,
|
||||||
|
Persona,
|
||||||
|
PlatformMessageHistory,
|
||||||
|
PlatformStat,
|
||||||
|
Preference,
|
||||||
|
Stats,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BaseDatabase(abc.ABC):
|
class BaseDatabase(abc.ABC):
|
||||||
'''
|
"""数据库基类"""
|
||||||
数据库基类
|
|
||||||
'''
|
DATABASE_URL = ""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
pass
|
self.engine = create_async_engine(
|
||||||
|
self.DATABASE_URL,
|
||||||
def insert_base_metrics(self, metrics: dict):
|
echo=False,
|
||||||
'''插入基础指标数据'''
|
future=True,
|
||||||
self.insert_platform_metrics(metrics['platform_stats'])
|
)
|
||||||
self.insert_plugin_metrics(metrics['plugin_stats'])
|
self.AsyncSessionLocal = sessionmaker(
|
||||||
self.insert_command_metrics(metrics['command_stats'])
|
self.engine,
|
||||||
self.insert_llm_metrics(metrics['llm_stats'])
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False,
|
||||||
@abc.abstractmethod
|
)
|
||||||
def insert_platform_metrics(self, metrics: dict):
|
|
||||||
'''插入平台指标数据'''
|
async def initialize(self):
|
||||||
raise NotImplementedError
|
"""初始化数据库连接"""
|
||||||
|
|
||||||
@abc.abstractmethod
|
@asynccontextmanager
|
||||||
def insert_plugin_metrics(self, metrics: dict):
|
async def get_db(self) -> T.AsyncGenerator[AsyncSession, None]:
|
||||||
'''插入插件指标数据'''
|
"""Get a database session."""
|
||||||
raise NotImplementedError
|
if not self.inited:
|
||||||
|
await self.initialize()
|
||||||
@abc.abstractmethod
|
self.inited = True
|
||||||
def insert_command_metrics(self, metrics: dict):
|
async with self.AsyncSessionLocal() as session:
|
||||||
'''插入指令指标数据'''
|
yield session
|
||||||
raise NotImplementedError
|
|
||||||
|
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||||
@abc.abstractmethod
|
|
||||||
def insert_llm_metrics(self, metrics: dict):
|
|
||||||
'''插入 LLM 指标数据'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update_llm_history(self, session_id: str, content: str, provider_type: str):
|
|
||||||
'''更新 LLM 历史记录。当不存在 session_id 时插入'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get_llm_history(self, session_id: str = None, provider_type: str = None) -> List[LLMHistory]:
|
|
||||||
'''获取 LLM 历史记录, 如果 session_id 为 None, 返回所有'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_base_stats(self, offset_sec: int = 86400) -> Stats:
|
def get_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||||
'''获取基础统计数据'''
|
"""获取基础统计数据"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_total_message_count(self) -> int:
|
def get_total_message_count(self) -> int:
|
||||||
'''获取总消息数'''
|
"""获取总消息数"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@deprecated(version="4.0.0", reason="Use get_platform_stats instead")
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_grouped_base_stats(self, offset_sec: int = 86400) -> Stats:
|
def get_grouped_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||||
'''获取基础统计数据(合并)'''
|
"""获取基础统计数据(合并)"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@abc.abstractmethod
|
# New methods in v4.0.0
|
||||||
def insert_atri_vision_data(self, vision_data: ATRIVision):
|
|
||||||
'''插入 ATRI 视觉数据'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get_atri_vision_data(self) -> List[ATRIVision]:
|
|
||||||
'''获取 ATRI 视觉数据'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get_atri_vision_data_by_path_or_id(self, url_or_path: str, id: str) -> ATRIVision:
|
|
||||||
'''通过 url 或 path 获取 ATRI 视觉数据'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get_conversation_by_user_id(self, user_id: str, cid: str) -> Conversation:
|
|
||||||
'''通过 user_id 和 cid 获取 Conversation'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def new_conversation(self, user_id: str, cid: str):
|
|
||||||
'''新建 Conversation'''
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get_conversations(self, user_id: str) -> List[Conversation]:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def update_conversation(self, user_id: str, cid: str, history: str):
|
async def insert_platform_stats(
|
||||||
'''更新 Conversation'''
|
self,
|
||||||
raise NotImplementedError
|
platform_id: str,
|
||||||
|
platform_type: str,
|
||||||
|
count: int = 1,
|
||||||
|
timestamp: datetime.datetime | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Insert a new platform statistic record."""
|
||||||
|
...
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def delete_conversation(self, user_id: str, cid: str):
|
async def count_platform_stats(self) -> int:
|
||||||
'''删除 Conversation'''
|
"""Count the number of platform statistics records."""
|
||||||
raise NotImplementedError
|
...
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def update_conversation_title(self, user_id: str, cid: str, title: str):
|
async def get_platform_stats(self, offset_sec: int = 86400) -> list[PlatformStat]:
|
||||||
'''更新 Conversation 标题'''
|
"""Get platform statistics within the specified offset in seconds and group by platform_id."""
|
||||||
raise NotImplementedError
|
...
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def update_conversation_persona_id(self, user_id: str, cid: str, persona_id: str):
|
async def get_conversations(
|
||||||
'''更新 Conversation Persona ID'''
|
self,
|
||||||
raise NotImplementedError
|
user_id: str | None = None,
|
||||||
|
platform_id: str | None = None,
|
||||||
|
) -> list[ConversationV2]:
|
||||||
|
"""Get all conversations for a specific user and platform_id(optional).
|
||||||
|
|
||||||
|
content is not included in the result.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_conversation_by_id(self, cid: str) -> ConversationV2:
|
||||||
|
"""Get a specific conversation by its ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_all_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
) -> list[ConversationV2]:
|
||||||
|
"""Get all conversations with pagination."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_filtered_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
platform_ids: list[str] | None = None,
|
||||||
|
search_query: str = "",
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[list[ConversationV2], int]:
|
||||||
|
"""Get conversations filtered by platform IDs and search query."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def create_conversation(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
platform_id: str,
|
||||||
|
content: list[dict] | None = None,
|
||||||
|
title: str | None = None,
|
||||||
|
persona_id: str | None = None,
|
||||||
|
cid: str | None = None,
|
||||||
|
created_at: datetime.datetime | None = None,
|
||||||
|
updated_at: datetime.datetime | None = None,
|
||||||
|
) -> ConversationV2:
|
||||||
|
"""Create a new conversation."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def update_conversation(
|
||||||
|
self,
|
||||||
|
cid: str,
|
||||||
|
title: str | None = None,
|
||||||
|
persona_id: str | None = None,
|
||||||
|
content: list[dict] | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Update a conversation's history."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def delete_conversation(self, cid: str) -> None:
|
||||||
|
"""Delete a conversation by its ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def delete_conversations_by_user_id(self, user_id: str) -> None:
|
||||||
|
"""Delete all conversations for a specific user."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert_platform_message_history(
|
||||||
|
self,
|
||||||
|
platform_id: str,
|
||||||
|
user_id: str,
|
||||||
|
content: dict,
|
||||||
|
sender_id: str | None = None,
|
||||||
|
sender_name: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Insert a new platform message history record."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def delete_platform_message_offset(
|
||||||
|
self,
|
||||||
|
platform_id: str,
|
||||||
|
user_id: str,
|
||||||
|
offset_sec: int = 86400,
|
||||||
|
) -> None:
|
||||||
|
"""Delete platform message history records older than the specified offset."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_platform_message_history(
|
||||||
|
self,
|
||||||
|
platform_id: str,
|
||||||
|
user_id: str,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
) -> list[PlatformMessageHistory]:
|
||||||
|
"""Get platform message history for a specific user."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert_attachment(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
type: str,
|
||||||
|
mime_type: str,
|
||||||
|
):
|
||||||
|
"""Insert a new attachment record."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_attachment_by_id(self, attachment_id: str) -> Attachment:
|
||||||
|
"""Get an attachment by its ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert_persona(
|
||||||
|
self,
|
||||||
|
persona_id: str,
|
||||||
|
system_prompt: str,
|
||||||
|
begin_dialogs: list[str] | None = None,
|
||||||
|
tools: list[str] | None = None,
|
||||||
|
) -> Persona:
|
||||||
|
"""Insert a new persona record."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_persona_by_id(self, persona_id: str) -> Persona:
|
||||||
|
"""Get a persona by its ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_personas(self) -> list[Persona]:
|
||||||
|
"""Get all personas for a specific bot."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def update_persona(
|
||||||
|
self,
|
||||||
|
persona_id: str,
|
||||||
|
system_prompt: str | None = None,
|
||||||
|
begin_dialogs: list[str] | None = None,
|
||||||
|
tools: list[str] | None = None,
|
||||||
|
) -> Persona | None:
|
||||||
|
"""Update a persona's system prompt or begin dialogs."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def delete_persona(self, persona_id: str) -> None:
|
||||||
|
"""Delete a persona by its ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert_preference_or_update(
|
||||||
|
self,
|
||||||
|
scope: str,
|
||||||
|
scope_id: str,
|
||||||
|
key: str,
|
||||||
|
value: dict,
|
||||||
|
) -> Preference:
|
||||||
|
"""Insert a new preference record."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_preference(self, scope: str, scope_id: str, key: str) -> Preference:
|
||||||
|
"""Get a preference by scope ID and key."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_preferences(
|
||||||
|
self,
|
||||||
|
scope: str,
|
||||||
|
scope_id: str | None = None,
|
||||||
|
key: str | None = None,
|
||||||
|
) -> list[Preference]:
|
||||||
|
"""Get all preferences for a specific scope ID or key."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def remove_preference(self, scope: str, scope_id: str, key: str) -> None:
|
||||||
|
"""Remove a preference by scope ID and key."""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def clear_preferences(self, scope: str, scope_id: str) -> None:
|
||||||
|
"""Clear all preferences for a specific scope ID."""
|
||||||
|
...
|
||||||
|
|
||||||
|
# @abc.abstractmethod
|
||||||
|
# async def insert_llm_message(
|
||||||
|
# self,
|
||||||
|
# cid: str,
|
||||||
|
# role: str,
|
||||||
|
# content: list,
|
||||||
|
# tool_calls: list = None,
|
||||||
|
# tool_call_id: str = None,
|
||||||
|
# parent_id: str = None,
|
||||||
|
# ) -> LLMMessage:
|
||||||
|
# """Insert a new LLM message into the conversation."""
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# @abc.abstractmethod
|
||||||
|
# async def get_llm_messages(self, cid: str) -> list[LLMMessage]:
|
||||||
|
# """Get all LLM messages for a specific conversation."""
|
||||||
|
# ...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def get_session_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
search_query: str | None = None,
|
||||||
|
platform: str | None = None,
|
||||||
|
) -> tuple[list[dict], int]:
|
||||||
|
"""Get paginated session conversations with joined conversation and persona details, support search and platform filter."""
|
||||||
|
...
|
||||||
|
|||||||
69
astrbot/core/db/migration/helper.py
Normal file
69
astrbot/core/db/migration/helper.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from astrbot.api import logger, sp
|
||||||
|
from astrbot.core.config import AstrBotConfig
|
||||||
|
from astrbot.core.db import BaseDatabase
|
||||||
|
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||||
|
|
||||||
|
from .migra_3_to_4 import (
|
||||||
|
migration_conversation_table,
|
||||||
|
migration_persona_data,
|
||||||
|
migration_platform_table,
|
||||||
|
migration_preferences,
|
||||||
|
migration_webchat_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def check_migration_needed_v4(db_helper: BaseDatabase) -> bool:
|
||||||
|
"""检查是否需要进行数据库迁移
|
||||||
|
如果存在 data_v3.db 并且 preference 中没有 migration_done_v4,则需要进行迁移。
|
||||||
|
"""
|
||||||
|
# 仅当 data 目录下存在旧版本数据(data_v3.db 文件)时才考虑迁移
|
||||||
|
data_dir = get_astrbot_data_path()
|
||||||
|
data_v3_db = os.path.join(data_dir, "data_v3.db")
|
||||||
|
|
||||||
|
if not os.path.exists(data_v3_db):
|
||||||
|
return False
|
||||||
|
migration_done = await db_helper.get_preference(
|
||||||
|
"global",
|
||||||
|
"global",
|
||||||
|
"migration_done_v4",
|
||||||
|
)
|
||||||
|
if migration_done:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def do_migration_v4(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
astrbot_config: AstrBotConfig,
|
||||||
|
) -> None:
|
||||||
|
"""执行数据库迁移
|
||||||
|
迁移旧的 webchat_conversation 表到新的 conversation 表。
|
||||||
|
迁移旧的 platform 到新的 platform_stats 表。
|
||||||
|
"""
|
||||||
|
if not await check_migration_needed_v4(db_helper):
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("开始执行数据库迁移...")
|
||||||
|
|
||||||
|
# 执行会话表迁移
|
||||||
|
await migration_conversation_table(db_helper, platform_id_map)
|
||||||
|
|
||||||
|
# 执行人格数据迁移
|
||||||
|
await migration_persona_data(db_helper, astrbot_config)
|
||||||
|
|
||||||
|
# 执行 WebChat 数据迁移
|
||||||
|
await migration_webchat_data(db_helper, platform_id_map)
|
||||||
|
|
||||||
|
# 执行偏好设置迁移
|
||||||
|
await migration_preferences(db_helper, platform_id_map)
|
||||||
|
|
||||||
|
# 执行平台统计表迁移
|
||||||
|
await migration_platform_table(db_helper, platform_id_map)
|
||||||
|
|
||||||
|
# 标记迁移完成
|
||||||
|
await sp.put_async("global", "global", "migration_done_v4", True)
|
||||||
|
|
||||||
|
logger.info("数据库迁移完成。")
|
||||||
357
astrbot/core/db/migration/migra_3_to_4.py
Normal file
357
astrbot/core/db/migration/migra_3_to_4.py
Normal file
@@ -0,0 +1,357 @@
|
|||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from astrbot.api import logger, sp
|
||||||
|
from astrbot.core.config import AstrBotConfig
|
||||||
|
from astrbot.core.config.default import DB_PATH
|
||||||
|
from astrbot.core.db.po import ConversationV2, PlatformMessageHistory
|
||||||
|
from astrbot.core.platform.astr_message_event import MessageSesion
|
||||||
|
|
||||||
|
from .. import BaseDatabase
|
||||||
|
from .shared_preferences_v3 import sp as sp_v3
|
||||||
|
from .sqlite_v3 import SQLiteDatabase as SQLiteV3DatabaseV3
|
||||||
|
|
||||||
|
"""
|
||||||
|
1. 迁移旧的 webchat_conversation 表到新的 conversation 表。
|
||||||
|
2. 迁移旧的 platform 到新的 platform_stats 表。
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform_id(
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
old_platform_name: str,
|
||||||
|
) -> str:
|
||||||
|
return platform_id_map.get(
|
||||||
|
old_platform_name,
|
||||||
|
{"platform_id": old_platform_name, "platform_type": old_platform_name},
|
||||||
|
).get("platform_id", old_platform_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform_type(
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
old_platform_name: str,
|
||||||
|
) -> str:
|
||||||
|
return platform_id_map.get(
|
||||||
|
old_platform_name,
|
||||||
|
{"platform_id": old_platform_name, "platform_type": old_platform_name},
|
||||||
|
).get("platform_type", old_platform_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def migration_conversation_table(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
):
|
||||||
|
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||||
|
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||||
|
)
|
||||||
|
conversations, total_cnt = db_helper_v3.get_all_conversations(
|
||||||
|
page=1,
|
||||||
|
page_size=10000000,
|
||||||
|
)
|
||||||
|
logger.info(f"迁移 {total_cnt} 条旧的会话数据到新的表中...")
|
||||||
|
|
||||||
|
async with db_helper.get_db() as dbsession:
|
||||||
|
dbsession: AsyncSession
|
||||||
|
async with dbsession.begin():
|
||||||
|
for idx, conversation in enumerate(conversations):
|
||||||
|
if total_cnt > 0 and (idx + 1) % max(1, total_cnt // 10) == 0:
|
||||||
|
progress = int((idx + 1) / total_cnt * 100)
|
||||||
|
if progress % 10 == 0:
|
||||||
|
logger.info(f"进度: {progress}% ({idx + 1}/{total_cnt})")
|
||||||
|
try:
|
||||||
|
conv = db_helper_v3.get_conversation_by_user_id(
|
||||||
|
user_id=conversation.get("user_id", "unknown"),
|
||||||
|
cid=conversation.get("cid", "unknown"),
|
||||||
|
)
|
||||||
|
if not conv:
|
||||||
|
logger.info(
|
||||||
|
f"未找到该条旧会话对应的具体数据: {conversation}, 跳过。",
|
||||||
|
)
|
||||||
|
if ":" not in conv.user_id:
|
||||||
|
continue
|
||||||
|
session = MessageSesion.from_str(session_str=conv.user_id)
|
||||||
|
platform_id = get_platform_id(
|
||||||
|
platform_id_map,
|
||||||
|
session.platform_name,
|
||||||
|
)
|
||||||
|
session.platform_id = platform_id # 更新平台名称为新的 ID
|
||||||
|
conv_v2 = ConversationV2(
|
||||||
|
user_id=str(session),
|
||||||
|
content=json.loads(conv.history) if conv.history else [],
|
||||||
|
platform_id=platform_id,
|
||||||
|
title=conv.title,
|
||||||
|
persona_id=conv.persona_id,
|
||||||
|
conversation_id=conv.cid,
|
||||||
|
created_at=datetime.datetime.fromtimestamp(conv.created_at),
|
||||||
|
updated_at=datetime.datetime.fromtimestamp(conv.updated_at),
|
||||||
|
)
|
||||||
|
dbsession.add(conv_v2)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"迁移旧会话 {conversation.get('cid', 'unknown')} 失败: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
logger.info(f"成功迁移 {total_cnt} 条旧的会话数据到新表。")
|
||||||
|
|
||||||
|
|
||||||
|
async def migration_platform_table(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
):
|
||||||
|
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||||
|
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||||
|
)
|
||||||
|
secs_from_2023_4_10_to_now = (
|
||||||
|
datetime.datetime.now(datetime.timezone.utc)
|
||||||
|
- datetime.datetime(2023, 4, 10, tzinfo=datetime.timezone.utc)
|
||||||
|
).total_seconds()
|
||||||
|
offset_sec = int(secs_from_2023_4_10_to_now)
|
||||||
|
logger.info(f"迁移旧平台数据,offset_sec: {offset_sec} 秒。")
|
||||||
|
stats = db_helper_v3.get_base_stats(offset_sec=offset_sec)
|
||||||
|
logger.info(f"迁移 {len(stats.platform)} 条旧的平台数据到新的表中...")
|
||||||
|
platform_stats_v3 = stats.platform
|
||||||
|
|
||||||
|
if not platform_stats_v3:
|
||||||
|
logger.info("没有找到旧平台数据,跳过迁移。")
|
||||||
|
return
|
||||||
|
|
||||||
|
first_time_stamp = platform_stats_v3[0].timestamp
|
||||||
|
end_time_stamp = platform_stats_v3[-1].timestamp
|
||||||
|
start_time = first_time_stamp - (first_time_stamp % 3600) # 向下取整到小时
|
||||||
|
end_time = end_time_stamp + (3600 - (end_time_stamp % 3600)) # 向上取整到小时
|
||||||
|
|
||||||
|
idx = 0
|
||||||
|
|
||||||
|
async with db_helper.get_db() as dbsession:
|
||||||
|
dbsession: AsyncSession
|
||||||
|
async with dbsession.begin():
|
||||||
|
total_buckets = (end_time - start_time) // 3600
|
||||||
|
for bucket_idx, bucket_end in enumerate(range(start_time, end_time, 3600)):
|
||||||
|
if bucket_idx % 500 == 0:
|
||||||
|
progress = int((bucket_idx + 1) / total_buckets * 100)
|
||||||
|
logger.info(f"进度: {progress}% ({bucket_idx + 1}/{total_buckets})")
|
||||||
|
cnt = 0
|
||||||
|
while (
|
||||||
|
idx < len(platform_stats_v3)
|
||||||
|
and platform_stats_v3[idx].timestamp < bucket_end
|
||||||
|
):
|
||||||
|
cnt += platform_stats_v3[idx].count
|
||||||
|
idx += 1
|
||||||
|
if cnt == 0:
|
||||||
|
continue
|
||||||
|
platform_id = get_platform_id(
|
||||||
|
platform_id_map,
|
||||||
|
platform_stats_v3[idx].name,
|
||||||
|
)
|
||||||
|
platform_type = get_platform_type(
|
||||||
|
platform_id_map,
|
||||||
|
platform_stats_v3[idx].name,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await dbsession.execute(
|
||||||
|
text("""
|
||||||
|
INSERT INTO platform_stats (timestamp, platform_id, platform_type, count)
|
||||||
|
VALUES (:timestamp, :platform_id, :platform_type, :count)
|
||||||
|
ON CONFLICT(timestamp, platform_id, platform_type) DO UPDATE SET
|
||||||
|
count = platform_stats.count + EXCLUDED.count
|
||||||
|
"""),
|
||||||
|
{
|
||||||
|
"timestamp": datetime.datetime.fromtimestamp(
|
||||||
|
bucket_end,
|
||||||
|
tz=datetime.timezone.utc,
|
||||||
|
),
|
||||||
|
"platform_id": platform_id,
|
||||||
|
"platform_type": platform_type,
|
||||||
|
"count": cnt,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
f"迁移平台统计数据失败: {platform_id}, {platform_type}, 时间戳: {bucket_end}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
logger.info(f"成功迁移 {len(platform_stats_v3)} 条旧的平台数据到新表。")
|
||||||
|
|
||||||
|
|
||||||
|
async def migration_webchat_data(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
):
|
||||||
|
"""迁移 WebChat 的历史记录到新的 PlatformMessageHistory 表中"""
|
||||||
|
db_helper_v3 = SQLiteV3DatabaseV3(
|
||||||
|
db_path=DB_PATH.replace("data_v4.db", "data_v3.db"),
|
||||||
|
)
|
||||||
|
conversations, total_cnt = db_helper_v3.get_all_conversations(
|
||||||
|
page=1,
|
||||||
|
page_size=10000000,
|
||||||
|
)
|
||||||
|
logger.info(f"迁移 {total_cnt} 条旧的 WebChat 会话数据到新的表中...")
|
||||||
|
|
||||||
|
async with db_helper.get_db() as dbsession:
|
||||||
|
dbsession: AsyncSession
|
||||||
|
async with dbsession.begin():
|
||||||
|
for idx, conversation in enumerate(conversations):
|
||||||
|
if total_cnt > 0 and (idx + 1) % max(1, total_cnt // 10) == 0:
|
||||||
|
progress = int((idx + 1) / total_cnt * 100)
|
||||||
|
if progress % 10 == 0:
|
||||||
|
logger.info(f"进度: {progress}% ({idx + 1}/{total_cnt})")
|
||||||
|
try:
|
||||||
|
conv = db_helper_v3.get_conversation_by_user_id(
|
||||||
|
user_id=conversation.get("user_id", "unknown"),
|
||||||
|
cid=conversation.get("cid", "unknown"),
|
||||||
|
)
|
||||||
|
if not conv:
|
||||||
|
logger.info(
|
||||||
|
f"未找到该条旧会话对应的具体数据: {conversation}, 跳过。",
|
||||||
|
)
|
||||||
|
if ":" in conv.user_id:
|
||||||
|
continue
|
||||||
|
platform_id = "webchat"
|
||||||
|
history = json.loads(conv.history) if conv.history else []
|
||||||
|
for msg in history:
|
||||||
|
type_ = msg.get("type") # user type, "bot" or "user"
|
||||||
|
new_history = PlatformMessageHistory(
|
||||||
|
platform_id=platform_id,
|
||||||
|
user_id=conv.cid, # we use conv.cid as user_id for webchat
|
||||||
|
content=msg,
|
||||||
|
sender_id=type_,
|
||||||
|
sender_name=type_,
|
||||||
|
)
|
||||||
|
dbsession.add(new_history)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
f"迁移旧 WebChat 会话 {conversation.get('cid', 'unknown')} 失败",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"成功迁移 {total_cnt} 条旧的 WebChat 会话数据到新表。")
|
||||||
|
|
||||||
|
|
||||||
|
async def migration_persona_data(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
astrbot_config: AstrBotConfig,
|
||||||
|
):
|
||||||
|
"""迁移 Persona 数据到新的表中。
|
||||||
|
旧的 Persona 数据存储在 preference 中,新的 Persona 数据存储在 persona 表中。
|
||||||
|
"""
|
||||||
|
v3_persona_config: list[dict] = astrbot_config.get("persona", [])
|
||||||
|
total_personas = len(v3_persona_config)
|
||||||
|
logger.info(f"迁移 {total_personas} 个 Persona 配置到新表中...")
|
||||||
|
|
||||||
|
for idx, persona in enumerate(v3_persona_config):
|
||||||
|
if total_personas > 0 and (idx + 1) % max(1, total_personas // 10) == 0:
|
||||||
|
progress = int((idx + 1) / total_personas * 100)
|
||||||
|
if progress % 10 == 0:
|
||||||
|
logger.info(f"进度: {progress}% ({idx + 1}/{total_personas})")
|
||||||
|
try:
|
||||||
|
begin_dialogs = persona.get("begin_dialogs", [])
|
||||||
|
mood_imitation_dialogs = persona.get("mood_imitation_dialogs", [])
|
||||||
|
parts = []
|
||||||
|
user_turn = True
|
||||||
|
for mood_dialog in mood_imitation_dialogs:
|
||||||
|
if user_turn:
|
||||||
|
parts.append(f"A: {mood_dialog}\n")
|
||||||
|
else:
|
||||||
|
parts.append(f"B: {mood_dialog}\n")
|
||||||
|
user_turn = not user_turn
|
||||||
|
mood_prompt = "".join(parts)
|
||||||
|
system_prompt = persona.get("prompt", "")
|
||||||
|
if mood_prompt:
|
||||||
|
system_prompt += f"Here are few shots of dialogs, you need to imitate the tone of 'B' in the following dialogs to respond:\n {mood_prompt}"
|
||||||
|
persona_new = await db_helper.insert_persona(
|
||||||
|
persona_id=persona["name"],
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
begin_dialogs=begin_dialogs,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"迁移 Persona {persona['name']}({persona_new.system_prompt[:30]}...) 到新表成功。",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"解析 Persona 配置失败:{e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def migration_preferences(
|
||||||
|
db_helper: BaseDatabase,
|
||||||
|
platform_id_map: dict[str, dict[str, str]],
|
||||||
|
):
|
||||||
|
# 1. global scope migration
|
||||||
|
keys = [
|
||||||
|
"inactivated_llm_tools",
|
||||||
|
"inactivated_plugins",
|
||||||
|
"curr_provider",
|
||||||
|
"curr_provider_tts",
|
||||||
|
"curr_provider_stt",
|
||||||
|
"alter_cmd",
|
||||||
|
]
|
||||||
|
for key in keys:
|
||||||
|
value = sp_v3.get(key)
|
||||||
|
if value is not None:
|
||||||
|
await sp.put_async("global", "global", key, value)
|
||||||
|
logger.info(f"迁移全局偏好设置 {key} 成功,值: {value}")
|
||||||
|
|
||||||
|
# 2. umo scope migration
|
||||||
|
session_conversation = sp_v3.get("session_conversation", default={})
|
||||||
|
for umo, conversation_id in session_conversation.items():
|
||||||
|
if not umo or not conversation_id:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
session = MessageSesion.from_str(session_str=umo)
|
||||||
|
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||||
|
session.platform_id = platform_id
|
||||||
|
await sp.put_async("umo", str(session), "sel_conv_id", conversation_id)
|
||||||
|
logger.info(f"迁移会话 {umo} 的对话数据到新表成功,平台 ID: {platform_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"迁移会话 {umo} 的对话数据失败: {e}", exc_info=True)
|
||||||
|
|
||||||
|
session_service_config = sp_v3.get("session_service_config", default={})
|
||||||
|
for umo, config in session_service_config.items():
|
||||||
|
if not umo or not config:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
session = MessageSesion.from_str(session_str=umo)
|
||||||
|
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||||
|
session.platform_id = platform_id
|
||||||
|
|
||||||
|
await sp.put_async("umo", str(session), "session_service_config", config)
|
||||||
|
|
||||||
|
logger.info(f"迁移会话 {umo} 的服务配置到新表成功,平台 ID: {platform_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"迁移会话 {umo} 的服务配置失败: {e}", exc_info=True)
|
||||||
|
|
||||||
|
session_variables = sp_v3.get("session_variables", default={})
|
||||||
|
for umo, variables in session_variables.items():
|
||||||
|
if not umo or not variables:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
session = MessageSesion.from_str(session_str=umo)
|
||||||
|
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||||
|
session.platform_id = platform_id
|
||||||
|
await sp.put_async("umo", str(session), "session_variables", variables)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"迁移会话 {umo} 的变量失败: {e}", exc_info=True)
|
||||||
|
|
||||||
|
session_provider_perf = sp_v3.get("session_provider_perf", default={})
|
||||||
|
for umo, perf in session_provider_perf.items():
|
||||||
|
if not umo or not perf:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
session = MessageSesion.from_str(session_str=umo)
|
||||||
|
platform_id = get_platform_id(platform_id_map, session.platform_name)
|
||||||
|
session.platform_id = platform_id
|
||||||
|
|
||||||
|
for provider_type, provider_id in perf.items():
|
||||||
|
await sp.put_async(
|
||||||
|
"umo",
|
||||||
|
str(session),
|
||||||
|
f"provider_perf_{provider_type}",
|
||||||
|
provider_id,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"迁移会话 {umo} 的提供商偏好到新表成功,平台 ID: {platform_id}",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"迁移会话 {umo} 的提供商偏好失败: {e}", exc_info=True)
|
||||||
44
astrbot/core/db/migration/migra_45_to_46.py
Normal file
44
astrbot/core/db/migration/migra_45_to_46.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from astrbot.api import logger, sp
|
||||||
|
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||||
|
from astrbot.core.umop_config_router import UmopConfigRouter
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate_45_to_46(acm: AstrBotConfigManager, ucr: UmopConfigRouter):
|
||||||
|
abconf_data = acm.abconf_data
|
||||||
|
|
||||||
|
if not isinstance(abconf_data, dict):
|
||||||
|
# should be unreachable
|
||||||
|
logger.warning(
|
||||||
|
f"migrate_45_to_46: abconf_data is not a dict (type={type(abconf_data)}). Value: {abconf_data!r}",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# 如果任何一项带有 umop,则说明需要迁移
|
||||||
|
need_migration = False
|
||||||
|
for conf_id, conf_info in abconf_data.items():
|
||||||
|
if isinstance(conf_info, dict) and "umop" in conf_info:
|
||||||
|
need_migration = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not need_migration:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Starting migration from version 4.5 to 4.6")
|
||||||
|
|
||||||
|
# extract umo->conf_id mapping
|
||||||
|
umo_to_conf_id = {}
|
||||||
|
for conf_id, conf_info in abconf_data.items():
|
||||||
|
if isinstance(conf_info, dict) and "umop" in conf_info:
|
||||||
|
umop_ls = conf_info.pop("umop")
|
||||||
|
if not isinstance(umop_ls, list):
|
||||||
|
continue
|
||||||
|
for umo in umop_ls:
|
||||||
|
if isinstance(umo, str) and umo not in umo_to_conf_id:
|
||||||
|
umo_to_conf_id[umo] = conf_id
|
||||||
|
|
||||||
|
# update the abconf data
|
||||||
|
await sp.global_put("abconf_mapping", abconf_data)
|
||||||
|
# update the umop config router
|
||||||
|
await ucr.update_routing_data(umo_to_conf_id)
|
||||||
|
|
||||||
|
logger.info("Migration from version 45 to 46 completed successfully")
|
||||||
48
astrbot/core/db/migration/shared_preferences_v3.py
Normal file
48
astrbot/core/db/migration/shared_preferences_v3.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from astrbot.core.utils.astrbot_path import get_astrbot_data_path
|
||||||
|
|
||||||
|
_VT = TypeVar("_VT")
|
||||||
|
|
||||||
|
|
||||||
|
class SharedPreferences:
|
||||||
|
def __init__(self, path=None):
|
||||||
|
if path is None:
|
||||||
|
path = os.path.join(get_astrbot_data_path(), "shared_preferences.json")
|
||||||
|
self.path = path
|
||||||
|
self._data = self._load_preferences()
|
||||||
|
|
||||||
|
def _load_preferences(self):
|
||||||
|
if os.path.exists(self.path):
|
||||||
|
try:
|
||||||
|
with open(self.path) as f:
|
||||||
|
return json.load(f)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
os.remove(self.path)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _save_preferences(self):
|
||||||
|
with open(self.path, "w") as f:
|
||||||
|
json.dump(self._data, f, indent=4, ensure_ascii=False)
|
||||||
|
f.flush()
|
||||||
|
|
||||||
|
def get(self, key, default: _VT = None) -> _VT:
|
||||||
|
return self._data.get(key, default)
|
||||||
|
|
||||||
|
def put(self, key, value):
|
||||||
|
self._data[key] = value
|
||||||
|
self._save_preferences()
|
||||||
|
|
||||||
|
def remove(self, key):
|
||||||
|
if key in self._data:
|
||||||
|
del self._data[key]
|
||||||
|
self._save_preferences()
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._data.clear()
|
||||||
|
self._save_preferences()
|
||||||
|
|
||||||
|
|
||||||
|
sp = SharedPreferences()
|
||||||
497
astrbot/core/db/migration/sqlite_v3.py
Normal file
497
astrbot/core/db/migration/sqlite_v3.py
Normal file
@@ -0,0 +1,497 @@
|
|||||||
|
import sqlite3
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from astrbot.core.db.po import Platform, Stats
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Conversation:
|
||||||
|
"""LLM 对话存储
|
||||||
|
|
||||||
|
对于网页聊天,history 存储了包括指令、回复、图片等在内的所有消息。
|
||||||
|
对于其他平台的聊天,不存储非 LLM 的回复(因为考虑到已经存储在各自的平台上)。
|
||||||
|
"""
|
||||||
|
|
||||||
|
user_id: str
|
||||||
|
cid: str
|
||||||
|
history: str = ""
|
||||||
|
"""字符串格式的列表。"""
|
||||||
|
created_at: int = 0
|
||||||
|
updated_at: int = 0
|
||||||
|
title: str = ""
|
||||||
|
persona_id: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
INIT_SQL = """
|
||||||
|
CREATE TABLE IF NOT EXISTS platform(
|
||||||
|
name VARCHAR(32),
|
||||||
|
count INTEGER,
|
||||||
|
timestamp INTEGER
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS llm(
|
||||||
|
name VARCHAR(32),
|
||||||
|
count INTEGER,
|
||||||
|
timestamp INTEGER
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS plugin(
|
||||||
|
name VARCHAR(32),
|
||||||
|
count INTEGER,
|
||||||
|
timestamp INTEGER
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS command(
|
||||||
|
name VARCHAR(32),
|
||||||
|
count INTEGER,
|
||||||
|
timestamp INTEGER
|
||||||
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS llm_history(
|
||||||
|
provider_type VARCHAR(32),
|
||||||
|
session_id VARCHAR(32),
|
||||||
|
content TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ATRI
|
||||||
|
CREATE TABLE IF NOT EXISTS atri_vision(
|
||||||
|
id TEXT,
|
||||||
|
url_or_path TEXT,
|
||||||
|
caption TEXT,
|
||||||
|
is_meme BOOLEAN,
|
||||||
|
keywords TEXT,
|
||||||
|
platform_name VARCHAR(32),
|
||||||
|
session_id VARCHAR(32),
|
||||||
|
sender_nickname VARCHAR(32),
|
||||||
|
timestamp INTEGER
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS webchat_conversation(
|
||||||
|
user_id TEXT, -- 会话 id
|
||||||
|
cid TEXT, -- 对话 id
|
||||||
|
history TEXT,
|
||||||
|
created_at INTEGER,
|
||||||
|
updated_at INTEGER,
|
||||||
|
title TEXT,
|
||||||
|
persona_id TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
PRAGMA encoding = 'UTF-8';
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class SQLiteDatabase:
|
||||||
|
def __init__(self, db_path: str) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.db_path = db_path
|
||||||
|
|
||||||
|
sql = INIT_SQL
|
||||||
|
|
||||||
|
# 初始化数据库
|
||||||
|
self.conn = self._get_conn(self.db_path)
|
||||||
|
c = self.conn.cursor()
|
||||||
|
c.executescript(sql)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# 检查 webchat_conversation 的 title 字段是否存在
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
PRAGMA table_info(webchat_conversation)
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
res = c.fetchall()
|
||||||
|
has_title = False
|
||||||
|
has_persona_id = False
|
||||||
|
for row in res:
|
||||||
|
if row[1] == "title":
|
||||||
|
has_title = True
|
||||||
|
if row[1] == "persona_id":
|
||||||
|
has_persona_id = True
|
||||||
|
if not has_title:
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
ALTER TABLE webchat_conversation ADD COLUMN title TEXT;
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
if not has_persona_id:
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
ALTER TABLE webchat_conversation ADD COLUMN persona_id TEXT;
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
def _get_conn(self, db_path: str) -> sqlite3.Connection:
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.text_factory = str
|
||||||
|
return conn
|
||||||
|
|
||||||
|
def _exec_sql(self, sql: str, params: tuple = None):
|
||||||
|
conn = self.conn
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
conn = self._get_conn(self.db_path)
|
||||||
|
c = conn.cursor()
|
||||||
|
|
||||||
|
if params:
|
||||||
|
c.execute(sql, params)
|
||||||
|
c.close()
|
||||||
|
else:
|
||||||
|
c.execute(sql)
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def insert_platform_metrics(self, metrics: dict):
|
||||||
|
for k, v in metrics.items():
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
INSERT INTO platform(name, count, timestamp) VALUES (?, ?, ?)
|
||||||
|
""",
|
||||||
|
(k, v, int(time.time())),
|
||||||
|
)
|
||||||
|
|
||||||
|
def insert_llm_metrics(self, metrics: dict):
|
||||||
|
for k, v in metrics.items():
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
INSERT INTO llm(name, count, timestamp) VALUES (?, ?, ?)
|
||||||
|
""",
|
||||||
|
(k, v, int(time.time())),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||||
|
"""获取 offset_sec 秒前到现在的基础统计数据"""
|
||||||
|
where_clause = f" WHERE timestamp >= {int(time.time()) - offset_sec}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT * FROM platform
|
||||||
|
"""
|
||||||
|
+ where_clause,
|
||||||
|
)
|
||||||
|
|
||||||
|
platform = []
|
||||||
|
for row in c.fetchall():
|
||||||
|
platform.append(Platform(*row))
|
||||||
|
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
return Stats(platform=platform)
|
||||||
|
|
||||||
|
def get_total_message_count(self) -> int:
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT SUM(count) FROM platform
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
res = c.fetchone()
|
||||||
|
c.close()
|
||||||
|
return res[0]
|
||||||
|
|
||||||
|
def get_grouped_base_stats(self, offset_sec: int = 86400) -> Stats:
|
||||||
|
"""获取 offset_sec 秒前到现在的基础统计数据(合并)"""
|
||||||
|
where_clause = f" WHERE timestamp >= {int(time.time()) - offset_sec}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT name, SUM(count), timestamp FROM platform
|
||||||
|
"""
|
||||||
|
+ where_clause
|
||||||
|
+ " GROUP BY name",
|
||||||
|
)
|
||||||
|
|
||||||
|
platform = []
|
||||||
|
for row in c.fetchall():
|
||||||
|
platform.append(Platform(*row))
|
||||||
|
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
return Stats(platform, [], [])
|
||||||
|
|
||||||
|
def get_conversation_by_user_id(self, user_id: str, cid: str) -> Conversation:
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT * FROM webchat_conversation WHERE user_id = ? AND cid = ?
|
||||||
|
""",
|
||||||
|
(user_id, cid),
|
||||||
|
)
|
||||||
|
|
||||||
|
res = c.fetchone()
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
if not res:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return Conversation(*res)
|
||||||
|
|
||||||
|
def new_conversation(self, user_id: str, cid: str):
|
||||||
|
history = "[]"
|
||||||
|
updated_at = int(time.time())
|
||||||
|
created_at = updated_at
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
INSERT INTO webchat_conversation(user_id, cid, history, updated_at, created_at) VALUES (?, ?, ?, ?, ?)
|
||||||
|
""",
|
||||||
|
(user_id, cid, history, updated_at, created_at),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_conversations(self, user_id: str) -> tuple:
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT cid, created_at, updated_at, title, persona_id FROM webchat_conversation WHERE user_id = ? ORDER BY updated_at DESC
|
||||||
|
""",
|
||||||
|
(user_id,),
|
||||||
|
)
|
||||||
|
|
||||||
|
res = c.fetchall()
|
||||||
|
c.close()
|
||||||
|
conversations = []
|
||||||
|
for row in res:
|
||||||
|
cid = row[0]
|
||||||
|
created_at = row[1]
|
||||||
|
updated_at = row[2]
|
||||||
|
title = row[3]
|
||||||
|
persona_id = row[4]
|
||||||
|
conversations.append(
|
||||||
|
Conversation("", cid, "[]", created_at, updated_at, title, persona_id),
|
||||||
|
)
|
||||||
|
return conversations
|
||||||
|
|
||||||
|
def update_conversation(self, user_id: str, cid: str, history: str):
|
||||||
|
"""更新对话,并且同时更新时间"""
|
||||||
|
updated_at = int(time.time())
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
UPDATE webchat_conversation SET history = ?, updated_at = ? WHERE user_id = ? AND cid = ?
|
||||||
|
""",
|
||||||
|
(history, updated_at, user_id, cid),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_conversation_title(self, user_id: str, cid: str, title: str):
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
UPDATE webchat_conversation SET title = ? WHERE user_id = ? AND cid = ?
|
||||||
|
""",
|
||||||
|
(title, user_id, cid),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_conversation_persona_id(self, user_id: str, cid: str, persona_id: str):
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
UPDATE webchat_conversation SET persona_id = ? WHERE user_id = ? AND cid = ?
|
||||||
|
""",
|
||||||
|
(persona_id, user_id, cid),
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete_conversation(self, user_id: str, cid: str):
|
||||||
|
self._exec_sql(
|
||||||
|
"""
|
||||||
|
DELETE FROM webchat_conversation WHERE user_id = ? AND cid = ?
|
||||||
|
""",
|
||||||
|
(user_id, cid),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_all_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
) -> tuple[list[dict[str, Any]], int]:
|
||||||
|
"""获取所有对话,支持分页,按更新时间降序排序"""
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 获取总记录数
|
||||||
|
c.execute("""
|
||||||
|
SELECT COUNT(*) FROM webchat_conversation
|
||||||
|
""")
|
||||||
|
total_count = c.fetchone()[0]
|
||||||
|
|
||||||
|
# 计算偏移量
|
||||||
|
offset = (page - 1) * page_size
|
||||||
|
|
||||||
|
# 获取分页数据,按更新时间降序排序
|
||||||
|
c.execute(
|
||||||
|
"""
|
||||||
|
SELECT user_id, cid, created_at, updated_at, title, persona_id
|
||||||
|
FROM webchat_conversation
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT ? OFFSET ?
|
||||||
|
""",
|
||||||
|
(page_size, offset),
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = c.fetchall()
|
||||||
|
|
||||||
|
conversations = []
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
user_id, cid, created_at, updated_at, title, persona_id = row
|
||||||
|
# 确保 cid 是字符串类型且至少有8个字符,否则使用一个默认值
|
||||||
|
safe_cid = str(cid) if cid else "unknown"
|
||||||
|
display_cid = safe_cid[:8] if len(safe_cid) >= 8 else safe_cid
|
||||||
|
|
||||||
|
conversations.append(
|
||||||
|
{
|
||||||
|
"user_id": user_id or "",
|
||||||
|
"cid": safe_cid,
|
||||||
|
"title": title or f"对话 {display_cid}",
|
||||||
|
"persona_id": persona_id or "",
|
||||||
|
"created_at": created_at or 0,
|
||||||
|
"updated_at": updated_at or 0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return conversations, total_count
|
||||||
|
|
||||||
|
except Exception as _:
|
||||||
|
# 返回空列表和0,确保即使出错也有有效的返回值
|
||||||
|
return [], 0
|
||||||
|
finally:
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
def get_filtered_conversations(
|
||||||
|
self,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 20,
|
||||||
|
platforms: list[str] | None = None,
|
||||||
|
message_types: list[str] | None = None,
|
||||||
|
search_query: str | None = None,
|
||||||
|
exclude_ids: list[str] | None = None,
|
||||||
|
exclude_platforms: list[str] | None = None,
|
||||||
|
) -> tuple[list[dict[str, Any]], int]:
|
||||||
|
"""获取筛选后的对话列表"""
|
||||||
|
try:
|
||||||
|
c = self.conn.cursor()
|
||||||
|
except sqlite3.ProgrammingError:
|
||||||
|
c = self._get_conn(self.db_path).cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 构建查询条件
|
||||||
|
where_clauses = []
|
||||||
|
params = []
|
||||||
|
|
||||||
|
# 平台筛选
|
||||||
|
if platforms and len(platforms) > 0:
|
||||||
|
platform_conditions = []
|
||||||
|
for platform in platforms:
|
||||||
|
platform_conditions.append("user_id LIKE ?")
|
||||||
|
params.append(f"{platform}:%")
|
||||||
|
|
||||||
|
if platform_conditions:
|
||||||
|
where_clauses.append(f"({' OR '.join(platform_conditions)})")
|
||||||
|
|
||||||
|
# 消息类型筛选
|
||||||
|
if message_types and len(message_types) > 0:
|
||||||
|
message_type_conditions = []
|
||||||
|
for msg_type in message_types:
|
||||||
|
message_type_conditions.append("user_id LIKE ?")
|
||||||
|
params.append(f"%:{msg_type}:%")
|
||||||
|
|
||||||
|
if message_type_conditions:
|
||||||
|
where_clauses.append(f"({' OR '.join(message_type_conditions)})")
|
||||||
|
|
||||||
|
# 搜索关键词
|
||||||
|
if search_query:
|
||||||
|
search_query = search_query.encode("unicode_escape").decode("utf-8")
|
||||||
|
where_clauses.append(
|
||||||
|
"(title LIKE ? OR user_id LIKE ? OR cid LIKE ? OR history LIKE ?)",
|
||||||
|
)
|
||||||
|
search_param = f"%{search_query}%"
|
||||||
|
params.extend([search_param, search_param, search_param, search_param])
|
||||||
|
|
||||||
|
# 排除特定用户ID
|
||||||
|
if exclude_ids and len(exclude_ids) > 0:
|
||||||
|
for exclude_id in exclude_ids:
|
||||||
|
where_clauses.append("user_id NOT LIKE ?")
|
||||||
|
params.append(f"{exclude_id}%")
|
||||||
|
|
||||||
|
# 排除特定平台
|
||||||
|
if exclude_platforms and len(exclude_platforms) > 0:
|
||||||
|
for exclude_platform in exclude_platforms:
|
||||||
|
where_clauses.append("user_id NOT LIKE ?")
|
||||||
|
params.append(f"{exclude_platform}:%")
|
||||||
|
|
||||||
|
# 构建完整的 WHERE 子句
|
||||||
|
where_sql = " WHERE " + " AND ".join(where_clauses) if where_clauses else ""
|
||||||
|
|
||||||
|
# 构建计数查询
|
||||||
|
count_sql = f"SELECT COUNT(*) FROM webchat_conversation{where_sql}"
|
||||||
|
|
||||||
|
# 获取总记录数
|
||||||
|
c.execute(count_sql, params)
|
||||||
|
total_count = c.fetchone()[0]
|
||||||
|
|
||||||
|
# 计算偏移量
|
||||||
|
offset = (page - 1) * page_size
|
||||||
|
|
||||||
|
# 构建分页数据查询
|
||||||
|
data_sql = f"""
|
||||||
|
SELECT user_id, cid, created_at, updated_at, title, persona_id
|
||||||
|
FROM webchat_conversation
|
||||||
|
{where_sql}
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT ? OFFSET ?
|
||||||
|
"""
|
||||||
|
query_params = params + [page_size, offset]
|
||||||
|
|
||||||
|
# 获取分页数据
|
||||||
|
c.execute(data_sql, query_params)
|
||||||
|
rows = c.fetchall()
|
||||||
|
|
||||||
|
conversations = []
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
user_id, cid, created_at, updated_at, title, persona_id = row
|
||||||
|
# 确保 cid 是字符串类型,否则使用一个默认值
|
||||||
|
safe_cid = str(cid) if cid else "unknown"
|
||||||
|
display_cid = safe_cid[:8] if len(safe_cid) >= 8 else safe_cid
|
||||||
|
|
||||||
|
conversations.append(
|
||||||
|
{
|
||||||
|
"user_id": user_id or "",
|
||||||
|
"cid": safe_cid,
|
||||||
|
"title": title or f"对话 {display_cid}",
|
||||||
|
"persona_id": persona_id or "",
|
||||||
|
"created_at": created_at or 0,
|
||||||
|
"updated_at": updated_at or 0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return conversations, total_count
|
||||||
|
|
||||||
|
except Exception as _:
|
||||||
|
# 返回空列表和0,确保即使出错也有有效的返回值
|
||||||
|
return [], 0
|
||||||
|
finally:
|
||||||
|
c.close()
|
||||||
@@ -1,70 +1,257 @@
|
|||||||
'''指标数据'''
|
import uuid
|
||||||
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import List
|
from datetime import datetime, timezone
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
from sqlmodel import (
|
||||||
|
JSON,
|
||||||
|
Field,
|
||||||
|
SQLModel,
|
||||||
|
Text,
|
||||||
|
UniqueConstraint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformStat(SQLModel, table=True):
|
||||||
|
"""This class represents the statistics of bot usage across different platforms.
|
||||||
|
|
||||||
|
Note: In astrbot v4, we moved `platform` table to here.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "platform_stats"
|
||||||
|
|
||||||
|
id: int = Field(primary_key=True, sa_column_kwargs={"autoincrement": True})
|
||||||
|
timestamp: datetime = Field(nullable=False)
|
||||||
|
platform_id: str = Field(nullable=False)
|
||||||
|
platform_type: str = Field(nullable=False) # such as "aiocqhttp", "slack", etc.
|
||||||
|
count: int = Field(default=0, nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"timestamp",
|
||||||
|
"platform_id",
|
||||||
|
"platform_type",
|
||||||
|
name="uix_platform_stats",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ConversationV2(SQLModel, table=True):
|
||||||
|
__tablename__ = "conversations"
|
||||||
|
|
||||||
|
inner_conversation_id: int = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
)
|
||||||
|
conversation_id: str = Field(
|
||||||
|
max_length=36,
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
)
|
||||||
|
platform_id: str = Field(nullable=False)
|
||||||
|
user_id: str = Field(nullable=False)
|
||||||
|
content: list | None = Field(default=None, sa_type=JSON)
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
title: str | None = Field(default=None, max_length=255)
|
||||||
|
persona_id: str | None = Field(default=None)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"conversation_id",
|
||||||
|
name="uix_conversation_id",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Persona(SQLModel, table=True):
|
||||||
|
"""Persona is a set of instructions for LLMs to follow.
|
||||||
|
|
||||||
|
It can be used to customize the behavior of LLMs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "personas"
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
persona_id: str = Field(max_length=255, nullable=False)
|
||||||
|
system_prompt: str = Field(sa_type=Text, nullable=False)
|
||||||
|
begin_dialogs: list | None = Field(default=None, sa_type=JSON)
|
||||||
|
"""a list of strings, each representing a dialog to start with"""
|
||||||
|
tools: list | None = Field(default=None, sa_type=JSON)
|
||||||
|
"""None means use ALL tools for default, empty list means no tools, otherwise a list of tool names."""
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"persona_id",
|
||||||
|
name="uix_persona_id",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Preference(SQLModel, table=True):
|
||||||
|
"""This class represents preferences for bots."""
|
||||||
|
|
||||||
|
__tablename__ = "preferences"
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
default=None,
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
)
|
||||||
|
scope: str = Field(nullable=False)
|
||||||
|
"""Scope of the preference, such as 'global', 'umo', 'plugin'."""
|
||||||
|
scope_id: str = Field(nullable=False)
|
||||||
|
"""ID of the scope, such as 'global', 'umo', 'plugin_name'."""
|
||||||
|
key: str = Field(nullable=False)
|
||||||
|
value: dict = Field(sa_type=JSON, nullable=False)
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"scope",
|
||||||
|
"scope_id",
|
||||||
|
"key",
|
||||||
|
name="uix_preference_scope_scope_id_key",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformMessageHistory(SQLModel, table=True):
|
||||||
|
"""This class represents the message history for a specific platform.
|
||||||
|
|
||||||
|
It is used to store messages that are not LLM-generated, such as user messages
|
||||||
|
or platform-specific messages.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "platform_message_history"
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
platform_id: str = Field(nullable=False)
|
||||||
|
user_id: str = Field(nullable=False) # An id of group, user in platform
|
||||||
|
sender_id: str | None = Field(default=None) # ID of the sender in the platform
|
||||||
|
sender_name: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
) # Name of the sender in the platform
|
||||||
|
content: dict = Field(sa_type=JSON, nullable=False) # a message chain list
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Attachment(SQLModel, table=True):
|
||||||
|
"""This class represents attachments for messages in AstrBot.
|
||||||
|
|
||||||
|
Attachments can be images, files, or other media types.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "attachments"
|
||||||
|
|
||||||
|
inner_attachment_id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
attachment_id: str = Field(
|
||||||
|
max_length=36,
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
)
|
||||||
|
path: str = Field(nullable=False) # Path to the file on disk
|
||||||
|
type: str = Field(nullable=False) # Type of the file (e.g., 'image', 'file')
|
||||||
|
mime_type: str = Field(nullable=False) # MIME type of the file
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"attachment_id",
|
||||||
|
name="uix_attachment_id",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Platform():
|
class Conversation:
|
||||||
name: str
|
"""LLM 对话类
|
||||||
count: int
|
|
||||||
timestamp: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Provider():
|
|
||||||
name: str
|
|
||||||
count: int
|
|
||||||
timestamp: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Plugin():
|
|
||||||
name: str
|
|
||||||
count: int
|
|
||||||
timestamp: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Command():
|
|
||||||
name: str
|
|
||||||
count: int
|
|
||||||
timestamp: int
|
|
||||||
|
|
||||||
@dataclass
|
对于 WebChat,history 存储了包括指令、回复、图片等在内的所有消息。
|
||||||
class Stats():
|
|
||||||
platform: List[Platform] = field(default_factory=list)
|
|
||||||
command: List[Command] = field(default_factory=list)
|
|
||||||
llm: List[Provider] = field(default_factory=list)
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LLMHistory():
|
|
||||||
'''LLM 聊天时持久化的信息'''
|
|
||||||
provider_type: str
|
|
||||||
session_id: str
|
|
||||||
content: str
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ATRIVision():
|
|
||||||
'''Deprecated'''
|
|
||||||
id: str
|
|
||||||
url_or_path: str
|
|
||||||
caption: str
|
|
||||||
is_meme: bool
|
|
||||||
keywords: List[str]
|
|
||||||
platform_name: str
|
|
||||||
session_id: str
|
|
||||||
sender_nickname: str
|
|
||||||
timestamp: int = -1
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Conversation():
|
|
||||||
'''LLM 对话存储
|
|
||||||
|
|
||||||
对于网页聊天,history 存储了包括指令、回复、图片等在内的所有消息。
|
|
||||||
对于其他平台的聊天,不存储非 LLM 的回复(因为考虑到已经存储在各自的平台上)。
|
对于其他平台的聊天,不存储非 LLM 的回复(因为考虑到已经存储在各自的平台上)。
|
||||||
'''
|
|
||||||
|
在 v4.0.0 版本及之后,WebChat 的历史记录被迁移至 `PlatformMessageHistory` 表中,
|
||||||
|
"""
|
||||||
|
|
||||||
|
platform_id: str
|
||||||
user_id: str
|
user_id: str
|
||||||
cid: str
|
cid: str
|
||||||
|
"""对话 ID, 是 uuid 格式的字符串"""
|
||||||
history: str = ""
|
history: str = ""
|
||||||
'''字符串格式的列表。'''
|
"""字符串格式的对话列表。"""
|
||||||
|
title: str | None = ""
|
||||||
|
persona_id: str | None = ""
|
||||||
created_at: int = 0
|
created_at: int = 0
|
||||||
updated_at: int = 0
|
updated_at: int = 0
|
||||||
title: str = ""
|
|
||||||
persona_id: str = ""
|
|
||||||
|
class Personality(TypedDict):
|
||||||
|
"""LLM 人格类。
|
||||||
|
|
||||||
|
在 v4.0.0 版本及之后,推荐使用上面的 Persona 类。并且, mood_imitation_dialogs 字段已被废弃。
|
||||||
|
"""
|
||||||
|
|
||||||
|
prompt: str = ""
|
||||||
|
name: str = ""
|
||||||
|
begin_dialogs: list[str] = []
|
||||||
|
mood_imitation_dialogs: list[str] = []
|
||||||
|
"""情感模拟对话预设。在 v4.0.0 版本及之后,已被废弃。"""
|
||||||
|
tools: list[str] | None = None
|
||||||
|
"""工具列表。None 表示使用所有工具,空列表表示不使用任何工具"""
|
||||||
|
|
||||||
|
# cache
|
||||||
|
_begin_dialogs_processed: list[dict] = []
|
||||||
|
_mood_imitation_dialogs_processed: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
# ====
|
||||||
|
# Deprecated, and will be removed in future versions.
|
||||||
|
# ====
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Platform:
|
||||||
|
"""平台使用统计数据"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
count: int
|
||||||
|
timestamp: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Stats:
|
||||||
|
platform: list[Platform] = field(default_factory=list)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,48 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS platform(
|
|
||||||
name VARCHAR(32),
|
|
||||||
count INTEGER,
|
|
||||||
timestamp INTEGER
|
|
||||||
);
|
|
||||||
CREATE TABLE IF NOT EXISTS llm(
|
|
||||||
name VARCHAR(32),
|
|
||||||
count INTEGER,
|
|
||||||
timestamp INTEGER
|
|
||||||
);
|
|
||||||
CREATE TABLE IF NOT EXISTS plugin(
|
|
||||||
name VARCHAR(32),
|
|
||||||
count INTEGER,
|
|
||||||
timestamp INTEGER
|
|
||||||
);
|
|
||||||
CREATE TABLE IF NOT EXISTS command(
|
|
||||||
name VARCHAR(32),
|
|
||||||
count INTEGER,
|
|
||||||
timestamp INTEGER
|
|
||||||
);
|
|
||||||
CREATE TABLE IF NOT EXISTS llm_history(
|
|
||||||
provider_type VARCHAR(32),
|
|
||||||
session_id VARCHAR(32),
|
|
||||||
content TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ATRI
|
|
||||||
CREATE TABLE IF NOT EXISTS atri_vision(
|
|
||||||
id TEXT,
|
|
||||||
url_or_path TEXT,
|
|
||||||
caption TEXT,
|
|
||||||
is_meme BOOLEAN,
|
|
||||||
keywords TEXT,
|
|
||||||
platform_name VARCHAR(32),
|
|
||||||
session_id VARCHAR(32),
|
|
||||||
sender_nickname VARCHAR(32),
|
|
||||||
timestamp INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS webchat_conversation(
|
|
||||||
user_id TEXT,
|
|
||||||
cid TEXT,
|
|
||||||
history TEXT,
|
|
||||||
created_at INTEGER,
|
|
||||||
updated_at INTEGER,
|
|
||||||
title TEXT,
|
|
||||||
persona_id TEXT
|
|
||||||
);
|
|
||||||
73
astrbot/core/db/vec_db/base.py
Normal file
73
astrbot/core/db/vec_db/base.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import abc
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Result:
|
||||||
|
similarity: float
|
||||||
|
data: dict
|
||||||
|
|
||||||
|
|
||||||
|
class BaseVecDB:
|
||||||
|
async def initialize(self):
|
||||||
|
"""初始化向量数据库"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert(
|
||||||
|
self,
|
||||||
|
content: str,
|
||||||
|
metadata: dict | None = None,
|
||||||
|
id: str | None = None,
|
||||||
|
) -> int:
|
||||||
|
"""插入一条文本和其对应向量,自动生成 ID 并保持一致性。"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def insert_batch(
|
||||||
|
self,
|
||||||
|
contents: list[str],
|
||||||
|
metadatas: list[dict] | None = None,
|
||||||
|
ids: list[str] | None = None,
|
||||||
|
batch_size: int = 32,
|
||||||
|
tasks_limit: int = 3,
|
||||||
|
max_retries: int = 3,
|
||||||
|
progress_callback=None,
|
||||||
|
) -> int:
|
||||||
|
"""批量插入文本和其对应向量,自动生成 ID 并保持一致性。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress_callback: 进度回调函数,接收参数 (current, total)
|
||||||
|
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def retrieve(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
top_k: int = 5,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
rerank: bool = False,
|
||||||
|
metadata_filters: dict | None = None,
|
||||||
|
) -> list[Result]:
|
||||||
|
"""搜索最相似的文档。
|
||||||
|
Args:
|
||||||
|
query (str): 查询文本
|
||||||
|
top_k (int): 返回的最相似文档的数量
|
||||||
|
Returns:
|
||||||
|
List[Result]: 查询结果
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def delete(self, doc_id: str) -> bool:
|
||||||
|
"""删除指定文档。
|
||||||
|
Args:
|
||||||
|
doc_id (str): 要删除的文档 ID
|
||||||
|
Returns:
|
||||||
|
bool: 删除是否成功
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
async def close(self): ...
|
||||||
3
astrbot/core/db/vec_db/faiss_impl/__init__.py
Normal file
3
astrbot/core/db/vec_db/faiss_impl/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .vec_db import FaissVecDB
|
||||||
|
|
||||||
|
__all__ = ["FaissVecDB"]
|
||||||
392
astrbot/core/db/vec_db/faiss_impl/document_storage.py
Normal file
392
astrbot/core/db/vec_db/faiss_impl/document_storage.py
Normal file
@@ -0,0 +1,392 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Text
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlmodel import Field, MetaData, SQLModel, col, func, select, text
|
||||||
|
|
||||||
|
from astrbot.core import logger
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDocModel(SQLModel, table=False):
|
||||||
|
metadata = MetaData()
|
||||||
|
|
||||||
|
|
||||||
|
class Document(BaseDocModel, table=True):
|
||||||
|
"""SQLModel for documents table."""
|
||||||
|
|
||||||
|
__tablename__ = "documents" # type: ignore
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
default=None,
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
)
|
||||||
|
doc_id: str = Field(nullable=False)
|
||||||
|
text: str = Field(nullable=False)
|
||||||
|
metadata_: str | None = Field(default=None, sa_column=Column("metadata", Text))
|
||||||
|
created_at: datetime | None = Field(default=None)
|
||||||
|
updated_at: datetime | None = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentStorage:
|
||||||
|
def __init__(self, db_path: str):
|
||||||
|
self.db_path = db_path
|
||||||
|
self.DATABASE_URL = f"sqlite+aiosqlite:///{db_path}"
|
||||||
|
self.engine: AsyncEngine | None = None
|
||||||
|
self.async_session_maker: sessionmaker | None = None
|
||||||
|
self.sqlite_init_path = os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
"sqlite_init.sql",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""Initialize the SQLite database and create the documents table if it doesn't exist."""
|
||||||
|
await self.connect()
|
||||||
|
async with self.engine.begin() as conn: # type: ignore
|
||||||
|
# Create tables using SQLModel
|
||||||
|
await conn.run_sync(BaseDocModel.metadata.create_all)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await conn.execute(
|
||||||
|
text(
|
||||||
|
"ALTER TABLE documents ADD COLUMN kb_doc_id TEXT "
|
||||||
|
"GENERATED ALWAYS AS (json_extract(metadata, '$.kb_doc_id')) STORED",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await conn.execute(
|
||||||
|
text(
|
||||||
|
"ALTER TABLE documents ADD COLUMN user_id TEXT "
|
||||||
|
"GENERATED ALWAYS AS (json_extract(metadata, '$.user_id')) STORED",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
await conn.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_documents_kb_doc_id ON documents(kb_doc_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await conn.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_documents_user_id ON documents(user_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except BaseException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
await conn.commit()
|
||||||
|
|
||||||
|
async def connect(self):
|
||||||
|
"""Connect to the SQLite database."""
|
||||||
|
if self.engine is None:
|
||||||
|
self.engine = create_async_engine(
|
||||||
|
self.DATABASE_URL,
|
||||||
|
echo=False,
|
||||||
|
future=True,
|
||||||
|
)
|
||||||
|
self.async_session_maker = sessionmaker(
|
||||||
|
self.engine, # type: ignore
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False,
|
||||||
|
) # type: ignore
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def get_session(self):
|
||||||
|
"""Context manager for database sessions."""
|
||||||
|
async with self.async_session_maker() as session: # type: ignore
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def get_documents(
|
||||||
|
self,
|
||||||
|
metadata_filters: dict,
|
||||||
|
ids: list | None = None,
|
||||||
|
offset: int | None = 0,
|
||||||
|
limit: int | None = 100,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""Retrieve documents by metadata filters and ids.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata_filters (dict): The metadata filters to apply.
|
||||||
|
ids (list | None): Optional list of document IDs to filter.
|
||||||
|
offset (int | None): Offset for pagination.
|
||||||
|
limit (int | None): Limit for pagination.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: The list of documents that match the filters.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.engine is None:
|
||||||
|
logger.warning(
|
||||||
|
"Database connection is not initialized, returning empty result",
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
async with self.get_session() as session:
|
||||||
|
query = select(Document)
|
||||||
|
|
||||||
|
for key, val in metadata_filters.items():
|
||||||
|
query = query.where(
|
||||||
|
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||||
|
).params(**{f"filter_{key}": val})
|
||||||
|
|
||||||
|
if ids is not None and len(ids) > 0:
|
||||||
|
valid_ids = [int(i) for i in ids if i != -1]
|
||||||
|
if valid_ids:
|
||||||
|
query = query.where(col(Document.id).in_(valid_ids))
|
||||||
|
|
||||||
|
if limit is not None:
|
||||||
|
query = query.limit(limit)
|
||||||
|
if offset is not None:
|
||||||
|
query = query.offset(offset)
|
||||||
|
|
||||||
|
result = await session.execute(query)
|
||||||
|
documents = result.scalars().all()
|
||||||
|
|
||||||
|
return [self._document_to_dict(doc) for doc in documents]
|
||||||
|
|
||||||
|
async def insert_document(self, doc_id: str, text: str, metadata: dict) -> int:
|
||||||
|
"""Insert a single document and return its integer ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc_id (str): The document ID (UUID string).
|
||||||
|
text (str): The document text.
|
||||||
|
metadata (dict): The document metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The integer ID of the inserted document.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session, session.begin():
|
||||||
|
document = Document(
|
||||||
|
doc_id=doc_id,
|
||||||
|
text=text,
|
||||||
|
metadata_=json.dumps(metadata),
|
||||||
|
created_at=datetime.now(),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
session.add(document)
|
||||||
|
await session.flush() # Flush to get the ID
|
||||||
|
return document.id # type: ignore
|
||||||
|
|
||||||
|
async def insert_documents_batch(
|
||||||
|
self,
|
||||||
|
doc_ids: list[str],
|
||||||
|
texts: list[str],
|
||||||
|
metadatas: list[dict],
|
||||||
|
) -> list[int]:
|
||||||
|
"""Batch insert documents and return their integer IDs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc_ids (list[str]): List of document IDs (UUID strings).
|
||||||
|
texts (list[str]): List of document texts.
|
||||||
|
metadatas (list[dict]): List of document metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[int]: List of integer IDs of the inserted documents.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session, session.begin():
|
||||||
|
import json
|
||||||
|
|
||||||
|
documents = []
|
||||||
|
for doc_id, text, metadata in zip(doc_ids, texts, metadatas):
|
||||||
|
document = Document(
|
||||||
|
doc_id=doc_id,
|
||||||
|
text=text,
|
||||||
|
metadata_=json.dumps(metadata),
|
||||||
|
created_at=datetime.now(),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
documents.append(document)
|
||||||
|
session.add(document)
|
||||||
|
|
||||||
|
await session.flush() # Flush to get all IDs
|
||||||
|
return [doc.id for doc in documents] # type: ignore
|
||||||
|
|
||||||
|
async def delete_document_by_doc_id(self, doc_id: str):
|
||||||
|
"""Delete a document by its doc_id.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc_id (str): The doc_id of the document to delete.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session, session.begin():
|
||||||
|
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||||
|
result = await session.execute(query)
|
||||||
|
document = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if document:
|
||||||
|
await session.delete(document)
|
||||||
|
|
||||||
|
async def get_document_by_doc_id(self, doc_id: str):
|
||||||
|
"""Retrieve a document by its doc_id.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc_id (str): The doc_id of the document to retrieve.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The document data or None if not found.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session:
|
||||||
|
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||||
|
result = await session.execute(query)
|
||||||
|
document = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if document:
|
||||||
|
return self._document_to_dict(document)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def update_document_by_doc_id(self, doc_id: str, new_text: str):
|
||||||
|
"""Update a document by its doc_id.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
doc_id (str): The doc_id.
|
||||||
|
new_text (str): The new text to update the document with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session, session.begin():
|
||||||
|
query = select(Document).where(col(Document.doc_id) == doc_id)
|
||||||
|
result = await session.execute(query)
|
||||||
|
document = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if document:
|
||||||
|
document.text = new_text
|
||||||
|
document.updated_at = datetime.now()
|
||||||
|
session.add(document)
|
||||||
|
|
||||||
|
async def delete_documents(self, metadata_filters: dict):
|
||||||
|
"""Delete documents by their metadata filters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata_filters (dict): The metadata filters to apply.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.engine is None:
|
||||||
|
logger.warning(
|
||||||
|
"Database connection is not initialized, skipping delete operation",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
async with self.get_session() as session, session.begin():
|
||||||
|
query = select(Document)
|
||||||
|
|
||||||
|
for key, val in metadata_filters.items():
|
||||||
|
query = query.where(
|
||||||
|
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||||
|
).params(**{f"filter_{key}": val})
|
||||||
|
|
||||||
|
result = await session.execute(query)
|
||||||
|
documents = result.scalars().all()
|
||||||
|
|
||||||
|
for doc in documents:
|
||||||
|
await session.delete(doc)
|
||||||
|
|
||||||
|
async def count_documents(self, metadata_filters: dict | None = None) -> int:
|
||||||
|
"""Count documents in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata_filters (dict | None): Metadata filters to apply.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The count of documents.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.engine is None:
|
||||||
|
logger.warning("Database connection is not initialized, returning 0")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
async with self.get_session() as session:
|
||||||
|
query = select(func.count(col(Document.id)))
|
||||||
|
|
||||||
|
if metadata_filters:
|
||||||
|
for key, val in metadata_filters.items():
|
||||||
|
query = query.where(
|
||||||
|
text(f"json_extract(metadata, '$.{key}') = :filter_{key}"),
|
||||||
|
).params(**{f"filter_{key}": val})
|
||||||
|
|
||||||
|
result = await session.execute(query)
|
||||||
|
count = result.scalar_one_or_none()
|
||||||
|
return count if count is not None else 0
|
||||||
|
|
||||||
|
async def get_user_ids(self) -> list[str]:
|
||||||
|
"""Retrieve all user IDs from the documents table.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: A list of user IDs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.engine is not None, "Database connection is not initialized."
|
||||||
|
|
||||||
|
async with self.get_session() as session:
|
||||||
|
query = text(
|
||||||
|
"SELECT DISTINCT user_id FROM documents WHERE user_id IS NOT NULL",
|
||||||
|
)
|
||||||
|
result = await session.execute(query)
|
||||||
|
rows = result.fetchall()
|
||||||
|
return [row[0] for row in rows]
|
||||||
|
|
||||||
|
def _document_to_dict(self, document: Document) -> dict:
|
||||||
|
"""Convert a Document model to a dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
document (Document): The document to convert.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The converted dictionary.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"id": document.id,
|
||||||
|
"doc_id": document.doc_id,
|
||||||
|
"text": document.text,
|
||||||
|
"metadata": document.metadata_,
|
||||||
|
"created_at": document.created_at.isoformat()
|
||||||
|
if isinstance(document.created_at, datetime)
|
||||||
|
else document.created_at,
|
||||||
|
"updated_at": document.updated_at.isoformat()
|
||||||
|
if isinstance(document.updated_at, datetime)
|
||||||
|
else document.updated_at,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def tuple_to_dict(self, row):
|
||||||
|
"""Convert a tuple to a dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
row (tuple): The row to convert.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: The converted dictionary.
|
||||||
|
|
||||||
|
Note: This method is kept for backward compatibility but is no longer used internally.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"id": row[0],
|
||||||
|
"doc_id": row[1],
|
||||||
|
"text": row[2],
|
||||||
|
"metadata": row[3],
|
||||||
|
"created_at": row[4],
|
||||||
|
"updated_at": row[5],
|
||||||
|
}
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Close the connection to the SQLite database."""
|
||||||
|
if self.engine:
|
||||||
|
await self.engine.dispose()
|
||||||
|
self.engine = None
|
||||||
|
self.async_session_maker = None
|
||||||
93
astrbot/core/db/vec_db/faiss_impl/embedding_storage.py
Normal file
93
astrbot/core/db/vec_db/faiss_impl/embedding_storage.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
try:
|
||||||
|
import faiss
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
raise ImportError(
|
||||||
|
"faiss 未安装。请使用 'pip install faiss-cpu' 或 'pip install faiss-gpu' 安装。",
|
||||||
|
)
|
||||||
|
import os
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingStorage:
|
||||||
|
def __init__(self, dimension: int, path: str | None = None):
|
||||||
|
self.dimension = dimension
|
||||||
|
self.path = path
|
||||||
|
self.index = None
|
||||||
|
if path and os.path.exists(path):
|
||||||
|
self.index = faiss.read_index(path)
|
||||||
|
else:
|
||||||
|
base_index = faiss.IndexFlatL2(dimension)
|
||||||
|
self.index = faiss.IndexIDMap(base_index)
|
||||||
|
|
||||||
|
async def insert(self, vector: np.ndarray, id: int):
|
||||||
|
"""插入向量
|
||||||
|
|
||||||
|
Args:
|
||||||
|
vector (np.ndarray): 要插入的向量
|
||||||
|
id (int): 向量的ID
|
||||||
|
Raises:
|
||||||
|
ValueError: 如果向量的维度与存储的维度不匹配
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.index is not None, "FAISS index is not initialized."
|
||||||
|
if vector.shape[0] != self.dimension:
|
||||||
|
raise ValueError(
|
||||||
|
f"向量维度不匹配, 期望: {self.dimension}, 实际: {vector.shape[0]}",
|
||||||
|
)
|
||||||
|
self.index.add_with_ids(vector.reshape(1, -1), np.array([id]))
|
||||||
|
await self.save_index()
|
||||||
|
|
||||||
|
async def insert_batch(self, vectors: np.ndarray, ids: list[int]):
|
||||||
|
"""批量插入向量
|
||||||
|
|
||||||
|
Args:
|
||||||
|
vectors (np.ndarray): 要插入的向量数组
|
||||||
|
ids (list[int]): 向量的ID列表
|
||||||
|
Raises:
|
||||||
|
ValueError: 如果向量的维度与存储的维度不匹配
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.index is not None, "FAISS index is not initialized."
|
||||||
|
if vectors.shape[1] != self.dimension:
|
||||||
|
raise ValueError(
|
||||||
|
f"向量维度不匹配, 期望: {self.dimension}, 实际: {vectors.shape[1]}",
|
||||||
|
)
|
||||||
|
self.index.add_with_ids(vectors, np.array(ids))
|
||||||
|
await self.save_index()
|
||||||
|
|
||||||
|
async def search(self, vector: np.ndarray, k: int) -> tuple:
|
||||||
|
"""搜索最相似的向量
|
||||||
|
|
||||||
|
Args:
|
||||||
|
vector (np.ndarray): 查询向量
|
||||||
|
k (int): 返回的最相似向量的数量
|
||||||
|
Returns:
|
||||||
|
tuple: (距离, 索引)
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.index is not None, "FAISS index is not initialized."
|
||||||
|
faiss.normalize_L2(vector)
|
||||||
|
distances, indices = self.index.search(vector, k)
|
||||||
|
return distances, indices
|
||||||
|
|
||||||
|
async def delete(self, ids: list[int]):
|
||||||
|
"""删除向量
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ids (list[int]): 要删除的向量ID列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.index is not None, "FAISS index is not initialized."
|
||||||
|
id_array = np.array(ids, dtype=np.int64)
|
||||||
|
self.index.remove_ids(id_array)
|
||||||
|
await self.save_index()
|
||||||
|
|
||||||
|
async def save_index(self):
|
||||||
|
"""保存索引
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): 保存索引的路径
|
||||||
|
|
||||||
|
"""
|
||||||
|
faiss.write_index(self.index, self.path)
|
||||||
17
astrbot/core/db/vec_db/faiss_impl/sqlite_init.sql
Normal file
17
astrbot/core/db/vec_db/faiss_impl/sqlite_init.sql
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
-- 创建文档存储表,包含 faiss 中文档的 id,文档文本,create_at,updated_at
|
||||||
|
CREATE TABLE documents (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
doc_id TEXT NOT NULL,
|
||||||
|
text TEXT NOT NULL,
|
||||||
|
metadata TEXT,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE documents
|
||||||
|
ADD COLUMN group_id TEXT GENERATED ALWAYS AS (json_extract(metadata, '$.group_id')) STORED;
|
||||||
|
ALTER TABLE documents
|
||||||
|
ADD COLUMN user_id TEXT GENERATED ALWAYS AS (json_extract(metadata, '$.user_id')) STORED;
|
||||||
|
|
||||||
|
CREATE INDEX idx_documents_user_id ON documents(user_id);
|
||||||
|
CREATE INDEX idx_documents_group_id ON documents(group_id);
|
||||||
204
astrbot/core/db/vec_db/faiss_impl/vec_db.py
Normal file
204
astrbot/core/db/vec_db/faiss_impl/vec_db.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from astrbot import logger
|
||||||
|
from astrbot.core.provider.provider import EmbeddingProvider, RerankProvider
|
||||||
|
|
||||||
|
from ..base import BaseVecDB, Result
|
||||||
|
from .document_storage import DocumentStorage
|
||||||
|
from .embedding_storage import EmbeddingStorage
|
||||||
|
|
||||||
|
|
||||||
|
class FaissVecDB(BaseVecDB):
|
||||||
|
"""A class to represent a vector database."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
doc_store_path: str,
|
||||||
|
index_store_path: str,
|
||||||
|
embedding_provider: EmbeddingProvider,
|
||||||
|
rerank_provider: RerankProvider | None = None,
|
||||||
|
):
|
||||||
|
self.doc_store_path = doc_store_path
|
||||||
|
self.index_store_path = index_store_path
|
||||||
|
self.embedding_provider = embedding_provider
|
||||||
|
self.document_storage = DocumentStorage(doc_store_path)
|
||||||
|
self.embedding_storage = EmbeddingStorage(
|
||||||
|
embedding_provider.get_dim(),
|
||||||
|
index_store_path,
|
||||||
|
)
|
||||||
|
self.embedding_provider = embedding_provider
|
||||||
|
self.rerank_provider = rerank_provider
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
await self.document_storage.initialize()
|
||||||
|
|
||||||
|
async def insert(
|
||||||
|
self,
|
||||||
|
content: str,
|
||||||
|
metadata: dict | None = None,
|
||||||
|
id: str | None = None,
|
||||||
|
) -> int:
|
||||||
|
"""插入一条文本和其对应向量,自动生成 ID 并保持一致性。"""
|
||||||
|
metadata = metadata or {}
|
||||||
|
str_id = id or str(uuid.uuid4()) # 使用 UUID 作为原始 ID
|
||||||
|
|
||||||
|
vector = await self.embedding_provider.get_embedding(content)
|
||||||
|
vector = np.array(vector, dtype=np.float32)
|
||||||
|
|
||||||
|
# 使用 DocumentStorage 的方法插入文档
|
||||||
|
int_id = await self.document_storage.insert_document(str_id, content, metadata)
|
||||||
|
|
||||||
|
# 插入向量到 FAISS
|
||||||
|
await self.embedding_storage.insert(vector, int_id)
|
||||||
|
return int_id
|
||||||
|
|
||||||
|
async def insert_batch(
|
||||||
|
self,
|
||||||
|
contents: list[str],
|
||||||
|
metadatas: list[dict] | None = None,
|
||||||
|
ids: list[str] | None = None,
|
||||||
|
batch_size: int = 32,
|
||||||
|
tasks_limit: int = 3,
|
||||||
|
max_retries: int = 3,
|
||||||
|
progress_callback=None,
|
||||||
|
) -> list[int]:
|
||||||
|
"""批量插入文本和其对应向量,自动生成 ID 并保持一致性。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress_callback: 进度回调函数,接收参数 (current, total)
|
||||||
|
|
||||||
|
"""
|
||||||
|
metadatas = metadatas or [{} for _ in contents]
|
||||||
|
ids = ids or [str(uuid.uuid4()) for _ in contents]
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
logger.debug(f"Generating embeddings for {len(contents)} contents...")
|
||||||
|
vectors = await self.embedding_provider.get_embeddings_batch(
|
||||||
|
contents,
|
||||||
|
batch_size=batch_size,
|
||||||
|
tasks_limit=tasks_limit,
|
||||||
|
max_retries=max_retries,
|
||||||
|
progress_callback=progress_callback,
|
||||||
|
)
|
||||||
|
end = time.time()
|
||||||
|
logger.debug(
|
||||||
|
f"Generated embeddings for {len(contents)} contents in {end - start:.2f} seconds.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# 使用 DocumentStorage 的批量插入方法
|
||||||
|
int_ids = await self.document_storage.insert_documents_batch(
|
||||||
|
ids,
|
||||||
|
contents,
|
||||||
|
metadatas,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 批量插入向量到 FAISS
|
||||||
|
vectors_array = np.array(vectors).astype("float32")
|
||||||
|
await self.embedding_storage.insert_batch(vectors_array, int_ids)
|
||||||
|
return int_ids
|
||||||
|
|
||||||
|
async def retrieve(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
k: int = 5,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
rerank: bool = False,
|
||||||
|
metadata_filters: dict | None = None,
|
||||||
|
) -> list[Result]:
|
||||||
|
"""搜索最相似的文档。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): 查询文本
|
||||||
|
k (int): 返回的最相似文档的数量
|
||||||
|
fetch_k (int): 在根据 metadata 过滤前从 FAISS 中获取的数量
|
||||||
|
rerank (bool): 是否使用重排序。这需要在实例化时提供 rerank_provider, 如果未提供并且 rerank 为 True, 不会抛出异常。
|
||||||
|
metadata_filters (dict): 元数据过滤器
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Result]: 查询结果
|
||||||
|
|
||||||
|
"""
|
||||||
|
embedding = await self.embedding_provider.get_embedding(query)
|
||||||
|
scores, indices = await self.embedding_storage.search(
|
||||||
|
vector=np.array([embedding]).astype("float32"),
|
||||||
|
k=fetch_k if metadata_filters else k,
|
||||||
|
)
|
||||||
|
if len(indices[0]) == 0 or indices[0][0] == -1:
|
||||||
|
return []
|
||||||
|
# normalize scores
|
||||||
|
scores[0] = 1.0 - (scores[0] / 2.0)
|
||||||
|
# NOTE: maybe the size is less than k.
|
||||||
|
fetched_docs = await self.document_storage.get_documents(
|
||||||
|
metadata_filters=metadata_filters or {},
|
||||||
|
ids=indices[0],
|
||||||
|
)
|
||||||
|
if not fetched_docs:
|
||||||
|
return []
|
||||||
|
result_docs: list[Result] = []
|
||||||
|
|
||||||
|
idx_pos = {fetch_doc["id"]: idx for idx, fetch_doc in enumerate(fetched_docs)}
|
||||||
|
for i, indice_idx in enumerate(indices[0]):
|
||||||
|
pos = idx_pos.get(indice_idx)
|
||||||
|
if pos is None:
|
||||||
|
continue
|
||||||
|
fetch_doc = fetched_docs[pos]
|
||||||
|
score = scores[0][i]
|
||||||
|
result_docs.append(Result(similarity=float(score), data=fetch_doc))
|
||||||
|
|
||||||
|
top_k_results = result_docs[:k]
|
||||||
|
|
||||||
|
if rerank and self.rerank_provider:
|
||||||
|
documents = [doc.data["text"] for doc in top_k_results]
|
||||||
|
reranked_results = await self.rerank_provider.rerank(query, documents)
|
||||||
|
reranked_results = sorted(
|
||||||
|
reranked_results,
|
||||||
|
key=lambda x: x.relevance_score,
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
top_k_results = [
|
||||||
|
top_k_results[reranked_result.index]
|
||||||
|
for reranked_result in reranked_results
|
||||||
|
]
|
||||||
|
|
||||||
|
return top_k_results
|
||||||
|
|
||||||
|
async def delete(self, doc_id: str):
|
||||||
|
"""删除一条文档块(chunk)"""
|
||||||
|
# 获得对应的 int id
|
||||||
|
result = await self.document_storage.get_document_by_doc_id(doc_id)
|
||||||
|
int_id = result["id"] if result else None
|
||||||
|
if int_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# 使用 DocumentStorage 的删除方法
|
||||||
|
await self.document_storage.delete_document_by_doc_id(doc_id)
|
||||||
|
await self.embedding_storage.delete([int_id])
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
await self.document_storage.close()
|
||||||
|
|
||||||
|
async def count_documents(self, metadata_filter: dict | None = None) -> int:
|
||||||
|
"""计算文档数量
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata_filter (dict | None): 元数据过滤器
|
||||||
|
|
||||||
|
"""
|
||||||
|
count = await self.document_storage.count_documents(
|
||||||
|
metadata_filters=metadata_filter or {},
|
||||||
|
)
|
||||||
|
return count
|
||||||
|
|
||||||
|
async def delete_documents(self, metadata_filters: dict):
|
||||||
|
"""根据元数据过滤器删除文档"""
|
||||||
|
docs = await self.document_storage.get_documents(
|
||||||
|
metadata_filters=metadata_filters,
|
||||||
|
offset=None,
|
||||||
|
limit=None,
|
||||||
|
)
|
||||||
|
doc_ids: list[int] = [doc["id"] for doc in docs]
|
||||||
|
await self.embedding_storage.delete(doc_ids)
|
||||||
|
await self.document_storage.delete_documents(metadata_filters=metadata_filters)
|
||||||
@@ -1,23 +1,61 @@
|
|||||||
|
"""事件总线, 用于处理事件的分发和处理
|
||||||
|
事件总线是一个异步队列, 用于接收各种消息事件, 并将其发送到Scheduler调度器进行处理
|
||||||
|
其中包含了一个无限循环的调度函数, 用于从事件队列中获取新的事件, 并创建一个新的异步任务来执行管道调度器的处理逻辑
|
||||||
|
|
||||||
|
class:
|
||||||
|
EventBus: 事件总线, 用于处理事件的分发和处理
|
||||||
|
|
||||||
|
工作流程:
|
||||||
|
1. 维护一个异步队列, 来接受各种消息事件
|
||||||
|
2. 无限循环的调度函数, 从事件队列中获取新的事件, 打印日志并创建一个新的异步任务来执行管道调度器的处理逻辑
|
||||||
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from asyncio import Queue
|
from asyncio import Queue
|
||||||
from astrbot.core.pipeline.scheduler import PipelineScheduler
|
|
||||||
from astrbot.core import logger
|
from astrbot.core import logger
|
||||||
|
from astrbot.core.astrbot_config_mgr import AstrBotConfigManager
|
||||||
|
from astrbot.core.pipeline.scheduler import PipelineScheduler
|
||||||
|
|
||||||
from .platform import AstrMessageEvent
|
from .platform import AstrMessageEvent
|
||||||
|
|
||||||
|
|
||||||
class EventBus:
|
class EventBus:
|
||||||
def __init__(self, event_queue: Queue, pipeline_scheduler: PipelineScheduler):
|
"""用于处理事件的分发和处理"""
|
||||||
self.event_queue = event_queue
|
|
||||||
self.pipeline_scheduler = pipeline_scheduler
|
def __init__(
|
||||||
|
self,
|
||||||
|
event_queue: Queue,
|
||||||
|
pipeline_scheduler_mapping: dict[str, PipelineScheduler],
|
||||||
|
astrbot_config_mgr: AstrBotConfigManager = None,
|
||||||
|
):
|
||||||
|
self.event_queue = event_queue # 事件队列
|
||||||
|
# abconf uuid -> scheduler
|
||||||
|
self.pipeline_scheduler_mapping = pipeline_scheduler_mapping
|
||||||
|
self.astrbot_config_mgr = astrbot_config_mgr
|
||||||
|
|
||||||
async def dispatch(self):
|
async def dispatch(self):
|
||||||
logger.info("事件总线已打开。")
|
|
||||||
while True:
|
while True:
|
||||||
event: AstrMessageEvent = await self.event_queue.get()
|
event: AstrMessageEvent = await self.event_queue.get()
|
||||||
self._print_event(event)
|
conf_info = self.astrbot_config_mgr.get_conf_info(event.unified_msg_origin)
|
||||||
asyncio.create_task(self.pipeline_scheduler.execute(event))
|
self._print_event(event, conf_info["name"])
|
||||||
|
scheduler = self.pipeline_scheduler_mapping.get(conf_info["id"])
|
||||||
def _print_event(self, event: AstrMessageEvent):
|
asyncio.create_task(scheduler.execute(event))
|
||||||
|
|
||||||
|
def _print_event(self, event: AstrMessageEvent, conf_name: str):
|
||||||
|
"""用于记录事件信息
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event (AstrMessageEvent): 事件对象
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 如果有发送者名称: [平台名] 发送者名称/发送者ID: 消息概要
|
||||||
if event.get_sender_name():
|
if event.get_sender_name():
|
||||||
logger.info(f"[{event.get_platform_name()}] {event.get_sender_name()}/{event.get_sender_id()}: {event.get_message_outline()}")
|
logger.info(
|
||||||
|
f"[{conf_name}] [{event.get_platform_id()}({event.get_platform_name()})] {event.get_sender_name()}/{event.get_sender_id()}: {event.get_message_outline()}",
|
||||||
|
)
|
||||||
|
# 没有发送者名称: [平台名] 发送者ID: 消息概要
|
||||||
else:
|
else:
|
||||||
logger.info(f"[{event.get_platform_name()}] {event.get_sender_id()}: {event.get_message_outline()}")
|
logger.info(
|
||||||
|
f"[{conf_name}] [{event.get_platform_id()}({event.get_platform_name()})] {event.get_sender_id()}: {event.get_message_outline()}",
|
||||||
|
)
|
||||||
|
|||||||
98
astrbot/core/file_token_service.py
Normal file
98
astrbot/core/file_token_service.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from urllib.parse import unquote, urlparse
|
||||||
|
|
||||||
|
|
||||||
|
class FileTokenService:
|
||||||
|
"""维护一个简单的基于令牌的文件下载服务,支持超时和懒清除。"""
|
||||||
|
|
||||||
|
def __init__(self, default_timeout: float = 300):
|
||||||
|
self.lock = asyncio.Lock()
|
||||||
|
self.staged_files = {} # token: (file_path, expire_time)
|
||||||
|
self.default_timeout = default_timeout
|
||||||
|
|
||||||
|
async def _cleanup_expired_tokens(self):
|
||||||
|
"""清理过期的令牌"""
|
||||||
|
now = time.time()
|
||||||
|
expired_tokens = [
|
||||||
|
token for token, (_, expire) in self.staged_files.items() if expire < now
|
||||||
|
]
|
||||||
|
for token in expired_tokens:
|
||||||
|
self.staged_files.pop(token, None)
|
||||||
|
|
||||||
|
async def check_token_expired(self, file_token: str) -> bool:
|
||||||
|
async with self.lock:
|
||||||
|
await self._cleanup_expired_tokens()
|
||||||
|
return file_token not in self.staged_files
|
||||||
|
|
||||||
|
async def register_file(self, file_path: str, timeout: float | None = None) -> str:
|
||||||
|
"""向令牌服务注册一个文件。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path(str): 文件路径
|
||||||
|
timeout(float): 超时时间,单位秒(可选)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: 一个单次令牌
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: 当路径不存在时抛出
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 处理 file:///
|
||||||
|
try:
|
||||||
|
parsed_uri = urlparse(file_path)
|
||||||
|
if parsed_uri.scheme == "file":
|
||||||
|
local_path = unquote(parsed_uri.path)
|
||||||
|
if platform.system() == "Windows" and local_path.startswith("/"):
|
||||||
|
local_path = local_path[1:]
|
||||||
|
else:
|
||||||
|
# 如果没有 file:/// 前缀,则认为是普通路径
|
||||||
|
local_path = file_path
|
||||||
|
except Exception:
|
||||||
|
# 解析失败时,按原路径处理
|
||||||
|
local_path = file_path
|
||||||
|
|
||||||
|
async with self.lock:
|
||||||
|
await self._cleanup_expired_tokens()
|
||||||
|
|
||||||
|
if not os.path.exists(local_path):
|
||||||
|
raise FileNotFoundError(
|
||||||
|
f"文件不存在: {local_path} (原始输入: {file_path})",
|
||||||
|
)
|
||||||
|
|
||||||
|
file_token = str(uuid.uuid4())
|
||||||
|
expire_time = time.time() + (
|
||||||
|
timeout if timeout is not None else self.default_timeout
|
||||||
|
)
|
||||||
|
# 存储转换后的真实路径
|
||||||
|
self.staged_files[file_token] = (local_path, expire_time)
|
||||||
|
return file_token
|
||||||
|
|
||||||
|
async def handle_file(self, file_token: str) -> str:
|
||||||
|
"""根据令牌获取文件路径,使用后令牌失效。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_token(str): 注册时返回的令牌
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: 文件路径
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: 当令牌不存在或已过期时抛出
|
||||||
|
FileNotFoundError: 当文件本身已被删除时抛出
|
||||||
|
|
||||||
|
"""
|
||||||
|
async with self.lock:
|
||||||
|
await self._cleanup_expired_tokens()
|
||||||
|
|
||||||
|
if file_token not in self.staged_files:
|
||||||
|
raise KeyError(f"无效或过期的文件 token: {file_token}")
|
||||||
|
|
||||||
|
file_path, _ = self.staged_files.pop(file_token)
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
raise FileNotFoundError(f"文件不存在: {file_path}")
|
||||||
|
return file_path
|
||||||
57
astrbot/core/initial_loader.py
Normal file
57
astrbot/core/initial_loader.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
"""AstrBot 启动器,负责初始化和启动核心组件和仪表板服务器。
|
||||||
|
|
||||||
|
工作流程:
|
||||||
|
1. 初始化核心生命周期, 传递数据库和日志代理实例到核心生命周期
|
||||||
|
2. 运行核心生命周期任务和仪表板服务器
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from astrbot.core import LogBroker, logger
|
||||||
|
from astrbot.core.core_lifecycle import AstrBotCoreLifecycle
|
||||||
|
from astrbot.core.db import BaseDatabase
|
||||||
|
from astrbot.dashboard.server import AstrBotDashboard
|
||||||
|
|
||||||
|
|
||||||
|
class InitialLoader:
|
||||||
|
"""AstrBot 启动器,负责初始化和启动核心组件和仪表板服务器。"""
|
||||||
|
|
||||||
|
def __init__(self, db: BaseDatabase, log_broker: LogBroker):
|
||||||
|
self.db = db
|
||||||
|
self.logger = logger
|
||||||
|
self.log_broker = log_broker
|
||||||
|
self.webui_dir: str | None = None
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
core_lifecycle = AstrBotCoreLifecycle(self.log_broker, self.db)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await core_lifecycle.initialize()
|
||||||
|
except Exception as e:
|
||||||
|
logger.critical(traceback.format_exc())
|
||||||
|
logger.critical(f"😭 初始化 AstrBot 失败:{e} !!!")
|
||||||
|
return
|
||||||
|
|
||||||
|
core_task = core_lifecycle.start()
|
||||||
|
|
||||||
|
webui_dir = self.webui_dir
|
||||||
|
|
||||||
|
self.dashboard_server = AstrBotDashboard(
|
||||||
|
core_lifecycle,
|
||||||
|
self.db,
|
||||||
|
core_lifecycle.dashboard_shutdown_event,
|
||||||
|
webui_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
coro = self.dashboard_server.run()
|
||||||
|
if coro:
|
||||||
|
# 启动核心任务和仪表板服务器
|
||||||
|
task = asyncio.gather(core_task, coro)
|
||||||
|
else:
|
||||||
|
task = core_task
|
||||||
|
try:
|
||||||
|
await task # 整个AstrBot在这里运行
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.info("🌈 正在关闭 AstrBot...")
|
||||||
|
await core_lifecycle.stop()
|
||||||
9
astrbot/core/knowledge_base/chunking/__init__.py
Normal file
9
astrbot/core/knowledge_base/chunking/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
"""文档分块模块"""
|
||||||
|
|
||||||
|
from .base import BaseChunker
|
||||||
|
from .fixed_size import FixedSizeChunker
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseChunker",
|
||||||
|
"FixedSizeChunker",
|
||||||
|
]
|
||||||
25
astrbot/core/knowledge_base/chunking/base.py
Normal file
25
astrbot/core/knowledge_base/chunking/base.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""文档分块器基类
|
||||||
|
|
||||||
|
定义了文档分块处理的抽象接口。
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class BaseChunker(ABC):
|
||||||
|
"""分块器基类
|
||||||
|
|
||||||
|
所有分块器都应该继承此类并实现 chunk 方法。
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||||
|
"""将文本分块
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: 输入文本
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: 分块后的文本列表
|
||||||
|
|
||||||
|
"""
|
||||||
59
astrbot/core/knowledge_base/chunking/fixed_size.py
Normal file
59
astrbot/core/knowledge_base/chunking/fixed_size.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""固定大小分块器
|
||||||
|
|
||||||
|
按照固定的字符数将文本分块,支持重叠区域。
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .base import BaseChunker
|
||||||
|
|
||||||
|
|
||||||
|
class FixedSizeChunker(BaseChunker):
|
||||||
|
"""固定大小分块器
|
||||||
|
|
||||||
|
按照固定的字符数分块,并支持块之间的重叠。
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, chunk_size: int = 512, chunk_overlap: int = 50):
|
||||||
|
"""初始化分块器
|
||||||
|
|
||||||
|
Args:
|
||||||
|
chunk_size: 块的大小(字符数)
|
||||||
|
chunk_overlap: 块之间的重叠字符数
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.chunk_size = chunk_size
|
||||||
|
self.chunk_overlap = chunk_overlap
|
||||||
|
|
||||||
|
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||||
|
"""固定大小分块
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: 输入文本
|
||||||
|
chunk_size: 每个文本块的最大大小
|
||||||
|
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: 分块后的文本列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
chunk_size = kwargs.get("chunk_size", self.chunk_size)
|
||||||
|
chunk_overlap = kwargs.get("chunk_overlap", self.chunk_overlap)
|
||||||
|
|
||||||
|
chunks = []
|
||||||
|
start = 0
|
||||||
|
text_len = len(text)
|
||||||
|
|
||||||
|
while start < text_len:
|
||||||
|
end = start + chunk_size
|
||||||
|
chunk = text[start:end]
|
||||||
|
|
||||||
|
if chunk:
|
||||||
|
chunks.append(chunk)
|
||||||
|
|
||||||
|
# 移动窗口,保留重叠部分
|
||||||
|
start = end - chunk_overlap
|
||||||
|
|
||||||
|
# 防止无限循环: 如果重叠过大,直接移到end
|
||||||
|
if start >= end or chunk_overlap >= chunk_size:
|
||||||
|
start = end
|
||||||
|
|
||||||
|
return chunks
|
||||||
161
astrbot/core/knowledge_base/chunking/recursive.py
Normal file
161
astrbot/core/knowledge_base/chunking/recursive.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from .base import BaseChunker
|
||||||
|
|
||||||
|
|
||||||
|
class RecursiveCharacterChunker(BaseChunker):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
chunk_size: int = 500,
|
||||||
|
chunk_overlap: int = 100,
|
||||||
|
length_function: Callable[[str], int] = len,
|
||||||
|
is_separator_regex: bool = False,
|
||||||
|
separators: list[str] | None = None,
|
||||||
|
):
|
||||||
|
"""初始化递归字符文本分割器
|
||||||
|
|
||||||
|
Args:
|
||||||
|
chunk_size: 每个文本块的最大大小
|
||||||
|
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||||
|
length_function: 计算文本长度的函数
|
||||||
|
is_separator_regex: 分隔符是否为正则表达式
|
||||||
|
separators: 用于分割文本的分隔符列表,按优先级排序
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.chunk_size = chunk_size
|
||||||
|
self.chunk_overlap = chunk_overlap
|
||||||
|
self.length_function = length_function
|
||||||
|
self.is_separator_regex = is_separator_regex
|
||||||
|
|
||||||
|
# 默认分隔符列表,按优先级从高到低
|
||||||
|
self.separators = separators or [
|
||||||
|
"\n\n", # 段落
|
||||||
|
"\n", # 换行
|
||||||
|
"。", # 中文句子
|
||||||
|
",", # 中文逗号
|
||||||
|
". ", # 句子
|
||||||
|
", ", # 逗号分隔
|
||||||
|
" ", # 单词
|
||||||
|
"", # 字符
|
||||||
|
]
|
||||||
|
|
||||||
|
async def chunk(self, text: str, **kwargs) -> list[str]:
|
||||||
|
"""递归地将文本分割成块
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: 要分割的文本
|
||||||
|
chunk_size: 每个文本块的最大大小
|
||||||
|
chunk_overlap: 每个文本块之间的重叠部分大小
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
分割后的文本块列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not text:
|
||||||
|
return []
|
||||||
|
|
||||||
|
overlap = kwargs.get("chunk_overlap", self.chunk_overlap)
|
||||||
|
chunk_size = kwargs.get("chunk_size", self.chunk_size)
|
||||||
|
|
||||||
|
text_length = self.length_function(text)
|
||||||
|
if text_length <= chunk_size:
|
||||||
|
return [text]
|
||||||
|
|
||||||
|
for separator in self.separators:
|
||||||
|
if separator == "":
|
||||||
|
return self._split_by_character(text, chunk_size, overlap)
|
||||||
|
|
||||||
|
if separator in text:
|
||||||
|
splits = text.split(separator)
|
||||||
|
# 重新添加分隔符(除了最后一个片段)
|
||||||
|
splits = [s + separator for s in splits[:-1]] + [splits[-1]]
|
||||||
|
splits = [s for s in splits if s]
|
||||||
|
if len(splits) == 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 递归合并分割后的文本块
|
||||||
|
final_chunks = []
|
||||||
|
current_chunk = []
|
||||||
|
current_chunk_length = 0
|
||||||
|
|
||||||
|
for split in splits:
|
||||||
|
split_length = self.length_function(split)
|
||||||
|
|
||||||
|
# 如果单个分割部分已经超过了chunk_size,需要递归分割
|
||||||
|
if split_length > chunk_size:
|
||||||
|
# 先处理当前积累的块
|
||||||
|
if current_chunk:
|
||||||
|
combined_text = "".join(current_chunk)
|
||||||
|
final_chunks.extend(
|
||||||
|
await self.chunk(
|
||||||
|
combined_text,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
chunk_overlap=overlap,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
current_chunk = []
|
||||||
|
current_chunk_length = 0
|
||||||
|
|
||||||
|
# 递归分割过大的部分
|
||||||
|
final_chunks.extend(
|
||||||
|
await self.chunk(
|
||||||
|
split,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
chunk_overlap=overlap,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# 如果添加这部分会使当前块超过chunk_size
|
||||||
|
elif current_chunk_length + split_length > chunk_size:
|
||||||
|
# 合并当前块并添加到结果中
|
||||||
|
combined_text = "".join(current_chunk)
|
||||||
|
final_chunks.append(combined_text)
|
||||||
|
|
||||||
|
# 处理重叠部分
|
||||||
|
overlap_start = max(0, len(combined_text) - overlap)
|
||||||
|
if overlap_start > 0:
|
||||||
|
overlap_text = combined_text[overlap_start:]
|
||||||
|
current_chunk = [overlap_text, split]
|
||||||
|
current_chunk_length = (
|
||||||
|
self.length_function(overlap_text) + split_length
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
current_chunk = [split]
|
||||||
|
current_chunk_length = split_length
|
||||||
|
else:
|
||||||
|
# 添加到当前块
|
||||||
|
current_chunk.append(split)
|
||||||
|
current_chunk_length += split_length
|
||||||
|
|
||||||
|
# 处理剩余的块
|
||||||
|
if current_chunk:
|
||||||
|
final_chunks.append("".join(current_chunk))
|
||||||
|
|
||||||
|
return final_chunks
|
||||||
|
|
||||||
|
return [text]
|
||||||
|
|
||||||
|
def _split_by_character(
|
||||||
|
self,
|
||||||
|
text: str,
|
||||||
|
chunk_size: int | None = None,
|
||||||
|
overlap: int | None = None,
|
||||||
|
) -> list[str]:
|
||||||
|
"""按字符级别分割文本
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: 要分割的文本
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
分割后的文本块列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
chunk_size = chunk_size or self.chunk_size
|
||||||
|
overlap = overlap or self.chunk_overlap
|
||||||
|
result = []
|
||||||
|
for i in range(0, len(text), chunk_size - overlap):
|
||||||
|
end = min(i + chunk_size, len(text))
|
||||||
|
result.append(text[i:end])
|
||||||
|
if end == len(text):
|
||||||
|
break
|
||||||
|
|
||||||
|
return result
|
||||||
301
astrbot/core/knowledge_base/kb_db_sqlite.py
Normal file
301
astrbot/core/knowledge_base/kb_db_sqlite.py
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from sqlalchemy import delete, func, select, text, update
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
from sqlmodel import col, desc
|
||||||
|
|
||||||
|
from astrbot.core import logger
|
||||||
|
from astrbot.core.db.vec_db.faiss_impl import FaissVecDB
|
||||||
|
from astrbot.core.knowledge_base.models import (
|
||||||
|
BaseKBModel,
|
||||||
|
KBDocument,
|
||||||
|
KBMedia,
|
||||||
|
KnowledgeBase,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KBSQLiteDatabase:
|
||||||
|
def __init__(self, db_path: str = "data/knowledge_base/kb.db") -> None:
|
||||||
|
"""初始化知识库数据库
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_path: 数据库文件路径, 默认为 data/knowledge_base/kb.db
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.db_path = db_path
|
||||||
|
self.DATABASE_URL = f"sqlite+aiosqlite:///{db_path}"
|
||||||
|
self.inited = False
|
||||||
|
|
||||||
|
# 确保目录存在
|
||||||
|
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# 创建异步引擎
|
||||||
|
self.engine = create_async_engine(
|
||||||
|
self.DATABASE_URL,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 创建会话工厂
|
||||||
|
self.async_session = async_sessionmaker(
|
||||||
|
self.engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def get_db(self):
|
||||||
|
"""获取数据库会话
|
||||||
|
|
||||||
|
用法:
|
||||||
|
async with kb_db.get_db() as session:
|
||||||
|
# 执行数据库操作
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
"""
|
||||||
|
async with self.async_session() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def initialize(self) -> None:
|
||||||
|
"""初始化数据库,创建表并配置 SQLite 参数"""
|
||||||
|
async with self.engine.begin() as conn:
|
||||||
|
# 创建所有知识库相关表
|
||||||
|
await conn.run_sync(BaseKBModel.metadata.create_all)
|
||||||
|
|
||||||
|
# 配置 SQLite 性能优化参数
|
||||||
|
await conn.execute(text("PRAGMA journal_mode=WAL"))
|
||||||
|
await conn.execute(text("PRAGMA synchronous=NORMAL"))
|
||||||
|
await conn.execute(text("PRAGMA cache_size=20000"))
|
||||||
|
await conn.execute(text("PRAGMA temp_store=MEMORY"))
|
||||||
|
await conn.execute(text("PRAGMA mmap_size=134217728"))
|
||||||
|
await conn.execute(text("PRAGMA optimize"))
|
||||||
|
await conn.commit()
|
||||||
|
|
||||||
|
self.inited = True
|
||||||
|
|
||||||
|
async def migrate_to_v1(self) -> None:
|
||||||
|
"""执行知识库数据库 v1 迁移
|
||||||
|
|
||||||
|
创建所有必要的索引以优化查询性能
|
||||||
|
"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
session: AsyncSession
|
||||||
|
async with session.begin():
|
||||||
|
# 创建知识库表索引
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_kb_kb_id "
|
||||||
|
"ON knowledge_bases(kb_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_kb_name "
|
||||||
|
"ON knowledge_bases(kb_name)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_kb_created_at "
|
||||||
|
"ON knowledge_bases(created_at)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# 创建文档表索引
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_doc_doc_id "
|
||||||
|
"ON kb_documents(doc_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_doc_kb_id "
|
||||||
|
"ON kb_documents(kb_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_doc_name "
|
||||||
|
"ON kb_documents(doc_name)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_doc_type "
|
||||||
|
"ON kb_documents(file_type)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_doc_created_at "
|
||||||
|
"ON kb_documents(created_at)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# 创建多媒体表索引
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_media_id "
|
||||||
|
"ON kb_media(media_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_doc_id "
|
||||||
|
"ON kb_media(doc_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_kb_id ON kb_media(kb_id)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await session.execute(
|
||||||
|
text(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_type "
|
||||||
|
"ON kb_media(media_type)",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
async def close(self) -> None:
|
||||||
|
"""关闭数据库连接"""
|
||||||
|
await self.engine.dispose()
|
||||||
|
logger.info(f"知识库数据库已关闭: {self.db_path}")
|
||||||
|
|
||||||
|
async def get_kb_by_id(self, kb_id: str) -> KnowledgeBase | None:
|
||||||
|
"""根据 ID 获取知识库"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(KnowledgeBase).where(col(KnowledgeBase.kb_id) == kb_id)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def get_kb_by_name(self, kb_name: str) -> KnowledgeBase | None:
|
||||||
|
"""根据名称获取知识库"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(KnowledgeBase).where(col(KnowledgeBase.kb_name) == kb_name)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def list_kbs(self, offset: int = 0, limit: int = 100) -> list[KnowledgeBase]:
|
||||||
|
"""列出所有知识库"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = (
|
||||||
|
select(KnowledgeBase)
|
||||||
|
.offset(offset)
|
||||||
|
.limit(limit)
|
||||||
|
.order_by(desc(KnowledgeBase.created_at))
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def count_kbs(self) -> int:
|
||||||
|
"""统计知识库数量"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(func.count(col(KnowledgeBase.id)))
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar() or 0
|
||||||
|
|
||||||
|
# ===== 文档查询 =====
|
||||||
|
|
||||||
|
async def get_document_by_id(self, doc_id: str) -> KBDocument | None:
|
||||||
|
"""根据 ID 获取文档"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(KBDocument).where(col(KBDocument.doc_id) == doc_id)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def list_documents_by_kb(
|
||||||
|
self,
|
||||||
|
kb_id: str,
|
||||||
|
offset: int = 0,
|
||||||
|
limit: int = 100,
|
||||||
|
) -> list[KBDocument]:
|
||||||
|
"""列出知识库的所有文档"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = (
|
||||||
|
select(KBDocument)
|
||||||
|
.where(col(KBDocument.kb_id) == kb_id)
|
||||||
|
.offset(offset)
|
||||||
|
.limit(limit)
|
||||||
|
.order_by(desc(KBDocument.created_at))
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def count_documents_by_kb(self, kb_id: str) -> int:
|
||||||
|
"""统计知识库的文档数量"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(func.count(col(KBDocument.id))).where(
|
||||||
|
col(KBDocument.kb_id) == kb_id,
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar() or 0
|
||||||
|
|
||||||
|
async def get_document_with_metadata(self, doc_id: str) -> dict | None:
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = (
|
||||||
|
select(KBDocument, KnowledgeBase)
|
||||||
|
.join(KnowledgeBase, col(KBDocument.kb_id) == col(KnowledgeBase.kb_id))
|
||||||
|
.where(col(KBDocument.doc_id) == doc_id)
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
row = result.first()
|
||||||
|
|
||||||
|
if not row:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return {
|
||||||
|
"document": row[0],
|
||||||
|
"knowledge_base": row[1],
|
||||||
|
}
|
||||||
|
|
||||||
|
async def delete_document_by_id(self, doc_id: str, vec_db: FaissVecDB):
|
||||||
|
"""删除单个文档及其相关数据"""
|
||||||
|
# 在知识库表中删除
|
||||||
|
async with self.get_db() as session, session.begin():
|
||||||
|
# 删除文档记录
|
||||||
|
delete_stmt = delete(KBDocument).where(col(KBDocument.doc_id) == doc_id)
|
||||||
|
await session.execute(delete_stmt)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
# 在 vec db 中删除相关向量
|
||||||
|
await vec_db.delete_documents(metadata_filters={"kb_doc_id": doc_id})
|
||||||
|
|
||||||
|
# ===== 多媒体查询 =====
|
||||||
|
|
||||||
|
async def list_media_by_doc(self, doc_id: str) -> list[KBMedia]:
|
||||||
|
"""列出文档的所有多媒体资源"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(KBMedia).where(col(KBMedia.doc_id) == doc_id)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def get_media_by_id(self, media_id: str) -> KBMedia | None:
|
||||||
|
"""根据 ID 获取多媒体资源"""
|
||||||
|
async with self.get_db() as session:
|
||||||
|
stmt = select(KBMedia).where(col(KBMedia.media_id) == media_id)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def update_kb_stats(self, kb_id: str, vec_db: FaissVecDB) -> None:
|
||||||
|
"""更新知识库统计信息"""
|
||||||
|
chunk_cnt = await vec_db.count_documents()
|
||||||
|
|
||||||
|
async with self.get_db() as session, session.begin():
|
||||||
|
update_stmt = (
|
||||||
|
update(KnowledgeBase)
|
||||||
|
.where(col(KnowledgeBase.kb_id) == kb_id)
|
||||||
|
.values(
|
||||||
|
doc_count=select(func.count(col(KBDocument.id)))
|
||||||
|
.where(col(KBDocument.kb_id) == kb_id)
|
||||||
|
.scalar_subquery(),
|
||||||
|
chunk_count=chunk_cnt,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await session.execute(update_stmt)
|
||||||
|
await session.commit()
|
||||||
361
astrbot/core/knowledge_base/kb_helper.py
Normal file
361
astrbot/core/knowledge_base/kb_helper.py
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
|
||||||
|
from astrbot.core import logger
|
||||||
|
from astrbot.core.db.vec_db.base import BaseVecDB
|
||||||
|
from astrbot.core.db.vec_db.faiss_impl.vec_db import FaissVecDB
|
||||||
|
from astrbot.core.provider.manager import ProviderManager
|
||||||
|
from astrbot.core.provider.provider import EmbeddingProvider, RerankProvider
|
||||||
|
|
||||||
|
from .chunking.base import BaseChunker
|
||||||
|
from .kb_db_sqlite import KBSQLiteDatabase
|
||||||
|
from .models import KBDocument, KBMedia, KnowledgeBase
|
||||||
|
from .parsers.util import select_parser
|
||||||
|
|
||||||
|
|
||||||
|
class KBHelper:
|
||||||
|
vec_db: BaseVecDB
|
||||||
|
kb: KnowledgeBase
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
kb_db: KBSQLiteDatabase,
|
||||||
|
kb: KnowledgeBase,
|
||||||
|
provider_manager: ProviderManager,
|
||||||
|
kb_root_dir: str,
|
||||||
|
chunker: BaseChunker,
|
||||||
|
):
|
||||||
|
self.kb_db = kb_db
|
||||||
|
self.kb = kb
|
||||||
|
self.prov_mgr = provider_manager
|
||||||
|
self.kb_root_dir = kb_root_dir
|
||||||
|
self.chunker = chunker
|
||||||
|
|
||||||
|
self.kb_dir = Path(self.kb_root_dir) / self.kb.kb_id
|
||||||
|
self.kb_medias_dir = Path(self.kb_dir) / "medias" / self.kb.kb_id
|
||||||
|
self.kb_files_dir = Path(self.kb_dir) / "files" / self.kb.kb_id
|
||||||
|
|
||||||
|
self.kb_medias_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.kb_files_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
await self._ensure_vec_db()
|
||||||
|
|
||||||
|
async def get_ep(self) -> EmbeddingProvider:
|
||||||
|
if not self.kb.embedding_provider_id:
|
||||||
|
raise ValueError(f"知识库 {self.kb.kb_name} 未配置 Embedding Provider")
|
||||||
|
ep: EmbeddingProvider = await self.prov_mgr.get_provider_by_id(
|
||||||
|
self.kb.embedding_provider_id,
|
||||||
|
) # type: ignore
|
||||||
|
if not ep:
|
||||||
|
raise ValueError(
|
||||||
|
f"无法找到 ID 为 {self.kb.embedding_provider_id} 的 Embedding Provider",
|
||||||
|
)
|
||||||
|
return ep
|
||||||
|
|
||||||
|
async def get_rp(self) -> RerankProvider | None:
|
||||||
|
if not self.kb.rerank_provider_id:
|
||||||
|
return None
|
||||||
|
rp: RerankProvider = await self.prov_mgr.get_provider_by_id(
|
||||||
|
self.kb.rerank_provider_id,
|
||||||
|
) # type: ignore
|
||||||
|
if not rp:
|
||||||
|
raise ValueError(
|
||||||
|
f"无法找到 ID 为 {self.kb.rerank_provider_id} 的 Rerank Provider",
|
||||||
|
)
|
||||||
|
return rp
|
||||||
|
|
||||||
|
async def _ensure_vec_db(self) -> FaissVecDB:
|
||||||
|
if not self.kb.embedding_provider_id:
|
||||||
|
raise ValueError(f"知识库 {self.kb.kb_name} 未配置 Embedding Provider")
|
||||||
|
|
||||||
|
ep = await self.get_ep()
|
||||||
|
rp = await self.get_rp()
|
||||||
|
|
||||||
|
vec_db = FaissVecDB(
|
||||||
|
doc_store_path=str(self.kb_dir / "doc.db"),
|
||||||
|
index_store_path=str(self.kb_dir / "index.faiss"),
|
||||||
|
embedding_provider=ep,
|
||||||
|
rerank_provider=rp,
|
||||||
|
)
|
||||||
|
await vec_db.initialize()
|
||||||
|
self.vec_db = vec_db
|
||||||
|
return vec_db
|
||||||
|
|
||||||
|
async def delete_vec_db(self):
|
||||||
|
"""删除知识库的向量数据库和所有相关文件"""
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
await self.terminate()
|
||||||
|
if self.kb_dir.exists():
|
||||||
|
shutil.rmtree(self.kb_dir)
|
||||||
|
|
||||||
|
async def terminate(self):
|
||||||
|
if self.vec_db:
|
||||||
|
await self.vec_db.close()
|
||||||
|
|
||||||
|
async def upload_document(
|
||||||
|
self,
|
||||||
|
file_name: str,
|
||||||
|
file_content: bytes,
|
||||||
|
file_type: str,
|
||||||
|
chunk_size: int = 512,
|
||||||
|
chunk_overlap: int = 50,
|
||||||
|
batch_size: int = 32,
|
||||||
|
tasks_limit: int = 3,
|
||||||
|
max_retries: int = 3,
|
||||||
|
progress_callback=None,
|
||||||
|
) -> KBDocument:
|
||||||
|
"""上传并处理文档(带原子性保证和失败清理)
|
||||||
|
|
||||||
|
流程:
|
||||||
|
1. 保存原始文件
|
||||||
|
2. 解析文档内容
|
||||||
|
3. 提取多媒体资源
|
||||||
|
4. 分块处理
|
||||||
|
5. 生成向量并存储
|
||||||
|
6. 保存元数据(事务)
|
||||||
|
7. 更新统计
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress_callback: 进度回调函数,接收参数 (stage, current, total)
|
||||||
|
- stage: 当前阶段 ('parsing', 'chunking', 'embedding')
|
||||||
|
- current: 当前进度
|
||||||
|
- total: 总数
|
||||||
|
|
||||||
|
"""
|
||||||
|
await self._ensure_vec_db()
|
||||||
|
doc_id = str(uuid.uuid4())
|
||||||
|
media_paths: list[Path] = []
|
||||||
|
|
||||||
|
# file_path = self.kb_files_dir / f"{doc_id}.{file_type}"
|
||||||
|
# async with aiofiles.open(file_path, "wb") as f:
|
||||||
|
# await f.write(file_content)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 阶段1: 解析文档
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("parsing", 0, 100)
|
||||||
|
|
||||||
|
parser = await select_parser(f".{file_type}")
|
||||||
|
parse_result = await parser.parse(file_content, file_name)
|
||||||
|
text_content = parse_result.text
|
||||||
|
media_items = parse_result.media
|
||||||
|
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("parsing", 100, 100)
|
||||||
|
|
||||||
|
# 保存媒体文件
|
||||||
|
saved_media = []
|
||||||
|
for media_item in media_items:
|
||||||
|
media = await self._save_media(
|
||||||
|
doc_id=doc_id,
|
||||||
|
media_type=media_item.media_type,
|
||||||
|
file_name=media_item.file_name,
|
||||||
|
content=media_item.content,
|
||||||
|
mime_type=media_item.mime_type,
|
||||||
|
)
|
||||||
|
saved_media.append(media)
|
||||||
|
media_paths.append(Path(media.file_path))
|
||||||
|
|
||||||
|
# 阶段2: 分块
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("chunking", 0, 100)
|
||||||
|
|
||||||
|
chunks_text = await self.chunker.chunk(
|
||||||
|
text_content,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
chunk_overlap=chunk_overlap,
|
||||||
|
)
|
||||||
|
contents = []
|
||||||
|
metadatas = []
|
||||||
|
for idx, chunk_text in enumerate(chunks_text):
|
||||||
|
contents.append(chunk_text)
|
||||||
|
metadatas.append(
|
||||||
|
{
|
||||||
|
"kb_id": self.kb.kb_id,
|
||||||
|
"kb_doc_id": doc_id,
|
||||||
|
"chunk_index": idx,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("chunking", 100, 100)
|
||||||
|
|
||||||
|
# 阶段3: 生成向量(带进度回调)
|
||||||
|
async def embedding_progress_callback(current, total):
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("embedding", current, total)
|
||||||
|
|
||||||
|
await self.vec_db.insert_batch(
|
||||||
|
contents=contents,
|
||||||
|
metadatas=metadatas,
|
||||||
|
batch_size=batch_size,
|
||||||
|
tasks_limit=tasks_limit,
|
||||||
|
max_retries=max_retries,
|
||||||
|
progress_callback=embedding_progress_callback,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 保存文档的元数据
|
||||||
|
doc = KBDocument(
|
||||||
|
doc_id=doc_id,
|
||||||
|
kb_id=self.kb.kb_id,
|
||||||
|
doc_name=file_name,
|
||||||
|
file_type=file_type,
|
||||||
|
file_size=len(file_content),
|
||||||
|
# file_path=str(file_path),
|
||||||
|
file_path="",
|
||||||
|
chunk_count=len(chunks_text),
|
||||||
|
media_count=0,
|
||||||
|
)
|
||||||
|
async with self.kb_db.get_db() as session:
|
||||||
|
async with session.begin():
|
||||||
|
session.add(doc)
|
||||||
|
for media in saved_media:
|
||||||
|
session.add(media)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
await session.refresh(doc)
|
||||||
|
|
||||||
|
vec_db: FaissVecDB = self.vec_db # type: ignore
|
||||||
|
await self.kb_db.update_kb_stats(kb_id=self.kb.kb_id, vec_db=vec_db)
|
||||||
|
await self.refresh_kb()
|
||||||
|
await self.refresh_document(doc_id)
|
||||||
|
return doc
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"上传文档失败: {e}")
|
||||||
|
# if file_path.exists():
|
||||||
|
# file_path.unlink()
|
||||||
|
|
||||||
|
for media_path in media_paths:
|
||||||
|
try:
|
||||||
|
if media_path.exists():
|
||||||
|
media_path.unlink()
|
||||||
|
except Exception as me:
|
||||||
|
logger.warning(f"清理多媒体文件失败 {media_path}: {me}")
|
||||||
|
|
||||||
|
raise e
|
||||||
|
|
||||||
|
async def list_documents(
|
||||||
|
self,
|
||||||
|
offset: int = 0,
|
||||||
|
limit: int = 100,
|
||||||
|
) -> list[KBDocument]:
|
||||||
|
"""列出知识库的所有文档"""
|
||||||
|
docs = await self.kb_db.list_documents_by_kb(self.kb.kb_id, offset, limit)
|
||||||
|
return docs
|
||||||
|
|
||||||
|
async def get_document(self, doc_id: str) -> KBDocument | None:
|
||||||
|
"""获取单个文档"""
|
||||||
|
doc = await self.kb_db.get_document_by_id(doc_id)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
async def delete_document(self, doc_id: str):
|
||||||
|
"""删除单个文档及其相关数据"""
|
||||||
|
await self.kb_db.delete_document_by_id(
|
||||||
|
doc_id=doc_id,
|
||||||
|
vec_db=self.vec_db, # type: ignore
|
||||||
|
)
|
||||||
|
await self.kb_db.update_kb_stats(
|
||||||
|
kb_id=self.kb.kb_id,
|
||||||
|
vec_db=self.vec_db, # type: ignore
|
||||||
|
)
|
||||||
|
await self.refresh_kb()
|
||||||
|
|
||||||
|
async def delete_chunk(self, chunk_id: str, doc_id: str):
|
||||||
|
"""删除单个文本块及其相关数据"""
|
||||||
|
vec_db: FaissVecDB = self.vec_db # type: ignore
|
||||||
|
await vec_db.delete(chunk_id)
|
||||||
|
await self.kb_db.update_kb_stats(
|
||||||
|
kb_id=self.kb.kb_id,
|
||||||
|
vec_db=self.vec_db, # type: ignore
|
||||||
|
)
|
||||||
|
await self.refresh_kb()
|
||||||
|
await self.refresh_document(doc_id)
|
||||||
|
|
||||||
|
async def refresh_kb(self):
|
||||||
|
if self.kb:
|
||||||
|
kb = await self.kb_db.get_kb_by_id(self.kb.kb_id)
|
||||||
|
if kb:
|
||||||
|
self.kb = kb
|
||||||
|
|
||||||
|
async def refresh_document(self, doc_id: str) -> None:
|
||||||
|
"""更新文档的元数据"""
|
||||||
|
doc = await self.get_document(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise ValueError(f"无法找到 ID 为 {doc_id} 的文档")
|
||||||
|
chunk_count = await self.get_chunk_count_by_doc_id(doc_id)
|
||||||
|
doc.chunk_count = chunk_count
|
||||||
|
async with self.kb_db.get_db() as session:
|
||||||
|
async with session.begin():
|
||||||
|
session.add(doc)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(doc)
|
||||||
|
|
||||||
|
async def get_chunks_by_doc_id(
|
||||||
|
self,
|
||||||
|
doc_id: str,
|
||||||
|
offset: int = 0,
|
||||||
|
limit: int = 100,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""获取文档的所有块及其元数据"""
|
||||||
|
vec_db: FaissVecDB = self.vec_db # type: ignore
|
||||||
|
chunks = await vec_db.document_storage.get_documents(
|
||||||
|
metadata_filters={"kb_doc_id": doc_id},
|
||||||
|
offset=offset,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
result = []
|
||||||
|
for chunk in chunks:
|
||||||
|
chunk_md = json.loads(chunk["metadata"])
|
||||||
|
result.append(
|
||||||
|
{
|
||||||
|
"chunk_id": chunk["doc_id"],
|
||||||
|
"doc_id": chunk_md["kb_doc_id"],
|
||||||
|
"kb_id": chunk_md["kb_id"],
|
||||||
|
"chunk_index": chunk_md["chunk_index"],
|
||||||
|
"content": chunk["text"],
|
||||||
|
"char_count": len(chunk["text"]),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def get_chunk_count_by_doc_id(self, doc_id: str) -> int:
|
||||||
|
"""获取文档的块数量"""
|
||||||
|
vec_db: FaissVecDB = self.vec_db # type: ignore
|
||||||
|
count = await vec_db.count_documents(metadata_filter={"kb_doc_id": doc_id})
|
||||||
|
return count
|
||||||
|
|
||||||
|
async def _save_media(
|
||||||
|
self,
|
||||||
|
doc_id: str,
|
||||||
|
media_type: str,
|
||||||
|
file_name: str,
|
||||||
|
content: bytes,
|
||||||
|
mime_type: str,
|
||||||
|
) -> KBMedia:
|
||||||
|
"""保存多媒体资源"""
|
||||||
|
media_id = str(uuid.uuid4())
|
||||||
|
ext = Path(file_name).suffix
|
||||||
|
|
||||||
|
# 保存文件
|
||||||
|
file_path = self.kb_medias_dir / doc_id / f"{media_id}{ext}"
|
||||||
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
async with aiofiles.open(file_path, "wb") as f:
|
||||||
|
await f.write(content)
|
||||||
|
|
||||||
|
media = KBMedia(
|
||||||
|
media_id=media_id,
|
||||||
|
doc_id=doc_id,
|
||||||
|
kb_id=self.kb.kb_id,
|
||||||
|
media_type=media_type,
|
||||||
|
file_name=file_name,
|
||||||
|
file_path=str(file_path),
|
||||||
|
file_size=len(content),
|
||||||
|
mime_type=mime_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
return media
|
||||||
286
astrbot/core/knowledge_base/kb_mgr.py
Normal file
286
astrbot/core/knowledge_base/kb_mgr.py
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from astrbot.core import logger
|
||||||
|
from astrbot.core.provider.manager import ProviderManager
|
||||||
|
|
||||||
|
# from .chunking.fixed_size import FixedSizeChunker
|
||||||
|
from .chunking.recursive import RecursiveCharacterChunker
|
||||||
|
from .kb_db_sqlite import KBSQLiteDatabase
|
||||||
|
from .kb_helper import KBHelper
|
||||||
|
from .models import KnowledgeBase
|
||||||
|
from .retrieval.manager import RetrievalManager, RetrievalResult
|
||||||
|
from .retrieval.rank_fusion import RankFusion
|
||||||
|
from .retrieval.sparse_retriever import SparseRetriever
|
||||||
|
|
||||||
|
FILES_PATH = "data/knowledge_base"
|
||||||
|
DB_PATH = Path(FILES_PATH) / "kb.db"
|
||||||
|
"""Knowledge Base storage root directory"""
|
||||||
|
CHUNKER = RecursiveCharacterChunker()
|
||||||
|
|
||||||
|
|
||||||
|
class KnowledgeBaseManager:
|
||||||
|
kb_db: KBSQLiteDatabase
|
||||||
|
retrieval_manager: RetrievalManager
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
provider_manager: ProviderManager,
|
||||||
|
):
|
||||||
|
Path(DB_PATH).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.provider_manager = provider_manager
|
||||||
|
self._session_deleted_callback_registered = False
|
||||||
|
|
||||||
|
self.kb_insts: dict[str, KBHelper] = {}
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""初始化知识库模块"""
|
||||||
|
try:
|
||||||
|
logger.info("正在初始化知识库模块...")
|
||||||
|
|
||||||
|
# 初始化数据库
|
||||||
|
await self._init_kb_database()
|
||||||
|
|
||||||
|
# 初始化检索管理器
|
||||||
|
sparse_retriever = SparseRetriever(self.kb_db)
|
||||||
|
rank_fusion = RankFusion(self.kb_db)
|
||||||
|
self.retrieval_manager = RetrievalManager(
|
||||||
|
sparse_retriever=sparse_retriever,
|
||||||
|
rank_fusion=rank_fusion,
|
||||||
|
kb_db=self.kb_db,
|
||||||
|
)
|
||||||
|
await self.load_kbs()
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"知识库模块导入失败: {e}")
|
||||||
|
logger.warning("请确保已安装所需依赖: pypdf, aiofiles, Pillow, rank-bm25")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"知识库模块初始化失败: {e}")
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
async def _init_kb_database(self):
|
||||||
|
self.kb_db = KBSQLiteDatabase(DB_PATH.as_posix())
|
||||||
|
await self.kb_db.initialize()
|
||||||
|
await self.kb_db.migrate_to_v1()
|
||||||
|
logger.info(f"KnowledgeBase database initialized: {DB_PATH}")
|
||||||
|
|
||||||
|
async def load_kbs(self):
|
||||||
|
"""加载所有知识库实例"""
|
||||||
|
kb_records = await self.kb_db.list_kbs()
|
||||||
|
for record in kb_records:
|
||||||
|
kb_helper = KBHelper(
|
||||||
|
kb_db=self.kb_db,
|
||||||
|
kb=record,
|
||||||
|
provider_manager=self.provider_manager,
|
||||||
|
kb_root_dir=FILES_PATH,
|
||||||
|
chunker=CHUNKER,
|
||||||
|
)
|
||||||
|
await kb_helper.initialize()
|
||||||
|
self.kb_insts[record.kb_id] = kb_helper
|
||||||
|
|
||||||
|
async def create_kb(
|
||||||
|
self,
|
||||||
|
kb_name: str,
|
||||||
|
description: str | None = None,
|
||||||
|
emoji: str | None = None,
|
||||||
|
embedding_provider_id: str | None = None,
|
||||||
|
rerank_provider_id: str | None = None,
|
||||||
|
chunk_size: int | None = None,
|
||||||
|
chunk_overlap: int | None = None,
|
||||||
|
top_k_dense: int | None = None,
|
||||||
|
top_k_sparse: int | None = None,
|
||||||
|
top_m_final: int | None = None,
|
||||||
|
) -> KBHelper:
|
||||||
|
"""创建新的知识库实例"""
|
||||||
|
kb = KnowledgeBase(
|
||||||
|
kb_name=kb_name,
|
||||||
|
description=description,
|
||||||
|
emoji=emoji or "📚",
|
||||||
|
embedding_provider_id=embedding_provider_id,
|
||||||
|
rerank_provider_id=rerank_provider_id,
|
||||||
|
chunk_size=chunk_size if chunk_size is not None else 512,
|
||||||
|
chunk_overlap=chunk_overlap if chunk_overlap is not None else 50,
|
||||||
|
top_k_dense=top_k_dense if top_k_dense is not None else 50,
|
||||||
|
top_k_sparse=top_k_sparse if top_k_sparse is not None else 50,
|
||||||
|
top_m_final=top_m_final if top_m_final is not None else 5,
|
||||||
|
)
|
||||||
|
async with self.kb_db.get_db() as session:
|
||||||
|
session.add(kb)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(kb)
|
||||||
|
|
||||||
|
kb_helper = KBHelper(
|
||||||
|
kb_db=self.kb_db,
|
||||||
|
kb=kb,
|
||||||
|
provider_manager=self.provider_manager,
|
||||||
|
kb_root_dir=FILES_PATH,
|
||||||
|
chunker=CHUNKER,
|
||||||
|
)
|
||||||
|
await kb_helper.initialize()
|
||||||
|
self.kb_insts[kb.kb_id] = kb_helper
|
||||||
|
return kb_helper
|
||||||
|
|
||||||
|
async def get_kb(self, kb_id: str) -> KBHelper | None:
|
||||||
|
"""获取知识库实例"""
|
||||||
|
if kb_id in self.kb_insts:
|
||||||
|
return self.kb_insts[kb_id]
|
||||||
|
|
||||||
|
async def get_kb_by_name(self, kb_name: str) -> KBHelper | None:
|
||||||
|
"""通过名称获取知识库实例"""
|
||||||
|
for kb_helper in self.kb_insts.values():
|
||||||
|
if kb_helper.kb.kb_name == kb_name:
|
||||||
|
return kb_helper
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def delete_kb(self, kb_id: str) -> bool:
|
||||||
|
"""删除知识库实例"""
|
||||||
|
kb_helper = await self.get_kb(kb_id)
|
||||||
|
if not kb_helper:
|
||||||
|
return False
|
||||||
|
|
||||||
|
await kb_helper.delete_vec_db()
|
||||||
|
async with self.kb_db.get_db() as session:
|
||||||
|
await session.delete(kb_helper.kb)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
self.kb_insts.pop(kb_id, None)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def list_kbs(self) -> list[KnowledgeBase]:
|
||||||
|
"""列出所有知识库实例"""
|
||||||
|
kbs = [kb_helper.kb for kb_helper in self.kb_insts.values()]
|
||||||
|
return kbs
|
||||||
|
|
||||||
|
async def update_kb(
|
||||||
|
self,
|
||||||
|
kb_id: str,
|
||||||
|
kb_name: str,
|
||||||
|
description: str | None = None,
|
||||||
|
emoji: str | None = None,
|
||||||
|
embedding_provider_id: str | None = None,
|
||||||
|
rerank_provider_id: str | None = None,
|
||||||
|
chunk_size: int | None = None,
|
||||||
|
chunk_overlap: int | None = None,
|
||||||
|
top_k_dense: int | None = None,
|
||||||
|
top_k_sparse: int | None = None,
|
||||||
|
top_m_final: int | None = None,
|
||||||
|
) -> KBHelper | None:
|
||||||
|
"""更新知识库实例"""
|
||||||
|
kb_helper = await self.get_kb(kb_id)
|
||||||
|
if not kb_helper:
|
||||||
|
return None
|
||||||
|
|
||||||
|
kb = kb_helper.kb
|
||||||
|
if kb_name is not None:
|
||||||
|
kb.kb_name = kb_name
|
||||||
|
if description is not None:
|
||||||
|
kb.description = description
|
||||||
|
if emoji is not None:
|
||||||
|
kb.emoji = emoji
|
||||||
|
if embedding_provider_id is not None:
|
||||||
|
kb.embedding_provider_id = embedding_provider_id
|
||||||
|
kb.rerank_provider_id = rerank_provider_id # 允许设置为 None
|
||||||
|
if chunk_size is not None:
|
||||||
|
kb.chunk_size = chunk_size
|
||||||
|
if chunk_overlap is not None:
|
||||||
|
kb.chunk_overlap = chunk_overlap
|
||||||
|
if top_k_dense is not None:
|
||||||
|
kb.top_k_dense = top_k_dense
|
||||||
|
if top_k_sparse is not None:
|
||||||
|
kb.top_k_sparse = top_k_sparse
|
||||||
|
if top_m_final is not None:
|
||||||
|
kb.top_m_final = top_m_final
|
||||||
|
async with self.kb_db.get_db() as session:
|
||||||
|
session.add(kb)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(kb)
|
||||||
|
|
||||||
|
return kb_helper
|
||||||
|
|
||||||
|
async def retrieve(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
kb_names: list[str],
|
||||||
|
top_k_fusion: int = 20,
|
||||||
|
top_m_final: int = 5,
|
||||||
|
) -> dict | None:
|
||||||
|
"""从指定知识库中检索相关内容"""
|
||||||
|
kb_ids = []
|
||||||
|
kb_id_helper_map = {}
|
||||||
|
for kb_name in kb_names:
|
||||||
|
if kb_helper := await self.get_kb_by_name(kb_name):
|
||||||
|
kb_ids.append(kb_helper.kb.kb_id)
|
||||||
|
kb_id_helper_map[kb_helper.kb.kb_id] = kb_helper
|
||||||
|
|
||||||
|
if not kb_ids:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
results = await self.retrieval_manager.retrieve(
|
||||||
|
query=query,
|
||||||
|
kb_ids=kb_ids,
|
||||||
|
kb_id_helper_map=kb_id_helper_map,
|
||||||
|
top_k_fusion=top_k_fusion,
|
||||||
|
top_m_final=top_m_final,
|
||||||
|
)
|
||||||
|
if not results:
|
||||||
|
return None
|
||||||
|
|
||||||
|
context_text = self._format_context(results)
|
||||||
|
|
||||||
|
results_dict = [
|
||||||
|
{
|
||||||
|
"chunk_id": r.chunk_id,
|
||||||
|
"doc_id": r.doc_id,
|
||||||
|
"kb_id": r.kb_id,
|
||||||
|
"kb_name": r.kb_name,
|
||||||
|
"doc_name": r.doc_name,
|
||||||
|
"chunk_index": r.metadata.get("chunk_index", 0),
|
||||||
|
"content": r.content,
|
||||||
|
"score": r.score,
|
||||||
|
"char_count": r.metadata.get("char_count", 0),
|
||||||
|
}
|
||||||
|
for r in results
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"context_text": context_text,
|
||||||
|
"results": results_dict,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _format_context(self, results: list[RetrievalResult]) -> str:
|
||||||
|
"""格式化知识上下文
|
||||||
|
|
||||||
|
Args:
|
||||||
|
results: 检索结果列表
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: 格式化的上下文文本
|
||||||
|
|
||||||
|
"""
|
||||||
|
lines = ["以下是相关的知识库内容,请参考这些信息回答用户的问题:\n"]
|
||||||
|
|
||||||
|
for i, result in enumerate(results, 1):
|
||||||
|
lines.append(f"【知识 {i}】")
|
||||||
|
lines.append(f"来源: {result.kb_name} / {result.doc_name}")
|
||||||
|
lines.append(f"内容: {result.content}")
|
||||||
|
lines.append(f"相关度: {result.score:.2f}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
async def terminate(self):
|
||||||
|
"""终止所有知识库实例,关闭数据库连接"""
|
||||||
|
for kb_id, kb_helper in self.kb_insts.items():
|
||||||
|
try:
|
||||||
|
await kb_helper.terminate()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"关闭知识库 {kb_id} 失败: {e}")
|
||||||
|
|
||||||
|
self.kb_insts.clear()
|
||||||
|
|
||||||
|
# 关闭元数据数据库
|
||||||
|
if hasattr(self, "kb_db") and self.kb_db:
|
||||||
|
try:
|
||||||
|
await self.kb_db.close()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"关闭知识库元数据数据库失败: {e}")
|
||||||
120
astrbot/core/knowledge_base/models.py
Normal file
120
astrbot/core/knowledge_base/models.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from sqlmodel import Field, MetaData, SQLModel, Text, UniqueConstraint
|
||||||
|
|
||||||
|
|
||||||
|
class BaseKBModel(SQLModel, table=False):
|
||||||
|
metadata = MetaData()
|
||||||
|
|
||||||
|
|
||||||
|
class KnowledgeBase(BaseKBModel, table=True):
|
||||||
|
"""知识库表
|
||||||
|
|
||||||
|
存储知识库的基本信息和统计数据。
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "knowledge_bases" # type: ignore
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
kb_id: str = Field(
|
||||||
|
max_length=36,
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
kb_name: str = Field(max_length=100, nullable=False)
|
||||||
|
description: str | None = Field(default=None, sa_type=Text)
|
||||||
|
emoji: str | None = Field(default="📚", max_length=10)
|
||||||
|
embedding_provider_id: str | None = Field(default=None, max_length=100)
|
||||||
|
rerank_provider_id: str | None = Field(default=None, max_length=100)
|
||||||
|
# 分块配置参数
|
||||||
|
chunk_size: int | None = Field(default=512, nullable=True)
|
||||||
|
chunk_overlap: int | None = Field(default=50, nullable=True)
|
||||||
|
# 检索配置参数
|
||||||
|
top_k_dense: int | None = Field(default=50, nullable=True)
|
||||||
|
top_k_sparse: int | None = Field(default=50, nullable=True)
|
||||||
|
top_m_final: int | None = Field(default=5, nullable=True)
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
doc_count: int = Field(default=0, nullable=False)
|
||||||
|
chunk_count: int = Field(default=0, nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"kb_name",
|
||||||
|
name="uix_kb_name",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KBDocument(BaseKBModel, table=True):
|
||||||
|
"""文档表
|
||||||
|
|
||||||
|
存储上传到知识库的文档元数据。
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "kb_documents" # type: ignore
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
doc_id: str = Field(
|
||||||
|
max_length=36,
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
kb_id: str = Field(max_length=36, nullable=False, index=True)
|
||||||
|
doc_name: str = Field(max_length=255, nullable=False)
|
||||||
|
file_type: str = Field(max_length=20, nullable=False)
|
||||||
|
file_size: int = Field(nullable=False)
|
||||||
|
file_path: str = Field(max_length=512, nullable=False)
|
||||||
|
chunk_count: int = Field(default=0, nullable=False)
|
||||||
|
media_count: int = Field(default=0, nullable=False)
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
|
updated_at: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
sa_column_kwargs={"onupdate": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KBMedia(BaseKBModel, table=True):
|
||||||
|
"""多媒体资源表
|
||||||
|
|
||||||
|
存储从文档中提取的图片、视频等多媒体资源。
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "kb_media" # type: ignore
|
||||||
|
|
||||||
|
id: int | None = Field(
|
||||||
|
primary_key=True,
|
||||||
|
sa_column_kwargs={"autoincrement": True},
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
media_id: str = Field(
|
||||||
|
max_length=36,
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
doc_id: str = Field(max_length=36, nullable=False, index=True)
|
||||||
|
kb_id: str = Field(max_length=36, nullable=False, index=True)
|
||||||
|
media_type: str = Field(max_length=20, nullable=False)
|
||||||
|
file_name: str = Field(max_length=255, nullable=False)
|
||||||
|
file_path: str = Field(max_length=512, nullable=False)
|
||||||
|
file_size: int = Field(nullable=False)
|
||||||
|
mime_type: str = Field(max_length=100, nullable=False)
|
||||||
|
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||||
13
astrbot/core/knowledge_base/parsers/__init__.py
Normal file
13
astrbot/core/knowledge_base/parsers/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
"""文档解析器模块"""
|
||||||
|
|
||||||
|
from .base import BaseParser, MediaItem, ParseResult
|
||||||
|
from .pdf_parser import PDFParser
|
||||||
|
from .text_parser import TextParser
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseParser",
|
||||||
|
"MediaItem",
|
||||||
|
"PDFParser",
|
||||||
|
"ParseResult",
|
||||||
|
"TextParser",
|
||||||
|
]
|
||||||
51
astrbot/core/knowledge_base/parsers/base.py
Normal file
51
astrbot/core/knowledge_base/parsers/base.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
"""文档解析器基类和数据结构
|
||||||
|
|
||||||
|
定义了文档解析器的抽象接口和相关数据类。
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MediaItem:
|
||||||
|
"""多媒体项
|
||||||
|
|
||||||
|
表示从文档中提取的多媒体资源。
|
||||||
|
"""
|
||||||
|
|
||||||
|
media_type: str # image, video
|
||||||
|
file_name: str
|
||||||
|
content: bytes
|
||||||
|
mime_type: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ParseResult:
|
||||||
|
"""解析结果
|
||||||
|
|
||||||
|
包含解析后的文本内容和提取的多媒体资源。
|
||||||
|
"""
|
||||||
|
|
||||||
|
text: str
|
||||||
|
media: list[MediaItem]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseParser(ABC):
|
||||||
|
"""文档解析器基类
|
||||||
|
|
||||||
|
所有文档解析器都应该继承此类并实现 parse 方法。
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def parse(self, file_content: bytes, file_name: str) -> ParseResult:
|
||||||
|
"""解析文档
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_content: 文件内容
|
||||||
|
file_name: 文件名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ParseResult: 解析结果
|
||||||
|
|
||||||
|
"""
|
||||||
26
astrbot/core/knowledge_base/parsers/markitdown_parser.py
Normal file
26
astrbot/core/knowledge_base/parsers/markitdown_parser.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import io
|
||||||
|
import os
|
||||||
|
|
||||||
|
from markitdown_no_magika import MarkItDown, StreamInfo
|
||||||
|
|
||||||
|
from astrbot.core.knowledge_base.parsers.base import (
|
||||||
|
BaseParser,
|
||||||
|
ParseResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MarkitdownParser(BaseParser):
|
||||||
|
"""解析 docx, xls, xlsx 格式"""
|
||||||
|
|
||||||
|
async def parse(self, file_content: bytes, file_name: str) -> ParseResult:
|
||||||
|
md = MarkItDown(enable_plugins=False)
|
||||||
|
bio = io.BytesIO(file_content)
|
||||||
|
stream_info = StreamInfo(
|
||||||
|
extension=os.path.splitext(file_name)[1].lower(),
|
||||||
|
filename=file_name,
|
||||||
|
)
|
||||||
|
result = md.convert(bio, stream_info=stream_info)
|
||||||
|
return ParseResult(
|
||||||
|
text=result.markdown,
|
||||||
|
media=[],
|
||||||
|
)
|
||||||
101
astrbot/core/knowledge_base/parsers/pdf_parser.py
Normal file
101
astrbot/core/knowledge_base/parsers/pdf_parser.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
"""PDF 文件解析器
|
||||||
|
|
||||||
|
支持解析 PDF 文件中的文本和图片资源。
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
|
||||||
|
from pypdf import PdfReader
|
||||||
|
|
||||||
|
from astrbot.core.knowledge_base.parsers.base import (
|
||||||
|
BaseParser,
|
||||||
|
MediaItem,
|
||||||
|
ParseResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PDFParser(BaseParser):
|
||||||
|
"""PDF 文档解析器
|
||||||
|
|
||||||
|
提取 PDF 中的文本内容和嵌入的图片资源。
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def parse(self, file_content: bytes, file_name: str) -> ParseResult:
|
||||||
|
"""解析 PDF 文件
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_content: 文件内容
|
||||||
|
file_name: 文件名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ParseResult: 包含文本和图片的解析结果
|
||||||
|
|
||||||
|
"""
|
||||||
|
pdf_file = io.BytesIO(file_content)
|
||||||
|
reader = PdfReader(pdf_file)
|
||||||
|
|
||||||
|
text_parts = []
|
||||||
|
media_items = []
|
||||||
|
|
||||||
|
# 提取文本
|
||||||
|
for page in reader.pages:
|
||||||
|
text = page.extract_text()
|
||||||
|
if text:
|
||||||
|
text_parts.append(text)
|
||||||
|
|
||||||
|
# 提取图片
|
||||||
|
image_counter = 0
|
||||||
|
for page_num, page in enumerate(reader.pages):
|
||||||
|
try:
|
||||||
|
# 安全检查 Resources
|
||||||
|
if "/Resources" not in page:
|
||||||
|
continue
|
||||||
|
|
||||||
|
resources = page["/Resources"]
|
||||||
|
if not resources or "/XObject" not in resources: # type: ignore
|
||||||
|
continue
|
||||||
|
|
||||||
|
xobjects = resources["/XObject"].get_object() # type: ignore
|
||||||
|
if not xobjects:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for obj_name in xobjects:
|
||||||
|
try:
|
||||||
|
obj = xobjects[obj_name]
|
||||||
|
|
||||||
|
if obj.get("/Subtype") != "/Image":
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 提取图片数据
|
||||||
|
image_data = obj.get_data()
|
||||||
|
|
||||||
|
# 确定格式
|
||||||
|
filter_type = obj.get("/Filter", "")
|
||||||
|
if filter_type == "/DCTDecode":
|
||||||
|
ext = "jpg"
|
||||||
|
mime_type = "image/jpeg"
|
||||||
|
elif filter_type == "/FlateDecode":
|
||||||
|
ext = "png"
|
||||||
|
mime_type = "image/png"
|
||||||
|
else:
|
||||||
|
ext = "png"
|
||||||
|
mime_type = "image/png"
|
||||||
|
|
||||||
|
image_counter += 1
|
||||||
|
media_items.append(
|
||||||
|
MediaItem(
|
||||||
|
media_type="image",
|
||||||
|
file_name=f"page_{page_num}_img_{image_counter}.{ext}",
|
||||||
|
content=image_data,
|
||||||
|
mime_type=mime_type,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# 单个图片提取失败不影响整体
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
# 页面处理失败不影响其他页面
|
||||||
|
continue
|
||||||
|
|
||||||
|
full_text = "\n\n".join(text_parts)
|
||||||
|
return ParseResult(text=full_text, media=media_items)
|
||||||
42
astrbot/core/knowledge_base/parsers/text_parser.py
Normal file
42
astrbot/core/knowledge_base/parsers/text_parser.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"""文本文件解析器
|
||||||
|
|
||||||
|
支持解析 TXT 和 Markdown 文件。
|
||||||
|
"""
|
||||||
|
|
||||||
|
from astrbot.core.knowledge_base.parsers.base import BaseParser, ParseResult
|
||||||
|
|
||||||
|
|
||||||
|
class TextParser(BaseParser):
|
||||||
|
"""TXT/MD 文本解析器
|
||||||
|
|
||||||
|
支持多种字符编码的自动检测。
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def parse(self, file_content: bytes, file_name: str) -> ParseResult:
|
||||||
|
"""解析文本文件
|
||||||
|
|
||||||
|
尝试使用多种编码解析文件内容。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_content: 文件内容
|
||||||
|
file_name: 文件名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ParseResult: 解析结果,不包含多媒体资源
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: 如果无法解码文件
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 尝试多种编码
|
||||||
|
for encoding in ["utf-8", "gbk", "gb2312", "gb18030"]:
|
||||||
|
try:
|
||||||
|
text = file_content.decode(encoding)
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise ValueError(f"无法解码文件: {file_name}")
|
||||||
|
|
||||||
|
# 文本文件无多媒体资源
|
||||||
|
return ParseResult(text=text, media=[])
|
||||||
13
astrbot/core/knowledge_base/parsers/util.py
Normal file
13
astrbot/core/knowledge_base/parsers/util.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from .base import BaseParser
|
||||||
|
|
||||||
|
|
||||||
|
async def select_parser(ext: str) -> BaseParser:
|
||||||
|
if ext in {".md", ".txt", ".markdown", ".xlsx", ".docx", ".xls"}:
|
||||||
|
from .markitdown_parser import MarkitdownParser
|
||||||
|
|
||||||
|
return MarkitdownParser()
|
||||||
|
if ext == ".pdf":
|
||||||
|
from .pdf_parser import PDFParser
|
||||||
|
|
||||||
|
return PDFParser()
|
||||||
|
raise ValueError(f"暂时不支持的文件格式: {ext}")
|
||||||
14
astrbot/core/knowledge_base/retrieval/__init__.py
Normal file
14
astrbot/core/knowledge_base/retrieval/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
"""检索模块"""
|
||||||
|
|
||||||
|
from .manager import RetrievalManager, RetrievalResult
|
||||||
|
from .rank_fusion import FusedResult, RankFusion
|
||||||
|
from .sparse_retriever import SparseResult, SparseRetriever
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FusedResult",
|
||||||
|
"RankFusion",
|
||||||
|
"RetrievalManager",
|
||||||
|
"RetrievalResult",
|
||||||
|
"SparseResult",
|
||||||
|
"SparseRetriever",
|
||||||
|
]
|
||||||
767
astrbot/core/knowledge_base/retrieval/hit_stopwords.txt
Normal file
767
astrbot/core/knowledge_base/retrieval/hit_stopwords.txt
Normal file
@@ -0,0 +1,767 @@
|
|||||||
|
———
|
||||||
|
》),
|
||||||
|
)÷(1-
|
||||||
|
”,
|
||||||
|
)、
|
||||||
|
=(
|
||||||
|
:
|
||||||
|
→
|
||||||
|
℃
|
||||||
|
&
|
||||||
|
*
|
||||||
|
一一
|
||||||
|
~~~~
|
||||||
|
’
|
||||||
|
.
|
||||||
|
『
|
||||||
|
.一
|
||||||
|
./
|
||||||
|
--
|
||||||
|
』
|
||||||
|
=″
|
||||||
|
【
|
||||||
|
[*]
|
||||||
|
}>
|
||||||
|
[⑤]]
|
||||||
|
[①D]
|
||||||
|
c]
|
||||||
|
ng昉
|
||||||
|
*
|
||||||
|
//
|
||||||
|
[
|
||||||
|
]
|
||||||
|
[②e]
|
||||||
|
[②g]
|
||||||
|
={
|
||||||
|
}
|
||||||
|
,也
|
||||||
|
‘
|
||||||
|
A
|
||||||
|
[①⑥]
|
||||||
|
[②B]
|
||||||
|
[①a]
|
||||||
|
[④a]
|
||||||
|
[①③]
|
||||||
|
[③h]
|
||||||
|
③]
|
||||||
|
1.
|
||||||
|
--
|
||||||
|
[②b]
|
||||||
|
’‘
|
||||||
|
×××
|
||||||
|
[①⑧]
|
||||||
|
0:2
|
||||||
|
=[
|
||||||
|
[⑤b]
|
||||||
|
[②c]
|
||||||
|
[④b]
|
||||||
|
[②③]
|
||||||
|
[③a]
|
||||||
|
[④c]
|
||||||
|
[①⑤]
|
||||||
|
[①⑦]
|
||||||
|
[①g]
|
||||||
|
∈[
|
||||||
|
[①⑨]
|
||||||
|
[①④]
|
||||||
|
[①c]
|
||||||
|
[②f]
|
||||||
|
[②⑧]
|
||||||
|
[②①]
|
||||||
|
[①C]
|
||||||
|
[③c]
|
||||||
|
[③g]
|
||||||
|
[②⑤]
|
||||||
|
[②②]
|
||||||
|
一.
|
||||||
|
[①h]
|
||||||
|
.数
|
||||||
|
[]
|
||||||
|
[①B]
|
||||||
|
数/
|
||||||
|
[①i]
|
||||||
|
[③e]
|
||||||
|
[①①]
|
||||||
|
[④d]
|
||||||
|
[④e]
|
||||||
|
[③b]
|
||||||
|
[⑤a]
|
||||||
|
[①A]
|
||||||
|
[②⑧]
|
||||||
|
[②⑦]
|
||||||
|
[①d]
|
||||||
|
[②j]
|
||||||
|
〕〔
|
||||||
|
][
|
||||||
|
://
|
||||||
|
′∈
|
||||||
|
[②④
|
||||||
|
[⑤e]
|
||||||
|
12%
|
||||||
|
b]
|
||||||
|
...
|
||||||
|
...................
|
||||||
|
…………………………………………………③
|
||||||
|
ZXFITL
|
||||||
|
[③F]
|
||||||
|
」
|
||||||
|
[①o]
|
||||||
|
]∧′=[
|
||||||
|
∪φ∈
|
||||||
|
′|
|
||||||
|
{-
|
||||||
|
②c
|
||||||
|
}
|
||||||
|
[③①]
|
||||||
|
R.L.
|
||||||
|
[①E]
|
||||||
|
Ψ
|
||||||
|
-[*]-
|
||||||
|
↑
|
||||||
|
.日
|
||||||
|
[②d]
|
||||||
|
[②
|
||||||
|
[②⑦]
|
||||||
|
[②②]
|
||||||
|
[③e]
|
||||||
|
[①i]
|
||||||
|
[①B]
|
||||||
|
[①h]
|
||||||
|
[①d]
|
||||||
|
[①g]
|
||||||
|
[①②]
|
||||||
|
[②a]
|
||||||
|
f]
|
||||||
|
[⑩]
|
||||||
|
a]
|
||||||
|
[①e]
|
||||||
|
[②h]
|
||||||
|
[②⑥]
|
||||||
|
[③d]
|
||||||
|
[②⑩]
|
||||||
|
e]
|
||||||
|
〉
|
||||||
|
】
|
||||||
|
元/吨
|
||||||
|
[②⑩]
|
||||||
|
2.3%
|
||||||
|
5:0
|
||||||
|
[①]
|
||||||
|
::
|
||||||
|
[②]
|
||||||
|
[③]
|
||||||
|
[④]
|
||||||
|
[⑤]
|
||||||
|
[⑥]
|
||||||
|
[⑦]
|
||||||
|
[⑧]
|
||||||
|
[⑨]
|
||||||
|
……
|
||||||
|
——
|
||||||
|
?
|
||||||
|
、
|
||||||
|
。
|
||||||
|
“
|
||||||
|
”
|
||||||
|
《
|
||||||
|
》
|
||||||
|
!
|
||||||
|
,
|
||||||
|
:
|
||||||
|
;
|
||||||
|
?
|
||||||
|
.
|
||||||
|
,
|
||||||
|
.
|
||||||
|
'
|
||||||
|
?
|
||||||
|
·
|
||||||
|
———
|
||||||
|
──
|
||||||
|
?
|
||||||
|
—
|
||||||
|
<
|
||||||
|
>
|
||||||
|
(
|
||||||
|
)
|
||||||
|
〔
|
||||||
|
〕
|
||||||
|
[
|
||||||
|
]
|
||||||
|
(
|
||||||
|
)
|
||||||
|
-
|
||||||
|
+
|
||||||
|
~
|
||||||
|
×
|
||||||
|
/
|
||||||
|
/
|
||||||
|
①
|
||||||
|
②
|
||||||
|
③
|
||||||
|
④
|
||||||
|
⑤
|
||||||
|
⑥
|
||||||
|
⑦
|
||||||
|
⑧
|
||||||
|
⑨
|
||||||
|
⑩
|
||||||
|
Ⅲ
|
||||||
|
В
|
||||||
|
"
|
||||||
|
;
|
||||||
|
#
|
||||||
|
@
|
||||||
|
γ
|
||||||
|
μ
|
||||||
|
φ
|
||||||
|
φ.
|
||||||
|
×
|
||||||
|
Δ
|
||||||
|
■
|
||||||
|
▲
|
||||||
|
sub
|
||||||
|
exp
|
||||||
|
sup
|
||||||
|
sub
|
||||||
|
Lex
|
||||||
|
#
|
||||||
|
%
|
||||||
|
&
|
||||||
|
'
|
||||||
|
+
|
||||||
|
+ξ
|
||||||
|
++
|
||||||
|
-
|
||||||
|
-β
|
||||||
|
<
|
||||||
|
<±
|
||||||
|
<Δ
|
||||||
|
<λ
|
||||||
|
<φ
|
||||||
|
<<
|
||||||
|
=
|
||||||
|
=
|
||||||
|
=☆
|
||||||
|
=-
|
||||||
|
>
|
||||||
|
>λ
|
||||||
|
_
|
||||||
|
~±
|
||||||
|
~+
|
||||||
|
[⑤f]
|
||||||
|
[⑤d]
|
||||||
|
[②i]
|
||||||
|
≈
|
||||||
|
[②G]
|
||||||
|
[①f]
|
||||||
|
LI
|
||||||
|
㈧
|
||||||
|
[-
|
||||||
|
......
|
||||||
|
〉
|
||||||
|
[③⑩]
|
||||||
|
第二
|
||||||
|
一番
|
||||||
|
一直
|
||||||
|
一个
|
||||||
|
一些
|
||||||
|
许多
|
||||||
|
种
|
||||||
|
有的是
|
||||||
|
也就是说
|
||||||
|
末##末
|
||||||
|
啊
|
||||||
|
阿
|
||||||
|
哎
|
||||||
|
哎呀
|
||||||
|
哎哟
|
||||||
|
唉
|
||||||
|
俺
|
||||||
|
俺们
|
||||||
|
按
|
||||||
|
按照
|
||||||
|
吧
|
||||||
|
吧哒
|
||||||
|
把
|
||||||
|
罢了
|
||||||
|
被
|
||||||
|
本
|
||||||
|
本着
|
||||||
|
比
|
||||||
|
比方
|
||||||
|
比如
|
||||||
|
鄙人
|
||||||
|
彼
|
||||||
|
彼此
|
||||||
|
边
|
||||||
|
别
|
||||||
|
别的
|
||||||
|
别说
|
||||||
|
并
|
||||||
|
并且
|
||||||
|
不比
|
||||||
|
不成
|
||||||
|
不单
|
||||||
|
不但
|
||||||
|
不独
|
||||||
|
不管
|
||||||
|
不光
|
||||||
|
不过
|
||||||
|
不仅
|
||||||
|
不拘
|
||||||
|
不论
|
||||||
|
不怕
|
||||||
|
不然
|
||||||
|
不如
|
||||||
|
不特
|
||||||
|
不惟
|
||||||
|
不问
|
||||||
|
不只
|
||||||
|
朝
|
||||||
|
朝着
|
||||||
|
趁
|
||||||
|
趁着
|
||||||
|
乘
|
||||||
|
冲
|
||||||
|
除
|
||||||
|
除此之外
|
||||||
|
除非
|
||||||
|
除了
|
||||||
|
此
|
||||||
|
此间
|
||||||
|
此外
|
||||||
|
从
|
||||||
|
从而
|
||||||
|
打
|
||||||
|
待
|
||||||
|
但
|
||||||
|
但是
|
||||||
|
当
|
||||||
|
当着
|
||||||
|
到
|
||||||
|
得
|
||||||
|
的
|
||||||
|
的话
|
||||||
|
等
|
||||||
|
等等
|
||||||
|
地
|
||||||
|
第
|
||||||
|
叮咚
|
||||||
|
对
|
||||||
|
对于
|
||||||
|
多
|
||||||
|
多少
|
||||||
|
而
|
||||||
|
而况
|
||||||
|
而且
|
||||||
|
而是
|
||||||
|
而外
|
||||||
|
而言
|
||||||
|
而已
|
||||||
|
尔后
|
||||||
|
反过来
|
||||||
|
反过来说
|
||||||
|
反之
|
||||||
|
非但
|
||||||
|
非徒
|
||||||
|
否则
|
||||||
|
嘎
|
||||||
|
嘎登
|
||||||
|
该
|
||||||
|
赶
|
||||||
|
个
|
||||||
|
各
|
||||||
|
各个
|
||||||
|
各位
|
||||||
|
各种
|
||||||
|
各自
|
||||||
|
给
|
||||||
|
根据
|
||||||
|
跟
|
||||||
|
故
|
||||||
|
故此
|
||||||
|
固然
|
||||||
|
关于
|
||||||
|
管
|
||||||
|
归
|
||||||
|
果然
|
||||||
|
果真
|
||||||
|
过
|
||||||
|
哈
|
||||||
|
哈哈
|
||||||
|
呵
|
||||||
|
和
|
||||||
|
何
|
||||||
|
何处
|
||||||
|
何况
|
||||||
|
何时
|
||||||
|
嘿
|
||||||
|
哼
|
||||||
|
哼唷
|
||||||
|
呼哧
|
||||||
|
乎
|
||||||
|
哗
|
||||||
|
还是
|
||||||
|
还有
|
||||||
|
换句话说
|
||||||
|
换言之
|
||||||
|
或
|
||||||
|
或是
|
||||||
|
或者
|
||||||
|
极了
|
||||||
|
及
|
||||||
|
及其
|
||||||
|
及至
|
||||||
|
即
|
||||||
|
即便
|
||||||
|
即或
|
||||||
|
即令
|
||||||
|
即若
|
||||||
|
即使
|
||||||
|
几
|
||||||
|
几时
|
||||||
|
己
|
||||||
|
既
|
||||||
|
既然
|
||||||
|
既是
|
||||||
|
继而
|
||||||
|
加之
|
||||||
|
假如
|
||||||
|
假若
|
||||||
|
假使
|
||||||
|
鉴于
|
||||||
|
将
|
||||||
|
较
|
||||||
|
较之
|
||||||
|
叫
|
||||||
|
接着
|
||||||
|
结果
|
||||||
|
借
|
||||||
|
紧接着
|
||||||
|
进而
|
||||||
|
尽
|
||||||
|
尽管
|
||||||
|
经
|
||||||
|
经过
|
||||||
|
就
|
||||||
|
就是
|
||||||
|
就是说
|
||||||
|
据
|
||||||
|
具体地说
|
||||||
|
具体说来
|
||||||
|
开始
|
||||||
|
开外
|
||||||
|
靠
|
||||||
|
咳
|
||||||
|
可
|
||||||
|
可见
|
||||||
|
可是
|
||||||
|
可以
|
||||||
|
况且
|
||||||
|
啦
|
||||||
|
来
|
||||||
|
来着
|
||||||
|
离
|
||||||
|
例如
|
||||||
|
哩
|
||||||
|
连
|
||||||
|
连同
|
||||||
|
两者
|
||||||
|
了
|
||||||
|
临
|
||||||
|
另
|
||||||
|
另外
|
||||||
|
另一方面
|
||||||
|
论
|
||||||
|
嘛
|
||||||
|
吗
|
||||||
|
慢说
|
||||||
|
漫说
|
||||||
|
冒
|
||||||
|
么
|
||||||
|
每
|
||||||
|
每当
|
||||||
|
们
|
||||||
|
莫若
|
||||||
|
某
|
||||||
|
某个
|
||||||
|
某些
|
||||||
|
拿
|
||||||
|
哪
|
||||||
|
哪边
|
||||||
|
哪儿
|
||||||
|
哪个
|
||||||
|
哪里
|
||||||
|
哪年
|
||||||
|
哪怕
|
||||||
|
哪天
|
||||||
|
哪些
|
||||||
|
哪样
|
||||||
|
那
|
||||||
|
那边
|
||||||
|
那儿
|
||||||
|
那个
|
||||||
|
那会儿
|
||||||
|
那里
|
||||||
|
那么
|
||||||
|
那么些
|
||||||
|
那么样
|
||||||
|
那时
|
||||||
|
那些
|
||||||
|
那样
|
||||||
|
乃
|
||||||
|
乃至
|
||||||
|
呢
|
||||||
|
能
|
||||||
|
你
|
||||||
|
你们
|
||||||
|
您
|
||||||
|
宁
|
||||||
|
宁可
|
||||||
|
宁肯
|
||||||
|
宁愿
|
||||||
|
哦
|
||||||
|
呕
|
||||||
|
啪达
|
||||||
|
旁人
|
||||||
|
呸
|
||||||
|
凭
|
||||||
|
凭借
|
||||||
|
其
|
||||||
|
其次
|
||||||
|
其二
|
||||||
|
其他
|
||||||
|
其它
|
||||||
|
其一
|
||||||
|
其余
|
||||||
|
其中
|
||||||
|
起
|
||||||
|
起见
|
||||||
|
起见
|
||||||
|
岂但
|
||||||
|
恰恰相反
|
||||||
|
前后
|
||||||
|
前者
|
||||||
|
且
|
||||||
|
然而
|
||||||
|
然后
|
||||||
|
然则
|
||||||
|
让
|
||||||
|
人家
|
||||||
|
任
|
||||||
|
任何
|
||||||
|
任凭
|
||||||
|
如
|
||||||
|
如此
|
||||||
|
如果
|
||||||
|
如何
|
||||||
|
如其
|
||||||
|
如若
|
||||||
|
如上所述
|
||||||
|
若
|
||||||
|
若非
|
||||||
|
若是
|
||||||
|
啥
|
||||||
|
上下
|
||||||
|
尚且
|
||||||
|
设若
|
||||||
|
设使
|
||||||
|
甚而
|
||||||
|
甚么
|
||||||
|
甚至
|
||||||
|
省得
|
||||||
|
时候
|
||||||
|
什么
|
||||||
|
什么样
|
||||||
|
使得
|
||||||
|
是
|
||||||
|
是的
|
||||||
|
首先
|
||||||
|
谁
|
||||||
|
谁知
|
||||||
|
顺
|
||||||
|
顺着
|
||||||
|
似的
|
||||||
|
虽
|
||||||
|
虽然
|
||||||
|
虽说
|
||||||
|
虽则
|
||||||
|
随
|
||||||
|
随着
|
||||||
|
所
|
||||||
|
所以
|
||||||
|
他
|
||||||
|
他们
|
||||||
|
他人
|
||||||
|
它
|
||||||
|
它们
|
||||||
|
她
|
||||||
|
她们
|
||||||
|
倘
|
||||||
|
倘或
|
||||||
|
倘然
|
||||||
|
倘若
|
||||||
|
倘使
|
||||||
|
腾
|
||||||
|
替
|
||||||
|
通过
|
||||||
|
同
|
||||||
|
同时
|
||||||
|
哇
|
||||||
|
万一
|
||||||
|
往
|
||||||
|
望
|
||||||
|
为
|
||||||
|
为何
|
||||||
|
为了
|
||||||
|
为什么
|
||||||
|
为着
|
||||||
|
喂
|
||||||
|
嗡嗡
|
||||||
|
我
|
||||||
|
我们
|
||||||
|
呜
|
||||||
|
呜呼
|
||||||
|
乌乎
|
||||||
|
无论
|
||||||
|
无宁
|
||||||
|
毋宁
|
||||||
|
嘻
|
||||||
|
吓
|
||||||
|
相对而言
|
||||||
|
像
|
||||||
|
向
|
||||||
|
向着
|
||||||
|
嘘
|
||||||
|
呀
|
||||||
|
焉
|
||||||
|
沿
|
||||||
|
沿着
|
||||||
|
要
|
||||||
|
要不
|
||||||
|
要不然
|
||||||
|
要不是
|
||||||
|
要么
|
||||||
|
要是
|
||||||
|
也
|
||||||
|
也罢
|
||||||
|
也好
|
||||||
|
一
|
||||||
|
一般
|
||||||
|
一旦
|
||||||
|
一方面
|
||||||
|
一来
|
||||||
|
一切
|
||||||
|
一样
|
||||||
|
一则
|
||||||
|
依
|
||||||
|
依照
|
||||||
|
矣
|
||||||
|
以
|
||||||
|
以便
|
||||||
|
以及
|
||||||
|
以免
|
||||||
|
以至
|
||||||
|
以至于
|
||||||
|
以致
|
||||||
|
抑或
|
||||||
|
因
|
||||||
|
因此
|
||||||
|
因而
|
||||||
|
因为
|
||||||
|
哟
|
||||||
|
用
|
||||||
|
由
|
||||||
|
由此可见
|
||||||
|
由于
|
||||||
|
有
|
||||||
|
有的
|
||||||
|
有关
|
||||||
|
有些
|
||||||
|
又
|
||||||
|
于
|
||||||
|
于是
|
||||||
|
于是乎
|
||||||
|
与
|
||||||
|
与此同时
|
||||||
|
与否
|
||||||
|
与其
|
||||||
|
越是
|
||||||
|
云云
|
||||||
|
哉
|
||||||
|
再说
|
||||||
|
再者
|
||||||
|
在
|
||||||
|
在下
|
||||||
|
咱
|
||||||
|
咱们
|
||||||
|
则
|
||||||
|
怎
|
||||||
|
怎么
|
||||||
|
怎么办
|
||||||
|
怎么样
|
||||||
|
怎样
|
||||||
|
咋
|
||||||
|
照
|
||||||
|
照着
|
||||||
|
者
|
||||||
|
这
|
||||||
|
这边
|
||||||
|
这儿
|
||||||
|
这个
|
||||||
|
这会儿
|
||||||
|
这就是说
|
||||||
|
这里
|
||||||
|
这么
|
||||||
|
这么点儿
|
||||||
|
这么些
|
||||||
|
这么样
|
||||||
|
这时
|
||||||
|
这些
|
||||||
|
这样
|
||||||
|
正如
|
||||||
|
吱
|
||||||
|
之
|
||||||
|
之类
|
||||||
|
之所以
|
||||||
|
之一
|
||||||
|
只是
|
||||||
|
只限
|
||||||
|
只要
|
||||||
|
只有
|
||||||
|
至
|
||||||
|
至于
|
||||||
|
诸位
|
||||||
|
着
|
||||||
|
着呢
|
||||||
|
自
|
||||||
|
自从
|
||||||
|
自个儿
|
||||||
|
自各儿
|
||||||
|
自己
|
||||||
|
自家
|
||||||
|
自身
|
||||||
|
综上所述
|
||||||
|
总的来看
|
||||||
|
总的来说
|
||||||
|
总的说来
|
||||||
|
总而言之
|
||||||
|
总之
|
||||||
|
纵
|
||||||
|
纵令
|
||||||
|
纵然
|
||||||
|
纵使
|
||||||
|
遵照
|
||||||
|
作为
|
||||||
|
兮
|
||||||
|
呃
|
||||||
|
呗
|
||||||
|
咚
|
||||||
|
咦
|
||||||
|
喏
|
||||||
|
啐
|
||||||
|
喔唷
|
||||||
|
嗬
|
||||||
|
嗯
|
||||||
|
嗳
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user