Compare commits
533 Commits
v1.0.2
...
feat/varia
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
370cfd6e9f | ||
|
|
5cdf4eff77 | ||
|
|
b53dbcbb30 | ||
|
|
a42283e789 | ||
|
|
d2ed9972bd | ||
|
|
0fd9b6e56c | ||
|
|
d213bc1024 | ||
|
|
91b9a48c48 | ||
|
|
e572b3801b | ||
|
|
4bf15aed25 | ||
|
|
6d568688ed | ||
|
|
f20cbf31a8 | ||
|
|
bfbfba13fe | ||
|
|
8b9929cc7b | ||
|
|
a90be7e83f | ||
|
|
efa68c8519 | ||
|
|
d7bd240a9a | ||
|
|
95df69ff82 | ||
|
|
e41df917b4 | ||
|
|
0a33649b3c | ||
|
|
d1cb7258d2 | ||
|
|
8fbedb2bd0 | ||
|
|
750247aef8 | ||
|
|
32e1f428e7 | ||
|
|
aee6219a75 | ||
|
|
5329fa7ede | ||
|
|
ba640d4070 | ||
|
|
8c5f61d407 | ||
|
|
b43ecb75f5 | ||
|
|
3dc4947e26 | ||
|
|
a5b0480418 | ||
|
|
8a7db19e73 | ||
|
|
2da8a73124 | ||
|
|
5223a3c5a6 | ||
|
|
72c5de3b81 | ||
|
|
9f11e7c22b | ||
|
|
1ce86c11ca | ||
|
|
57c1b59a51 | ||
|
|
a2f9067908 | ||
|
|
2a4c512e49 | ||
|
|
94eb7f3a34 | ||
|
|
b363cb06a4 | ||
|
|
9e977f4b35 | ||
|
|
00de616958 | ||
|
|
1187a47698 | ||
|
|
83d0eb07aa | ||
|
|
8f6bf11320 | ||
|
|
22b0bd54b4 | ||
|
|
be39c5f40c | ||
|
|
8b00ff4b93 | ||
|
|
f5b675b356 | ||
|
|
de8dbb2646 | ||
|
|
7e67005e70 | ||
|
|
d6e66f3a4d | ||
|
|
e5aaec2129 | ||
|
|
464634d051 | ||
|
|
3698238e9e | ||
|
|
ae2a661201 | ||
|
|
d6dbe357fb | ||
|
|
e9dd795f9a | ||
|
|
03a18c1f3b | ||
|
|
e3ba44fc2c | ||
|
|
9976ad9ed0 | ||
|
|
3bb294e698 | ||
|
|
990b1651a9 | ||
|
|
11c070a1d7 | ||
|
|
57ba91072d | ||
|
|
433d562599 | ||
|
|
194ba1baa0 | ||
|
|
53ae427f2f | ||
|
|
3f40cc28ac | ||
|
|
d3584d2d39 | ||
|
|
da0db73916 | ||
|
|
21f1b8b373 | ||
|
|
f1a03916e7 | ||
|
|
45f0bfa0f9 | ||
|
|
f2102daf00 | ||
|
|
8f5c4483fc | ||
|
|
43adac3f74 | ||
|
|
7b8c5f185c | ||
|
|
eeb537048b | ||
|
|
5712a58a5e | ||
|
|
c4162bd9e3 | ||
|
|
eddbae6f5e | ||
|
|
29f7da1a4c | ||
|
|
403ed8cbf4 | ||
|
|
7263a682b7 | ||
|
|
29b5ba787b | ||
|
|
bb6fdd2db7 | ||
|
|
710171278a | ||
|
|
41191f6132 | ||
|
|
bbc7b20183 | ||
|
|
8bb8081f31 | ||
|
|
7ddd2cb9d5 | ||
|
|
06ff44f97c | ||
|
|
1a85b8bd5d | ||
|
|
fb9c23c500 | ||
|
|
7fb85dc311 | ||
|
|
2af15e4172 | ||
|
|
415f991143 | ||
|
|
c162242433 | ||
|
|
487d7a502e | ||
|
|
d64d6969ae | ||
|
|
cc32c36222 | ||
|
|
0d320120a4 | ||
|
|
3cbe45fc8d | ||
|
|
917943386e | ||
|
|
aee0f9ea3f | ||
|
|
2055615aca | ||
|
|
40cac47136 | ||
|
|
40d9629681 | ||
|
|
8acefaa907 | ||
|
|
e2d8b89ffd | ||
|
|
8d48824981 | ||
|
|
fd66881022 | ||
|
|
b321169ca2 | ||
|
|
123362b493 | ||
|
|
a1568808d4 | ||
|
|
6dff8b2725 | ||
|
|
c8b2e8dd79 | ||
|
|
8ac18934e9 | ||
|
|
6699b0902f | ||
|
|
9b98312775 | ||
|
|
1e14dd6ea2 | ||
|
|
0d612cb827 | ||
|
|
ccfac25a04 | ||
|
|
7447dfe771 | ||
|
|
0fe45a203c | ||
|
|
94942141b9 | ||
|
|
f08856ae42 | ||
|
|
a606f4b6c5 | ||
|
|
a5318ebefa | ||
|
|
ae8869e1b6 | ||
|
|
32b8fa7e63 | ||
|
|
c299d615fc | ||
|
|
8628dc188b | ||
|
|
eba746a3bc | ||
|
|
640ca19cba | ||
|
|
f9941a6858 | ||
|
|
17bd66259d | ||
|
|
9112ecc79b | ||
|
|
e75cfac8d8 | ||
|
|
a9b2b32c5a | ||
|
|
16ac419b9b | ||
|
|
13747a585a | ||
|
|
d56774fd59 | ||
|
|
6c6af2a12b | ||
|
|
ae7b94b01e | ||
|
|
36824c20f8 | ||
|
|
07b6d5ce1d | ||
|
|
43a6428653 | ||
|
|
404ec095d4 | ||
|
|
ed731db56a | ||
|
|
1e4d6f196f | ||
|
|
e0f1768c4f | ||
|
|
c9d640770a | ||
|
|
56207d5617 | ||
|
|
2e2ed664d0 | ||
|
|
183f1310e5 | ||
|
|
b7ee0ea7b3 | ||
|
|
117cf548fe | ||
|
|
bddec81402 | ||
|
|
a0ccc4e661 | ||
|
|
25c166cb8e | ||
|
|
bc1d6157f6 | ||
|
|
852274b4b1 | ||
|
|
998c4bc459 | ||
|
|
55bb4530c0 | ||
|
|
27a384b0c8 | ||
|
|
4927f98e59 | ||
|
|
36966cfc14 | ||
|
|
9ebc20882b | ||
|
|
d0ddfce280 | ||
|
|
707e713e73 | ||
|
|
5347df4840 | ||
|
|
2ca0a62efa | ||
|
|
28c5231741 | ||
|
|
994ffa224e | ||
|
|
ea990e78a5 | ||
|
|
6fd5ff991d | ||
|
|
cd6c0a1f66 | ||
|
|
8f1528b21c | ||
|
|
d11f892c26 | ||
|
|
63b4ecbadd | ||
|
|
f6cb501119 | ||
|
|
70ba8df57c | ||
|
|
89508162b7 | ||
|
|
f107fb0c78 | ||
|
|
a183a9a21e | ||
|
|
dffcaa11c3 | ||
|
|
0fe7d559c8 | ||
|
|
eef141cbe7 | ||
|
|
424eb09995 | ||
|
|
c29cab7daa | ||
|
|
592484af95 | ||
|
|
e9c9f3b488 | ||
|
|
a2a3760c95 | ||
|
|
62de293194 | ||
|
|
9a65a1e7c7 | ||
|
|
82eb22d978 | ||
|
|
ed1f80da00 | ||
|
|
11620828ad | ||
|
|
b89213b1ab | ||
|
|
9ca46ee3d3 | ||
|
|
530bf42abb | ||
|
|
c527fbdcd2 | ||
|
|
cbb1173a3d | ||
|
|
ae47d170ca | ||
|
|
fd6e4db888 | ||
|
|
ea31f27451 | ||
|
|
88143ba695 | ||
|
|
0ddcecabdf | ||
|
|
f9f2586dc4 | ||
|
|
e8ae776084 | ||
|
|
9655b33903 | ||
|
|
1a2a382916 | ||
|
|
16d9be4ce4 | ||
|
|
8374cd508d | ||
|
|
e0ba3b8968 | ||
|
|
68acbe8f3d | ||
|
|
68d7815332 | ||
|
|
6ab0a89a98 | ||
|
|
15ab8407e4 | ||
|
|
b50f8a4c11 | ||
|
|
359f6e36e9 | ||
|
|
a04757c0d9 | ||
|
|
ab8600864e | ||
|
|
d22a101fd1 | ||
|
|
2d1ab70818 | ||
|
|
99ac5986ee | ||
|
|
9ae7c5101e | ||
|
|
889331005e | ||
|
|
b3fbe35efe | ||
|
|
3fdbb5a9da | ||
|
|
5b0b36dc5c | ||
|
|
60eb08a982 | ||
|
|
570d6aeaf1 | ||
|
|
8df09b4ecc | ||
|
|
d35f15574e | ||
|
|
f21bf3d860 | ||
|
|
4dacea04a6 | ||
|
|
4939fc8b03 | ||
|
|
b4c71b4dd3 | ||
|
|
6870390b9f | ||
|
|
495656ec9d | ||
|
|
1e4bc56780 | ||
|
|
48a6c4d017 | ||
|
|
e5342cd414 | ||
|
|
2941aadd0f | ||
|
|
4597d2a930 | ||
|
|
456ad612aa | ||
|
|
899c183c5c | ||
|
|
a83c153531 | ||
|
|
6a187fd370 | ||
|
|
827d5c58d0 | ||
|
|
e11bb16307 | ||
|
|
b316c3ae64 | ||
|
|
07ad7f0622 | ||
|
|
0863cfb2af | ||
|
|
0e44f9cd2a | ||
|
|
e760b1be6b | ||
|
|
187726ae8d | ||
|
|
07199d0ed6 | ||
|
|
a6921b064d | ||
|
|
486563062c | ||
|
|
7096f81234 | ||
|
|
94ba450323 | ||
|
|
ed59e0f47e | ||
|
|
857bb02e50 | ||
|
|
1e830c0613 | ||
|
|
90077a519d | ||
|
|
bb25522798 | ||
|
|
e0e1d285e4 | ||
|
|
45c10fa166 | ||
|
|
295454a85e | ||
|
|
b441d76991 | ||
|
|
555c5baafa | ||
|
|
8c5273d47d | ||
|
|
e5f2fab43c | ||
|
|
62a8c28a6a | ||
|
|
7d048872e1 | ||
|
|
9cb127f14e | ||
|
|
c8983f3000 | ||
|
|
d8808b89f1 | ||
|
|
730b03cde8 | ||
|
|
cef32f4b36 | ||
|
|
893a04aba3 | ||
|
|
43da80cba1 | ||
|
|
a30cfb53bf | ||
|
|
d1087ec87c | ||
|
|
3f285d0676 | ||
|
|
61829ab591 | ||
|
|
fb30a796d7 | ||
|
|
604f76d55e | ||
|
|
e8cba0ca01 | ||
|
|
8c3ce1a787 | ||
|
|
8faececa4c | ||
|
|
aa6ecb4814 | ||
|
|
4c5b8ee0ee | ||
|
|
394483c363 | ||
|
|
6238b353cd | ||
|
|
ea8de1f954 | ||
|
|
7df87d5eeb | ||
|
|
66ddab8ebf | ||
|
|
93ad07b44e | ||
|
|
ca085a807e | ||
|
|
18d143f56e | ||
|
|
c7bd1918a9 | ||
|
|
d7cbba8f5b | ||
|
|
25f354c651 | ||
|
|
e89e27b0d7 | ||
|
|
38b52a2ee6 | ||
|
|
442ef89ce0 | ||
|
|
762c901074 | ||
|
|
1b0b2f6736 | ||
|
|
a39ff78758 | ||
|
|
18b7618a8d | ||
|
|
008bb33013 | ||
|
|
6b1c27ab2c | ||
|
|
52de270d04 | ||
|
|
a0fde96b40 | ||
|
|
8a3bf652d3 | ||
|
|
8b2c1cbe99 | ||
|
|
f8361d50e7 | ||
|
|
541405d708 | ||
|
|
c2ff5f3997 | ||
|
|
fdb856199a | ||
|
|
866ce86cc0 | ||
|
|
145be1fd87 | ||
|
|
6f973741a2 | ||
|
|
98937310d3 | ||
|
|
58412aecde | ||
|
|
2392bb4ed4 | ||
|
|
a2aa7aed09 | ||
|
|
e45aca2343 | ||
|
|
083d1b5550 | ||
|
|
acc0d3e01f | ||
|
|
fb27be0f59 | ||
|
|
7122d44b13 | ||
|
|
9d627e660f | ||
|
|
42b8b696a2 | ||
|
|
bac4dcf73c | ||
|
|
06ab8f35ce | ||
|
|
84360bfde8 | ||
|
|
157146151e | ||
|
|
647ecbfa61 | ||
|
|
1de54caa7e | ||
|
|
abecb74135 | ||
|
|
aae12a21ac | ||
|
|
aa75f90294 | ||
|
|
723e686455 | ||
|
|
03c18287fc | ||
|
|
01f7faff8a | ||
|
|
c13d584010 | ||
|
|
dbf331b9b4 | ||
|
|
38c8327cbf | ||
|
|
0e5411d3ba | ||
|
|
ee653b1032 | ||
|
|
f5d3c07161 | ||
|
|
12d40713a9 | ||
|
|
151a08d0dd | ||
|
|
74567d5e17 | ||
|
|
85160c2d29 | ||
|
|
4634c88f76 | ||
|
|
2c21553059 | ||
|
|
8227e2553e | ||
|
|
c69c750144 | ||
|
|
a25c0e657b | ||
|
|
67bb1f19f0 | ||
|
|
7d7f9eaa35 | ||
|
|
92ed848d4e | ||
|
|
6bcc21c578 | ||
|
|
48d824fe6f | ||
|
|
db2b92421a | ||
|
|
632b0c17aa | ||
|
|
e61618f1b4 | ||
|
|
2fd3ebb378 | ||
|
|
56dd2d17e7 | ||
|
|
cf92752e79 | ||
|
|
3a6d49d3fc | ||
|
|
9b79051ea5 | ||
|
|
b9d97e8a35 | ||
|
|
89f1de4df4 | ||
|
|
4ca2d7f9dc | ||
|
|
75eb6680d8 | ||
|
|
53892fa5e6 | ||
|
|
3947cf07ec | ||
|
|
c0e85b6caf | ||
|
|
a090984c67 | ||
|
|
2d2a9ea299 | ||
|
|
3ccb06652d | ||
|
|
68685511e7 | ||
|
|
3791f30d8f | ||
|
|
0250ec6f2e | ||
|
|
d98f9909db | ||
|
|
3c310c61d8 | ||
|
|
8a0a109fb2 | ||
|
|
3790e82ef3 | ||
|
|
7fb6fcdeeb | ||
|
|
7ce55cf90f | ||
|
|
4a8dcb2c08 | ||
|
|
2b34150ef7 | ||
|
|
f429f6c39e | ||
|
|
dcc90cd79f | ||
|
|
702568502e | ||
|
|
db636e4b5a | ||
|
|
5c4f0e8e8e | ||
|
|
8e36d29996 | ||
|
|
02604c466d | ||
|
|
219cea0c53 | ||
|
|
08e75c39c0 | ||
|
|
647fa21e7c | ||
|
|
bdf85c68d1 | ||
|
|
a4c0224ab5 | ||
|
|
9e9c954560 | ||
|
|
262213cc8b | ||
|
|
b42da9f154 | ||
|
|
f890da0cda | ||
|
|
670d66b01d | ||
|
|
de1ad09900 | ||
|
|
40163e5c63 | ||
|
|
a8941326dc | ||
|
|
9c9f200874 | ||
|
|
aa33f0242a | ||
|
|
2fc7c4b5c7 | ||
|
|
21b532f581 | ||
|
|
1978cfc356 | ||
|
|
37ee092398 | ||
|
|
85bf4498c0 | ||
|
|
3312befe11 | ||
|
|
49d29d78da | ||
|
|
3f82a692a2 | ||
|
|
c44f3b8a3d | ||
|
|
37d172dbd9 | ||
|
|
8aba2f58c5 | ||
|
|
2db5b3d72f | ||
|
|
9afc6989af | ||
|
|
4a06c86412 | ||
|
|
602a6a5f66 | ||
|
|
d714a53dc6 | ||
|
|
a8451b7c3d | ||
|
|
a0351fb5ad | ||
|
|
f29eeeac9e | ||
|
|
371d38a9ee | ||
|
|
ebef970078 | ||
|
|
2d8d478e2c | ||
|
|
3ba16118b4 | ||
|
|
94e0559dd3 | ||
|
|
0fdb2ed0ef | ||
|
|
2cf67b59d2 | ||
|
|
910bd30b24 | ||
|
|
754693f403 | ||
|
|
e066db763a | ||
|
|
219dc2c8bf | ||
|
|
a4b5ef9bde | ||
|
|
e5664048d9 | ||
|
|
f24177d5c4 | ||
|
|
48f66e785b | ||
|
|
bdb6e30c92 | ||
|
|
062baad682 | ||
|
|
40182befe9 | ||
|
|
026f88d1b3 | ||
|
|
32749d65a4 | ||
|
|
46c7d35bb8 | ||
|
|
c6f036cba5 | ||
|
|
e656db779e | ||
|
|
fa32cd13cf | ||
|
|
a1ae55b29d | ||
|
|
05b3810d4a | ||
|
|
c95c7faa5f | ||
|
|
ed23e9395c | ||
|
|
1738a74e8c | ||
|
|
70eb0a9187 | ||
|
|
a25f4e90dd | ||
|
|
3d701b98aa | ||
|
|
dcaac54c75 | ||
|
|
b2b89a1339 | ||
|
|
4692f98770 | ||
|
|
86a3a108a7 | ||
|
|
5ec4403bfb | ||
|
|
ec0be1ff27 | ||
|
|
516315ac45 | ||
|
|
ff55739376 | ||
|
|
1e24b7bc45 | ||
|
|
dd92dca34b | ||
|
|
a592fdc550 | ||
|
|
846e7ca097 | ||
|
|
93c2a94658 | ||
|
|
309b66e4df | ||
|
|
4d9476e99b | ||
|
|
46f796a74c | ||
|
|
00bf28b999 | ||
|
|
640d3783a0 | ||
|
|
0e4f06e86a | ||
|
|
886a7ec1e9 | ||
|
|
37cf7427f9 | ||
|
|
e69d0c89a6 | ||
|
|
581e2fb786 | ||
|
|
13b465fe73 | ||
|
|
a12d10f4f7 | ||
|
|
e8bfb2b49b | ||
|
|
ae995182b2 | ||
|
|
59c69e065c | ||
|
|
4ca2d61ccc | ||
|
|
d62ff69351 | ||
|
|
012e79a7e2 | ||
|
|
97dc80a07f | ||
|
|
b974f8537f | ||
|
|
c32e17968e | ||
|
|
cf09d1d44d | ||
|
|
ad39d8774d | ||
|
|
687f140a5c | ||
|
|
1b09bb47bf | ||
|
|
808b457503 | ||
|
|
92e054569c | ||
|
|
2f22e68559 | ||
|
|
53a8628fab | ||
|
|
df04503674 | ||
|
|
1fe74fa753 | ||
|
|
55a9be2fa5 | ||
|
|
fdf4821d56 | ||
|
|
84e6caa846 | ||
|
|
3bc8dfdf8c | ||
|
|
ed96940e82 | ||
|
|
1c60375d71 | ||
|
|
c9699609ed | ||
|
|
f91caff7ec | ||
|
|
11bd55701c | ||
|
|
efa9c6c546 | ||
|
|
92ab67eb3d | ||
|
|
ae11490f87 | ||
|
|
956c2f683d | ||
|
|
d01f793558 |
@@ -1,5 +0,0 @@
|
||||
node_modules
|
||||
dist
|
||||
out
|
||||
.gitignore
|
||||
scripts/cloudflare-worker.js
|
||||
@@ -1,22 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: ['unused-imports', 'simple-import-sort'],
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:react/jsx-runtime',
|
||||
'plugin:react-hooks/recommended',
|
||||
'@electron-toolkit/eslint-config-ts/recommended',
|
||||
'@electron-toolkit/eslint-config-prettier'
|
||||
],
|
||||
rules: {
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'unused-imports/no-unused-imports': 'error',
|
||||
'@typescript-eslint/no-non-null-asserted-optional-chain': 'off',
|
||||
'react/prop-types': 'off',
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
'react/no-is-mounted': 'off',
|
||||
'prettier/prettier': ['error', { endOfLine: 'auto' }]
|
||||
}
|
||||
}
|
||||
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/.yarn/** linguist-vendored
|
||||
/.yarn/releases/* binary
|
||||
261
.github/workflows/nightly-build.yml
vendored
Normal file
@@ -0,0 +1,261 @@
|
||||
name: Nightly Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 17 * * *' # 1:00 BJ Time
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
nightly-build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, windows-latest, ubuntu-latest]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Generate date tag
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- name: Build Linux
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
yarn build:npm linux
|
||||
yarn build:linux
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
|
||||
- name: Build Mac
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
yarn build:npm mac
|
||||
yarn build:mac
|
||||
env:
|
||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||
APPLE_ID: ${{ vars.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ vars.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: yarn build:win
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
|
||||
- name: Replace spaces in filenames
|
||||
run: node scripts/replace-spaces.js
|
||||
|
||||
- name: Rename artifacts with nightly format
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p renamed-artifacts
|
||||
DATE=${{ steps.date.outputs.date }}
|
||||
|
||||
# Windows artifacts - based on actual file naming pattern
|
||||
if [ "${{ matrix.os }}" == "windows-latest" ]; then
|
||||
# Setup installer
|
||||
find dist -name "*setup.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-setup.exe \;
|
||||
|
||||
# Portable exe
|
||||
find dist -name "*portable.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-portable.exe \;
|
||||
|
||||
# Rename blockmap files to match the new exe names
|
||||
if [ -f "dist/*setup.exe.blockmap" ]; then
|
||||
cp dist/*setup.exe.blockmap renamed-artifacts/cherry-studio-nightly-${DATE}-setup.exe.blockmap || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# macOS artifacts
|
||||
if [ "${{ matrix.os }}" == "macos-latest" ]; then
|
||||
# 处理arm64架构文件
|
||||
find dist -name "*-arm64.dmg" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.dmg \;
|
||||
find dist -name "*-arm64.dmg.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.dmg.blockmap \;
|
||||
find dist -name "*-arm64.zip" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.zip \;
|
||||
find dist -name "*-arm64.zip.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.zip.blockmap \;
|
||||
|
||||
# 处理x64架构文件
|
||||
find dist -name "*-x64.dmg" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.dmg \;
|
||||
find dist -name "*-x64.dmg.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.dmg.blockmap \;
|
||||
find dist -name "*-x64.zip" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.zip \;
|
||||
find dist -name "*-x64.zip.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.zip.blockmap \;
|
||||
fi
|
||||
|
||||
# Linux artifacts
|
||||
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
|
||||
find dist -name "*.AppImage" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.AppImage \;
|
||||
find dist -name "*.snap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.snap \;
|
||||
find dist -name "*.deb" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.deb \;
|
||||
find dist -name "*.rpm" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.rpm \;
|
||||
find dist -name "*.tar.gz" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.tar.gz \;
|
||||
fi
|
||||
|
||||
# Copy update files
|
||||
cp dist/latest*.yml renamed-artifacts/ || true
|
||||
|
||||
# Generate SHA256 checksums (Windows)
|
||||
- name: Generate SHA256 checksums (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
cd renamed-artifacts
|
||||
echo "# SHA256 checksums for Windows - $(Get-Date -Format 'yyyy-MM-dd')" > SHA256SUMS.txt
|
||||
Get-ChildItem -File | Where-Object { $_.Name -ne 'SHA256SUMS.txt' } | ForEach-Object {
|
||||
$file = $_.Name
|
||||
$hash = (Get-FileHash -Algorithm SHA256 $file).Hash.ToLower()
|
||||
Add-Content -Path SHA256SUMS.txt -Value "$hash $file"
|
||||
}
|
||||
cat SHA256SUMS.txt
|
||||
|
||||
# Generate SHA256 checksums (macOS/Linux)
|
||||
- name: Generate SHA256 checksums (macOS/Linux)
|
||||
if: runner.os != 'Windows'
|
||||
shell: bash
|
||||
run: |
|
||||
cd renamed-artifacts
|
||||
echo "# SHA256 checksums for ${{ runner.os }} - $(date +'%Y-%m-%d')" > SHA256SUMS.txt
|
||||
if command -v shasum &>/dev/null; then
|
||||
# macOS
|
||||
shasum -a 256 * 2>/dev/null | grep -v SHA256SUMS.txt >> SHA256SUMS.txt || echo "No files to hash" >> SHA256SUMS.txt
|
||||
else
|
||||
# Linux
|
||||
sha256sum * 2>/dev/null | grep -v SHA256SUMS.txt >> SHA256SUMS.txt || echo "No files to hash" >> SHA256SUMS.txt
|
||||
fi
|
||||
cat SHA256SUMS.txt
|
||||
|
||||
- name: List files to be uploaded
|
||||
shell: bash
|
||||
run: |
|
||||
echo "准备上传的文件:"
|
||||
if [ -x "$(command -v tree)" ]; then
|
||||
tree renamed-artifacts
|
||||
elif [ "$RUNNER_OS" == "Windows" ]; then
|
||||
dir renamed-artifacts
|
||||
else
|
||||
ls -la renamed-artifacts
|
||||
fi
|
||||
echo "总计: $(find renamed-artifacts -type f | wc -l) 个文件"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cherry-studio-nightly-${{ steps.date.outputs.date }}-${{ matrix.os }}
|
||||
path: renamed-artifacts/*
|
||||
retention-days: 3 # 保留3天
|
||||
compression-level: 8
|
||||
|
||||
Build-Summary:
|
||||
needs: nightly-build
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get date tag
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: all-artifacts
|
||||
merge-multiple: false
|
||||
continue-on-error: true
|
||||
|
||||
- name: Create summary report
|
||||
run: |
|
||||
echo "## ⚠️ 警告:这是每日构建版本" >> $GITHUB_STEP_SUMMARY
|
||||
echo "此版本为自动构建的不稳定版本,仅供测试使用。不建议在生产环境中使用。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "安装此版本前请务必备份数据,并做好数据迁移准备。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "构建日期:$(date +'%Y-%m-%d')" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "## 📦 安装包校验和" >> $GITHUB_STEP_SUMMARY
|
||||
echo "请在下载后验证文件完整性。提供 SHA256 校验和。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check each platform's artifacts and show checksums if available
|
||||
|
||||
# Windows
|
||||
WIN_ARTIFACT_DIR="all-artifacts/cherry-studio-nightly-${{ steps.date.outputs.date }}-windows-latest"
|
||||
if [ -d "$WIN_ARTIFACT_DIR" ] && [ -f "$WIN_ARTIFACT_DIR/SHA256SUMS.txt" ]; then
|
||||
echo "### Windows 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
cat "$WIN_ARTIFACT_DIR/SHA256SUMS.txt" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### Windows 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo "❌ Windows 构建未成功完成或未生成校验和。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# macOS
|
||||
MAC_ARTIFACT_DIR="all-artifacts/cherry-studio-nightly-${{ steps.date.outputs.date }}-macos-latest"
|
||||
if [ -d "$MAC_ARTIFACT_DIR" ] && [ -f "$MAC_ARTIFACT_DIR/SHA256SUMS.txt" ]; then
|
||||
echo "### macOS 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
cat "$MAC_ARTIFACT_DIR/SHA256SUMS.txt" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### macOS 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo "❌ macOS 构建未成功完成或未生成校验和。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# Linux
|
||||
LINUX_ARTIFACT_DIR="all-artifacts/cherry-studio-nightly-${{ steps.date.outputs.date }}-ubuntu-latest"
|
||||
if [ -d "$LINUX_ARTIFACT_DIR" ] && [ -f "$LINUX_ARTIFACT_DIR/SHA256SUMS.txt" ]; then
|
||||
echo "### Linux 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
cat "$LINUX_ARTIFACT_DIR/SHA256SUMS.txt" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### Linux 安装包" >> $GITHUB_STEP_SUMMARY
|
||||
echo "❌ Linux 构建未成功完成或未生成校验和。" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "## ⚠️ Warning: This is a nightly build version" >> $GITHUB_STEP_SUMMARY
|
||||
echo "This version is an unstable version built automatically and is only for testing. It is not recommended to use it in a production environment." >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please backup your data before installing this version and prepare for data migration." >> $GITHUB_STEP_SUMMARY
|
||||
echo "Build date: $(date +'%Y-%m-%d')" >> $GITHUB_STEP_SUMMARY
|
||||
46
.github/workflows/pr-ci.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Pull Request CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Build Check
|
||||
run: yarn build:check
|
||||
|
||||
- name: Lint Check
|
||||
run: yarn lint
|
||||
6
.github/workflows/release.yml
vendored
@@ -38,19 +38,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.3.1 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache yarn dependencies
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
|
||||
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
||||
yarn lint-staged
|
||||
1
.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
electron_mirror=https://npmmirror.com/mirrors/electron/
|
||||
@@ -6,3 +6,4 @@ tsconfig.json
|
||||
tsconfig.*.json
|
||||
CHANGELOG*.md
|
||||
agents.json
|
||||
src/renderer/src/integration/nutstore/sso/lib
|
||||
|
||||
3
.vscode/settings.json
vendored
@@ -4,7 +4,8 @@
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/dist/**": true
|
||||
"**/dist/**": true,
|
||||
".yarn/releases/**": true
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/src/markdown-loader.js b/src/markdown-loader.js
|
||||
index eaf30b114a273e68abbb92c8b07018495e63f4cb..4b06519bdb51845e4693fe877da9de01c7a81039 100644
|
||||
--- a/src/markdown-loader.js
|
||||
+++ b/src/markdown-loader.js
|
||||
@@ -21,7 +21,7 @@ export class MarkdownLoader extends BaseLoader {
|
||||
? (await getSafe(this.filePathOrUrl, { format: 'buffer' })).body
|
||||
: await streamToBuffer(fs.createReadStream(this.filePathOrUrl));
|
||||
this.debug('MarkdownLoader stream created');
|
||||
- const result = micromark(buffer, { extensions: [gfm(), mdxJsx()], htmlExtensions: [gfmHtml()] });
|
||||
+ const result = micromark(buffer, { extensions: [gfm()], htmlExtensions: [gfmHtml()] });
|
||||
this.debug('Markdown parsed...');
|
||||
const webLoader = new WebLoader({
|
||||
urlOrContent: result,
|
||||
@@ -1,158 +0,0 @@
|
||||
diff --git a/src/loaders/local-path-loader.d.ts b/src/loaders/local-path-loader.d.ts
|
||||
index 48c20e68c469cd309be2dc8f28e44c1bd04a26e9..1c16d83bcbf9b7140292793d6cbb8c04281949d9 100644
|
||||
--- a/src/loaders/local-path-loader.d.ts
|
||||
+++ b/src/loaders/local-path-loader.d.ts
|
||||
@@ -4,8 +4,10 @@ export declare class LocalPathLoader extends BaseLoader<{
|
||||
}> {
|
||||
private readonly debug;
|
||||
private readonly path;
|
||||
- constructor({ path }: {
|
||||
+ constructor({ path, chunkSize, chunkOverlap }: {
|
||||
path: string;
|
||||
+ chunkSize?: number;
|
||||
+ chunkOverlap?: number;
|
||||
});
|
||||
getUnfilteredChunks(): AsyncGenerator<{
|
||||
metadata: {
|
||||
diff --git a/src/loaders/local-path-loader.js b/src/loaders/local-path-loader.js
|
||||
index 4cf8a6bd1d890244c8ec49d4a05ee3bd58861c79..ec8215b01195a21ef20f3c5d56ecc99f186bb596 100644
|
||||
--- a/src/loaders/local-path-loader.js
|
||||
+++ b/src/loaders/local-path-loader.js
|
||||
@@ -8,8 +8,8 @@ import { BaseLoader } from '@llm-tools/embedjs-interfaces';
|
||||
export class LocalPathLoader extends BaseLoader {
|
||||
debug = createDebugMessages('embedjs:loader:LocalPathLoader');
|
||||
path;
|
||||
- constructor({ path }) {
|
||||
- super(`LocalPathLoader_${md5(path)}`, { path });
|
||||
+ constructor({ path, chunkSize, chunkOverlap }) {
|
||||
+ super(`LocalPathLoader_${md5(path)}`, { path }, chunkSize ?? 1000, chunkOverlap ?? 0);
|
||||
this.path = path;
|
||||
}
|
||||
async *getUnfilteredChunks() {
|
||||
@@ -36,10 +36,12 @@ export class LocalPathLoader extends BaseLoader {
|
||||
const extension = currentPath.split('.').pop().toLowerCase();
|
||||
if (extension === 'md' || extension === 'mdx')
|
||||
mime = 'text/markdown';
|
||||
+ if (extension === 'txt')
|
||||
+ mime = 'text/plain';
|
||||
this.debug(`File '${this.path}' mime type updated to 'text/markdown'`);
|
||||
}
|
||||
try {
|
||||
- const loader = await createLoaderFromMimeType(currentPath, mime);
|
||||
+ const loader = await createLoaderFromMimeType(currentPath, mime, this.chunkSize, this.chunkOverlap);
|
||||
for await (const result of await loader.getUnfilteredChunks()) {
|
||||
yield {
|
||||
pageContent: result.pageContent,
|
||||
diff --git a/src/util/mime.d.ts b/src/util/mime.d.ts
|
||||
index 57f56a1b8edc98366af9f84d671676c41c2f01ca..14be3b5727cff6eb1978838045e9a788f8f53bfb 100644
|
||||
--- a/src/util/mime.d.ts
|
||||
+++ b/src/util/mime.d.ts
|
||||
@@ -1,2 +1,2 @@
|
||||
import { BaseLoader } from '@llm-tools/embedjs-interfaces';
|
||||
-export declare function createLoaderFromMimeType(loaderData: string, mimeType: string): Promise<BaseLoader>;
|
||||
+export declare function createLoaderFromMimeType(loaderData: string, mimeType: string, chunkSize?: number, chunkOverlap?: number): Promise<BaseLoader>;
|
||||
diff --git a/src/util/mime.js b/src/util/mime.js
|
||||
index b6426a859968e2bf6206795f70333e90ae27aeb7..16ae2adb863f8d7abfa757f1c5cc39f6bb1c44fa 100644
|
||||
--- a/src/util/mime.js
|
||||
+++ b/src/util/mime.js
|
||||
@@ -1,7 +1,9 @@
|
||||
import mime from 'mime';
|
||||
import createDebugMessages from 'debug';
|
||||
import { TextLoader } from '../loaders/text-loader.js';
|
||||
-export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
+import fs from 'node:fs'
|
||||
+
|
||||
+export async function createLoaderFromMimeType(loaderData, mimeType, chunkSize, chunkOverlap) {
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')(`Incoming mime type '${mimeType}'`);
|
||||
switch (mimeType) {
|
||||
case 'application/msword':
|
||||
@@ -10,7 +12,7 @@ export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-msoffice` needs to be installed to load docx files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported DocxLoader');
|
||||
- return new DocxLoader({ filePathOrUrl: loaderData });
|
||||
+ return new DocxLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'application/vnd.ms-excel':
|
||||
case 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': {
|
||||
@@ -18,21 +20,21 @@ export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-msoffice` needs to be installed to load excel files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported ExcelLoader');
|
||||
- return new ExcelLoader({ filePathOrUrl: loaderData });
|
||||
+ return new ExcelLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'application/pdf': {
|
||||
const { PdfLoader } = await import('@llm-tools/embedjs-loader-pdf').catch(() => {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-pdf` needs to be installed to load PDF files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported PdfLoader');
|
||||
- return new PdfLoader({ filePathOrUrl: loaderData });
|
||||
+ return new PdfLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'application/vnd.openxmlformats-officedocument.presentationml.presentation': {
|
||||
const { PptLoader } = await import('@llm-tools/embedjs-loader-msoffice').catch(() => {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-msoffice` needs to be installed to load pptx files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported PptLoader');
|
||||
- return new PptLoader({ filePathOrUrl: loaderData });
|
||||
+ return new PptLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'text/plain': {
|
||||
const fineType = mime.getType(loaderData);
|
||||
@@ -42,24 +44,24 @@ export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-csv` needs to be installed to load CSV files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported CsvLoader');
|
||||
- return new CsvLoader({ filePathOrUrl: loaderData });
|
||||
+ return new CsvLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
- else
|
||||
- return new TextLoader({ text: loaderData });
|
||||
+ const content = fs.readFileSync(loaderData, 'utf-8');
|
||||
+ return new TextLoader({ text: content, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'application/csv': {
|
||||
const { CsvLoader } = await import('@llm-tools/embedjs-loader-csv').catch(() => {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-csv` needs to be installed to load CSV files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported CsvLoader');
|
||||
- return new CsvLoader({ filePathOrUrl: loaderData });
|
||||
+ return new CsvLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'text/html': {
|
||||
const { WebLoader } = await import('@llm-tools/embedjs-loader-web').catch(() => {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-web` needs to be installed to load web documents');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported WebLoader');
|
||||
- return new WebLoader({ urlOrContent: loaderData });
|
||||
+ return new WebLoader({ urlOrContent: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'text/xml': {
|
||||
const { SitemapLoader } = await import('@llm-tools/embedjs-loader-sitemap').catch(() => {
|
||||
@@ -67,14 +69,14 @@ export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported SitemapLoader');
|
||||
if (await SitemapLoader.test(loaderData)) {
|
||||
- return new SitemapLoader({ url: loaderData });
|
||||
+ return new SitemapLoader({ url: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
//This is not a Sitemap but is still XML
|
||||
const { XmlLoader } = await import('@llm-tools/embedjs-loader-xml').catch(() => {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-xml` needs to be installed to load XML documents');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported XmlLoader');
|
||||
- return new XmlLoader({ filePathOrUrl: loaderData });
|
||||
+ return new XmlLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'text/x-markdown':
|
||||
case 'text/markdown': {
|
||||
@@ -82,7 +84,7 @@ export async function createLoaderFromMimeType(loaderData, mimeType) {
|
||||
throw new Error('Package `@llm-tools/embedjs-loader-markdown` needs to be installed to load markdown files');
|
||||
});
|
||||
createDebugMessages('embedjs:util:createLoaderFromMimeType')('Dynamically imported MarkdownLoader');
|
||||
- return new MarkdownLoader({ filePathOrUrl: loaderData });
|
||||
+ return new MarkdownLoader({ filePathOrUrl: loaderData, chunkSize, chunkOverlap });
|
||||
}
|
||||
case 'image/png':
|
||||
case 'image/jpeg': {
|
||||
53
.yarn/patches/epub-npm-1.3.0-8325494ffe.patch
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
diff --git a/epub.js b/epub.js
|
||||
index 50efff7678ca4879ed639d3bb70fd37e7477fd16..accbe689cd200bd59475dd20fca596511d0f33e0 100644
|
||||
--- a/epub.js
|
||||
+++ b/epub.js
|
||||
@@ -3,9 +3,28 @@ var xml2jsOptions = xml2js.defaults['0.1'];
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
|
||||
try {
|
||||
- // zipfile is an optional dependency:
|
||||
- var ZipFile = require("zipfile").ZipFile;
|
||||
-} catch (err) {
|
||||
+ var zipread = require("zipread");
|
||||
+ var ZipFile = function(filename) {
|
||||
+ var zip = zipread(filename);
|
||||
+ this.zip = zip;
|
||||
+ var files = zip.files;
|
||||
+
|
||||
+ files = Object.values(files).filter((file) => {
|
||||
+ return !file.dir;
|
||||
+ }).map((file) => {
|
||||
+ return file.name;
|
||||
+ });
|
||||
+
|
||||
+ this.names = files;
|
||||
+ this.count = this.names.length;
|
||||
+ };
|
||||
+ ZipFile.prototype.readFile = function(name, cb) {
|
||||
+ this.zip.readFile(name
|
||||
+ , function(err, buffer) {
|
||||
+ return cb(null, buffer);
|
||||
+ });
|
||||
+ };
|
||||
+} catch(err) {
|
||||
// Mock zipfile using pure-JS adm-zip:
|
||||
var AdmZip = require('adm-zip');
|
||||
|
||||
diff --git a/package.json b/package.json
|
||||
index 8c3dccf0caac8913a2edabd7049b25bb9063c905..57bac3b71ddd73916adbdf00b049089181db5bcb 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -40,10 +40,8 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"adm-zip": "^0.4.11",
|
||||
- "xml2js": "^0.4.23"
|
||||
- },
|
||||
- "optionalDependencies": {
|
||||
- "zipfile": "^0.5.11"
|
||||
+ "xml2js": "^0.4.23",
|
||||
+ "zipread": "^1.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/mocha": "^5.2.5",
|
||||
18
.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
diff --git a/dist/index.node.js b/dist/index.node.js
|
||||
index bb108cbc210af5b99e864fd1dd8c555e948ecf7a..8ef8c1aab59215c21d161c0e52125724528ecab8 100644
|
||||
--- a/dist/index.node.js
|
||||
+++ b/dist/index.node.js
|
||||
@@ -1,8 +1,11 @@
|
||||
let crypto;
|
||||
crypto =
|
||||
globalThis.crypto?.webcrypto ?? // Node.js 16 REPL has globalThis.crypto as node:crypto
|
||||
- globalThis.crypto ?? // Node.js 18+
|
||||
- (await import("node:crypto")).webcrypto; // Node.js 16 non-REPL
|
||||
+ globalThis.crypto ?? // Node.js 18+
|
||||
+ (async() => {
|
||||
+ const crypto = await import("node:crypto");
|
||||
+ return crypto.webcrypto;
|
||||
+ })();
|
||||
/**
|
||||
* Creates an array of length `size` of random bytes
|
||||
* @param size
|
||||
934
.yarn/releases/yarn-4.6.0.cjs
vendored
Executable file
@@ -3,3 +3,5 @@ enableImmutableInstalls: false
|
||||
httpTimeout: 300000
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.6.0.cjs
|
||||
|
||||
@@ -40,6 +40,6 @@
|
||||
如果您有任何问题或建议,欢迎通过以下方式联系我们:
|
||||
|
||||
- 微信:kangfenmao
|
||||
- [GitHub Issues](https://github.com/kangfenmao/cherry-studio/issues)
|
||||
- [GitHub Issues](https://github.com/CherryHQ/cherry-studio/issues)
|
||||
|
||||
感谢您的支持和贡献!我们期待与您一起将 Cherry Studio 打造成更好的产品。
|
||||
|
||||
63
LICENSE
@@ -1,19 +1,16 @@
|
||||
## Cherry Studio 用户协议
|
||||
|
||||
欢迎使用 Cherry Studio 桌面 AI 客户端工具。请仔细阅读以下协议条款,继续使用本软件即表示您同意本协议内容。
|
||||
|
||||
**许可协议**
|
||||
|
||||
本软件采用 Apache License 2.0 许可。除 Apache License 2.0 规定的条款外,您在使用 Cherry Studio 时还应遵守以下附加条款:
|
||||
|
||||
**一. 商用许可**
|
||||
|
||||
1. **免费商用**:用户在不修改代码的情况下,可以免费用于商业目的。
|
||||
2. **商业授权**:如果您满足以下任意条件之一,需取得商业授权:
|
||||
1. 对本软件进行二次修改、开发(包括但不限于修改应用名称、logo、代码以及功能)。
|
||||
2. 为企业客户提供多租户服务,且该服务支持 10 人或以上的使用。
|
||||
3. 预装或集成到硬件设备或产品中进行捆绑销售。
|
||||
4. 政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
|
||||
在以下任何一种情况下,您需要联系我们并获得明确的书面商业授权后,方可继续使用 Cherry Studio 材料:
|
||||
|
||||
1. **修改与衍生**: 您对 Cherry Studio 材料进行修改或基于其进行衍生开发(包括但不限于修改应用名称、Logo、代码、功能、界面,数据等)。
|
||||
2. **企业服务**: 在您的企业内部,或为企业客户提供基于 CherryStudio 的服务,且该服务支持 10 人及以上累计用户使用。
|
||||
3. **硬件捆绑销售**: 您将 Cherry Studio 预装或集成到硬件设备或产品中进行捆绑销售。
|
||||
4. **政府或教育机构大规模采购**: 您的使用场景属于政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
|
||||
5. **面向公众的公有云服务**:基于 Cherry Studio,提供面向公众的公有云服务。
|
||||
|
||||
**二. 贡献者协议**
|
||||
|
||||
@@ -33,47 +30,33 @@
|
||||
|
||||
---
|
||||
|
||||
根据 Apache 许可证 2.0 版(“许可证”)进行许可;除非符合许可证,否则您不得使用此文件。您可以在以下网址获取许可证副本:
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
除非适用法律要求或书面同意,软件根据许可证分发的内容以“原样”分发,不附带任何明示或暗示的保证或条件。请参阅特定语言管理权限的许可证和许可证下的限制。
|
||||
|
||||
## Cherry Studio User Agreement
|
||||
|
||||
Welcome to Cherry Studio, a desktop AI client tool. Please read the following agreement carefully. By continuing to use this software, you agree to the terms outlined below.
|
||||
|
||||
**License Agreement**
|
||||
|
||||
This software is licensed under the **Apache License 2.0**. In addition to the terms of the Apache License 2.0, the following additional terms apply to the use of Cherry Studio:
|
||||
This software is licensed under the Apache License 2.0. In addition to the terms stipulated by the Apache License 2.0, you must comply with the following supplementary terms when using Cherry Studio:
|
||||
|
||||
**I. Commercial Use License**
|
||||
**I. Commercial Licensing**
|
||||
|
||||
1. **Free Commercial Use**: Users can use the software for commercial purposes without modifying the code.
|
||||
2. **Commercial License Required**: A commercial license is required if any of the following conditions are met:
|
||||
1. You modify, develop, or alter the software, including but not limited to changes to the application name, logo, code, or functionality.
|
||||
2. You provide multi-tenant services to enterprise customers with 10 or more users.
|
||||
3. You pre-install or integrate the software into hardware devices or products and bundle it for sale.
|
||||
4. You are engaging in large-scale procurement for government or educational institutions, especially involving security, data privacy, or other sensitive requirements.
|
||||
You must contact us and obtain explicit written commercial authorization to continue using Cherry Studio materials under any of the following circumstances:
|
||||
|
||||
1. **Modifications and Derivatives:** You modify Cherry Studio materials or perform derivative development based on them (including but not limited to changing the application’s name, logo, code, functionality, user interface, data, etc.).
|
||||
2. **Enterprise Services:** You use Cherry Studio internally within your enterprise, or you provide Cherry Studio-based services for enterprise customers, and such services support cumulative usage by 10 or more users.
|
||||
3. **Hardware Bundling and Sales:** You pre-install or integrate Cherry Studio into hardware devices or products for bundled sale.
|
||||
4. **Large-scale Procurement by Government or Educational Institutions:** Your usage scenario involves large-scale procurement projects by government or educational institutions, especially in cases involving sensitive requirements such as security and data privacy.
|
||||
5. **Public Cloud Services:** You provide public cloud-based product services utilizing Cherry Studio.
|
||||
|
||||
**II. Contributor Agreement**
|
||||
|
||||
As a contributor to Cherry Studio, you agree to the following:
|
||||
As a contributor to Cherry Studio, you must agree to the following terms:
|
||||
|
||||
1. **License Adjustment**: The producer reserves the right to adjust the open-source license as needed, making it stricter or more lenient.
|
||||
2. **Commercial Use**: Any code you contribute may be used for commercial purposes, including but not limited to cloud business operations.
|
||||
1. **License Adjustments:** The producer reserves the right to adjust the open-source license as necessary, making it more strict or permissive.
|
||||
2. **Commercial Usage:** Your contributed code may be used commercially, including but not limited to cloud business operations.
|
||||
|
||||
**III. Other Terms**
|
||||
|
||||
1. The interpretation of these terms is subject to the discretion of Cherry Studio developers.
|
||||
2. These terms may be updated, and users will be notified through the software when changes occur.
|
||||
1. Cherry Studio developers reserve the right of final interpretation of these agreement terms.
|
||||
2. This agreement may be updated according to practical circumstances, and users will be notified of updates through this software.
|
||||
|
||||
For any questions or to request a commercial license, please contact the Cherry Studio development team.
|
||||
If you have any questions or need to apply for commercial authorization, please contact the Cherry Studio development team.
|
||||
|
||||
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0.
|
||||
|
||||
---
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
Other than these specific conditions, all remaining rights and restrictions follow the Apache License 2.0. For more detailed information regarding Apache License 2.0, please visit http://www.apache.org/licenses/LICENSE-2.0.
|
||||
106
README.md
@@ -1,21 +1,26 @@
|
||||
<h1 align="center">
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/releases">
|
||||
<img src="https://github.com/kangfenmao/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" /><br>
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/releases">
|
||||
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" /><br>
|
||||
</a>
|
||||
</h1>
|
||||
<p align="center">English | <a href="./docs/README.zh.md">中文</a> | <a href="./docs/README.ja.md">日本語</a><br></p>
|
||||
<div align="center">
|
||||
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
|
||||
# 🍒 Cherry Studio
|
||||
|
||||
Cherry Studio is a desktop client that supports for multiple LLM providers, available on Windows, Mac and Linux.
|
||||
|
||||
👏 Join [Telegram Group](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(1025067911)](https://qm.qq.com/q/RIBAO2pPKS)
|
||||
👏 Join [Telegram Group](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
|
||||
❤️ Like Cherry Studio? Give it a star 🌟 or [Sponsor](docs/sponsor.md) to support the development!
|
||||
|
||||
# 📖 Guide
|
||||
|
||||
https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 Screenshot
|
||||
|
||||

|
||||
@@ -28,37 +33,39 @@ Cherry Studio is a desktop client that supports for multiple LLM providers, avai
|
||||
|
||||
1. **Diverse LLM Provider Support**:
|
||||
|
||||
- ☁️ Major LLM Cloud Services: OpenAI, Gemini, Anthropic, and more
|
||||
- 🔗 AI Web Service Integration: Claude, Peplexity, Poe, and others
|
||||
- 💻 Local Model Support with Ollama, LM Studio
|
||||
- ☁️ Major LLM Cloud Services: OpenAI, Gemini, Anthropic, and more
|
||||
- 🔗 AI Web Service Integration: Claude, Peplexity, Poe, and others
|
||||
- 💻 Local Model Support with Ollama, LM Studio
|
||||
|
||||
2. **AI Assistants & Conversations**:
|
||||
|
||||
- 📚 300+ Pre-configured AI Assistants
|
||||
- 🤖 Custom Assistant Creation
|
||||
- 💬 Multi-model Simultaneous Conversations
|
||||
- 📚 300+ Pre-configured AI Assistants
|
||||
- 🤖 Custom Assistant Creation
|
||||
- 💬 Multi-model Simultaneous Conversations
|
||||
|
||||
3. **Document & Data Processing**:
|
||||
|
||||
- 📄 Support for Text, Images, Office, PDF, and more
|
||||
- ☁️ WebDAV File Management and Backup
|
||||
- 📊 Mermaid Chart Visualization
|
||||
- 💻 Code Syntax Highlighting
|
||||
- 📄 Support for Text, Images, Office, PDF, and more
|
||||
- ☁️ WebDAV File Management and Backup
|
||||
- 📊 Mermaid Chart Visualization
|
||||
- 💻 Code Syntax Highlighting
|
||||
|
||||
4. **Practical Tools Integration**:
|
||||
|
||||
- 🔍 Global Search Functionality
|
||||
- 📝 Topic Management System
|
||||
- 🔤 AI-powered Translation
|
||||
- 🎯 Drag-and-drop Sorting
|
||||
- 🔌 Mini Program Support
|
||||
- 🔍 Global Search Functionality
|
||||
- 📝 Topic Management System
|
||||
- 🔤 AI-powered Translation
|
||||
- 🎯 Drag-and-drop Sorting
|
||||
- 🔌 Mini Program Support
|
||||
- ⚙️ MCP(Model Context Protocol) Server
|
||||
|
||||
5. **Enhanced User Experience**:
|
||||
- 🖥️ Cross-platform Support for Windows, Mac, and Linux
|
||||
- 📦 Ready to Use, No Environment Setup Required
|
||||
- 🎨 Light/Dark Themes and Transparent Window
|
||||
- 📝 Complete Markdown Rendering
|
||||
- 🤲 Easy Content Sharing
|
||||
|
||||
- 🖥️ Cross-platform Support for Windows, Mac, and Linux
|
||||
- 📦 Ready to Use, No Environment Setup Required
|
||||
- 🎨 Light/Dark Themes and Transparent Window
|
||||
- 📝 Complete Markdown Rendering
|
||||
- 🤲 Easy Content Sharing
|
||||
|
||||
# 📝 TODO
|
||||
|
||||
@@ -75,38 +82,17 @@ Cherry Studio is a desktop client that supports for multiple LLM providers, avai
|
||||
- [ ] Voice input and output (AI call)
|
||||
- [ ] Data backup supports custom backup content
|
||||
|
||||
# 🌈 Theme
|
||||
|
||||
- Theme Gallery: https://cherrycss.com
|
||||
- Aero Theme: https://github.com/hakadao/CherryStudio-Aero
|
||||
- PaperMaterial Theme: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
|
||||
|
||||
Welcome PR for more themes
|
||||
|
||||
# 🖥️ Develop
|
||||
|
||||
## IDE Setup
|
||||
|
||||
[Cursor](https://www.cursor.com/) + [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) + [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
|
||||
|
||||
## Project Setup
|
||||
|
||||
### Install
|
||||
|
||||
```bash
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
```bash
|
||||
$ yarn dev
|
||||
```
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# For windows
|
||||
$ yarn build:win
|
||||
|
||||
# For macOS
|
||||
$ yarn build:mac
|
||||
|
||||
# For Linux
|
||||
$ yarn build:linux
|
||||
```
|
||||
Refer to the [development documentation](docs/dev.md)
|
||||
|
||||
# 🤝 Contributing
|
||||
|
||||
@@ -135,20 +121,18 @@ Thank you for your support and contributions!
|
||||
|
||||
- [one-api](https://github.com/songquanpeng/one-api):LLM API management and distribution system, supporting mainstream models like OpenAI, Azure, and Anthropic. Features unified API interface, suitable for key management and secondary distribution.
|
||||
|
||||
- [ublacklist](https://github.com/iorate/ublacklist):Blocks specific sites from appearing in Google search results
|
||||
|
||||
# 🚀 Contributors
|
||||
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/graphs/contributors">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=kangfenmao/cherry-studio" />
|
||||
</a>
|
||||
<br /><br />
|
||||
|
||||
# 🌐 Community
|
||||
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:kangfenmao@gmail.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# 📣 Product Hunt
|
||||
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:support@cherry-ai.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# ☕ Sponsor
|
||||
|
||||
@@ -158,6 +142,10 @@ Thank you for your support and contributions!
|
||||
|
||||
[LICENSE](./LICENSE)
|
||||
|
||||
# ✉️ Contact
|
||||
|
||||
yinsenho@cherry-ai.com
|
||||
|
||||
# ⭐️ Star History
|
||||
|
||||
[](https://star-history.com/#kangfenmao/cherry-studio&Timeline)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# provider: generic
|
||||
# url: http://127.0.0.1:8080
|
||||
# updaterCacheDirName: cherry-studio-updater
|
||||
# provider: github
|
||||
# repo: cherry-studio
|
||||
# owner: kangfenmao
|
||||
provider: generic
|
||||
url: https://cherrystudio.ocool.online
|
||||
provider: github
|
||||
repo: cherry-studio
|
||||
owner: kangfenmao
|
||||
# provider: generic
|
||||
# url: https://cherrystudio.ocool.online
|
||||
|
||||
@@ -1,21 +1,26 @@
|
||||
<h1 align="center">
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/releases">
|
||||
<img src="https://github.com/kangfenmao/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" />
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/releases">
|
||||
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" />
|
||||
</a>
|
||||
</h1>
|
||||
<div align="center">
|
||||
<a href="./README.md">English</a> | <a href="./README.zh.md">中文</a> | 日本語
|
||||
</div>
|
||||
<p align="center">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | <a href="./README.zh.md">中文</a> | 日本語 <br>
|
||||
</p>
|
||||
<div align="center">
|
||||
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
# 🍒 Cherry Studio
|
||||
|
||||
Cherry Studioは、複数のLLMプロバイダーをサポートするデスクトップクライアントで、Windows、Mac、Linuxで利用可能です。
|
||||
Cherry Studio は、複数の LLM プロバイダーをサポートするデスクトップクライアントで、Windows、Mac、Linux で利用可能です。
|
||||
|
||||
👏 [Telegram](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(1025067911)](https://qm.qq.com/q/RIBAO2pPKS)
|
||||
👏 [Telegram](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
|
||||
❤️ Cherry Studioをお気に入りにしましたか?小さな星をつけてください 🌟 または [スポンサー](sponsor.md) をして開発をサポートしてください!❤️
|
||||
❤️ Cherry Studio をお気に入りにしましたか?小さな星をつけてください 🌟 または [スポンサー](sponsor.md) をして開発をサポートしてください!❤️
|
||||
|
||||
# 📖 ガイド
|
||||
|
||||
https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 スクリーンショット
|
||||
|
||||
@@ -29,43 +34,45 @@ Cherry Studioは、複数のLLMプロバイダーをサポートするデスク
|
||||
|
||||
1. **多様な LLM サービス対応**:
|
||||
|
||||
- ☁️ 主要な LLM クラウドサービス対応:OpenAI、Gemini、Anthropic など
|
||||
- 🔗 AI Web サービス統合:Claude、Peplexity、Poe など
|
||||
- 💻 Ollama、LM Studio によるローカルモデル実行対応
|
||||
- ☁️ 主要な LLM クラウドサービス対応:OpenAI、Gemini、Anthropic など
|
||||
- 🔗 AI Web サービス統合:Claude、Peplexity、Poe など
|
||||
- 💻 Ollama、LM Studio によるローカルモデル実行対応
|
||||
|
||||
2. **AI アシスタントと対話**:
|
||||
|
||||
- 📚 300+ の事前設定済み AI アシスタント
|
||||
- 🤖 カスタム AI アシスタントの作成
|
||||
- 💬 複数モデルでの同時対話機能
|
||||
- 📚 300+ の事前設定済み AI アシスタント
|
||||
- 🤖 カスタム AI アシスタントの作成
|
||||
- 💬 複数モデルでの同時対話機能
|
||||
|
||||
3. **文書とデータ処理**:
|
||||
|
||||
- 📄 テキスト、画像、Office、PDF など多様な形式対応
|
||||
- ☁️ WebDAV によるファイル管理とバックアップ
|
||||
- 📊 Mermaid による図表作成
|
||||
- 💻 コードハイライト機能
|
||||
- 📄 テキスト、画像、Office、PDF など多様な形式対応
|
||||
- ☁️ WebDAV によるファイル管理とバックアップ
|
||||
- 📊 Mermaid による図表作成
|
||||
- 💻 コードハイライト機能
|
||||
|
||||
4. **実用的なツール統合**:
|
||||
|
||||
- 🔍 グローバル検索機能
|
||||
- 📝 トピック管理システム
|
||||
- 🔤 AI による翻訳機能
|
||||
- 🎯 ドラッグ&ドロップによる整理
|
||||
- 🔌 ミニプログラム対応
|
||||
- 🔍 グローバル検索機能
|
||||
- 📝 トピック管理システム
|
||||
- 🔤 AI による翻訳機能
|
||||
- 🎯 ドラッグ&ドロップによる整理
|
||||
- 🔌 ミニプログラム対応
|
||||
- ⚙️ MCP(モデルコンテキストプロトコル) サービス
|
||||
|
||||
5. **優れたユーザー体験**:
|
||||
- 🖥️ Windows、Mac、Linux のクロスプラットフォーム対応
|
||||
- 📦 環境構築不要ですぐに使用可能
|
||||
- 🎨 ライト/ダークテーマと透明ウィンドウ対応
|
||||
- 📝 完全な Markdown レンダリング
|
||||
- 🤲 簡単な共有機能
|
||||
|
||||
- 🖥️ Windows、Mac、Linux のクロスプラットフォーム対応
|
||||
- 📦 環境構築不要ですぐに使用可能
|
||||
- 🎨 ライト/ダークテーマと透明ウィンドウ対応
|
||||
- 📝 完全な Markdown レンダリング
|
||||
- 🤲 簡単な共有機能
|
||||
|
||||
# 📝 TODO
|
||||
|
||||
- [x] クイックポップアップ(クリップボードの読み取り、簡単な質問、説明、翻訳、要約)
|
||||
- [x] 複数モデルの回答の比較
|
||||
- [x] サービスプロバイダーが提供するSSOを使用したログインをサポート
|
||||
- [x] サービスプロバイダーが提供する SSO を使用したログインをサポート
|
||||
- [x] すべてのモデルがネットワークをサポート
|
||||
- [x] 最初の公式バージョンのリリース
|
||||
- [ ] 錯誤修復と改善 (開発中...)
|
||||
@@ -73,53 +80,31 @@ Cherry Studioは、複数のLLMプロバイダーをサポートするデスク
|
||||
- [ ] ブラウザ拡張機能(テキストをハイライトして翻訳、要約、ナレッジベースに追加)
|
||||
- [ ] iOS & Android クライアント
|
||||
- [ ] AIノート
|
||||
- [ ] 音声入出力(AIコール)
|
||||
- [ ] 音声入出力(AI コール)
|
||||
- [ ] データバックアップはカスタムバックアップコンテンツをサポート
|
||||
|
||||
# 🌈 テーマ
|
||||
|
||||
テーマギャラリー: https://cherrycss.com
|
||||
Aero テーマ: https://github.com/hakadao/CherryStudio-Aero
|
||||
|
||||
より多くのテーマのPRを歓迎します
|
||||
|
||||
# 🖥️ 開発
|
||||
|
||||
## IDEの設定
|
||||
|
||||
[Cursor](https://www.cursor.com/) + [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) + [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
|
||||
|
||||
## プロジェクトの設定
|
||||
|
||||
### インストール
|
||||
|
||||
```bash
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### 開発
|
||||
|
||||
```bash
|
||||
$ yarn dev
|
||||
```
|
||||
|
||||
### ビルド
|
||||
|
||||
```bash
|
||||
# Windowsの場合
|
||||
$ yarn build:win
|
||||
|
||||
# macOSの場合
|
||||
$ yarn build:mac
|
||||
|
||||
# Linuxの場合
|
||||
$ yarn build:linux
|
||||
```
|
||||
参考[開発ドキュメント](dev.md)
|
||||
|
||||
# 🤝 貢献
|
||||
|
||||
Cherry Studioへの貢献を歓迎します!以下の方法で貢献できます:
|
||||
Cherry Studio への貢献を歓迎します!以下の方法で貢献できます:
|
||||
|
||||
1. **コードの貢献**:新機能を開発するか、既存のコードを最適化します。
|
||||
2. **バグの修正**:見つけたバグを修正します。
|
||||
3. **問題の管理**:GitHubの問題を管理するのを手伝います。
|
||||
3. **問題の管理**:GitHub の問題を管理するのを手伝います。
|
||||
4. **製品デザイン**:デザインの議論に参加します。
|
||||
5. **ドキュメントの作成**:ユーザーマニュアルやガイドを改善します。
|
||||
6. **コミュニティの参加**:ディスカッションに参加し、ユーザーを支援します。
|
||||
7. **使用の促進**:Cherry Studioを広めます。
|
||||
7. **使用の促進**:Cherry Studio を広めます。
|
||||
|
||||
## 始め方
|
||||
|
||||
@@ -128,27 +113,23 @@ Cherry Studioへの貢献を歓迎します!以下の方法で貢献できま
|
||||
3. **変更を提出**:変更をコミットしてプッシュします。
|
||||
4. **プルリクエストを開く**:変更内容と理由を説明します。
|
||||
|
||||
詳細なガイドラインについては、[貢献ガイド](./CONTRIBUTING.md)をご覧ください。
|
||||
詳細なガイドラインについては、[貢献ガイド](../CONTRIBUTING.md)をご覧ください。
|
||||
|
||||
ご支援と貢献に感謝します!
|
||||
|
||||
## 関連頁版
|
||||
|
||||
- [one-api](https://github.com/songquanpeng/one-api):LLM APIの管理・配信システム。OpenAI、Azure、Anthropicなどの主要モデルに対応し、統一APIインターフェースを提供。APIキー管理と再配布に利用可能。
|
||||
- [one-api](https://github.com/songquanpeng/one-api):LLM API の管理・配信システム。OpenAI、Azure、Anthropic などの主要モデルに対応し、統一 API インターフェースを提供。API キー管理と再配布に利用可能。
|
||||
|
||||
# 🚀 コントリビューター
|
||||
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/graphs/contributors">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=kangfenmao/cherry-studio" />
|
||||
</a>
|
||||
|
||||
# コミュニティ
|
||||
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:kangfenmao@gmail.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# 📣 プロダクトハント
|
||||
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:support@cherry-ai.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# スポンサー
|
||||
|
||||
@@ -158,6 +139,10 @@ Cherry Studioへの貢献を歓迎します!以下の方法で貢献できま
|
||||
|
||||
[LICENSE](../LICENSE)
|
||||
|
||||
# ✉️ お問い合わせ
|
||||
|
||||
yinsenho@cherry-ai.com
|
||||
|
||||
# ⭐️ スター履歴
|
||||
|
||||
[](https://star-history.com/#kangfenmao/cherry-studio&Timeline)
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
<h1 align="center">
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/releases">
|
||||
<img src="https://github.com/kangfenmao/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" />
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/releases">
|
||||
<img src="https://github.com/CherryHQ/cherry-studio/blob/main/build/icon.png?raw=true" width="150" height="150" alt="banner" />
|
||||
</a>
|
||||
</h1>
|
||||
<div align="center">
|
||||
中文 / <a href="https://github.com/kangfenmao/cherry-studio">English</a> / <a href="./README.ja.md">日本語</a>
|
||||
</div>
|
||||
<p align="center">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio">English</a> | 中文 | <a href="./README.ja.md">日本語</a><br></p>
|
||||
<div align="center">
|
||||
<a href="https://trendshift.io/repositories/11772" target="_blank"><img src="https://trendshift.io/api/badge/repositories/11772" alt="kangfenmao%2Fcherry-studio | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
</div>
|
||||
|
||||
# 🍒 Cherry Studio
|
||||
|
||||
Cherry Studio 是一款支持多个大语言模型(LLM)服务商的桌面客户端,兼容 Windows、Mac 和 Linux 系统。
|
||||
|
||||
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(1025067911)](https://qm.qq.com/q/RIBAO2pPKS)
|
||||
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
|
||||
❤️ 喜欢 Cherry Studio? 点亮小星星 🌟 或 [赞助开发者](sponsor.md)! ❤️
|
||||
|
||||
# 📖 使用教程
|
||||
|
||||
https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 界面
|
||||
|
||||

|
||||
@@ -29,41 +34,43 @@ Cherry Studio 是一款支持多个大语言模型(LLM)服务商的桌面客
|
||||
|
||||
1. **多样化 LLM 服务支持**:
|
||||
|
||||
- ☁️ 支持主流 LLM 云服务:OpenAI、Gemini、Anthropic、硅基流动等
|
||||
- 🔗 集成流行 AI Web 服务:Claude、Peplexity、Poe、腾讯元宝、知乎直答等
|
||||
- 💻 支持 Ollama、LM Studio 本地模型部署
|
||||
- ☁️ 支持主流 LLM 云服务:OpenAI、Gemini、Anthropic、硅基流动等
|
||||
- 🔗 集成流行 AI Web 服务:Claude、Peplexity、Poe、腾讯元宝、知乎直答等
|
||||
- 💻 支持 Ollama、LM Studio 本地模型部署
|
||||
|
||||
2. **智能助手与对话**:
|
||||
|
||||
- 📚 内置 300+ 预配置 AI 助手
|
||||
- 🤖 支持自定义创建专属助手
|
||||
- 💬 多模型同时对话,获得多样化观点
|
||||
- 📚 内置 300+ 预配置 AI 助手
|
||||
- 🤖 支持自定义创建专属助手
|
||||
- 💬 多模型同时对话,获得多样化观点
|
||||
|
||||
3. **文档与数据处理**:
|
||||
|
||||
- 📄 支持文本、图片、Office、PDF 等多种格式
|
||||
- ☁️ WebDAV 文件管理与数据备份
|
||||
- 📊 Mermaid 图表可视化
|
||||
- 💻 代码高亮显示
|
||||
- 📄 支持文本、图片、Office、PDF 等多种格式
|
||||
- ☁️ WebDAV 文件管理与数据备份
|
||||
- 📊 Mermaid 图表可视化
|
||||
- 💻 代码高亮显示
|
||||
|
||||
4. **实用工具集成**:
|
||||
|
||||
- 🔍 全局搜索功能
|
||||
- 📝 话题管理系统
|
||||
- 🔤 AI 驱动的翻译功能
|
||||
- 🎯 拖拽排序
|
||||
- 🔌 小程序支持
|
||||
- 🔍 全局搜索功能
|
||||
- 📝 话题管理系统
|
||||
- 🔤 AI 驱动的翻译功能
|
||||
- 🎯 拖拽排序
|
||||
- 🔌 小程序支持
|
||||
- ⚙️ MCP(模型上下文协议) 服务
|
||||
|
||||
5. **优质使用体验**:
|
||||
- 🖥️ Windows、Mac、Linux 跨平台支持
|
||||
- 📦 开箱即用,无需配置环境
|
||||
- 🎨 支持明暗主题与透明窗口
|
||||
- 📝 完整的 Markdown 渲染
|
||||
- 🤲 便捷的内容分享功能
|
||||
|
||||
- 🖥️ Windows、Mac、Linux 跨平台支持
|
||||
- 📦 开箱即用,无需配置环境
|
||||
- 🎨 支持明暗主题与透明窗口
|
||||
- 📝 完整的 Markdown 渲染
|
||||
- 🤲 便捷的内容分享功能
|
||||
|
||||
# 📝 待辦事項
|
||||
|
||||
- [x] 快捷弹窗 (读取剪贴板、快速提问、解释、翻译、总结)
|
||||
- [x] 快捷弹窗(读取剪贴板、快速提问、解释、翻译、总结)
|
||||
- [x] 多模型回答对比
|
||||
- [x] 支持使用服务供应商提供的 SSO 进行登入
|
||||
- [x] 全部模型支持连网(开发中...)
|
||||
@@ -76,38 +83,16 @@ Cherry Studio 是一款支持多个大语言模型(LLM)服务商的桌面客
|
||||
- [ ] 语音输入输出(AI 通话)
|
||||
- [ ] 数据备份支持自定义备份内容
|
||||
|
||||
# 🌈 主题
|
||||
|
||||
主题库:https://cherrycss.com
|
||||
Aero 主题:https://github.com/hakadao/CherryStudio-Aero
|
||||
|
||||
欢迎 PR 更多主题
|
||||
|
||||
# 🖥️ 开发
|
||||
|
||||
## IDE 设置
|
||||
|
||||
[Cursor](https://www.cursor.com/) + [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) + [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
|
||||
|
||||
## 项目设置
|
||||
|
||||
### 安装
|
||||
|
||||
```bash
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### 开发
|
||||
|
||||
```bash
|
||||
$ yarn dev
|
||||
```
|
||||
|
||||
### 构建
|
||||
|
||||
```bash
|
||||
# Windows
|
||||
$ yarn build:win
|
||||
|
||||
# macOS
|
||||
$ yarn build:mac
|
||||
|
||||
# Linux
|
||||
$ yarn build:linux
|
||||
```
|
||||
参考[开发文档](dev.md)
|
||||
|
||||
# 🤝 贡献
|
||||
|
||||
@@ -128,28 +113,24 @@ $ yarn build:linux
|
||||
3. **提交更改**:提交并推送您的更改。
|
||||
4. **打开 Pull Request**:描述您的更改和原因。
|
||||
|
||||
有关更详细的指南,请参阅我们的 [贡献指南](./CONTRIBUTING.md)。
|
||||
有关更详细的指南,请参阅我们的 [贡献指南](../CONTRIBUTING.md)。
|
||||
|
||||
感谢您的支持和贡献!
|
||||
|
||||
## 相关项目
|
||||
|
||||
- [one-api](https://github.com/songquanpeng/one-api):LLM API管理及分发系统,支持OpenAI、Azure、Anthropic等主流模型,统一API接口,可用于密钥管理与二次分发。
|
||||
- [one-api](https://github.com/songquanpeng/one-api):LLM API 管理及分发系统,支持 OpenAI、Azure、Anthropic 等主流模型,统一 API 接口,可用于密钥管理与二次分发。
|
||||
|
||||
# 🚀 贡献者
|
||||
|
||||
<a href="https://github.com/kangfenmao/cherry-studio/graphs/contributors">
|
||||
<a href="https://github.com/CherryHQ/cherry-studio/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=kangfenmao/cherry-studio" />
|
||||
</a>
|
||||
<br /><br />
|
||||
|
||||
# 🌐 社区
|
||||
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:kangfenmao@gmail.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# 📣 产品猎人
|
||||
|
||||
<a href="https://www.producthunt.com/posts/cherry-studio?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-cherry-studio" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=496640&theme=light" alt="Cherry Studio - AI Chatbots, AI Desktop Client | Product Hunt" style="width: 250px; height: 54px;" width="250" height="54" /></a>
|
||||
[Telegram](https://t.me/CherryStudioAI) | [Email](mailto:support@cherry-ai.com) | [Twitter](https://x.com/kangfenmao)
|
||||
|
||||
# ☕ 赞助
|
||||
|
||||
@@ -159,6 +140,10 @@ $ yarn build:linux
|
||||
|
||||
[LICENSE](../LICENSE)
|
||||
|
||||
# ✉️ 联系我们
|
||||
|
||||
yinsenho@cherry-ai.com
|
||||
|
||||
# ⭐️ Star 记录
|
||||
|
||||
[](https://star-history.com/#kangfenmao/cherry-studio&Timeline)
|
||||
|
||||
51
docs/dev.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# 🖥️ Develop
|
||||
|
||||
## IDE Setup
|
||||
|
||||
[Cursor](https://www.cursor.com/) + [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) + [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode)
|
||||
|
||||
## Project Setup
|
||||
|
||||
### Install
|
||||
|
||||
```bash
|
||||
yarn
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
### Setup Node.js
|
||||
|
||||
Download and install [Node.js v20.x.x](https://nodejs.org/en/download)
|
||||
|
||||
### Setup Yarn
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
corepack prepare yarn@4.6.0 --activate
|
||||
```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
```
|
||||
|
||||
### Start
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
```
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
# For windows
|
||||
$ yarn build:win
|
||||
|
||||
# For macOS
|
||||
$ yarn build:mac
|
||||
|
||||
# For Linux
|
||||
$ yarn build:linux
|
||||
```
|
||||
174
docs/technical/KnowledgeService.md
Normal file
@@ -0,0 +1,174 @@
|
||||
我来详细介绍 KnowledgeService 中控制并发处理的项目数量和工作负载的部分。
|
||||
|
||||
## 并发控制与工作负载管理机制
|
||||
|
||||
KnowledgeService 实现了一个精细的任务队列系统,用于控制并发处理的项目数量和工作负载。这个系统主要通过以下几个关键部分实现:
|
||||
|
||||
### 1. 关键变量和限制
|
||||
|
||||
```typescript
|
||||
private workload = 0
|
||||
private processingItemCount = 0
|
||||
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
|
||||
private static MAXIMUM_WORKLOAD = 1024 * 1024 * 80 // 约80MB
|
||||
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
|
||||
```
|
||||
|
||||
- `workload`: 跟踪当前正在处理的总工作量(以字节为单位)
|
||||
- `processingItemCount`: 跟踪当前正在处理的项目数量
|
||||
- `MAXIMUM_WORKLOAD`: 设置最大工作负载为80MB
|
||||
- `MAXIMUM_PROCESSING_ITEM_COUNT`: 设置最大并发处理项目数为30个
|
||||
|
||||
### 2. 工作负载评估
|
||||
|
||||
每个任务都有一个评估工作负载的机制,通过 `evaluateTaskWorkload` 属性来表示:
|
||||
|
||||
```typescript
|
||||
interface EvaluateTaskWorkload {
|
||||
workload: number
|
||||
}
|
||||
```
|
||||
|
||||
不同类型的任务有不同的工作负载评估方式:
|
||||
|
||||
- 文件任务:使用文件大小作为工作负载 `{ workload: file.size }`
|
||||
- URL任务:使用固定值 `{ workload: 1024 * 1024 * 2 }` (约2MB)
|
||||
- 网站地图任务:使用固定值 `{ workload: 1024 * 1024 * 20 }` (约20MB)
|
||||
- 笔记任务:使用文本内容的字节长度 `{ workload: contentBytes.length }`
|
||||
|
||||
### 3. 任务状态管理
|
||||
|
||||
任务通过状态枚举来跟踪其生命周期:
|
||||
|
||||
```typescript
|
||||
enum LoaderTaskItemState {
|
||||
PENDING, // 等待处理
|
||||
PROCESSING, // 正在处理
|
||||
DONE // 已完成
|
||||
}
|
||||
```
|
||||
|
||||
### 4. 任务队列处理核心逻辑
|
||||
|
||||
核心的队列处理逻辑在 `processingQueueHandle` 方法中:
|
||||
|
||||
```typescript
|
||||
private processingQueueHandle() {
|
||||
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
|
||||
const queueTaskList: QueueTaskItem[] = []
|
||||
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
|
||||
for (const item of task.loaderTasks) {
|
||||
if (this.maximumLoad()) {
|
||||
break that
|
||||
}
|
||||
|
||||
const { state, task: taskPromise, evaluateTaskWorkload } = item
|
||||
|
||||
if (state !== LoaderTaskItemState.PENDING) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { workload } = evaluateTaskWorkload
|
||||
this.workload += workload
|
||||
this.processingItemCount += 1
|
||||
item.state = LoaderTaskItemState.PROCESSING
|
||||
queueTaskList.push({
|
||||
taskPromise: () =>
|
||||
taskPromise().then(() => {
|
||||
this.workload -= workload
|
||||
this.processingItemCount -= 1
|
||||
task.loaderTasks.delete(item)
|
||||
if (task.loaderTasks.size === 0) {
|
||||
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
|
||||
resolve()
|
||||
}
|
||||
this.processingQueueHandle()
|
||||
}),
|
||||
resolve: () => {},
|
||||
evaluateTaskWorkload
|
||||
})
|
||||
}
|
||||
}
|
||||
return queueTaskList
|
||||
}
|
||||
|
||||
const subTasks = getSubtasksUntilMaximumLoad()
|
||||
if (subTasks.length > 0) {
|
||||
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
|
||||
Promise.all(subTaskPromises).then(() => {
|
||||
subTasks.forEach(({ resolve }) => resolve())
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
这个方法的工作流程是:
|
||||
|
||||
1. 遍历所有待处理的任务集合
|
||||
2. 对于每个任务集合中的每个子任务:
|
||||
- 检查是否已达到最大负载(通过 `maximumLoad()` 方法)
|
||||
- 如果任务状态为 PENDING,则:
|
||||
- 增加当前工作负载和处理项目计数
|
||||
- 将任务状态更新为 PROCESSING
|
||||
- 将任务添加到待执行队列
|
||||
3. 执行所有收集到的子任务
|
||||
4. 当子任务完成时:
|
||||
- 减少工作负载和处理项目计数
|
||||
- 从任务集合中移除已完成的任务
|
||||
- 如果任务集合为空,则解析相应的 Promise
|
||||
- 递归调用 `processingQueueHandle()` 以处理更多任务
|
||||
|
||||
### 5. 负载检查
|
||||
|
||||
```typescript
|
||||
private maximumLoad() {
|
||||
return (
|
||||
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
|
||||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
这个方法检查当前是否已达到最大负载,通过两个条件:
|
||||
|
||||
- 处理项目数量达到上限(30个)
|
||||
- 总工作负载达到上限(80MB)
|
||||
|
||||
### 6. 任务添加与执行流程
|
||||
|
||||
当添加新任务时,流程如下:
|
||||
|
||||
1. 创建任务(根据类型不同创建不同的任务)
|
||||
2. 通过 `appendProcessingQueue` 将任务添加到队列
|
||||
3. 调用 `processingQueueHandle` 开始处理队列中的任务
|
||||
|
||||
```typescript
|
||||
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
|
||||
return new Promise((resolve) => {
|
||||
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
|
||||
resolve(task.loaderDoneReturn!)
|
||||
})
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
## 并发控制的优势
|
||||
|
||||
这种并发控制机制有几个重要优势:
|
||||
|
||||
1. **资源使用优化**:通过限制同时处理的项目数量和总工作负载,避免系统资源过度使用
|
||||
2. **自动调节**:当任务完成时,会自动从队列中获取新任务,保持资源的高效利用
|
||||
3. **灵活性**:不同类型的任务有不同的工作负载评估,更准确地反映实际资源需求
|
||||
4. **可靠性**:通过状态管理和Promise解析机制,确保任务正确完成并通知调用者
|
||||
|
||||
## 实际应用场景
|
||||
|
||||
这种并发控制在处理大量数据时特别有用,例如:
|
||||
|
||||
- 导入大型目录时,可能包含数百个文件
|
||||
- 处理大型网站地图,可能包含大量URL
|
||||
- 处理多个用户同时添加知识库项目的请求
|
||||
|
||||
通过这种机制,系统可以平滑地处理大量请求,避免资源耗尽,同时保持良好的响应性。
|
||||
|
||||
总结来说,KnowledgeService 实现了一个复杂而高效的任务队列系统,通过精确控制并发处理的项目数量和工作负载,确保系统在处理大量数据时保持稳定和高效。
|
||||
@@ -27,7 +27,6 @@ files:
|
||||
- '!node_modules/@tavily/core/node_modules/js-tiktoken'
|
||||
- '!node_modules/pdf-parse/lib/pdf.js/{v1.9.426,v1.10.88,v2.0.550}'
|
||||
- '!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}'
|
||||
- '!node_modules/html2canvas/dist/{html2canvas.min.js,html2canvas.esm.js}'
|
||||
asarUnpack:
|
||||
- resources/**
|
||||
- '**/*.{node,dll,metal,exp,lib}'
|
||||
@@ -73,17 +72,18 @@ linux:
|
||||
maintainer: electronjs.org
|
||||
category: Utility
|
||||
publish:
|
||||
provider: generic
|
||||
url: https://cherrystudio.ocool.online
|
||||
# provider: generic
|
||||
# url: https://cherrystudio.ocool.online
|
||||
provider: github
|
||||
repo: cherry-studio
|
||||
owner: CherryHQ
|
||||
electronDownload:
|
||||
mirror: https://npmmirror.com/mirrors/electron/
|
||||
afterPack: scripts/after-pack.js
|
||||
afterSign: scripts/notarize.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
服务商新增天翼云
|
||||
助手支持使用模型图标当作头像
|
||||
网络搜索网站图标快速加载
|
||||
macOS 移除 Option 开头快捷键限制
|
||||
代理配置优化
|
||||
界面优化和错误修复
|
||||
小程序支持多开
|
||||
支持 GPT-4o 图像生成
|
||||
修复 MCP 服务器无法使用问题
|
||||
修复升级导致旧版本数据丢失问题
|
||||
|
||||
@@ -12,16 +12,18 @@ export default defineConfig({
|
||||
plugins: [
|
||||
externalizeDepsPlugin({
|
||||
exclude: [
|
||||
'@llm-tools/embedjs',
|
||||
'@llm-tools/embedjs-openai',
|
||||
'@llm-tools/embedjs-loader-web',
|
||||
'@llm-tools/embedjs-loader-markdown',
|
||||
'@llm-tools/embedjs-loader-msoffice',
|
||||
'@llm-tools/embedjs-loader-xml',
|
||||
'@llm-tools/embedjs-loader-pdf',
|
||||
'@llm-tools/embedjs-loader-sitemap',
|
||||
'@llm-tools/embedjs-libsql',
|
||||
'@llm-tools/embedjs-loader-image'
|
||||
'@cherrystudio/embedjs',
|
||||
'@cherrystudio/embedjs-openai',
|
||||
'@cherrystudio/embedjs-loader-web',
|
||||
'@cherrystudio/embedjs-loader-markdown',
|
||||
'@cherrystudio/embedjs-loader-msoffice',
|
||||
'@cherrystudio/embedjs-loader-xml',
|
||||
'@cherrystudio/embedjs-loader-pdf',
|
||||
'@cherrystudio/embedjs-loader-sitemap',
|
||||
'@cherrystudio/embedjs-libsql',
|
||||
'@cherrystudio/embedjs-loader-image',
|
||||
'p-queue',
|
||||
'webdav'
|
||||
]
|
||||
}),
|
||||
...visualizerPlugin('main')
|
||||
@@ -43,7 +45,24 @@ export default defineConfig({
|
||||
plugins: [externalizeDepsPlugin()]
|
||||
},
|
||||
renderer: {
|
||||
plugins: [react(), ...visualizerPlugin('renderer')],
|
||||
plugins: [
|
||||
react({
|
||||
babel: {
|
||||
plugins: [
|
||||
[
|
||||
'styled-components',
|
||||
{
|
||||
displayName: true, // 开发环境下启用组件名称
|
||||
fileName: false, // 不在类名中包含文件名
|
||||
pure: true, // 优化性能
|
||||
ssr: false // 不需要服务端渲染
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}),
|
||||
...visualizerPlugin('renderer')
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
'@renderer': resolve('src/renderer/src'),
|
||||
@@ -51,7 +70,7 @@ export default defineConfig({
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
exclude: ['chunk-PZ64DZKH.js', 'chunk-JMKENWIY.js', 'chunk-UXYB6GHG.js']
|
||||
exclude: ['chunk-PZ64DZKH.js', 'chunk-JMKENWIY.js', 'chunk-UXYB6GHG.js', 'chunk-ALDIEZMG.js', 'chunk-4X6ZJEXY.js']
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
68
eslint.config.mjs
Normal file
@@ -0,0 +1,68 @@
|
||||
import electronConfigPrettier from '@electron-toolkit/eslint-config-prettier'
|
||||
import tseslint from '@electron-toolkit/eslint-config-ts'
|
||||
import eslint from '@eslint/js'
|
||||
import eslintReact from '@eslint-react/eslint-plugin'
|
||||
import { defineConfig } from 'eslint/config'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import simpleImportSort from 'eslint-plugin-simple-import-sort'
|
||||
import unusedImports from 'eslint-plugin-unused-imports'
|
||||
|
||||
export default defineConfig([
|
||||
eslint.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
electronConfigPrettier,
|
||||
eslintReact.configs['recommended-typescript'],
|
||||
reactHooks.configs['recommended-latest'],
|
||||
{
|
||||
plugins: {
|
||||
'simple-import-sort': simpleImportSort,
|
||||
'unused-imports': unusedImports
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-non-null-asserted-optional-chain': 'off',
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
'unused-imports/no-unused-imports': 'error',
|
||||
'@eslint-react/no-prop-types': 'error',
|
||||
'prettier/prettier': ['error', { endOfLine: 'auto' }]
|
||||
}
|
||||
},
|
||||
// Configuration for ensuring compatibility with the original ESLint(8.x) rules
|
||||
...[
|
||||
{
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'off',
|
||||
'@typescript-eslint/no-unused-vars': ['error', { caughtErrors: 'none' }],
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'@typescript-eslint/no-empty-object-type': 'off',
|
||||
'@eslint-react/hooks-extra/no-direct-set-state-in-use-effect': 'off',
|
||||
'@eslint-react/web-api/no-leaked-event-listener': 'off',
|
||||
'@eslint-react/web-api/no-leaked-timeout': 'off',
|
||||
'@eslint-react/no-unknown-property': 'off',
|
||||
'@eslint-react/no-nested-component-definitions': 'off',
|
||||
'@eslint-react/dom/no-dangerously-set-innerhtml': 'off',
|
||||
'@eslint-react/no-array-index-key': 'off',
|
||||
'@eslint-react/no-unstable-default-props': 'off',
|
||||
'@eslint-react/no-unstable-context-value': 'off',
|
||||
'@eslint-react/hooks-extra/prefer-use-state-lazy-initialization': 'off',
|
||||
'@eslint-react/hooks-extra/no-unnecessary-use-prefix': 'off',
|
||||
'@eslint-react/no-children-to-array': 'off'
|
||||
}
|
||||
}
|
||||
],
|
||||
{
|
||||
ignores: [
|
||||
'node_modules/**',
|
||||
'build/**',
|
||||
'dist/**',
|
||||
'out/**',
|
||||
'local/**',
|
||||
'.yarn/**',
|
||||
'.gitignore',
|
||||
'scripts/cloudflare-worker.js',
|
||||
'src/main/integration/nutstore/sso/lib/**'
|
||||
]
|
||||
}
|
||||
])
|
||||
130
package.json
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.0.2",
|
||||
"version": "1.1.17",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
"author": "kangfenmao@qq.com",
|
||||
"homepage": "https://github.com/kangfenmao/cherry-studio",
|
||||
"author": "support@cherry-ai.com",
|
||||
"homepage": "https://github.com/CherryHQ/cherry-studio",
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"local",
|
||||
@@ -18,16 +18,10 @@
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
|
||||
"typecheck:node": "tsc --noEmit -p tsconfig.node.json --composite false",
|
||||
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
|
||||
"typecheck": "npm run typecheck:node && npm run typecheck:web",
|
||||
"start": "electron-vite preview",
|
||||
"dev": "electron-vite dev",
|
||||
"build:check": "yarn typecheck",
|
||||
"build": "npm run typecheck && electron-vite build",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
"build:check": "yarn test && yarn typecheck && yarn check:i18n",
|
||||
"build:unpack": "dotenv npm run build && electron-builder --dir",
|
||||
"build:win": "dotenv npm run build && electron-builder --win",
|
||||
"build:win:x64": "dotenv npm run build && electron-builder --win --x64",
|
||||
@@ -39,57 +33,81 @@
|
||||
"build:linux:x64": "dotenv electron-vite build && electron-builder --linux --x64",
|
||||
"build:npm": "node scripts/build-npm.js",
|
||||
"release": "node scripts/version.js",
|
||||
"publish": "yarn release patch push",
|
||||
"publish": "yarn build:check && yarn release patch push",
|
||||
"pulish:artifacts": "cd packages/artifacts && npm publish && cd -",
|
||||
"generate:agents": "yarn workspace @cherry-studio/database agents",
|
||||
"generate:icons": "electron-icon-builder --input=./build/logo.png --output=build",
|
||||
"analyze:renderer": "VISUALIZER_RENDERER=true yarn build",
|
||||
"analyze:main": "VISUALIZER_MAIN=true yarn build",
|
||||
"check": "node scripts/check-i18n.js"
|
||||
"typecheck": "npm run typecheck:node && npm run typecheck:web",
|
||||
"typecheck:node": "tsc --noEmit -p tsconfig.node.json --composite false",
|
||||
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
|
||||
"check:i18n": "node scripts/check-i18n.js",
|
||||
"test": "npx -y tsx --test src/**/*.test.ts",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
"prepare": "husky"
|
||||
},
|
||||
"dependencies": {
|
||||
"@electron-toolkit/preload": "^3.0.0",
|
||||
"@cherrystudio/embedjs": "^0.1.28",
|
||||
"@cherrystudio/embedjs-libsql": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-csv": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-image": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-markdown": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-msoffice": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-pdf": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-sitemap": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-web": "^0.1.28",
|
||||
"@cherrystudio/embedjs-loader-xml": "^0.1.28",
|
||||
"@cherrystudio/embedjs-openai": "^0.1.28",
|
||||
"@electron-toolkit/utils": "^3.0.0",
|
||||
"@electron/notarize": "^2.5.0",
|
||||
"@google/generative-ai": "^0.21.0",
|
||||
"@llm-tools/embedjs": "patch:@llm-tools/embedjs@npm%3A0.1.28#~/.yarn/patches/@llm-tools-embedjs-npm-0.1.28-8e4393fa2d.patch",
|
||||
"@llm-tools/embedjs-libsql": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-csv": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-markdown": "patch:@llm-tools/embedjs-loader-markdown@npm%3A0.1.28#~/.yarn/patches/@llm-tools-embedjs-loader-markdown-npm-0.1.28-81647ffac6.patch",
|
||||
"@llm-tools/embedjs-loader-msoffice": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-pdf": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-sitemap": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-web": "^0.1.28",
|
||||
"@llm-tools/embedjs-loader-xml": "^0.1.28",
|
||||
"@llm-tools/embedjs-openai": "^0.1.28",
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"@langchain/community": "^0.3.36",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"adm-zip": "^0.5.16",
|
||||
"apache-arrow": "^18.1.0",
|
||||
"docx": "^9.0.2",
|
||||
"electron-log": "^5.1.5",
|
||||
"electron-store": "^8.2.0",
|
||||
"electron-updater": "^6.3.9",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"epub": "^1.3.0",
|
||||
"epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch",
|
||||
"fast-xml-parser": "^5.0.9",
|
||||
"fetch-socks": "^1.3.2",
|
||||
"fs-extra": "^11.2.0",
|
||||
"html2canvas": "^1.4.1",
|
||||
"markdown-it": "^14.1.0",
|
||||
"officeparser": "^4.1.1",
|
||||
"tokenx": "^0.4.1",
|
||||
"undici": "^7.3.0",
|
||||
"webdav": "4.11.4"
|
||||
"proxy-agent": "^6.5.0",
|
||||
"tar": "^7.4.3",
|
||||
"undici": "^7.4.0",
|
||||
"webdav": "^5.8.0",
|
||||
"zipread": "^1.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@anthropic-ai/sdk": "^0.24.3",
|
||||
"@electron-toolkit/eslint-config-prettier": "^2.0.0",
|
||||
"@electron-toolkit/eslint-config-ts": "^1.0.1",
|
||||
"@agentic/exa": "^7.3.3",
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.38.0",
|
||||
"@electron-toolkit/eslint-config-prettier": "^3.0.0",
|
||||
"@electron-toolkit/eslint-config-ts": "^3.0.0",
|
||||
"@electron-toolkit/preload": "^3.0.0",
|
||||
"@electron-toolkit/tsconfig": "^1.0.1",
|
||||
"@emotion/is-prop-valid": "^1.3.1",
|
||||
"@eslint-react/eslint-plugin": "^1.36.1",
|
||||
"@eslint/js": "^9.22.0",
|
||||
"@google/genai": "^0.4.0",
|
||||
"@hello-pangea/dnd": "^16.6.0",
|
||||
"@kangfenmao/keyv-storage": "^0.1.0",
|
||||
"@llm-tools/embedjs-loader-image": "^0.1.28",
|
||||
"@modelcontextprotocol/sdk": "^1.8.0",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@reduxjs/toolkit": "^2.2.5",
|
||||
"@tavily/core": "patch:@tavily/core@npm%3A0.3.1#~/.yarn/patches/@tavily-core-npm-0.3.1-fe69bf2bea.patch",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
"@types/adm-zip": "^0",
|
||||
"@types/fs-extra": "^11",
|
||||
"@types/lodash": "^4.17.5",
|
||||
@@ -97,14 +115,15 @@
|
||||
"@types/md5": "^2.3.5",
|
||||
"@types/node": "^18.19.9",
|
||||
"@types/pako": "^1.0.2",
|
||||
"@types/react": "^18.2.48",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/react": "^19.0.12",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@types/tinycolor2": "^1",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"antd": "^5.22.5",
|
||||
"applescript": "^1.0.0",
|
||||
"axios": "^1.7.3",
|
||||
"babel-plugin-styled-components": "^2.1.4",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"dayjs": "^1.11.11",
|
||||
"dexie": "^4.0.8",
|
||||
@@ -117,18 +136,23 @@
|
||||
"electron-vite": "^2.3.0",
|
||||
"emittery": "^1.0.3",
|
||||
"emoji-picker-element": "^1.22.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-plugin-react": "^7.34.3",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint": "^9.22.0",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"eslint-plugin-simple-import-sort": "^12.1.1",
|
||||
"eslint-plugin-unused-imports": "^4.0.0",
|
||||
"eslint-plugin-unused-imports": "^4.1.4",
|
||||
"html-to-image": "^1.11.13",
|
||||
"husky": "^9.1.7",
|
||||
"i18next": "^23.11.5",
|
||||
"lint-staged": "^15.5.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mime": "^4.0.4",
|
||||
"npx-scope-finder": "^1.2.0",
|
||||
"openai": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
|
||||
"prettier": "^3.2.4",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"p-queue": "^8.1.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rc-virtual-list": "^3.18.5",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-hotkeys-hook": "^4.6.1",
|
||||
"react-i18next": "^14.1.2",
|
||||
"react-infinite-scroll-component": "^6.1.0",
|
||||
@@ -142,6 +166,7 @@
|
||||
"rehype-katex": "^7.0.1",
|
||||
"rehype-mathjax": "^7.0.0",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"remark-cjk-friendly": "^1.1.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"rollup-plugin-visualizer": "^5.12.0",
|
||||
@@ -150,19 +175,26 @@
|
||||
"string-width": "^7.2.0",
|
||||
"styled-components": "^6.1.11",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"tokenx": "^0.4.1",
|
||||
"typescript": "^5.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"vite": "^5.0.12"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^17.0.0 || ^18.0.0",
|
||||
"react-dom": "^17.0.0 || ^18.0.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.4.0": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"openai@npm:^4.77.0": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch"
|
||||
"openai@npm:^4.77.0": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
|
||||
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch"
|
||||
},
|
||||
"packageManager": "yarn@4.6.0"
|
||||
"packageManager": "yarn@4.6.0",
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,cjs,mjs,cts,mts}": [
|
||||
"prettier --write",
|
||||
"eslint --fix"
|
||||
],
|
||||
"*.{json,md,yml,yaml,css,scss,html}": [
|
||||
"prettier --write"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherry-studio/database",
|
||||
"packageManager": "yarn@4.3.1",
|
||||
"packageManager": "yarn@4.6.0",
|
||||
"dependencies": {
|
||||
"csv-parser": "^3.0.0",
|
||||
"sqlite3": "^5.1.7"
|
||||
|
||||
@@ -22,6 +22,7 @@ export const textExts = [
|
||||
'.org', // org-mode 文件
|
||||
'.wiki', // VimWiki 文件
|
||||
'.tex', // LaTeX 文件
|
||||
'.bib', // BibTeX 文件
|
||||
'.srt', // 字幕文件
|
||||
'.xhtml', // XHTML 文件
|
||||
'.nfo', // 信息文件(主要用于场景发布)
|
||||
@@ -87,7 +88,7 @@ export const textExts = [
|
||||
'.ctp', // CakePHP 视图文件
|
||||
'.cfm', // ColdFusion 标记语言文件
|
||||
'.cfc', // ColdFusion 组件文件
|
||||
'.m', // Objective-C 源文件
|
||||
'.m', // Objective-C 或 MATLAB 源文件
|
||||
'.mm', // Objective-C++ 源文件
|
||||
'.gradle', // Gradle 构建文件
|
||||
'.groovy', // Gradle 构建文件
|
||||
@@ -102,7 +103,35 @@ export const textExts = [
|
||||
'.cxx', // C++ 源文件
|
||||
'.cppm', // C++20 模块接口文件
|
||||
'.ipp', // 模板实现文件
|
||||
'.ixx' // C++20 模块实现文件
|
||||
'.ixx', // C++20 模块实现文件
|
||||
'.f90', // Fortran 90 源文件
|
||||
'.f', // Fortran 固定格式源代码文件
|
||||
'.f03', // Fortran 2003+ 源代码文件
|
||||
'.ahk', // AutoHotKey 语言文件
|
||||
'.tcl', // Tcl 脚本
|
||||
'.do', // Questa 或 Modelsim Tcl 脚本
|
||||
'.v', // Verilog 源文件
|
||||
'.sv', // SystemVerilog 源文件
|
||||
'.svh', // SystemVerilog 头文件
|
||||
'.vhd', // VHDL 源文件
|
||||
'.vhdl', // VHDL 源文件
|
||||
'.lef', // Library Exchange Format
|
||||
'.def', // Design Exchange Format
|
||||
'.edif', // Electronic Design Interchange Format
|
||||
'.sdf', // Standard Delay Format
|
||||
'.sdc', // Synopsys Design Constraints
|
||||
'.xdc', // Xilinx Design Constraints
|
||||
'.rpt', // 报告文件
|
||||
'.lisp', // Lisp 脚本
|
||||
'.il', // Cadence SKILL 脚本
|
||||
'.ils', // Cadence SKILL++ 脚本
|
||||
'.sp', // SPICE netlist 文件
|
||||
'.spi', // SPICE netlist 文件
|
||||
'.cir', // SPICE netlist 文件
|
||||
'.net', // SPICE netlist 文件
|
||||
'.scs', // Spectre netlist 文件
|
||||
'.asc', // LTspice netlist schematic 文件
|
||||
'.tf' // Technology File
|
||||
]
|
||||
|
||||
export const ZOOM_SHORTCUTS = [
|
||||
|
||||
1
packages/shared/config/nutstore.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const NUTSTORE_HOST = 'https://dav.jianguoyun.com/dav'
|
||||
35
resources/scripts/download.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const https = require('https')
|
||||
const fs = require('fs')
|
||||
|
||||
/**
|
||||
* Downloads a file from a URL with redirect handling
|
||||
* @param {string} url The URL to download from
|
||||
* @param {string} destinationPath The path to save the file to
|
||||
* @returns {Promise<void>} Promise that resolves when download is complete
|
||||
*/
|
||||
async function downloadWithRedirects(url, destinationPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = (url) => {
|
||||
https
|
||||
.get(url, (response) => {
|
||||
if (response.statusCode == 301 || response.statusCode == 302) {
|
||||
request(response.headers.location)
|
||||
return
|
||||
}
|
||||
if (response.statusCode !== 200) {
|
||||
reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`))
|
||||
return
|
||||
}
|
||||
const file = fs.createWriteStream(destinationPath)
|
||||
response.pipe(file)
|
||||
file.on('finish', () => resolve())
|
||||
})
|
||||
.on('error', (err) => {
|
||||
reject(err)
|
||||
})
|
||||
}
|
||||
request(url)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { downloadWithRedirects }
|
||||
171
resources/scripts/install-bun.js
Normal file
@@ -0,0 +1,171 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const { execSync } = require('child_process')
|
||||
const AdmZip = require('adm-zip')
|
||||
const { downloadWithRedirects } = require('./download')
|
||||
|
||||
// Base URL for downloading bun binaries
|
||||
const BUN_RELEASE_BASE_URL = 'https://github.com/oven-sh/bun/releases/download'
|
||||
const DEFAULT_BUN_VERSION = '1.2.5' // Default fallback version
|
||||
|
||||
// Mapping of platform+arch to binary package name
|
||||
const BUN_PACKAGES = {
|
||||
'darwin-arm64': 'bun-darwin-aarch64.zip',
|
||||
'darwin-x64': 'bun-darwin-x64.zip',
|
||||
'win32-x64': 'bun-windows-x64.zip',
|
||||
'win32-x64-baseline': 'bun-windows-x64-baseline.zip',
|
||||
'linux-x64': 'bun-linux-x64.zip',
|
||||
'linux-x64-baseline': 'bun-linux-x64-baseline.zip',
|
||||
'linux-arm64': 'bun-linux-aarch64.zip',
|
||||
// MUSL variants
|
||||
'linux-musl-x64': 'bun-linux-x64-musl.zip',
|
||||
'linux-musl-x64-baseline': 'bun-linux-x64-musl-baseline.zip',
|
||||
'linux-musl-arm64': 'bun-linux-aarch64-musl.zip'
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads and extracts the bun binary for the specified platform and architecture
|
||||
* @param {string} platform Platform to download for (e.g., 'darwin', 'win32', 'linux')
|
||||
* @param {string} arch Architecture to download for (e.g., 'x64', 'arm64')
|
||||
* @param {string} version Version of bun to download
|
||||
* @param {boolean} isMusl Whether to use MUSL variant for Linux
|
||||
* @param {boolean} isBaseline Whether to use baseline variant
|
||||
*/
|
||||
async function downloadBunBinary(platform, arch, version = DEFAULT_BUN_VERSION, isMusl = false, isBaseline = false) {
|
||||
let platformKey = isMusl ? `${platform}-musl-${arch}` : `${platform}-${arch}`
|
||||
if (isBaseline) {
|
||||
platformKey += '-baseline'
|
||||
}
|
||||
const packageName = BUN_PACKAGES[platformKey]
|
||||
|
||||
if (!packageName) {
|
||||
console.error(`No binary available for ${platformKey}`)
|
||||
return false
|
||||
}
|
||||
|
||||
// Create output directory structure
|
||||
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
// Ensure directories exist
|
||||
fs.mkdirSync(binDir, { recursive: true })
|
||||
|
||||
// Download URL for the specific binary
|
||||
const downloadUrl = `${BUN_RELEASE_BASE_URL}/bun-v${version}/${packageName}`
|
||||
const tempdir = os.tmpdir()
|
||||
// Create a temporary file for the downloaded binary
|
||||
const tempFilename = path.join(tempdir, packageName)
|
||||
|
||||
try {
|
||||
console.log(`Downloading bun ${version} for ${platformKey}...`)
|
||||
console.log(`URL: ${downloadUrl}`)
|
||||
|
||||
// Use the new download function
|
||||
await downloadWithRedirects(downloadUrl, tempFilename)
|
||||
|
||||
// Extract the zip file using adm-zip
|
||||
console.log(`Extracting ${packageName} to ${binDir}...`)
|
||||
const zip = new AdmZip(tempFilename)
|
||||
zip.extractAllTo(tempdir, true)
|
||||
|
||||
// Move files using Node.js fs
|
||||
const sourceDir = path.join(tempdir, packageName.split('.')[0])
|
||||
const files = fs.readdirSync(sourceDir)
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = path.join(sourceDir, file)
|
||||
const destPath = path.join(binDir, file)
|
||||
|
||||
fs.copyFileSync(sourcePath, destPath)
|
||||
fs.unlinkSync(sourcePath)
|
||||
|
||||
// Set executable permissions for non-Windows platforms
|
||||
if (platform !== 'win32') {
|
||||
try {
|
||||
// 755 permission: rwxr-xr-x
|
||||
fs.chmodSync(destPath, '755')
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Failed to set executable permissions: ${error.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up
|
||||
fs.unlinkSync(tempFilename)
|
||||
fs.rmSync(sourceDir, { recursive: true })
|
||||
|
||||
console.log(`Successfully installed bun ${version} for ${platformKey}`)
|
||||
return true
|
||||
} catch (error) {
|
||||
console.error(`Error installing bun for ${platformKey}: ${error.message}`)
|
||||
// Clean up temporary file if it exists
|
||||
if (fs.existsSync(tempFilename)) {
|
||||
fs.unlinkSync(tempFilename)
|
||||
}
|
||||
|
||||
// Check if binDir is empty and remove it if so
|
||||
try {
|
||||
const files = fs.readdirSync(binDir)
|
||||
if (files.length === 0) {
|
||||
fs.rmSync(binDir, { recursive: true })
|
||||
console.log(`Removed empty directory: ${binDir}`)
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
console.warn(`Warning: Failed to clean up directory: ${cleanupError.message}`)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects current platform and architecture
|
||||
*/
|
||||
function detectPlatformAndArch() {
|
||||
const platform = os.platform()
|
||||
const arch = os.arch()
|
||||
const isMusl = platform === 'linux' && detectIsMusl()
|
||||
const isBaseline = platform === 'win32'
|
||||
|
||||
return { platform, arch, isMusl, isBaseline }
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to detect if running on MUSL libc
|
||||
*/
|
||||
function detectIsMusl() {
|
||||
try {
|
||||
// Simple check for Alpine Linux which uses MUSL
|
||||
const output = execSync('cat /etc/os-release').toString()
|
||||
return output.toLowerCase().includes('alpine')
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to install bun
|
||||
*/
|
||||
async function installBun() {
|
||||
// Get the latest version if no specific version is provided
|
||||
const version = DEFAULT_BUN_VERSION
|
||||
console.log(`Using bun version: ${version}`)
|
||||
|
||||
const { platform, arch, isMusl, isBaseline } = detectPlatformAndArch()
|
||||
|
||||
console.log(
|
||||
`Installing bun ${version} for ${platform}-${arch}${isMusl ? ' (MUSL)' : ''}${isBaseline ? ' (baseline)' : ''}...`
|
||||
)
|
||||
|
||||
await downloadBunBinary(platform, arch, version, isMusl, isBaseline)
|
||||
}
|
||||
|
||||
// Run the installation
|
||||
installBun()
|
||||
.then(() => {
|
||||
console.log('Installation successful')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Installation failed:', error)
|
||||
process.exit(1)
|
||||
})
|
||||
181
resources/scripts/install-uv.js
Normal file
@@ -0,0 +1,181 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const { execSync } = require('child_process')
|
||||
const tar = require('tar')
|
||||
const AdmZip = require('adm-zip')
|
||||
const { downloadWithRedirects } = require('./download')
|
||||
|
||||
// Base URL for downloading uv binaries
|
||||
const UV_RELEASE_BASE_URL = 'https://github.com/astral-sh/uv/releases/download'
|
||||
const DEFAULT_UV_VERSION = '0.6.6'
|
||||
|
||||
// Mapping of platform+arch to binary package name
|
||||
const UV_PACKAGES = {
|
||||
'darwin-arm64': 'uv-aarch64-apple-darwin.tar.gz',
|
||||
'darwin-x64': 'uv-x86_64-apple-darwin.tar.gz',
|
||||
'win32-arm64': 'uv-aarch64-pc-windows-msvc.zip',
|
||||
'win32-ia32': 'uv-i686-pc-windows-msvc.zip',
|
||||
'win32-x64': 'uv-x86_64-pc-windows-msvc.zip',
|
||||
'linux-arm64': 'uv-aarch64-unknown-linux-gnu.tar.gz',
|
||||
'linux-ia32': 'uv-i686-unknown-linux-gnu.tar.gz',
|
||||
'linux-ppc64': 'uv-powerpc64-unknown-linux-gnu.tar.gz',
|
||||
'linux-ppc64le': 'uv-powerpc64le-unknown-linux-gnu.tar.gz',
|
||||
'linux-s390x': 'uv-s390x-unknown-linux-gnu.tar.gz',
|
||||
'linux-x64': 'uv-x86_64-unknown-linux-gnu.tar.gz',
|
||||
'linux-armv7l': 'uv-armv7-unknown-linux-gnueabihf.tar.gz',
|
||||
// MUSL variants
|
||||
'linux-musl-arm64': 'uv-aarch64-unknown-linux-musl.tar.gz',
|
||||
'linux-musl-ia32': 'uv-i686-unknown-linux-musl.tar.gz',
|
||||
'linux-musl-x64': 'uv-x86_64-unknown-linux-musl.tar.gz',
|
||||
'linux-musl-armv6l': 'uv-arm-unknown-linux-musleabihf.tar.gz',
|
||||
'linux-musl-armv7l': 'uv-armv7-unknown-linux-musleabihf.tar.gz'
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads and extracts the uv binary for the specified platform and architecture
|
||||
* @param {string} platform Platform to download for (e.g., 'darwin', 'win32', 'linux')
|
||||
* @param {string} arch Architecture to download for (e.g., 'x64', 'arm64')
|
||||
* @param {string} version Version of uv to download
|
||||
* @param {boolean} isMusl Whether to use MUSL variant for Linux
|
||||
*/
|
||||
async function downloadUvBinary(platform, arch, version = DEFAULT_UV_VERSION, isMusl = false) {
|
||||
const platformKey = isMusl ? `${platform}-musl-${arch}` : `${platform}-${arch}`
|
||||
const packageName = UV_PACKAGES[platformKey]
|
||||
|
||||
if (!packageName) {
|
||||
console.error(`No binary available for ${platformKey}`)
|
||||
return false
|
||||
}
|
||||
|
||||
// Create output directory structure
|
||||
const binDir = path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
// Ensure directories exist
|
||||
fs.mkdirSync(binDir, { recursive: true })
|
||||
|
||||
// Download URL for the specific binary
|
||||
const downloadUrl = `${UV_RELEASE_BASE_URL}/${version}/${packageName}`
|
||||
const tempdir = os.tmpdir()
|
||||
const tempFilename = path.join(tempdir, packageName)
|
||||
|
||||
try {
|
||||
console.log(`Downloading uv ${version} for ${platformKey}...`)
|
||||
console.log(`URL: ${downloadUrl}`)
|
||||
|
||||
await downloadWithRedirects(downloadUrl, tempFilename)
|
||||
|
||||
console.log(`Extracting ${packageName} to ${binDir}...`)
|
||||
|
||||
// 根据文件扩展名选择解压方法
|
||||
if (packageName.endsWith('.zip')) {
|
||||
// 使用 adm-zip 处理 zip 文件
|
||||
const zip = new AdmZip(tempFilename)
|
||||
zip.extractAllTo(binDir, true)
|
||||
fs.unlinkSync(tempFilename)
|
||||
console.log(`Successfully installed uv ${version} for ${platform}-${arch}`)
|
||||
return true
|
||||
} else {
|
||||
// tar.gz 文件的处理保持不变
|
||||
await tar.x({
|
||||
file: tempFilename,
|
||||
cwd: tempdir,
|
||||
z: true
|
||||
})
|
||||
|
||||
// Move files using Node.js fs
|
||||
const sourceDir = path.join(tempdir, packageName.split('.')[0])
|
||||
const files = fs.readdirSync(sourceDir)
|
||||
for (const file of files) {
|
||||
const sourcePath = path.join(sourceDir, file)
|
||||
const destPath = path.join(binDir, file)
|
||||
fs.copyFileSync(sourcePath, destPath)
|
||||
fs.unlinkSync(sourcePath)
|
||||
|
||||
// Set executable permissions for non-Windows platforms
|
||||
if (platform !== 'win32') {
|
||||
try {
|
||||
fs.chmodSync(destPath, '755')
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Failed to set executable permissions: ${error.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up
|
||||
fs.unlinkSync(tempFilename)
|
||||
fs.rmSync(sourceDir, { recursive: true })
|
||||
}
|
||||
|
||||
console.log(`Successfully installed uv ${version} for ${platform}-${arch}`)
|
||||
return true
|
||||
} catch (error) {
|
||||
console.error(`Error installing uv for ${platformKey}: ${error.message}`)
|
||||
|
||||
if (fs.existsSync(tempFilename)) {
|
||||
fs.unlinkSync(tempFilename)
|
||||
}
|
||||
|
||||
// Check if binDir is empty and remove it if so
|
||||
try {
|
||||
const files = fs.readdirSync(binDir)
|
||||
if (files.length === 0) {
|
||||
fs.rmSync(binDir, { recursive: true })
|
||||
console.log(`Removed empty directory: ${binDir}`)
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
console.warn(`Warning: Failed to clean up directory: ${cleanupError.message}`)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects current platform and architecture
|
||||
*/
|
||||
function detectPlatformAndArch() {
|
||||
const platform = os.platform()
|
||||
const arch = os.arch()
|
||||
const isMusl = platform === 'linux' && detectIsMusl()
|
||||
|
||||
return { platform, arch, isMusl }
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to detect if running on MUSL libc
|
||||
*/
|
||||
function detectIsMusl() {
|
||||
try {
|
||||
// Simple check for Alpine Linux which uses MUSL
|
||||
const output = execSync('cat /etc/os-release').toString()
|
||||
return output.toLowerCase().includes('alpine')
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to install uv
|
||||
*/
|
||||
async function installUv() {
|
||||
// Get the latest version if no specific version is provided
|
||||
const version = DEFAULT_UV_VERSION
|
||||
console.log(`Using uv version: ${version}`)
|
||||
|
||||
const { platform, arch, isMusl } = detectPlatformAndArch()
|
||||
|
||||
console.log(`Installing uv ${version} for ${platform}-${arch}${isMusl ? ' (MUSL)' : ''}...`)
|
||||
|
||||
await downloadUvBinary(platform, arch, version, isMusl)
|
||||
}
|
||||
|
||||
// Run the installation
|
||||
installUv()
|
||||
.then(() => {
|
||||
console.log('Installation successful')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Installation failed:', error)
|
||||
process.exit(1)
|
||||
})
|
||||
130
scripts/update-i18n.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
/**
|
||||
* OCOOL_API_KEY=sk-abcxxxxxxxxxxxxxxxxxxxxxxx123 ts-node scripts/update-i18n.ts
|
||||
*/
|
||||
|
||||
// OCOOL API KEY
|
||||
const OCOOL_API_KEY = process.env.OCOOL_API_KEY
|
||||
|
||||
const INDEX = [
|
||||
// 语言的名称 代码 用来翻译的模型
|
||||
{ name: 'France', code: 'fr-fr', model: 'qwen2.5-32b-instruct' },
|
||||
{ name: 'Spanish', code: 'es-es', model: 'qwen2.5-32b-instruct' },
|
||||
{ name: 'Portuguese', code: 'pt-pt', model: 'qwen2.5-72b-instruct' },
|
||||
{ name: 'Greek', code: 'el-gr', model: 'qwen-turbo' }
|
||||
]
|
||||
|
||||
const fs = require('fs')
|
||||
import OpenAI from 'openai'
|
||||
|
||||
const zh = JSON.parse(fs.readFileSync('src/renderer/src/i18n/locales/zh-cn.json', 'utf8')) as object
|
||||
|
||||
const openai = new OpenAI({
|
||||
apiKey: OCOOL_API_KEY,
|
||||
baseURL: 'https://one.ocoolai.com/v1'
|
||||
})
|
||||
|
||||
// 递归遍历翻译
|
||||
async function translate(zh: object, obj: object, target: string, model: string, updateFile) {
|
||||
const texts: { [key: string]: string } = {}
|
||||
for (const e in zh) {
|
||||
if (typeof zh[e] == 'object') {
|
||||
// 遍历下一层
|
||||
if (!obj[e] || typeof obj[e] != 'object') obj[e] = {}
|
||||
await translate(zh[e], obj[e], target, model, updateFile)
|
||||
} else {
|
||||
// 加入到本层待翻译列表
|
||||
if (!obj[e] || typeof obj[e] != 'string') {
|
||||
texts[e] = zh[e]
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Object.keys(texts).length > 0) {
|
||||
const completion = await openai.chat.completions.create({
|
||||
model: model,
|
||||
response_format: { type: 'json_object' },
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: `
|
||||
You are a robot specifically designed for translation tasks. As a model that has been extensively fine-tuned on Russian language corpora, you are proficient in using the Russian language.
|
||||
Now, please output the translation based on the input content. The input will include both Chinese and English key values, and you should output the corresponding key values in the Russian language.
|
||||
When translating, ensure that no key value is omitted, and maintain the accuracy and fluency of the translation. Pay attention to the capitalization rules in the output to match the source text, and especially pay attention to whether to capitalize the first letter of each word except for prepositions. For strings containing \`{{value}}\`, ensure that the format is not disrupted.
|
||||
Output in JSON.
|
||||
######################################################
|
||||
INPUT
|
||||
######################################################
|
||||
${JSON.stringify({
|
||||
confirm: '确定要备份数据吗?',
|
||||
select_model: '选择模型',
|
||||
title: '文件',
|
||||
deeply_thought: '已深度思考(用时 {{secounds}} 秒)'
|
||||
})}
|
||||
######################################################
|
||||
MAKE SURE TO OUTPUT IN Russian. DO NOT OUTPUT IN UNSPECIFIED LANGUAGE.
|
||||
######################################################
|
||||
`
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: JSON.stringify({
|
||||
confirm: 'Подтвердите резервное копирование данных?',
|
||||
select_model: 'Выберите Модель',
|
||||
title: 'Файл',
|
||||
deeply_thought: 'Глубоко продумано (заняло {{seconds}} секунд)'
|
||||
})
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `
|
||||
You are a robot specifically designed for translation tasks. As a model that has been extensively fine-tuned on ${target} language corpora, you are proficient in using the ${target} language.
|
||||
Now, please output the translation based on the input content. The input will include both Chinese and English key values, and you should output the corresponding key values in the ${target} language.
|
||||
When translating, ensure that no key value is omitted, and maintain the accuracy and fluency of the translation. Pay attention to the capitalization rules in the output to match the source text, and especially pay attention to whether to capitalize the first letter of each word except for prepositions. For strings containing \`{{value}}\`, ensure that the format is not disrupted.
|
||||
Output in JSON.
|
||||
######################################################
|
||||
INPUT
|
||||
######################################################
|
||||
${JSON.stringify(texts)}
|
||||
######################################################
|
||||
MAKE SURE TO OUTPUT IN ${target}. DO NOT OUTPUT IN UNSPECIFIED LANGUAGE.
|
||||
######################################################
|
||||
`
|
||||
}
|
||||
]
|
||||
})
|
||||
// 添加翻译后的键值,并打印错译漏译内容
|
||||
try {
|
||||
const result = JSON.parse(completion.choices[0].message.content!)
|
||||
for (const e in texts) {
|
||||
if (result[e] && typeof result[e] === 'string') {
|
||||
obj[e] = result[e]
|
||||
} else {
|
||||
console.log('[warning]', `missing value "${e}" in ${target} translation`)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('[error]', e)
|
||||
for (const e in texts) {
|
||||
console.log('[warning]', `missing value "${e}" in ${target} translation`)
|
||||
}
|
||||
}
|
||||
}
|
||||
// 删除多余的键值
|
||||
for (const e in obj) {
|
||||
if (!zh[e]) {
|
||||
delete obj[e]
|
||||
}
|
||||
}
|
||||
// 更新文件
|
||||
updateFile()
|
||||
}
|
||||
|
||||
;(async () => {
|
||||
for (const { name, code, model } of INDEX) {
|
||||
const obj = fs.existsSync(`src/renderer/src/i18n/translate/${code}.json`)
|
||||
? JSON.parse(fs.readFileSync(`src/renderer/src/i18n/translate/${code}.json`, 'utf8'))
|
||||
: {}
|
||||
await translate(zh, obj, name, model, () => {
|
||||
fs.writeFileSync(`src/renderer/src/i18n/translate/${code}.json`, JSON.stringify(obj, null, 2), 'utf8')
|
||||
})
|
||||
}
|
||||
})()
|
||||
@@ -12,12 +12,12 @@ export const DATA_PATH = getDataPath()
|
||||
|
||||
export const titleBarOverlayDark = {
|
||||
height: 40,
|
||||
color: '#00000000',
|
||||
color: 'rgba(0,0,0,0)',
|
||||
symbolColor: '#ffffff'
|
||||
}
|
||||
|
||||
export const titleBarOverlayLight = {
|
||||
height: 40,
|
||||
color: '#00000000',
|
||||
color: 'rgba(255,255,255,0)',
|
||||
symbolColor: '#000000'
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export const isMac = process.platform === 'darwin'
|
||||
export const isWin = process.platform === 'win32'
|
||||
export const isLinux = process.platform === 'linux'
|
||||
export const isDev = process.env.NODE_ENV === 'development'
|
||||
|
||||
24
src/main/embeddings/Embeddings.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
import EmbeddingsFactory from './EmbeddingsFactory'
|
||||
|
||||
export default class Embeddings {
|
||||
private sdk: BaseEmbeddings
|
||||
constructor({ model, apiKey, apiVersion, baseURL, dimensions }: KnowledgeBaseParams) {
|
||||
this.sdk = EmbeddingsFactory.create({ model, apiKey, apiVersion, baseURL, dimensions } as KnowledgeBaseParams)
|
||||
}
|
||||
public async init(): Promise<void> {
|
||||
return this.sdk.init()
|
||||
}
|
||||
public async getDimensions(): Promise<number> {
|
||||
return this.sdk.getDimensions()
|
||||
}
|
||||
public async embedDocuments(texts: string[]): Promise<number[][]> {
|
||||
return this.sdk.embedDocuments(texts)
|
||||
}
|
||||
|
||||
public async embedQuery(text: string): Promise<number[]> {
|
||||
return this.sdk.embedQuery(text)
|
||||
}
|
||||
}
|
||||
38
src/main/embeddings/EmbeddingsFactory.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { OpenAiEmbeddings } from '@cherrystudio/embedjs-openai'
|
||||
import { AzureOpenAiEmbeddings } from '@cherrystudio/embedjs-openai/src/azure-openai-embeddings'
|
||||
import { getInstanceName } from '@main/utils'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
import VoyageEmbeddings from './VoyageEmbeddings'
|
||||
|
||||
export default class EmbeddingsFactory {
|
||||
static create({ model, apiKey, apiVersion, baseURL, dimensions }: KnowledgeBaseParams): BaseEmbeddings {
|
||||
const batchSize = 10
|
||||
if (model.includes('voyage')) {
|
||||
return new VoyageEmbeddings({
|
||||
modelName: model,
|
||||
apiKey,
|
||||
outputDimension: dimensions,
|
||||
batchSize: 8
|
||||
})
|
||||
}
|
||||
if (apiVersion !== undefined) {
|
||||
return new AzureOpenAiEmbeddings({
|
||||
azureOpenAIApiKey: apiKey,
|
||||
azureOpenAIApiVersion: apiVersion,
|
||||
azureOpenAIApiDeploymentName: model,
|
||||
azureOpenAIApiInstanceName: getInstanceName(baseURL),
|
||||
dimensions,
|
||||
batchSize
|
||||
})
|
||||
}
|
||||
return new OpenAiEmbeddings({
|
||||
model,
|
||||
apiKey,
|
||||
dimensions,
|
||||
batchSize,
|
||||
configuration: { baseURL }
|
||||
})
|
||||
}
|
||||
}
|
||||
30
src/main/embeddings/VoyageEmbeddings.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { VoyageEmbeddings as _VoyageEmbeddings } from '@langchain/community/embeddings/voyage'
|
||||
|
||||
export default class VoyageEmbeddings extends BaseEmbeddings {
|
||||
private model: _VoyageEmbeddings
|
||||
constructor(private readonly configuration?: ConstructorParameters<typeof _VoyageEmbeddings>[0]) {
|
||||
super()
|
||||
if (!this.configuration) this.configuration = {}
|
||||
if (!this.configuration.modelName) this.configuration.modelName = 'voyage-3'
|
||||
|
||||
if (!this.configuration.outputDimension) {
|
||||
throw new Error('You need to pass in the optional dimensions parameter for this model')
|
||||
}
|
||||
this.model = new _VoyageEmbeddings(this.configuration)
|
||||
}
|
||||
override async getDimensions(): Promise<number> {
|
||||
if (!this.configuration?.outputDimension) {
|
||||
throw new Error('You need to pass in the optional dimensions parameter for this model')
|
||||
}
|
||||
return this.configuration?.outputDimension
|
||||
}
|
||||
|
||||
override async embedDocuments(texts: string[]): Promise<number[][]> {
|
||||
return this.model.embedDocuments(texts)
|
||||
}
|
||||
|
||||
override async embedQuery(text: string): Promise<number[]> {
|
||||
return this.model.embedQuery(text)
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||
import { app } from 'electron'
|
||||
import installExtension, { REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||
import { app, ipcMain } from 'electron'
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
|
||||
import { registerIpc } from './ipc'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import { CHERRY_STUDIO_PROTOCOL, handleProtocolUrl, registerProtocolClient } from './services/ProtocolClient'
|
||||
import { registerShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { updateUserDataPath } from './utils/upgrade'
|
||||
|
||||
// Check for single instance lock
|
||||
if (!app.requestSingleInstanceLock()) {
|
||||
@@ -18,11 +20,15 @@ if (!app.requestSingleInstanceLock()) {
|
||||
// Some APIs can only be used after this event occurs.
|
||||
|
||||
app.whenReady().then(async () => {
|
||||
await updateUserDataPath()
|
||||
|
||||
// Set app user model id for windows
|
||||
electronApp.setAppUserModelId(import.meta.env.VITE_MAIN_BUNDLE_ID || 'com.kangfenmao.CherryStudio')
|
||||
|
||||
// Mac: Hide dock icon before window creation when launch to tray is set
|
||||
const isLaunchToTray = configManager.getLaunchToTray()
|
||||
if (isLaunchToTray) {
|
||||
app.dock?.hide()
|
||||
}
|
||||
|
||||
const mainWindow = windowService.createMainWindow()
|
||||
new TrayService()
|
||||
|
||||
@@ -39,16 +45,42 @@ if (!app.requestSingleInstanceLock()) {
|
||||
|
||||
registerIpc(mainWindow, app)
|
||||
|
||||
replaceDevtoolsFont(mainWindow)
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
installExtension(REDUX_DEVTOOLS)
|
||||
installExtension([REDUX_DEVTOOLS, REACT_DEVELOPER_TOOLS])
|
||||
.then((name) => console.log(`Added Extension: ${name}`))
|
||||
.catch((err) => console.log('An error occurred: ', err))
|
||||
}
|
||||
ipcMain.handle('system:getDeviceType', () => {
|
||||
return process.platform === 'darwin' ? 'mac' : process.platform === 'win32' ? 'windows' : 'linux'
|
||||
})
|
||||
})
|
||||
|
||||
registerProtocolClient(app)
|
||||
|
||||
// macOS specific: handle protocol when app is already running
|
||||
app.on('open-url', (event, url) => {
|
||||
event.preventDefault()
|
||||
handleProtocolUrl(url)
|
||||
})
|
||||
|
||||
registerProtocolClient(app)
|
||||
|
||||
// macOS specific: handle protocol when app is already running
|
||||
app.on('open-url', (event, url) => {
|
||||
event.preventDefault()
|
||||
handleProtocolUrl(url)
|
||||
})
|
||||
|
||||
// Listen for second instance
|
||||
app.on('second-instance', () => {
|
||||
app.on('second-instance', (_event, argv) => {
|
||||
windowService.showMainWindow()
|
||||
|
||||
// Protocol handler for Windows/Linux
|
||||
// The commandLine is an array of strings where the last item might be the URL
|
||||
const url = argv.find((arg) => arg.startsWith(CHERRY_STUDIO_PROTOCOL + '://'))
|
||||
if (url) handleProtocolUrl(url)
|
||||
})
|
||||
|
||||
app.on('browser-window-created', (_, window) => {
|
||||
|
||||
8
src/main/integration/nutstore/sso/lib/index.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
declare function decrypt(app: string, s: string): string;
|
||||
|
||||
interface Secret {
|
||||
app: string;
|
||||
}
|
||||
declare function createOAuthUrl(secret: Secret): string;
|
||||
|
||||
export { type Secret, createOAuthUrl, decrypt };
|
||||
9
src/main/integration/nutstore/sso/lib/index.js
Normal file
108
src/main/ipc.ts
@@ -1,6 +1,7 @@
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
import { isMac, isWin } from '@main/constant'
|
||||
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { Shortcut, ThemeMode } from '@types'
|
||||
import { BrowserWindow, ipcMain, session, shell } from 'electron'
|
||||
import log from 'electron-log'
|
||||
@@ -9,55 +10,91 @@ import { titleBarOverlayDark, titleBarOverlayLight } from './config'
|
||||
import AppUpdater from './services/AppUpdater'
|
||||
import BackupManager from './services/BackupManager'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import CopilotService from './services/CopilotService'
|
||||
import { ExportService } from './services/ExportService'
|
||||
import FileService from './services/FileService'
|
||||
import FileStorage from './services/FileStorage'
|
||||
import { GeminiService } from './services/GeminiService'
|
||||
import KnowledgeService from './services/KnowledgeService'
|
||||
import mcpService from './services/MCPService'
|
||||
import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ProxyConfig, proxyManager } from './services/ProxyManager'
|
||||
import { registerShortcuts, unregisterAllShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { getResourcePath } from './utils'
|
||||
import { decrypt, encrypt } from './utils/aes'
|
||||
import { getFilesDir } from './utils/file'
|
||||
import { compress, decompress } from './utils/zip'
|
||||
|
||||
const fileManager = new FileStorage()
|
||||
const backupManager = new BackupManager()
|
||||
const exportService = new ExportService(fileManager)
|
||||
const obsidianVaultService = new ObsidianVaultService()
|
||||
|
||||
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const { autoUpdater } = new AppUpdater(mainWindow)
|
||||
const appUpdater = new AppUpdater(mainWindow)
|
||||
|
||||
ipcMain.handle('app:info', () => ({
|
||||
version: app.getVersion(),
|
||||
isPackaged: app.isPackaged,
|
||||
appPath: app.getAppPath(),
|
||||
filesPath: path.join(app.getPath('userData'), 'Data', 'Files'),
|
||||
filesPath: getFilesDir(),
|
||||
appDataPath: app.getPath('userData'),
|
||||
resourcesPath: getResourcePath(),
|
||||
logsPath: log.transports.file.getFile().path
|
||||
}))
|
||||
|
||||
ipcMain.handle('app:proxy', async (_, proxy: string) => {
|
||||
const proxyConfig: ProxyConfig =
|
||||
proxy === 'system' ? { mode: 'system' } : proxy ? { mode: 'custom', url: proxy } : { mode: 'none' }
|
||||
let proxyConfig: ProxyConfig
|
||||
|
||||
if (proxy === 'system') {
|
||||
proxyConfig = { mode: 'system' }
|
||||
} else if (proxy) {
|
||||
proxyConfig = { mode: 'custom', url: proxy }
|
||||
} else {
|
||||
proxyConfig = { mode: 'none' }
|
||||
}
|
||||
|
||||
await proxyManager.configureProxy(proxyConfig)
|
||||
})
|
||||
|
||||
ipcMain.handle('app:reload', () => mainWindow.reload())
|
||||
ipcMain.handle('open:website', (_, url: string) => shell.openExternal(url))
|
||||
|
||||
// Update
|
||||
ipcMain.handle('app:show-update-dialog', () => appUpdater.showUpdateDialog(mainWindow))
|
||||
|
||||
// language
|
||||
ipcMain.handle('app:set-language', (_, language) => {
|
||||
configManager.setLanguage(language)
|
||||
})
|
||||
|
||||
// launch on boot
|
||||
ipcMain.handle('app:set-launch-on-boot', (_, openAtLogin: boolean) => {
|
||||
// Set login item settings for windows and mac
|
||||
// linux is not supported because it requires more file operations
|
||||
if (isWin || isMac) {
|
||||
app.setLoginItemSettings({ openAtLogin })
|
||||
}
|
||||
})
|
||||
|
||||
// launch to tray
|
||||
ipcMain.handle('app:set-launch-to-tray', (_, isActive: boolean) => {
|
||||
configManager.setLaunchToTray(isActive)
|
||||
})
|
||||
|
||||
// tray
|
||||
ipcMain.handle('app:set-tray', (_, isActive: boolean) => {
|
||||
configManager.setTray(isActive)
|
||||
})
|
||||
|
||||
// to tray on close
|
||||
ipcMain.handle('app:set-tray-on-close', (_, isActive: boolean) => {
|
||||
configManager.setTrayOnClose(isActive)
|
||||
})
|
||||
|
||||
ipcMain.handle('app:restart-tray', () => TrayService.getInstance().restartTray())
|
||||
|
||||
ipcMain.handle('config:set', (_, key: string, value: any) => {
|
||||
@@ -69,8 +106,21 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
})
|
||||
|
||||
// theme
|
||||
ipcMain.handle('app:set-theme', (_, theme: ThemeMode) => {
|
||||
ipcMain.handle('app:set-theme', (event, theme: ThemeMode) => {
|
||||
if (theme === configManager.getTheme()) return
|
||||
|
||||
configManager.setTheme(theme)
|
||||
|
||||
// should sync theme change to all windows
|
||||
const senderWindowId = event.sender.id
|
||||
const windows = BrowserWindow.getAllWindows()
|
||||
// 向其他窗口广播主题变化
|
||||
windows.forEach((win) => {
|
||||
if (win.webContents.id !== senderWindowId) {
|
||||
win.webContents.send('theme:change', theme)
|
||||
}
|
||||
})
|
||||
|
||||
mainWindow?.setTitleBarOverlay &&
|
||||
mainWindow.setTitleBarOverlay(theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight)
|
||||
})
|
||||
@@ -99,9 +149,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
|
||||
// check for update
|
||||
ipcMain.handle('app:check-for-update', async () => {
|
||||
const update = await autoUpdater.checkForUpdates()
|
||||
const update = await appUpdater.autoUpdater.checkForUpdates()
|
||||
return {
|
||||
currentVersion: autoUpdater.currentVersion,
|
||||
currentVersion: appUpdater.autoUpdater.currentVersion,
|
||||
updateInfo: update?.updateInfo
|
||||
}
|
||||
})
|
||||
@@ -115,6 +165,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle('backup:restore', backupManager.restore)
|
||||
ipcMain.handle('backup:backupToWebdav', backupManager.backupToWebdav)
|
||||
ipcMain.handle('backup:restoreFromWebdav', backupManager.restoreFromWebdav)
|
||||
ipcMain.handle('backup:listWebdavFiles', backupManager.listWebdavFiles)
|
||||
ipcMain.handle('backup:checkConnection', backupManager.checkConnection)
|
||||
ipcMain.handle('backup:createDirectory', backupManager.createDirectory)
|
||||
|
||||
// file
|
||||
ipcMain.handle('file:open', fileManager.open)
|
||||
@@ -175,6 +228,7 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle('knowledge-base:add', KnowledgeService.add)
|
||||
ipcMain.handle('knowledge-base:remove', KnowledgeService.remove)
|
||||
ipcMain.handle('knowledge-base:search', KnowledgeService.search)
|
||||
ipcMain.handle('knowledge-base:rerank', KnowledgeService.rerank)
|
||||
|
||||
// window
|
||||
ipcMain.handle('window:set-minimum-size', (_, width: number, height: number) => {
|
||||
@@ -201,10 +255,48 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle('miniwindow:hide', () => windowService.hideMiniWindow())
|
||||
ipcMain.handle('miniwindow:close', () => windowService.closeMiniWindow())
|
||||
ipcMain.handle('miniwindow:toggle', () => windowService.toggleMiniWindow())
|
||||
ipcMain.handle('miniwindow:set-pin', (_, isPinned) => windowService.setPinMiniWindow(isPinned))
|
||||
|
||||
// aes
|
||||
ipcMain.handle('aes:encrypt', (_, text: string, secretKey: string, iv: string) => encrypt(text, secretKey, iv))
|
||||
ipcMain.handle('aes:decrypt', (_, encryptedData: string, iv: string, secretKey: string) =>
|
||||
decrypt(encryptedData, iv, secretKey)
|
||||
)
|
||||
|
||||
// Register MCP handlers
|
||||
ipcMain.handle('mcp:remove-server', mcpService.removeServer)
|
||||
ipcMain.handle('mcp:restart-server', mcpService.restartServer)
|
||||
ipcMain.handle('mcp:stop-server', mcpService.stopServer)
|
||||
ipcMain.handle('mcp:list-tools', mcpService.listTools)
|
||||
ipcMain.handle('mcp:call-tool', mcpService.callTool)
|
||||
ipcMain.handle('mcp:get-install-info', mcpService.getInstallInfo)
|
||||
|
||||
ipcMain.handle('app:is-binary-exist', (_, name: string) => isBinaryExists(name))
|
||||
ipcMain.handle('app:get-binary-path', (_, name: string) => getBinaryPath(name))
|
||||
ipcMain.handle('app:install-uv-binary', () => runInstallScript('install-uv.js'))
|
||||
ipcMain.handle('app:install-bun-binary', () => runInstallScript('install-bun.js'))
|
||||
|
||||
//copilot
|
||||
ipcMain.handle('copilot:get-auth-message', CopilotService.getAuthMessage)
|
||||
ipcMain.handle('copilot:get-copilot-token', CopilotService.getCopilotToken)
|
||||
ipcMain.handle('copilot:save-copilot-token', CopilotService.saveCopilotToken)
|
||||
ipcMain.handle('copilot:get-token', CopilotService.getToken)
|
||||
ipcMain.handle('copilot:logout', CopilotService.logout)
|
||||
ipcMain.handle('copilot:get-user', CopilotService.getUser)
|
||||
|
||||
// Obsidian service
|
||||
ipcMain.handle('obsidian:get-vaults', () => {
|
||||
return obsidianVaultService.getVaults()
|
||||
})
|
||||
|
||||
ipcMain.handle('obsidian:get-files', (_event, vaultName) => {
|
||||
return obsidianVaultService.getFilesByVaultName(vaultName)
|
||||
})
|
||||
|
||||
// nutstore
|
||||
ipcMain.handle('nutstore:get-sso-url', NutstoreService.getNutstoreSSOUrl)
|
||||
ipcMain.handle('nutstore:decrypt-token', (_, token: string) => NutstoreService.decryptToken(token))
|
||||
ipcMain.handle('nutstore:get-directory-contents', (_, token: string, path: string) =>
|
||||
NutstoreService.getDirectoryContents(token, path)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from 'node:fs'
|
||||
|
||||
import { JsonLoader } from '@llm-tools/embedjs'
|
||||
import { JsonLoader } from '@cherrystudio/embedjs'
|
||||
|
||||
/**
|
||||
* Drafts 应用导出的笔记文件加载器
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { BaseLoader } from '@cherrystudio/embedjs-interfaces'
|
||||
import { cleanString } from '@cherrystudio/embedjs-utils'
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'
|
||||
import { BaseLoader } from '@llm-tools/embedjs-interfaces'
|
||||
import { cleanString } from '@llm-tools/embedjs-utils'
|
||||
import { getTempDir } from '@main/utils/file'
|
||||
import Logger from 'electron-log'
|
||||
import EPub from 'epub'
|
||||
import * as fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
/**
|
||||
* epub 加载器的配置选项
|
||||
@@ -157,7 +159,9 @@ export class EpubLoader extends BaseLoader<Record<string, string | number | bool
|
||||
throw new Error('No content found in epub file')
|
||||
}
|
||||
|
||||
const chapterTexts: string[] = []
|
||||
// 使用临时文件而不是内存数组
|
||||
const tempFilePath = path.join(getTempDir(), `epub-${Date.now()}.txt`)
|
||||
const writeStream = fs.createWriteStream(tempFilePath)
|
||||
|
||||
// 遍历所有章节
|
||||
for (const chapter of chapters) {
|
||||
@@ -175,15 +179,31 @@ export class EpubLoader extends BaseLoader<Record<string, string | number | bool
|
||||
.trim() // 移除首尾空白
|
||||
|
||||
if (text) {
|
||||
chapterTexts.push(text)
|
||||
// 直接写入文件
|
||||
writeStream.write(text + '\n\n')
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.error(`[EpubLoader] Error processing chapter ${chapter.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
// 使用双换行符连接所有章节文本
|
||||
this.extractedText = chapterTexts.join('\n\n')
|
||||
// 关闭写入流
|
||||
writeStream.end()
|
||||
|
||||
// 等待写入完成
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
writeStream.on('finish', resolve)
|
||||
writeStream.on('error', reject)
|
||||
})
|
||||
|
||||
// 从临时文件读取内容
|
||||
this.extractedText = fs.readFileSync(tempFilePath, 'utf-8')
|
||||
|
||||
// 删除临时文件
|
||||
fs.unlinkSync(tempFilePath)
|
||||
|
||||
// 只添加一条完成日志
|
||||
Logger.info(`[EpubLoader] 电子书 ${this.metadata?.title || path.basename(this.filePath)} 处理完成`)
|
||||
} catch (error) {
|
||||
Logger.error('[EpubLoader] Error in extractTextFromEpub:', error)
|
||||
throw error
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as fs from 'node:fs'
|
||||
|
||||
import { JsonLoader, LocalPathLoader, RAGApplication, TextLoader } from '@llm-tools/embedjs'
|
||||
import type { AddLoaderReturn } from '@llm-tools/embedjs-interfaces'
|
||||
import { WebLoader } from '@llm-tools/embedjs-loader-web'
|
||||
import { JsonLoader, LocalPathLoader, RAGApplication, TextLoader } from '@cherrystudio/embedjs'
|
||||
import type { AddLoaderReturn } from '@cherrystudio/embedjs-interfaces'
|
||||
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
|
||||
import { LoaderReturn } from '@shared/config/types'
|
||||
import { FileType, KnowledgeBaseParams } from '@types'
|
||||
import Logger from 'electron-log'
|
||||
@@ -11,8 +11,30 @@ import { DraftsExportLoader } from './draftsExportLoader'
|
||||
import { EpubLoader } from './epubLoader'
|
||||
import { OdLoader, OdType } from './odLoader'
|
||||
|
||||
// embedjs内置loader类型
|
||||
const commonExts = ['.pdf', '.csv', '.docx', '.pptx', '.xlsx', '.md']
|
||||
// 文件扩展名到加载器类型的映射
|
||||
const FILE_LOADER_MAP: Record<string, string> = {
|
||||
// 内置类型
|
||||
'.pdf': 'common',
|
||||
'.csv': 'common',
|
||||
'.docx': 'common',
|
||||
'.pptx': 'common',
|
||||
'.xlsx': 'common',
|
||||
'.md': 'common',
|
||||
// OD类型
|
||||
'.odt': 'od',
|
||||
'.ods': 'od',
|
||||
'.odp': 'od',
|
||||
// epub类型
|
||||
'.epub': 'epub',
|
||||
// Drafts类型
|
||||
'.draftsexport': 'drafts',
|
||||
// HTML类型
|
||||
'.html': 'html',
|
||||
'.htm': 'html',
|
||||
// JSON类型
|
||||
'.json': 'json'
|
||||
// 其他类型默认为文本类型
|
||||
}
|
||||
|
||||
export async function addOdLoader(
|
||||
ragApplication: RAGApplication,
|
||||
@@ -46,35 +68,34 @@ export async function addFileLoader(
|
||||
base: KnowledgeBaseParams,
|
||||
forceReload: boolean
|
||||
): Promise<LoaderReturn> {
|
||||
// 内置类型
|
||||
if (commonExts.includes(file.ext)) {
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
// @ts-ignore LocalPathLoader
|
||||
new LocalPathLoader({ path: file.path, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
|
||||
// 获取文件类型,如果没有匹配则默认为文本类型
|
||||
const loaderType = FILE_LOADER_MAP[file.ext.toLowerCase()] || 'text'
|
||||
let loaderReturn: AddLoaderReturn
|
||||
|
||||
// JSON类型处理
|
||||
let jsonObject = {}
|
||||
let jsonParsed = true
|
||||
Logger.info(`[KnowledgeBase] processing file ${file.path} as ${loaderType} type`)
|
||||
switch (loaderType) {
|
||||
case 'common':
|
||||
// 内置类型处理
|
||||
loaderReturn = await ragApplication.addLoader(
|
||||
new LocalPathLoader({
|
||||
path: file.path,
|
||||
chunkSize: base.chunkSize,
|
||||
chunkOverlap: base.chunkOverlap
|
||||
}) as any,
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
}
|
||||
break
|
||||
|
||||
// 自定义类型
|
||||
if (['.odt', '.ods', '.odp'].includes(file.ext)) {
|
||||
const loaderReturn = await addOdLoader(ragApplication, file, base, forceReload)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
}
|
||||
|
||||
// epub 文件处理
|
||||
if (file.ext === '.epub') {
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
case 'od':
|
||||
// OD类型处理
|
||||
loaderReturn = await addOdLoader(ragApplication, file, base, forceReload)
|
||||
break
|
||||
case 'epub':
|
||||
// epub类型处理
|
||||
loaderReturn = await ragApplication.addLoader(
|
||||
new EpubLoader({
|
||||
filePath: file.path,
|
||||
chunkSize: base.chunkSize ?? 1000,
|
||||
@@ -82,73 +103,51 @@ export async function addFileLoader(
|
||||
}) as any,
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
}
|
||||
break
|
||||
|
||||
// DraftsExport类型 (file.ext会自动转换成小写)
|
||||
if (['.draftsexport'].includes(file.ext)) {
|
||||
const loaderReturn = await ragApplication.addLoader(new DraftsExportLoader(file.path) as any, forceReload)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
}
|
||||
}
|
||||
case 'drafts':
|
||||
// Drafts类型处理
|
||||
loaderReturn = await ragApplication.addLoader(new DraftsExportLoader(file.path) as any, forceReload)
|
||||
break
|
||||
|
||||
const fileContent = fs.readFileSync(file.path, 'utf-8')
|
||||
|
||||
// HTML类型
|
||||
if (['.html', '.htm'].includes(file.ext)) {
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
case 'html':
|
||||
// HTML类型处理
|
||||
loaderReturn = await ragApplication.addLoader(
|
||||
new WebLoader({
|
||||
urlOrContent: fileContent,
|
||||
urlOrContent: fs.readFileSync(file.path, 'utf-8'),
|
||||
chunkSize: base.chunkSize,
|
||||
chunkOverlap: base.chunkOverlap
|
||||
}) as any,
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
// JSON类型
|
||||
if (['.json'].includes(file.ext)) {
|
||||
let jsonObject = {}
|
||||
let jsonParsed = true
|
||||
case 'json':
|
||||
try {
|
||||
jsonObject = JSON.parse(fileContent)
|
||||
jsonObject = JSON.parse(fs.readFileSync(file.path, 'utf-8'))
|
||||
} catch (error) {
|
||||
jsonParsed = false
|
||||
Logger.warn('[KnowledgeBase] failed parsing json file, failling back to text processing:', file.path, error)
|
||||
}
|
||||
if (jsonParsed) {
|
||||
const loaderReturn = await ragApplication.addLoader(new JsonLoader({ object: jsonObject }))
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
}
|
||||
}
|
||||
Logger.warn('[KnowledgeBase] failed parsing json file, falling back to text processing:', file.path, error)
|
||||
}
|
||||
|
||||
// 文本类型
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
new TextLoader({ text: fileContent, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
|
||||
if (jsonParsed) {
|
||||
loaderReturn = await ragApplication.addLoader(new JsonLoader({ object: jsonObject }), forceReload)
|
||||
break
|
||||
}
|
||||
// fallthrough - JSON 解析失败时作为文本处理
|
||||
default:
|
||||
// 文本类型处理(默认)
|
||||
// 如果是其他文本类型且尚未读取文件,则读取文件
|
||||
loaderReturn = await ragApplication.addLoader(
|
||||
new TextLoader({
|
||||
text: fs.readFileSync(file.path, 'utf-8'),
|
||||
chunkSize: base.chunkSize,
|
||||
chunkOverlap: base.chunkOverlap
|
||||
}) as any,
|
||||
forceReload
|
||||
)
|
||||
|
||||
Logger.info('[KnowledgeBase] processing file', file.path)
|
||||
break
|
||||
}
|
||||
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { BaseLoader } from '@cherrystudio/embedjs-interfaces'
|
||||
import { cleanString } from '@cherrystudio/embedjs-utils'
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'
|
||||
import { BaseLoader } from '@llm-tools/embedjs-interfaces'
|
||||
import { cleanString } from '@llm-tools/embedjs-utils'
|
||||
import md5 from 'md5'
|
||||
import { OfficeParserConfig, parseOfficeAsync } from 'officeparser'
|
||||
|
||||
|
||||
31
src/main/reranker/BaseReranker.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
export default abstract class BaseReranker {
|
||||
protected base: KnowledgeBaseParams
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
if (!base.rerankModel) {
|
||||
throw new Error('Rerank model is required')
|
||||
}
|
||||
this.base = base
|
||||
}
|
||||
abstract rerank(query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]>
|
||||
|
||||
public defaultHeaders() {
|
||||
return {
|
||||
Authorization: `Bearer ${this.base.rerankApiKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
|
||||
public formatErrorMessage(url: string, error: any, requestBody: any) {
|
||||
const errorDetails = {
|
||||
url: url,
|
||||
message: error.message,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
requestBody: requestBody
|
||||
}
|
||||
return JSON.stringify(errorDetails, null, 2)
|
||||
}
|
||||
}
|
||||
14
src/main/reranker/DefaultReranker.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
export default class DefaultReranker extends BaseReranker {
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
super(base)
|
||||
}
|
||||
|
||||
async rerank(): Promise<ExtractChunkData[]> {
|
||||
throw new Error('Method not implemented.')
|
||||
}
|
||||
}
|
||||
56
src/main/reranker/JinaReranker.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
export default class JinaReranker extends BaseReranker {
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
super(base)
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
// 必须携带/v1,否则会404
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
query,
|
||||
documents: searchResults.map((doc) => doc.pageContent),
|
||||
top_n: this.base.topN
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
|
||||
const rerankResults = data.results
|
||||
console.log(rerankResults)
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
console.error('Jina Reranker API Error:', errorDetails)
|
||||
throw new Error(`重排序请求失败: ${error.message}\n请求详情: ${errorDetails}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
15
src/main/reranker/Reranker.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
import RerankerFactory from './RerankerFactory'
|
||||
|
||||
export default class Reranker {
|
||||
private sdk: BaseReranker
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
this.sdk = RerankerFactory.create(base)
|
||||
}
|
||||
public async rerank(query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> {
|
||||
return this.sdk.rerank(query, searchResults)
|
||||
}
|
||||
}
|
||||
20
src/main/reranker/RerankerFactory.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
import DefaultReranker from './DefaultReranker'
|
||||
import JinaReranker from './JinaReranker'
|
||||
import SiliconFlowReranker from './SiliconFlowReranker'
|
||||
import VoyageReranker from './VoyageReranker'
|
||||
|
||||
export default class RerankerFactory {
|
||||
static create(base: KnowledgeBaseParams): BaseReranker {
|
||||
if (base.rerankModelProvider === 'silicon') {
|
||||
return new SiliconFlowReranker(base)
|
||||
} else if (base.rerankModelProvider === 'jina') {
|
||||
return new JinaReranker(base)
|
||||
} else if (base.rerankModelProvider === 'voyageai') {
|
||||
return new VoyageReranker(base)
|
||||
}
|
||||
return new DefaultReranker(base)
|
||||
}
|
||||
}
|
||||
58
src/main/reranker/SiliconFlowReranker.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
export default class SiliconFlowReranker extends BaseReranker {
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
super(base)
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
// 必须携带/v1,否则会404
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
query,
|
||||
documents: searchResults.map((doc) => doc.pageContent),
|
||||
top_n: this.base.topN,
|
||||
max_chunks_per_doc: this.base.chunkSize,
|
||||
overlap_tokens: this.base.chunkOverlap
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
|
||||
const rerankResults = data.results
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
console.error('SiliconFlow Reranker API 错误:', errorDetails)
|
||||
throw new Error(`重排序请求失败: ${error.message}\n请求详情: ${errorDetails}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
62
src/main/reranker/VoyageReranker.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
export default class VoyageReranker extends BaseReranker {
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
super(base)
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
query,
|
||||
documents: searchResults.map((doc) => doc.pageContent),
|
||||
top_k: this.base.topN,
|
||||
return_documents: false,
|
||||
truncation: true
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, {
|
||||
headers: {
|
||||
...this.defaultHeaders()
|
||||
}
|
||||
})
|
||||
|
||||
const rerankResults = data.data
|
||||
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
console.error('Voyage Reranker API Error:', errorDetails)
|
||||
throw new Error(`重排序请求失败: ${error.message}\n请求详情: ${errorDetails}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Before Width: | Height: | Size: 353 KiB |
@@ -1,11 +1,13 @@
|
||||
import { UpdateInfo } from 'builder-util-runtime'
|
||||
import { app, BrowserWindow, dialog } from 'electron'
|
||||
import logger from 'electron-log'
|
||||
import { AppUpdater as _AppUpdater, autoUpdater, UpdateInfo } from 'electron-updater'
|
||||
import { AppUpdater as _AppUpdater, autoUpdater } from 'electron-updater'
|
||||
|
||||
import icon from '../../../build/icon.png?asset'
|
||||
|
||||
export default class AppUpdater {
|
||||
autoUpdater: _AppUpdater = autoUpdater
|
||||
private releaseInfo: UpdateInfo | undefined
|
||||
|
||||
constructor(mainWindow: BrowserWindow) {
|
||||
logger.transports.file.level = 'info'
|
||||
@@ -16,7 +18,12 @@ export default class AppUpdater {
|
||||
|
||||
// 检测下载错误
|
||||
autoUpdater.on('error', (error) => {
|
||||
logger.error('更新异常', error)
|
||||
// 简单记录错误信息和时间戳
|
||||
logger.error('更新异常', {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
time: new Date().toISOString()
|
||||
})
|
||||
mainWindow.webContents.send('update-error', error)
|
||||
})
|
||||
|
||||
@@ -37,17 +44,26 @@ export default class AppUpdater {
|
||||
|
||||
// 当需要更新的内容下载完成后
|
||||
autoUpdater.on('update-downloaded', (releaseInfo: UpdateInfo) => {
|
||||
mainWindow.webContents.send('update-downloaded')
|
||||
mainWindow.webContents.send('update-downloaded', releaseInfo)
|
||||
this.releaseInfo = releaseInfo
|
||||
logger.info('下载完成', releaseInfo)
|
||||
})
|
||||
|
||||
logger.info('下载完成,询问用户是否更新', releaseInfo)
|
||||
this.autoUpdater = autoUpdater
|
||||
}
|
||||
|
||||
public async showUpdateDialog(mainWindow: BrowserWindow) {
|
||||
if (!this.releaseInfo) {
|
||||
return
|
||||
}
|
||||
|
||||
dialog
|
||||
.showMessageBox({
|
||||
type: 'info',
|
||||
title: '安装更新',
|
||||
icon,
|
||||
message: `新版本 ${releaseInfo.version} 已准备就绪`,
|
||||
detail: this.formatReleaseNotes(releaseInfo.releaseNotes),
|
||||
message: `新版本 ${this.releaseInfo.version} 已准备就绪`,
|
||||
detail: this.formatReleaseNotes(this.releaseInfo.releaseNotes),
|
||||
buttons: ['稍后安装', '立即安装'],
|
||||
defaultId: 1,
|
||||
cancelId: 0
|
||||
@@ -60,9 +76,6 @@ export default class AppUpdater {
|
||||
mainWindow.webContents.send('update-downloaded-cancelled')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
this.autoUpdater = autoUpdater
|
||||
}
|
||||
|
||||
private formatReleaseNotes(releaseNotes: string | ReleaseNoteInfo[] | null | undefined): string {
|
||||
|
||||
@@ -5,18 +5,22 @@ import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import * as fs from 'fs-extra'
|
||||
import * as path from 'path'
|
||||
import { createClient, CreateDirectoryOptions, FileStat } from 'webdav'
|
||||
|
||||
import WebDav from './WebDav'
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
class BackupManager {
|
||||
private tempDir = path.join(app.getPath('temp'), 'cherry-studio', 'backup', 'temp')
|
||||
private backupDir = path.join(app.getPath('temp'), 'cherry-studio', 'backup')
|
||||
|
||||
constructor() {
|
||||
this.checkConnection = this.checkConnection.bind(this)
|
||||
this.backup = this.backup.bind(this)
|
||||
this.restore = this.restore.bind(this)
|
||||
this.backupToWebdav = this.backupToWebdav.bind(this)
|
||||
this.restoreFromWebdav = this.restoreFromWebdav.bind(this)
|
||||
this.listWebdavFiles = this.listWebdavFiles.bind(this)
|
||||
}
|
||||
|
||||
private async setWritableRecursive(dirPath: string): Promise<void> {
|
||||
@@ -72,18 +76,46 @@ class BackupManager {
|
||||
data: string,
|
||||
destinationPath: string = this.backupDir
|
||||
): Promise<string> {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
|
||||
mainWindow?.webContents.send('backup-progress', processData)
|
||||
Logger.log('[BackupManager] backup progress', processData)
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.ensureDir(this.tempDir)
|
||||
onProgress({ stage: 'preparing', progress: 0, total: 100 })
|
||||
|
||||
// 将 data 写入临时文件
|
||||
// 使用流的方式写入 data.json
|
||||
const tempDataPath = path.join(this.tempDir, 'data.json')
|
||||
await fs.writeFile(tempDataPath, data)
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writeStream = fs.createWriteStream(tempDataPath)
|
||||
writeStream.write(data)
|
||||
writeStream.end()
|
||||
|
||||
writeStream.on('finish', () => resolve())
|
||||
writeStream.on('error', (error) => reject(error))
|
||||
})
|
||||
onProgress({ stage: 'writing_data', progress: 20, total: 100 })
|
||||
|
||||
// 复制 Data 目录到临时目录
|
||||
const sourcePath = path.join(app.getPath('userData'), 'Data')
|
||||
const tempDataDir = path.join(this.tempDir, 'Data')
|
||||
await fs.copy(sourcePath, tempDataDir)
|
||||
|
||||
// 获取源目录总大小
|
||||
const totalSize = await this.getDirSize(sourcePath)
|
||||
let copiedSize = 0
|
||||
|
||||
// 使用流式复制
|
||||
await this.copyDirWithProgress(sourcePath, tempDataDir, (size) => {
|
||||
copiedSize += size
|
||||
const progress = Math.min(80, 20 + Math.floor((copiedSize / totalSize) * 60))
|
||||
onProgress({ stage: 'copying_files', progress, total: 100 })
|
||||
})
|
||||
|
||||
await this.setWritableRecursive(tempDataDir)
|
||||
onProgress({ stage: 'compressing', progress: 80, total: 100 })
|
||||
|
||||
// 使用 adm-zip 创建压缩文件
|
||||
const zip = new AdmZip()
|
||||
@@ -93,46 +125,65 @@ class BackupManager {
|
||||
|
||||
// 清理临时目录
|
||||
await fs.remove(this.tempDir)
|
||||
onProgress({ stage: 'completed', progress: 100, total: 100 })
|
||||
|
||||
Logger.log('Backup completed successfully')
|
||||
Logger.log('[BackupManager] Backup completed successfully')
|
||||
return backupedFilePath
|
||||
} catch (error) {
|
||||
Logger.error('Backup failed:', error)
|
||||
Logger.error('[BackupManager] Backup failed:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async restore(_: Electron.IpcMainInvokeEvent, backupPath: string): Promise<string> {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
|
||||
mainWindow?.webContents.send('restore-progress', processData)
|
||||
Logger.log('[BackupManager] restore progress', processData)
|
||||
}
|
||||
|
||||
try {
|
||||
// 创建临时目录
|
||||
await fs.ensureDir(this.tempDir)
|
||||
onProgress({ stage: 'preparing', progress: 0, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 1: unzip backup file', this.tempDir)
|
||||
|
||||
// 使用 adm-zip 解压
|
||||
const zip = new AdmZip(backupPath)
|
||||
zip.extractAllTo(this.tempDir, true) // true 表示覆盖已存在的文件
|
||||
onProgress({ stage: 'extracting', progress: 20, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 2: read data.json')
|
||||
|
||||
// 读取 data.json
|
||||
const dataPath = path.join(this.tempDir, 'data.json')
|
||||
const data = await fs.readFile(dataPath, 'utf-8')
|
||||
onProgress({ stage: 'reading_data', progress: 40, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 3: restore Data directory')
|
||||
|
||||
// 恢复 Data 目录
|
||||
const sourcePath = path.join(this.tempDir, 'Data')
|
||||
const destPath = path.join(app.getPath('userData'), 'Data')
|
||||
|
||||
// 获取源目录总大小
|
||||
const totalSize = await this.getDirSize(sourcePath)
|
||||
let copiedSize = 0
|
||||
|
||||
await this.setWritableRecursive(destPath)
|
||||
await fs.remove(destPath)
|
||||
await fs.copy(sourcePath, destPath)
|
||||
|
||||
// 使用流式复制
|
||||
await this.copyDirWithProgress(sourcePath, destPath, (size) => {
|
||||
copiedSize += size
|
||||
const progress = Math.min(90, 40 + Math.floor((copiedSize / totalSize) * 50))
|
||||
onProgress({ stage: 'copying_files', progress, total: 100 })
|
||||
})
|
||||
|
||||
Logger.log('[backup] step 4: clean up temp directory')
|
||||
|
||||
// 清理临时目录
|
||||
await this.setWritableRecursive(this.tempDir)
|
||||
await fs.remove(this.tempDir)
|
||||
onProgress({ stage: 'completed', progress: 100, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 5: Restore completed successfully')
|
||||
|
||||
@@ -145,7 +196,7 @@ class BackupManager {
|
||||
}
|
||||
|
||||
async backupToWebdav(_: Electron.IpcMainInvokeEvent, data: string, webdavConfig: WebDavConfig) {
|
||||
const filename = 'cherry-studio.backup.zip'
|
||||
const filename = webdavConfig.fileName || 'cherry-studio.backup.zip'
|
||||
const backupedFilePath = await this.backup(_, filename, data)
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
return await webdavClient.putFileContents(filename, fs.createReadStream(backupedFilePath), {
|
||||
@@ -154,8 +205,9 @@ class BackupManager {
|
||||
}
|
||||
|
||||
async restoreFromWebdav(_: Electron.IpcMainInvokeEvent, webdavConfig: WebDavConfig) {
|
||||
const filename = 'cherry-studio.backup.zip'
|
||||
const filename = webdavConfig.fileName || 'cherry-studio.backup.zip'
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
try {
|
||||
const retrievedFile = await webdavClient.getFileContents(filename)
|
||||
const backupedFilePath = path.join(this.backupDir, filename)
|
||||
|
||||
@@ -163,9 +215,98 @@ class BackupManager {
|
||||
fs.mkdirSync(this.backupDir, { recursive: true })
|
||||
}
|
||||
|
||||
await fs.writeFileSync(backupedFilePath, retrievedFile as Buffer)
|
||||
// 使用流的方式写入文件
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writeStream = fs.createWriteStream(backupedFilePath)
|
||||
writeStream.write(retrievedFile as Buffer)
|
||||
writeStream.end()
|
||||
|
||||
writeStream.on('finish', () => resolve())
|
||||
writeStream.on('error', (error) => reject(error))
|
||||
})
|
||||
|
||||
return await this.restore(_, backupedFilePath)
|
||||
} catch (error: any) {
|
||||
Logger.error('[backup] Failed to restore from WebDAV:', error)
|
||||
throw new Error(error.message || 'Failed to restore backup file')
|
||||
}
|
||||
}
|
||||
|
||||
listWebdavFiles = async (_: Electron.IpcMainInvokeEvent, config: WebDavConfig) => {
|
||||
try {
|
||||
const client = createClient(config.webdavHost, {
|
||||
username: config.webdavUser,
|
||||
password: config.webdavPass
|
||||
})
|
||||
|
||||
const response = await client.getDirectoryContents(config.webdavPath)
|
||||
const files = Array.isArray(response) ? response : response.data
|
||||
|
||||
return files
|
||||
.filter((file: FileStat) => file.type === 'file' && file.basename.endsWith('.zip'))
|
||||
.map((file: FileStat) => ({
|
||||
fileName: file.basename,
|
||||
modifiedTime: file.lastmod,
|
||||
size: file.size
|
||||
}))
|
||||
.sort((a, b) => new Date(b.modifiedTime).getTime() - new Date(a.modifiedTime).getTime())
|
||||
} catch (error: any) {
|
||||
Logger.error('Failed to list WebDAV files:', error)
|
||||
throw new Error(error.message || 'Failed to list backup files')
|
||||
}
|
||||
}
|
||||
|
||||
private async getDirSize(dirPath: string): Promise<number> {
|
||||
let size = 0
|
||||
const items = await fs.readdir(dirPath, { withFileTypes: true })
|
||||
|
||||
for (const item of items) {
|
||||
const fullPath = path.join(dirPath, item.name)
|
||||
if (item.isDirectory()) {
|
||||
size += await this.getDirSize(fullPath)
|
||||
} else {
|
||||
const stats = await fs.stat(fullPath)
|
||||
size += stats.size
|
||||
}
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
private async copyDirWithProgress(
|
||||
source: string,
|
||||
destination: string,
|
||||
onProgress: (size: number) => void
|
||||
): Promise<void> {
|
||||
const items = await fs.readdir(source, { withFileTypes: true })
|
||||
|
||||
for (const item of items) {
|
||||
const sourcePath = path.join(source, item.name)
|
||||
const destPath = path.join(destination, item.name)
|
||||
|
||||
if (item.isDirectory()) {
|
||||
await fs.ensureDir(destPath)
|
||||
await this.copyDirWithProgress(sourcePath, destPath, onProgress)
|
||||
} else {
|
||||
const stats = await fs.stat(sourcePath)
|
||||
await fs.copy(sourcePath, destPath)
|
||||
onProgress(stats.size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async checkConnection(_: Electron.IpcMainInvokeEvent, webdavConfig: WebDavConfig) {
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
return await webdavClient.checkConnection()
|
||||
}
|
||||
|
||||
async createDirectory(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
webdavConfig: WebDavConfig,
|
||||
path: string,
|
||||
options?: CreateDirectoryOptions
|
||||
) {
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
return await webdavClient.createDirectory(path, options)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ export default class ClipboardMonitor {
|
||||
private handleTextSelected(text: string) {
|
||||
if (!text) return
|
||||
|
||||
console.debug('[ClipboardMonitor] handleTextSelected', text)
|
||||
console.log('[ClipboardMonitor] handleTextSelected', text)
|
||||
|
||||
windowService.setLastSelectedText(text)
|
||||
|
||||
|
||||
@@ -30,6 +30,14 @@ export class ConfigManager {
|
||||
this.store.set('theme', theme)
|
||||
}
|
||||
|
||||
getLaunchToTray(): boolean {
|
||||
return !!this.store.get('launchToTray', false)
|
||||
}
|
||||
|
||||
setLaunchToTray(value: boolean) {
|
||||
this.store.set('launchToTray', value)
|
||||
}
|
||||
|
||||
getTray(): boolean {
|
||||
return !!this.store.get('tray', true)
|
||||
}
|
||||
@@ -39,6 +47,14 @@ export class ConfigManager {
|
||||
this.notifySubscribers('tray', value)
|
||||
}
|
||||
|
||||
getTrayOnClose(): boolean {
|
||||
return !!this.store.get('trayOnClose', true)
|
||||
}
|
||||
|
||||
setTrayOnClose(value: boolean) {
|
||||
this.store.set('trayOnClose', value)
|
||||
}
|
||||
|
||||
getZoomFactor(): number {
|
||||
return this.store.get('zoomFactor', 1) as number
|
||||
}
|
||||
|
||||
247
src/main/services/CopilotService.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import { app, safeStorage } from 'electron'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
// 配置常量,集中管理
|
||||
const CONFIG = {
|
||||
GITHUB_CLIENT_ID: 'Iv1.b507a08c87ecfe98',
|
||||
POLLING: {
|
||||
MAX_ATTEMPTS: 8,
|
||||
INITIAL_DELAY_MS: 1000,
|
||||
MAX_DELAY_MS: 16000 // 最大延迟16秒
|
||||
},
|
||||
DEFAULT_HEADERS: {
|
||||
accept: 'application/json',
|
||||
'editor-version': 'Neovim/0.6.1',
|
||||
'editor-plugin-version': 'copilot.vim/1.16.0',
|
||||
'content-type': 'application/json',
|
||||
'user-agent': 'GithubCopilot/1.155.0',
|
||||
'accept-encoding': 'gzip,deflate,br'
|
||||
},
|
||||
// API端点集中管理
|
||||
API_URLS: {
|
||||
GITHUB_USER: 'https://api.github.com/user',
|
||||
GITHUB_DEVICE_CODE: 'https://github.com/login/device/code',
|
||||
GITHUB_ACCESS_TOKEN: 'https://github.com/login/oauth/access_token',
|
||||
COPILOT_TOKEN: 'https://api.github.com/copilot_internal/v2/token'
|
||||
}
|
||||
}
|
||||
|
||||
// 接口定义移到顶部,便于查阅
|
||||
interface UserResponse {
|
||||
login: string
|
||||
avatar: string
|
||||
}
|
||||
|
||||
interface AuthResponse {
|
||||
device_code: string
|
||||
user_code: string
|
||||
verification_uri: string
|
||||
}
|
||||
|
||||
interface TokenResponse {
|
||||
access_token: string
|
||||
}
|
||||
|
||||
interface CopilotTokenResponse {
|
||||
token: string
|
||||
}
|
||||
|
||||
// 自定义错误类,统一错误处理
|
||||
class CopilotServiceError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly cause?: unknown
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'CopilotServiceError'
|
||||
}
|
||||
}
|
||||
|
||||
class CopilotService {
|
||||
private readonly tokenFilePath: string
|
||||
private headers: Record<string, string>
|
||||
|
||||
constructor() {
|
||||
this.tokenFilePath = path.join(app.getPath('userData'), '.copilot_token')
|
||||
this.headers = { ...CONFIG.DEFAULT_HEADERS }
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置自定义请求头
|
||||
*/
|
||||
private updateHeaders = (headers?: Record<string, string>): void => {
|
||||
if (headers && Object.keys(headers).length > 0) {
|
||||
this.headers = { ...headers }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取GitHub登录信息
|
||||
*/
|
||||
public getUser = async (_: Electron.IpcMainInvokeEvent, token: string): Promise<UserResponse> => {
|
||||
try {
|
||||
const config: AxiosRequestConfig = {
|
||||
headers: {
|
||||
Connection: 'keep-alive',
|
||||
'user-agent': 'Visual Studio Code (desktop)',
|
||||
'Sec-Fetch-Site': 'none',
|
||||
'Sec-Fetch-Mode': 'no-cors',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
authorization: `token ${token}`
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios.get(CONFIG.API_URLS.GITHUB_USER, config)
|
||||
return {
|
||||
login: response.data.login,
|
||||
avatar: response.data.avatar_url
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to get user information:', error)
|
||||
throw new CopilotServiceError('无法获取GitHub用户信息', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取GitHub设备授权信息
|
||||
*/
|
||||
public getAuthMessage = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
headers?: Record<string, string>
|
||||
): Promise<AuthResponse> => {
|
||||
try {
|
||||
this.updateHeaders(headers)
|
||||
|
||||
const response = await axios.post<AuthResponse>(
|
||||
CONFIG.API_URLS.GITHUB_DEVICE_CODE,
|
||||
{
|
||||
client_id: CONFIG.GITHUB_CLIENT_ID,
|
||||
scope: 'read:user'
|
||||
},
|
||||
{ headers: this.headers }
|
||||
)
|
||||
|
||||
return response.data
|
||||
} catch (error) {
|
||||
console.error('Failed to get auth message:', error)
|
||||
throw new CopilotServiceError('无法获取GitHub授权信息', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 使用设备码获取访问令牌 - 优化轮询逻辑
|
||||
*/
|
||||
public getCopilotToken = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
device_code: string,
|
||||
headers?: Record<string, string>
|
||||
): Promise<TokenResponse> => {
|
||||
this.updateHeaders(headers)
|
||||
|
||||
let currentDelay = CONFIG.POLLING.INITIAL_DELAY_MS
|
||||
|
||||
for (let attempt = 0; attempt < CONFIG.POLLING.MAX_ATTEMPTS; attempt++) {
|
||||
await this.delay(currentDelay)
|
||||
|
||||
try {
|
||||
const response = await axios.post<TokenResponse>(
|
||||
CONFIG.API_URLS.GITHUB_ACCESS_TOKEN,
|
||||
{
|
||||
client_id: CONFIG.GITHUB_CLIENT_ID,
|
||||
device_code,
|
||||
grant_type: 'urn:ietf:params:oauth:grant-type:device_code'
|
||||
},
|
||||
{ headers: this.headers }
|
||||
)
|
||||
|
||||
const { access_token } = response.data
|
||||
if (access_token) {
|
||||
return { access_token }
|
||||
}
|
||||
} catch (error) {
|
||||
// 指数退避策略
|
||||
currentDelay = Math.min(currentDelay * 2, CONFIG.POLLING.MAX_DELAY_MS)
|
||||
|
||||
// 仅在最后一次尝试失败时记录详细错误
|
||||
const isLastAttempt = attempt === CONFIG.POLLING.MAX_ATTEMPTS - 1
|
||||
if (isLastAttempt) {
|
||||
console.error(`Token polling failed after ${CONFIG.POLLING.MAX_ATTEMPTS} attempts:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new CopilotServiceError('获取访问令牌超时,请重试')
|
||||
}
|
||||
|
||||
/**
|
||||
* 保存Copilot令牌到本地文件
|
||||
*/
|
||||
public saveCopilotToken = async (_: Electron.IpcMainInvokeEvent, token: string): Promise<void> => {
|
||||
try {
|
||||
const encryptedToken = safeStorage.encryptString(token)
|
||||
await fs.writeFile(this.tokenFilePath, encryptedToken)
|
||||
} catch (error) {
|
||||
console.error('Failed to save token:', error)
|
||||
throw new CopilotServiceError('无法保存访问令牌', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 从本地文件读取令牌并获取Copilot令牌
|
||||
*/
|
||||
public getToken = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
headers?: Record<string, string>
|
||||
): Promise<CopilotTokenResponse> => {
|
||||
try {
|
||||
this.updateHeaders(headers)
|
||||
|
||||
const encryptedToken = await fs.readFile(this.tokenFilePath)
|
||||
const access_token = safeStorage.decryptString(Buffer.from(encryptedToken))
|
||||
|
||||
const config: AxiosRequestConfig = {
|
||||
headers: {
|
||||
...this.headers,
|
||||
authorization: `token ${access_token}`
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
|
||||
|
||||
return response.data
|
||||
} catch (error) {
|
||||
console.error('Failed to get Copilot token:', error)
|
||||
throw new CopilotServiceError('无法获取Copilot令牌,请重新授权', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 退出登录,删除本地token文件
|
||||
*/
|
||||
public logout = async (): Promise<void> => {
|
||||
try {
|
||||
try {
|
||||
await fs.access(this.tokenFilePath)
|
||||
await fs.unlink(this.tokenFilePath)
|
||||
console.log('Successfully logged out from Copilot')
|
||||
} catch (error) {
|
||||
// 文件不存在不是错误,只是记录一下
|
||||
console.log('Token file not found, nothing to delete')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to logout:', error)
|
||||
throw new CopilotServiceError('无法完成退出登录操作', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 辅助方法:延迟执行
|
||||
*/
|
||||
private delay = (ms: number): Promise<void> => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
}
|
||||
|
||||
export default new CopilotService()
|
||||
@@ -1,7 +1,22 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
// ExportService
|
||||
|
||||
import { AlignmentType, BorderStyle, Document, HeadingLevel, Packer, Paragraph, ShadingType, TextRun } from 'docx'
|
||||
import {
|
||||
AlignmentType,
|
||||
BorderStyle,
|
||||
Document,
|
||||
ExternalHyperlink,
|
||||
HeadingLevel,
|
||||
Packer,
|
||||
Paragraph,
|
||||
ShadingType,
|
||||
Table,
|
||||
TableCell,
|
||||
TableRow,
|
||||
TextRun,
|
||||
VerticalAlign,
|
||||
WidthType
|
||||
} from 'docx'
|
||||
import { dialog } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import MarkdownIt from 'markdown-it'
|
||||
@@ -21,13 +36,54 @@ export class ExportService {
|
||||
const tokens = this.md.parse(markdown, {})
|
||||
const elements: any[] = []
|
||||
let listLevel = 0
|
||||
let currentTable: Table | null = null
|
||||
let currentRowCells: TableCell[] = []
|
||||
let isHeaderRow = false
|
||||
let tableColumnCount = 0
|
||||
let tableRows: TableRow[] = [] // Store rows temporarily
|
||||
|
||||
const processInlineTokens = (tokens: any[]): TextRun[] => {
|
||||
const runs: TextRun[] = []
|
||||
for (const token of tokens) {
|
||||
const processInlineTokens = (tokens: any[], isHeaderRow: boolean): (TextRun | ExternalHyperlink)[] => {
|
||||
const runs: (TextRun | ExternalHyperlink)[] = []
|
||||
let linkText = ''
|
||||
let linkUrl = ''
|
||||
let insideLink = false
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const token = tokens[i]
|
||||
switch (token.type) {
|
||||
case 'link_open':
|
||||
insideLink = true
|
||||
linkUrl = token.attrs.find((attr: [string, string]) => attr[0] === 'href')[1]
|
||||
linkText = tokens[i + 1].content
|
||||
i += 1
|
||||
break
|
||||
case 'link_close':
|
||||
if (insideLink && linkUrl && linkText) {
|
||||
// Handle any accumulated link text with the ExternalHyperlink
|
||||
runs.push(
|
||||
new ExternalHyperlink({
|
||||
children: [
|
||||
new TextRun({
|
||||
text: linkText,
|
||||
style: 'Hyperlink',
|
||||
color: '0000FF',
|
||||
underline: {
|
||||
type: 'single'
|
||||
}
|
||||
})
|
||||
],
|
||||
link: linkUrl
|
||||
})
|
||||
)
|
||||
|
||||
// Reset link variables
|
||||
linkText = ''
|
||||
linkUrl = ''
|
||||
insideLink = false
|
||||
}
|
||||
break
|
||||
case 'text':
|
||||
runs.push(new TextRun(token.content))
|
||||
runs.push(new TextRun({ text: token.content, bold: isHeaderRow ? true : false }))
|
||||
break
|
||||
case 'strong':
|
||||
runs.push(new TextRun({ text: token.content, bold: true }))
|
||||
@@ -45,7 +101,6 @@ export class ExportService {
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const token = tokens[i]
|
||||
|
||||
switch (token.type) {
|
||||
case 'heading_open':
|
||||
// 获取标题级别 (h1 -> h6)
|
||||
@@ -68,7 +123,7 @@ export class ExportService {
|
||||
const inlineTokens = tokens[i + 1].children || []
|
||||
elements.push(
|
||||
new Paragraph({
|
||||
children: processInlineTokens(inlineTokens),
|
||||
children: processInlineTokens(inlineTokens, false),
|
||||
spacing: {
|
||||
before: 120,
|
||||
after: 120
|
||||
@@ -93,7 +148,7 @@ export class ExportService {
|
||||
children: [
|
||||
new TextRun({ text: '•', bold: true }),
|
||||
new TextRun({ text: '\t' }),
|
||||
...processInlineTokens(itemInlineTokens)
|
||||
...processInlineTokens(itemInlineTokens, false)
|
||||
],
|
||||
indent: {
|
||||
left: listLevel * 720
|
||||
@@ -171,6 +226,116 @@ export class ExportService {
|
||||
)
|
||||
i += 3
|
||||
break
|
||||
|
||||
// 表格处理
|
||||
case 'table_open':
|
||||
tableRows = [] // Reset table rows for new table
|
||||
break
|
||||
|
||||
case 'thead_open':
|
||||
isHeaderRow = true
|
||||
break
|
||||
|
||||
case 'tbody_open':
|
||||
isHeaderRow = false
|
||||
break
|
||||
|
||||
case 'tr_open':
|
||||
currentRowCells = []
|
||||
break
|
||||
|
||||
case 'tr_close':
|
||||
const row = new TableRow({
|
||||
children: currentRowCells,
|
||||
tableHeader: isHeaderRow
|
||||
})
|
||||
tableRows.push(row)
|
||||
// 计算表格有多少列(针对第一行)
|
||||
if (tableColumnCount === 0) {
|
||||
tableColumnCount = currentRowCells.length
|
||||
}
|
||||
break
|
||||
|
||||
case 'th_open':
|
||||
case 'td_open':
|
||||
const isFirstColumn = currentRowCells.length === 0 // 判断是否是第一列
|
||||
const borders = {
|
||||
top: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
bottom: isHeaderRow
|
||||
? {
|
||||
style: BorderStyle.SINGLE,
|
||||
size: 0.5,
|
||||
color: '000000'
|
||||
}
|
||||
: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
left: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
right: {
|
||||
style: BorderStyle.NONE
|
||||
}
|
||||
}
|
||||
const cellContent = tokens[i + 1]
|
||||
const cellOptions = {
|
||||
children: [
|
||||
new Paragraph({
|
||||
children: cellContent.children
|
||||
? processInlineTokens(cellContent.children, isHeaderRow || isFirstColumn)
|
||||
: [new TextRun({ text: cellContent.content || '', bold: isHeaderRow || isFirstColumn })],
|
||||
alignment: AlignmentType.CENTER
|
||||
})
|
||||
],
|
||||
verticalAlign: VerticalAlign.CENTER,
|
||||
borders: borders
|
||||
}
|
||||
currentRowCells.push(new TableCell(cellOptions))
|
||||
i += 2 // 跳过内容和结束标记
|
||||
break
|
||||
case 'table_close':
|
||||
// Create table with the collected rows - avoid using protected properties
|
||||
// Create the table with all rows
|
||||
currentTable = new Table({
|
||||
width: {
|
||||
size: 100,
|
||||
type: WidthType.PERCENTAGE
|
||||
},
|
||||
rows: tableRows,
|
||||
borders: {
|
||||
top: {
|
||||
style: BorderStyle.SINGLE,
|
||||
size: 1,
|
||||
color: '000000'
|
||||
},
|
||||
bottom: {
|
||||
style: BorderStyle.SINGLE,
|
||||
size: 1,
|
||||
color: '000000'
|
||||
},
|
||||
left: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
right: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
insideHorizontal: {
|
||||
style: BorderStyle.NONE
|
||||
},
|
||||
insideVertical: {
|
||||
style: BorderStyle.NONE
|
||||
}
|
||||
}
|
||||
})
|
||||
elements.push(currentTable)
|
||||
currentTable = null
|
||||
tableColumnCount = 0
|
||||
tableRows = []
|
||||
currentRowCells = []
|
||||
isHeaderRow = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { getFileType } from '@main/utils/file'
|
||||
import { getFilesDir, getFileType, getTempDir } from '@main/utils/file'
|
||||
import { documentExts, imageExts } from '@shared/config/constant'
|
||||
import { FileType } from '@types'
|
||||
import * as crypto from 'crypto'
|
||||
import {
|
||||
app,
|
||||
dialog,
|
||||
OpenDialogOptions,
|
||||
OpenDialogReturnValue,
|
||||
@@ -21,8 +20,8 @@ import { chdir } from 'process'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
class FileStorage {
|
||||
private storageDir = path.join(app.getPath('userData'), 'Data', 'Files')
|
||||
private tempDir = path.join(app.getPath('temp'), 'CherryStudio')
|
||||
private storageDir = getFilesDir()
|
||||
private tempDir = getTempDir()
|
||||
|
||||
constructor() {
|
||||
this.initStorageDir()
|
||||
@@ -70,7 +69,7 @@ class FileStorage {
|
||||
origin_name: file,
|
||||
name: file + ext,
|
||||
path: storedFilePath,
|
||||
created_at: storedStats.birthtime,
|
||||
created_at: storedStats.birthtime.toISOString(),
|
||||
size: storedStats.size,
|
||||
ext,
|
||||
type: getFileType(ext),
|
||||
@@ -109,7 +108,7 @@ class FileStorage {
|
||||
origin_name: path.basename(filePath),
|
||||
name: path.basename(filePath),
|
||||
path: filePath,
|
||||
created_at: stats.birthtime,
|
||||
created_at: stats.birthtime.toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext,
|
||||
type: fileType,
|
||||
@@ -174,7 +173,7 @@ class FileStorage {
|
||||
origin_name,
|
||||
name: uuid + ext,
|
||||
path: destPath,
|
||||
created_at: stats.birthtime,
|
||||
created_at: stats.birthtime.toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext,
|
||||
type: fileType,
|
||||
@@ -198,7 +197,7 @@ class FileStorage {
|
||||
origin_name: path.basename(filePath),
|
||||
name: path.basename(filePath),
|
||||
path: filePath,
|
||||
created_at: stats.birthtime,
|
||||
created_at: stats.birthtime.toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext,
|
||||
type: fileType,
|
||||
@@ -255,7 +254,8 @@ class FileStorage {
|
||||
const filePath = path.join(this.storageDir, id)
|
||||
const data = await fs.promises.readFile(filePath)
|
||||
const base64 = data.toString('base64')
|
||||
const mime = `image/${path.extname(filePath).slice(1)}`
|
||||
const ext = path.extname(filePath).slice(1) == 'jpg' ? 'jpeg' : path.extname(filePath).slice(1)
|
||||
const mime = `image/${ext}`
|
||||
return {
|
||||
mime,
|
||||
base64,
|
||||
@@ -271,12 +271,12 @@ class FileStorage {
|
||||
}
|
||||
|
||||
public clear = async (): Promise<void> => {
|
||||
await fs.promises.rmdir(this.storageDir, { recursive: true })
|
||||
await fs.promises.rm(this.storageDir, { recursive: true })
|
||||
await this.initStorageDir()
|
||||
}
|
||||
|
||||
public clearTemp = async (): Promise<void> => {
|
||||
await fs.promises.rmdir(this.tempDir, { recursive: true })
|
||||
await fs.promises.rm(this.tempDir, { recursive: true })
|
||||
await fs.promises.mkdir(this.tempDir, { recursive: true })
|
||||
}
|
||||
|
||||
@@ -416,7 +416,7 @@ class FileStorage {
|
||||
origin_name: filename,
|
||||
name: uuid + ext,
|
||||
path: destPath,
|
||||
created_at: stats.birthtime,
|
||||
created_at: stats.birthtime.toISOString(),
|
||||
size: stats.size,
|
||||
ext: ext,
|
||||
type: fileType,
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
import { FileMetadataResponse, FileState, GoogleAIFileManager } from '@google/generative-ai/server'
|
||||
import { FileType } from '@types'
|
||||
import fs from 'fs'
|
||||
import { ProxyAgent, setGlobalDispatcher } from 'undici'
|
||||
|
||||
import { CacheService } from './CacheService'
|
||||
import { proxyManager } from './ProxyManager'
|
||||
|
||||
export class GeminiService {
|
||||
private static readonly FILE_LIST_CACHE_KEY = 'gemini_file_list'
|
||||
private static readonly CACHE_DURATION = 3000
|
||||
|
||||
static async uploadFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string) {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyManager.getProxyUrl() || ''))
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
const uploadResult = await fileManager.uploadFile(file.path, {
|
||||
mimeType: 'application/pdf',
|
||||
@@ -31,7 +29,6 @@ export class GeminiService {
|
||||
file: FileType,
|
||||
apiKey: string
|
||||
): Promise<FileMetadataResponse | undefined> {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyManager.getProxyUrl() || ''))
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
|
||||
const cachedResponse = CacheService.get<any>(GeminiService.FILE_LIST_CACHE_KEY)
|
||||
@@ -55,13 +52,11 @@ export class GeminiService {
|
||||
}
|
||||
|
||||
static async listFiles(_: Electron.IpcMainInvokeEvent, apiKey: string) {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyManager.getProxyUrl() || ''))
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
return await fileManager.listFiles()
|
||||
}
|
||||
|
||||
static async deleteFile(_: Electron.IpcMainInvokeEvent, apiKey: string, fileId: string) {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyManager.getProxyUrl() || ''))
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
await fileManager.deleteFile(fileId)
|
||||
}
|
||||
|
||||
@@ -1,26 +1,99 @@
|
||||
/**
|
||||
* Knowledge Service - Manages knowledge bases using RAG (Retrieval-Augmented Generation)
|
||||
*
|
||||
* This service handles creation, management, and querying of knowledge bases from various sources
|
||||
* including files, directories, URLs, sitemaps, and notes.
|
||||
*
|
||||
* Features:
|
||||
* - Concurrent task processing with workload management
|
||||
* - Multiple data source support
|
||||
* - Vector database integration
|
||||
*
|
||||
* For detailed documentation, see:
|
||||
* @see {@link ../../../docs/technical/KnowledgeService.md}
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
import { RAGApplication, RAGApplicationBuilder, TextLoader } from '@llm-tools/embedjs'
|
||||
import type { ExtractChunkData } from '@llm-tools/embedjs-interfaces'
|
||||
import { LibSqlDb } from '@llm-tools/embedjs-libsql'
|
||||
import { SitemapLoader } from '@llm-tools/embedjs-loader-sitemap'
|
||||
import { WebLoader } from '@llm-tools/embedjs-loader-web'
|
||||
import { AzureOpenAiEmbeddings, OpenAiEmbeddings } from '@llm-tools/embedjs-openai'
|
||||
import { RAGApplication, RAGApplicationBuilder, TextLoader } from '@cherrystudio/embedjs'
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { LibSqlDb } from '@cherrystudio/embedjs-libsql'
|
||||
import { SitemapLoader } from '@cherrystudio/embedjs-loader-sitemap'
|
||||
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
|
||||
import Embeddings from '@main/embeddings/Embeddings'
|
||||
import { addFileLoader } from '@main/loader'
|
||||
import { getInstanceName } from '@main/utils'
|
||||
import Reranker from '@main/reranker/Reranker'
|
||||
import { windowService } from '@main/services/WindowService'
|
||||
import { getAllFiles } from '@main/utils/file'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types'
|
||||
import { app } from 'electron'
|
||||
import { ProxyAgent, setGlobalDispatcher } from 'undici'
|
||||
import Logger from 'electron-log'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
import { proxyManager } from './ProxyManager'
|
||||
import { windowService } from './WindowService'
|
||||
export interface KnowledgeBaseAddItemOptions {
|
||||
base: KnowledgeBaseParams
|
||||
item: KnowledgeItem
|
||||
forceReload?: boolean
|
||||
}
|
||||
|
||||
interface KnowledgeBaseAddItemOptionsNonNullableAttribute {
|
||||
base: KnowledgeBaseParams
|
||||
item: KnowledgeItem
|
||||
forceReload: boolean
|
||||
}
|
||||
|
||||
interface EvaluateTaskWorkload {
|
||||
workload: number
|
||||
}
|
||||
|
||||
type LoaderDoneReturn = LoaderReturn | null
|
||||
|
||||
enum LoaderTaskItemState {
|
||||
PENDING,
|
||||
PROCESSING,
|
||||
DONE
|
||||
}
|
||||
|
||||
interface LoaderTaskItem {
|
||||
state: LoaderTaskItemState
|
||||
task: () => Promise<unknown>
|
||||
evaluateTaskWorkload: EvaluateTaskWorkload
|
||||
}
|
||||
|
||||
interface LoaderTask {
|
||||
loaderTasks: LoaderTaskItem[]
|
||||
loaderDoneReturn: LoaderDoneReturn
|
||||
}
|
||||
|
||||
interface LoaderTaskOfSet {
|
||||
loaderTasks: Set<LoaderTaskItem>
|
||||
loaderDoneReturn: LoaderDoneReturn
|
||||
}
|
||||
|
||||
interface QueueTaskItem {
|
||||
taskPromise: () => Promise<unknown>
|
||||
resolve: () => void
|
||||
evaluateTaskWorkload: EvaluateTaskWorkload
|
||||
}
|
||||
|
||||
const loaderTaskIntoOfSet = (loaderTask: LoaderTask): LoaderTaskOfSet => {
|
||||
return {
|
||||
loaderTasks: new Set(loaderTask.loaderTasks),
|
||||
loaderDoneReturn: loaderTask.loaderDoneReturn
|
||||
}
|
||||
}
|
||||
|
||||
class KnowledgeService {
|
||||
private storageDir = path.join(app.getPath('userData'), 'Data', 'KnowledgeBase')
|
||||
// Byte based
|
||||
private workload = 0
|
||||
private processingItemCount = 0
|
||||
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
|
||||
private static MAXIMUM_WORKLOAD = 1024 * 1024 * 80
|
||||
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
|
||||
private static ERROR_LOADER_RETURN: LoaderReturn = { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
|
||||
|
||||
constructor() {
|
||||
this.initStorageDir()
|
||||
@@ -40,30 +113,20 @@ class KnowledgeService {
|
||||
baseURL,
|
||||
dimensions
|
||||
}: KnowledgeBaseParams): Promise<RAGApplication> => {
|
||||
const batchSize = 10
|
||||
return new RAGApplicationBuilder()
|
||||
let ragApplication: RAGApplication
|
||||
const embeddings = new Embeddings({ model, apiKey, apiVersion, baseURL, dimensions } as KnowledgeBaseParams)
|
||||
try {
|
||||
ragApplication = await new RAGApplicationBuilder()
|
||||
.setModel('NO_MODEL')
|
||||
.setEmbeddingModel(
|
||||
apiVersion
|
||||
? new AzureOpenAiEmbeddings({
|
||||
azureOpenAIApiKey: apiKey,
|
||||
azureOpenAIApiVersion: apiVersion,
|
||||
azureOpenAIApiDeploymentName: model,
|
||||
azureOpenAIApiInstanceName: getInstanceName(baseURL),
|
||||
configuration: { httpAgent: proxyManager.getProxyAgent() },
|
||||
dimensions,
|
||||
batchSize
|
||||
})
|
||||
: new OpenAiEmbeddings({
|
||||
model,
|
||||
apiKey,
|
||||
configuration: { baseURL, httpAgent: proxyManager.getProxyAgent() },
|
||||
dimensions,
|
||||
batchSize
|
||||
})
|
||||
)
|
||||
.setEmbeddingModel(embeddings)
|
||||
.setVectorDatabase(new LibSqlDb({ path: path.join(this.storageDir, id) }))
|
||||
.build()
|
||||
} catch (e) {
|
||||
Logger.error(e)
|
||||
throw new Error(`Failed to create RAGApplication: ${e}`)
|
||||
}
|
||||
|
||||
return ragApplication
|
||||
}
|
||||
|
||||
public create = async (_: Electron.IpcMainInvokeEvent, base: KnowledgeBaseParams): Promise<void> => {
|
||||
@@ -82,12 +145,52 @@ class KnowledgeService {
|
||||
}
|
||||
}
|
||||
|
||||
public add = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ base, item, forceReload = false }: { base: KnowledgeBaseParams; item: KnowledgeItem; forceReload: boolean }
|
||||
): Promise<LoaderReturn> => {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyManager.getProxyUrl() || ''))
|
||||
const ragApplication = await this.getRagApplication(base)
|
||||
private maximumLoad() {
|
||||
return (
|
||||
this.processingItemCount >= KnowledgeService.MAXIMUM_PROCESSING_ITEM_COUNT ||
|
||||
this.workload >= KnowledgeService.MAXIMUM_WORKLOAD
|
||||
)
|
||||
}
|
||||
|
||||
private fileTask(
|
||||
ragApplication: RAGApplication,
|
||||
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
|
||||
): LoaderTask {
|
||||
const { base, item, forceReload } = options
|
||||
const file = item.content as FileType
|
||||
|
||||
const loaderTask: LoaderTask = {
|
||||
loaderTasks: [
|
||||
{
|
||||
state: LoaderTaskItemState.PENDING,
|
||||
task: () =>
|
||||
addFileLoader(ragApplication, file, base, forceReload)
|
||||
.then((result) => {
|
||||
loaderTask.loaderDoneReturn = result
|
||||
return result
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
}),
|
||||
evaluateTaskWorkload: { workload: file.size }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
}
|
||||
|
||||
return loaderTask
|
||||
}
|
||||
|
||||
private directoryTask(
|
||||
ragApplication: RAGApplication,
|
||||
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
|
||||
): LoaderTask {
|
||||
const { base, item, forceReload } = options
|
||||
const directory = item.content as string
|
||||
const files = getAllFiles(directory)
|
||||
const totalFiles = files.length
|
||||
let processedFiles = 0
|
||||
|
||||
const sendDirectoryProcessingPercent = (totalFiles: number, processedFiles: number) => {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
@@ -97,86 +200,256 @@ class KnowledgeService {
|
||||
})
|
||||
}
|
||||
|
||||
if (item.type === 'directory') {
|
||||
const directory = item.content as string
|
||||
const files = getAllFiles(directory)
|
||||
const totalFiles = files.length
|
||||
let processedFiles = 0
|
||||
|
||||
const loaderPromises = files.map(async (file) => {
|
||||
const result = await addFileLoader(ragApplication, file, base, forceReload)
|
||||
processedFiles++
|
||||
const loaderDoneReturn: LoaderDoneReturn = {
|
||||
entriesAdded: 0,
|
||||
uniqueId: `DirectoryLoader_${uuidv4()}`,
|
||||
uniqueIds: [],
|
||||
loaderType: 'DirectoryLoader'
|
||||
}
|
||||
const loaderTasks: LoaderTaskItem[] = []
|
||||
for (const file of files) {
|
||||
loaderTasks.push({
|
||||
state: LoaderTaskItemState.PENDING,
|
||||
task: () =>
|
||||
addFileLoader(ragApplication, file, base, forceReload)
|
||||
.then((result) => {
|
||||
loaderDoneReturn.entriesAdded += 1
|
||||
processedFiles += 1
|
||||
sendDirectoryProcessingPercent(totalFiles, processedFiles)
|
||||
loaderDoneReturn.uniqueIds.push(result.uniqueId)
|
||||
return result
|
||||
})
|
||||
|
||||
const loaderResults = await Promise.allSettled(loaderPromises)
|
||||
// @ts-ignore uniqueId
|
||||
const uniqueIds = loaderResults
|
||||
.filter((result) => result.status === 'fulfilled')
|
||||
.map((result) => result.value.uniqueId)
|
||||
|
||||
return {
|
||||
entriesAdded: loaderResults.length,
|
||||
uniqueId: `DirectoryLoader_${uuidv4()}`,
|
||||
uniqueIds,
|
||||
loaderType: 'DirectoryLoader'
|
||||
} as LoaderReturn
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
}),
|
||||
evaluateTaskWorkload: { workload: file.size }
|
||||
})
|
||||
}
|
||||
|
||||
if (item.type === 'url') {
|
||||
return {
|
||||
loaderTasks,
|
||||
loaderDoneReturn
|
||||
}
|
||||
}
|
||||
|
||||
private urlTask(
|
||||
ragApplication: RAGApplication,
|
||||
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
|
||||
): LoaderTask {
|
||||
const { base, item, forceReload } = options
|
||||
const content = item.content as string
|
||||
if (content.startsWith('http')) {
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
new WebLoader({ urlOrContent: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
|
||||
|
||||
const loaderTask: LoaderTask = {
|
||||
loaderTasks: [
|
||||
{
|
||||
state: LoaderTaskItemState.PENDING,
|
||||
task: () => {
|
||||
const loaderReturn = ragApplication.addLoader(
|
||||
new WebLoader({
|
||||
urlOrContent: content,
|
||||
chunkSize: base.chunkSize,
|
||||
chunkOverlap: base.chunkOverlap
|
||||
}),
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
) as Promise<LoaderReturn>
|
||||
|
||||
return loaderReturn
|
||||
.then((result) => {
|
||||
const { entriesAdded, uniqueId, loaderType } = result
|
||||
loaderTask.loaderDoneReturn = {
|
||||
entriesAdded: entriesAdded,
|
||||
uniqueId: uniqueId,
|
||||
uniqueIds: [uniqueId],
|
||||
loaderType: loaderType
|
||||
}
|
||||
return result
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
})
|
||||
},
|
||||
evaluateTaskWorkload: { workload: 1024 * 1024 * 2 }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
}
|
||||
return loaderTask
|
||||
}
|
||||
|
||||
if (item.type === 'sitemap') {
|
||||
private sitemapTask(
|
||||
ragApplication: RAGApplication,
|
||||
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
|
||||
): LoaderTask {
|
||||
const { base, item, forceReload } = options
|
||||
const content = item.content as string
|
||||
// @ts-ignore loader type
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
|
||||
const loaderTask: LoaderTask = {
|
||||
loaderTasks: [
|
||||
{
|
||||
state: LoaderTaskItemState.PENDING,
|
||||
task: () =>
|
||||
ragApplication
|
||||
.addLoader(
|
||||
new SitemapLoader({ url: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }) as any,
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
.then((result) => {
|
||||
const { entriesAdded, uniqueId, loaderType } = result
|
||||
loaderTask.loaderDoneReturn = {
|
||||
entriesAdded: entriesAdded,
|
||||
uniqueId: uniqueId,
|
||||
uniqueIds: [uniqueId],
|
||||
loaderType: loaderType
|
||||
}
|
||||
return result
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
}),
|
||||
evaluateTaskWorkload: { workload: 1024 * 1024 * 20 }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
}
|
||||
return loaderTask
|
||||
}
|
||||
|
||||
if (item.type === 'note') {
|
||||
private noteTask(
|
||||
ragApplication: RAGApplication,
|
||||
options: KnowledgeBaseAddItemOptionsNonNullableAttribute
|
||||
): LoaderTask {
|
||||
const { base, item, forceReload } = options
|
||||
const content = item.content as string
|
||||
console.debug('chunkSize', base.chunkSize)
|
||||
const loaderReturn = await ragApplication.addLoader(
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const contentBytes = encoder.encode(content)
|
||||
const loaderTask: LoaderTask = {
|
||||
loaderTasks: [
|
||||
{
|
||||
state: LoaderTaskItemState.PENDING,
|
||||
task: () => {
|
||||
const loaderReturn = ragApplication.addLoader(
|
||||
new TextLoader({ text: content, chunkSize: base.chunkSize, chunkOverlap: base.chunkOverlap }),
|
||||
forceReload
|
||||
)
|
||||
return {
|
||||
entriesAdded: loaderReturn.entriesAdded,
|
||||
uniqueId: loaderReturn.uniqueId,
|
||||
uniqueIds: [loaderReturn.uniqueId],
|
||||
loaderType: loaderReturn.loaderType
|
||||
} as LoaderReturn
|
||||
) as Promise<LoaderReturn>
|
||||
|
||||
return loaderReturn
|
||||
.then(({ entriesAdded, uniqueId, loaderType }) => {
|
||||
loaderTask.loaderDoneReturn = {
|
||||
entriesAdded: entriesAdded,
|
||||
uniqueId: uniqueId,
|
||||
uniqueIds: [uniqueId],
|
||||
loaderType: loaderType
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
})
|
||||
},
|
||||
evaluateTaskWorkload: { workload: contentBytes.length }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
}
|
||||
return loaderTask
|
||||
}
|
||||
|
||||
if (item.type === 'file') {
|
||||
const file = item.content as FileType
|
||||
|
||||
return await addFileLoader(ragApplication, file, base, forceReload)
|
||||
private processingQueueHandle() {
|
||||
const getSubtasksUntilMaximumLoad = (): QueueTaskItem[] => {
|
||||
const queueTaskList: QueueTaskItem[] = []
|
||||
that: for (const [task, resolve] of this.knowledgeItemProcessingQueueMappingPromise) {
|
||||
for (const item of task.loaderTasks) {
|
||||
if (this.maximumLoad()) {
|
||||
break that
|
||||
}
|
||||
|
||||
return { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
|
||||
const { state, task: taskPromise, evaluateTaskWorkload } = item
|
||||
|
||||
if (state !== LoaderTaskItemState.PENDING) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { workload } = evaluateTaskWorkload
|
||||
this.workload += workload
|
||||
this.processingItemCount += 1
|
||||
item.state = LoaderTaskItemState.PROCESSING
|
||||
queueTaskList.push({
|
||||
taskPromise: () =>
|
||||
taskPromise().then(() => {
|
||||
this.workload -= workload
|
||||
this.processingItemCount -= 1
|
||||
task.loaderTasks.delete(item)
|
||||
if (task.loaderTasks.size === 0) {
|
||||
this.knowledgeItemProcessingQueueMappingPromise.delete(task)
|
||||
resolve()
|
||||
}
|
||||
this.processingQueueHandle()
|
||||
}),
|
||||
resolve: () => {},
|
||||
evaluateTaskWorkload
|
||||
})
|
||||
}
|
||||
}
|
||||
return queueTaskList
|
||||
}
|
||||
const subTasks = getSubtasksUntilMaximumLoad()
|
||||
if (subTasks.length > 0) {
|
||||
const subTaskPromises = subTasks.map(({ taskPromise }) => taskPromise())
|
||||
Promise.all(subTaskPromises).then(() => {
|
||||
subTasks.forEach(({ resolve }) => resolve())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private appendProcessingQueue(task: LoaderTask): Promise<LoaderReturn> {
|
||||
return new Promise((resolve) => {
|
||||
this.knowledgeItemProcessingQueueMappingPromise.set(loaderTaskIntoOfSet(task), () => {
|
||||
resolve(task.loaderDoneReturn!)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
public add = (_: Electron.IpcMainInvokeEvent, options: KnowledgeBaseAddItemOptions): Promise<LoaderReturn> => {
|
||||
return new Promise((resolve) => {
|
||||
const { base, item, forceReload = false } = options
|
||||
const optionsNonNullableAttribute = { base, item, forceReload }
|
||||
this.getRagApplication(base)
|
||||
.then((ragApplication) => {
|
||||
const task = (() => {
|
||||
switch (item.type) {
|
||||
case 'file':
|
||||
return this.fileTask(ragApplication, optionsNonNullableAttribute)
|
||||
case 'directory':
|
||||
return this.directoryTask(ragApplication, optionsNonNullableAttribute)
|
||||
case 'url':
|
||||
return this.urlTask(ragApplication, optionsNonNullableAttribute)
|
||||
case 'sitemap':
|
||||
return this.sitemapTask(ragApplication, optionsNonNullableAttribute)
|
||||
case 'note':
|
||||
return this.noteTask(ragApplication, optionsNonNullableAttribute)
|
||||
default:
|
||||
return null
|
||||
}
|
||||
})()
|
||||
|
||||
if (task) {
|
||||
this.appendProcessingQueue(task).then(() => {
|
||||
resolve(task.loaderDoneReturn!)
|
||||
})
|
||||
this.processingQueueHandle()
|
||||
} else {
|
||||
resolve(KnowledgeService.ERROR_LOADER_RETURN)
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
Logger.error(err)
|
||||
resolve(KnowledgeService.ERROR_LOADER_RETURN)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
public remove = async (
|
||||
@@ -184,7 +457,7 @@ class KnowledgeService {
|
||||
{ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }
|
||||
): Promise<void> => {
|
||||
const ragApplication = await this.getRagApplication(base)
|
||||
console.debug(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`)
|
||||
console.log(`[ KnowledgeService Remove Item UniqueId: ${uniqueId}]`)
|
||||
for (const id of uniqueIds) {
|
||||
await ragApplication.deleteLoader(id)
|
||||
}
|
||||
@@ -197,6 +470,16 @@ class KnowledgeService {
|
||||
const ragApplication = await this.getRagApplication(base)
|
||||
return await ragApplication.search(search)
|
||||
}
|
||||
|
||||
public rerank = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }
|
||||
): Promise<ExtractChunkData[]> => {
|
||||
if (results.length === 0) {
|
||||
return results
|
||||
}
|
||||
return await new Reranker(base).rerank(search, results)
|
||||
}
|
||||
}
|
||||
|
||||
export default new KnowledgeService()
|
||||
|
||||
293
src/main/services/MCPService.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import os from 'node:os'
|
||||
import path from 'node:path'
|
||||
|
||||
import { isLinux, isMac, isWin } from '@main/constant'
|
||||
import { makeSureDirExists } from '@main/utils'
|
||||
import { getBinaryName, getBinaryPath } from '@main/utils/process'
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js'
|
||||
import { getDefaultEnvironment, StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { MCPServer, MCPTool } from '@types'
|
||||
import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
|
||||
import { CacheService } from './CacheService'
|
||||
|
||||
class McpService {
|
||||
private clients: Map<string, Client> = new Map()
|
||||
|
||||
private getServerKey(server: MCPServer): string {
|
||||
return JSON.stringify({
|
||||
baseUrl: server.baseUrl,
|
||||
command: server.command,
|
||||
args: server.args,
|
||||
registryUrl: server.registryUrl,
|
||||
env: server.env,
|
||||
id: server.id
|
||||
})
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.initClient = this.initClient.bind(this)
|
||||
this.listTools = this.listTools.bind(this)
|
||||
this.callTool = this.callTool.bind(this)
|
||||
this.closeClient = this.closeClient.bind(this)
|
||||
this.removeServer = this.removeServer.bind(this)
|
||||
this.restartServer = this.restartServer.bind(this)
|
||||
this.stopServer = this.stopServer.bind(this)
|
||||
}
|
||||
|
||||
async initClient(server: MCPServer): Promise<Client> {
|
||||
const serverKey = this.getServerKey(server)
|
||||
|
||||
// Check if we already have a client for this server configuration
|
||||
const existingClient = this.clients.get(serverKey)
|
||||
if (existingClient) {
|
||||
// Check if the existing client is still connected
|
||||
const pingResult = await existingClient.ping()
|
||||
Logger.info(`[MCP] Ping result for ${server.name}:`, pingResult)
|
||||
// If the ping fails, remove the client from the cache
|
||||
// and create a new one
|
||||
if (!pingResult) {
|
||||
this.clients.delete(serverKey)
|
||||
} else {
|
||||
return existingClient
|
||||
}
|
||||
}
|
||||
|
||||
// Create new client instance for each connection
|
||||
const client = new Client({ name: 'Cherry Studio', version: app.getVersion() }, { capabilities: {} })
|
||||
|
||||
const args = [...(server.args || [])]
|
||||
|
||||
let transport: StdioClientTransport | SSEClientTransport
|
||||
|
||||
try {
|
||||
// Create appropriate transport based on configuration
|
||||
if (server.baseUrl) {
|
||||
transport = new SSEClientTransport(new URL(server.baseUrl))
|
||||
} else if (server.command) {
|
||||
let cmd = server.command
|
||||
|
||||
if (server.command === 'npx' || server.command === 'bun' || server.command === 'bunx') {
|
||||
cmd = await getBinaryPath('bun')
|
||||
Logger.info(`[MCP] Using command: ${cmd}`)
|
||||
|
||||
// add -x to args if args exist
|
||||
if (args && args.length > 0) {
|
||||
if (!args.includes('-y')) {
|
||||
!args.includes('-y') && args.unshift('-y')
|
||||
}
|
||||
if (!args.includes('x')) {
|
||||
args.unshift('x')
|
||||
}
|
||||
}
|
||||
if (server.registryUrl) {
|
||||
server.env = {
|
||||
...server.env,
|
||||
NPM_CONFIG_REGISTRY: server.registryUrl
|
||||
}
|
||||
|
||||
// if the server name is mcp-auto-install, use the mcp-registry.json file in the bin directory
|
||||
if (server.name === 'mcp-auto-install') {
|
||||
const binPath = await getBinaryPath()
|
||||
makeSureDirExists(binPath)
|
||||
server.env.MCP_REGISTRY_PATH = path.join(binPath, 'mcp-registry.json')
|
||||
}
|
||||
}
|
||||
} else if (server.command === 'uvx' || server.command === 'uv') {
|
||||
cmd = await getBinaryPath(server.command)
|
||||
if (server.registryUrl) {
|
||||
server.env = {
|
||||
...server.env,
|
||||
UV_DEFAULT_INDEX: server.registryUrl,
|
||||
PIP_INDEX_URL: server.registryUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Logger.info(`[MCP] Starting server with command: ${cmd} ${args ? args.join(' ') : ''}`)
|
||||
// Logger.info(`[MCP] Environment variables for server:`, server.env)
|
||||
|
||||
transport = new StdioClientTransport({
|
||||
command: cmd,
|
||||
args,
|
||||
env: {
|
||||
...getDefaultEnvironment(),
|
||||
PATH: this.getEnhancedPath(process.env.PATH || ''),
|
||||
...server.env
|
||||
}
|
||||
})
|
||||
} else {
|
||||
throw new Error('Either baseUrl or command must be provided')
|
||||
}
|
||||
|
||||
await client.connect(transport)
|
||||
|
||||
// Store the new client in the cache
|
||||
this.clients.set(serverKey, client)
|
||||
|
||||
Logger.info(`[MCP] Activated server: ${server.name}`)
|
||||
return client
|
||||
} catch (error: any) {
|
||||
Logger.error(`[MCP] Error activating server ${server.name}:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async closeClient(serverKey: string) {
|
||||
const client = this.clients.get(serverKey)
|
||||
if (client) {
|
||||
// Remove the client from the cache
|
||||
await client.close()
|
||||
Logger.info(`[MCP] Closed server: ${serverKey}`)
|
||||
this.clients.delete(serverKey)
|
||||
CacheService.remove(`mcp:list_tool:${serverKey}`)
|
||||
Logger.info(`[MCP] Cleared cache for server: ${serverKey}`)
|
||||
} else {
|
||||
Logger.warn(`[MCP] No client found for server: ${serverKey}`)
|
||||
}
|
||||
}
|
||||
|
||||
async stopServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
Logger.info(`[MCP] Stopping server: ${server.name}`)
|
||||
await this.closeClient(serverKey)
|
||||
}
|
||||
|
||||
async removeServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const serverKey = this.getServerKey(server)
|
||||
const existingClient = this.clients.get(serverKey)
|
||||
if (existingClient) {
|
||||
await this.closeClient(serverKey)
|
||||
}
|
||||
}
|
||||
|
||||
async restartServer(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
Logger.info(`[MCP] Restarting server: ${server.name}`)
|
||||
const serverKey = this.getServerKey(server)
|
||||
await this.closeClient(serverKey)
|
||||
await this.initClient(server)
|
||||
}
|
||||
|
||||
async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const client = await this.initClient(server)
|
||||
const serverKey = this.getServerKey(server)
|
||||
const cacheKey = `mcp:list_tool:${serverKey}`
|
||||
if (CacheService.has(cacheKey)) {
|
||||
Logger.info(`[MCP] Tools from ${server.name} loaded from cache`)
|
||||
const cachedTools = CacheService.get<MCPTool[]>(cacheKey)
|
||||
if (cachedTools && cachedTools.length > 0) {
|
||||
return cachedTools
|
||||
}
|
||||
}
|
||||
Logger.info(`[MCP] Listing tools for server: ${server.name}`)
|
||||
const { tools } = await client.listTools()
|
||||
const serverTools: MCPTool[] = []
|
||||
tools.map((tool: any) => {
|
||||
const serverTool: MCPTool = {
|
||||
...tool,
|
||||
id: `f${nanoid()}`,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
}
|
||||
serverTools.push(serverTool)
|
||||
})
|
||||
CacheService.set(cacheKey, serverTools, 5 * 60 * 1000)
|
||||
return serverTools
|
||||
}
|
||||
|
||||
/**
|
||||
* Call a tool on an MCP server
|
||||
*/
|
||||
public async callTool(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ server, name, args }: { server: MCPServer; name: string; args: any }
|
||||
): Promise<any> {
|
||||
try {
|
||||
Logger.info('[MCP] Calling:', server.name, name, args)
|
||||
const client = await this.initClient(server)
|
||||
const result = await client.callTool({ name, arguments: args })
|
||||
return result
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Error calling tool ${name} on ${server.name}:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public async getInstallInfo() {
|
||||
const dir = path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
const uvName = await getBinaryName('uv')
|
||||
const bunName = await getBinaryName('bun')
|
||||
const uvPath = path.join(dir, uvName)
|
||||
const bunPath = path.join(dir, bunName)
|
||||
return { dir, uvPath, bunPath }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get enhanced PATH including common tool locations
|
||||
*/
|
||||
private getEnhancedPath(originalPath: string): string {
|
||||
// 将原始 PATH 按分隔符分割成数组
|
||||
const pathSeparator = process.platform === 'win32' ? ';' : ':'
|
||||
const existingPaths = new Set(originalPath.split(pathSeparator).filter(Boolean))
|
||||
const homeDir = process.env.HOME || process.env.USERPROFILE || ''
|
||||
|
||||
// 定义要添加的新路径
|
||||
const newPaths: string[] = []
|
||||
|
||||
if (isMac) {
|
||||
newPaths.push(
|
||||
'/bin',
|
||||
'/usr/bin',
|
||||
'/usr/local/bin',
|
||||
'/usr/local/sbin',
|
||||
'/opt/homebrew/bin',
|
||||
'/opt/homebrew/sbin',
|
||||
'/usr/local/opt/node/bin',
|
||||
`${homeDir}/.nvm/current/bin`,
|
||||
`${homeDir}/.npm-global/bin`,
|
||||
`${homeDir}/.yarn/bin`,
|
||||
`${homeDir}/.cargo/bin`,
|
||||
`${homeDir}/.cherrystudio/bin`,
|
||||
'/opt/local/bin'
|
||||
)
|
||||
}
|
||||
|
||||
if (isLinux) {
|
||||
newPaths.push(
|
||||
'/bin',
|
||||
'/usr/bin',
|
||||
'/usr/local/bin',
|
||||
`${homeDir}/.nvm/current/bin`,
|
||||
`${homeDir}/.npm-global/bin`,
|
||||
`${homeDir}/.yarn/bin`,
|
||||
`${homeDir}/.cargo/bin`,
|
||||
`${homeDir}/.cherrystudio/bin`,
|
||||
'/snap/bin'
|
||||
)
|
||||
}
|
||||
|
||||
if (isWin) {
|
||||
newPaths.push(
|
||||
`${process.env.APPDATA}\\npm`,
|
||||
`${homeDir}\\AppData\\Local\\Yarn\\bin`,
|
||||
`${homeDir}\\.cargo\\bin`,
|
||||
`${homeDir}\\.cherrystudio\\bin`
|
||||
)
|
||||
}
|
||||
|
||||
// 只添加不存在的路径
|
||||
newPaths.forEach((path) => {
|
||||
if (path && !existingPaths.has(path)) {
|
||||
existingPaths.add(path)
|
||||
}
|
||||
})
|
||||
|
||||
// 转换回字符串
|
||||
return Array.from(existingPaths).join(pathSeparator)
|
||||
}
|
||||
}
|
||||
|
||||
export default new McpService()
|
||||
134
src/main/services/NutstoreService.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import path from 'node:path'
|
||||
|
||||
import { NUTSTORE_HOST } from '@shared/config/nutstore'
|
||||
import { XMLParser } from 'fast-xml-parser'
|
||||
import { isNil, partial } from 'lodash'
|
||||
import { type FileStat } from 'webdav'
|
||||
|
||||
interface OAuthResponse {
|
||||
username: string
|
||||
userid: string
|
||||
access_token: string
|
||||
}
|
||||
|
||||
interface WebDAVResponse {
|
||||
multistatus: {
|
||||
response: Array<{
|
||||
href: string
|
||||
propstat: {
|
||||
prop: {
|
||||
displayname: string
|
||||
resourcetype: { collection?: any }
|
||||
getlastmodified?: string
|
||||
getcontentlength?: string
|
||||
getcontenttype?: string
|
||||
}
|
||||
status: string
|
||||
}
|
||||
}>
|
||||
}
|
||||
}
|
||||
|
||||
export async function getNutstoreSSOUrl() {
|
||||
const { createOAuthUrl } = await import('../integration/nutstore/sso/lib')
|
||||
|
||||
const url = createOAuthUrl({
|
||||
app: 'cherrystudio'
|
||||
})
|
||||
return url
|
||||
}
|
||||
|
||||
export async function decryptToken(token: string) {
|
||||
const { decrypt } = await import('../integration/nutstore/sso/lib')
|
||||
try {
|
||||
const decrypted = decrypt('cherrystudio', token)
|
||||
return JSON.parse(decrypted) as OAuthResponse
|
||||
} catch (error) {
|
||||
console.error('解密失败:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDirectoryContents(token: string, target: string): Promise<FileStat[]> {
|
||||
const contents: FileStat[] = []
|
||||
if (!target.startsWith('/')) {
|
||||
target = '/' + target
|
||||
}
|
||||
|
||||
let currentUrl = `${NUTSTORE_HOST}${target}`
|
||||
|
||||
while (true) {
|
||||
const response = await fetch(currentUrl, {
|
||||
method: 'PROPFIND',
|
||||
headers: {
|
||||
Authorization: `Basic ${token}`,
|
||||
'Content-Type': 'application/xml',
|
||||
Depth: '1'
|
||||
},
|
||||
body: `<?xml version="1.0" encoding="utf-8"?>
|
||||
<propfind xmlns="DAV:">
|
||||
<prop>
|
||||
<displayname/>
|
||||
<resourcetype/>
|
||||
<getlastmodified/>
|
||||
<getcontentlength/>
|
||||
<getcontenttype/>
|
||||
</prop>
|
||||
</propfind>`
|
||||
})
|
||||
|
||||
const text = await response.text()
|
||||
|
||||
const result = parseXml<WebDAVResponse>(text)
|
||||
const items = Array.isArray(result.multistatus.response)
|
||||
? result.multistatus.response
|
||||
: [result.multistatus.response]
|
||||
|
||||
// 跳过第一个条目(当前目录)
|
||||
contents.push(...items.slice(1).map(partial(convertToFileStat, '/dav')))
|
||||
|
||||
const linkHeader = response.headers['link'] || response.headers['Link']
|
||||
if (!linkHeader) {
|
||||
break
|
||||
}
|
||||
|
||||
const nextLink = extractNextLink(linkHeader)
|
||||
if (!nextLink) {
|
||||
break
|
||||
}
|
||||
|
||||
currentUrl = decodeURI(nextLink)
|
||||
}
|
||||
|
||||
return contents
|
||||
}
|
||||
|
||||
function extractNextLink(linkHeader: string): string | null {
|
||||
const matches = linkHeader.match(/<([^>]+)>;\s*rel="next"/)
|
||||
return matches ? matches[1] : null
|
||||
}
|
||||
|
||||
function convertToFileStat(serverBase: string, item: WebDAVResponse['multistatus']['response'][number]): FileStat {
|
||||
const props = item.propstat.prop
|
||||
const isDir = !isNil(props.resourcetype?.collection)
|
||||
const href = decodeURIComponent(item.href)
|
||||
const filename = serverBase === '/' ? href : path.posix.join('/', href.replace(serverBase, ''))
|
||||
|
||||
return {
|
||||
filename: filename.endsWith('/') ? filename.slice(0, -1) : filename,
|
||||
basename: path.basename(filename),
|
||||
lastmod: props.getlastmodified || '',
|
||||
size: props.getcontentlength ? parseInt(props.getcontentlength, 10) : 0,
|
||||
type: isDir ? 'directory' : 'file',
|
||||
etag: null,
|
||||
mime: props.getcontenttype
|
||||
}
|
||||
}
|
||||
|
||||
function parseXml<T>(xml: string) {
|
||||
const parser = new XMLParser({
|
||||
attributeNamePrefix: '',
|
||||
removeNSPrefix: true
|
||||
})
|
||||
return parser.parse(xml) as T
|
||||
}
|
||||
167
src/main/services/ObsidianVaultService.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { app } from 'electron'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
interface VaultInfo {
|
||||
path: string
|
||||
name: string
|
||||
}
|
||||
|
||||
interface FileInfo {
|
||||
path: string
|
||||
type: 'folder' | 'markdown'
|
||||
name: string
|
||||
}
|
||||
|
||||
class ObsidianVaultService {
|
||||
private obsidianConfigPath: string
|
||||
|
||||
constructor() {
|
||||
// 根据操作系统获取Obsidian配置文件路径
|
||||
if (process.platform === 'win32') {
|
||||
this.obsidianConfigPath = path.join(app.getPath('appData'), 'obsidian', 'obsidian.json')
|
||||
} else if (process.platform === 'darwin') {
|
||||
this.obsidianConfigPath = path.join(
|
||||
app.getPath('home'),
|
||||
'Library',
|
||||
'Application Support',
|
||||
'obsidian',
|
||||
'obsidian.json'
|
||||
)
|
||||
} else {
|
||||
// Linux
|
||||
this.obsidianConfigPath = path.join(app.getPath('home'), '.config', 'obsidian', 'obsidian.json')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取所有的Obsidian Vault
|
||||
*/
|
||||
getVaults(): VaultInfo[] {
|
||||
try {
|
||||
if (!fs.existsSync(this.obsidianConfigPath)) {
|
||||
return []
|
||||
}
|
||||
|
||||
const configContent = fs.readFileSync(this.obsidianConfigPath, 'utf8')
|
||||
const config = JSON.parse(configContent)
|
||||
|
||||
if (!config.vaults) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Object.entries(config.vaults).map(([, vault]: [string, any]) => ({
|
||||
path: vault.path,
|
||||
name: vault.name || path.basename(vault.path)
|
||||
}))
|
||||
} catch (error) {
|
||||
console.error('获取Obsidian Vault失败:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取Vault中的文件夹和Markdown文件结构
|
||||
*/
|
||||
getVaultStructure(vaultPath: string): FileInfo[] {
|
||||
const results: FileInfo[] = []
|
||||
|
||||
try {
|
||||
// 检查vault路径是否存在
|
||||
if (!fs.existsSync(vaultPath)) {
|
||||
console.error('Vault路径不存在:', vaultPath)
|
||||
return []
|
||||
}
|
||||
|
||||
// 检查是否是目录
|
||||
const stats = fs.statSync(vaultPath)
|
||||
if (!stats.isDirectory()) {
|
||||
console.error('Vault路径不是一个目录:', vaultPath)
|
||||
return []
|
||||
}
|
||||
|
||||
this.traverseDirectory(vaultPath, '', results)
|
||||
} catch (error) {
|
||||
console.error('读取Vault文件夹结构失败:', error)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* 递归遍历目录获取所有文件夹和Markdown文件
|
||||
*/
|
||||
private traverseDirectory(dirPath: string, relativePath: string, results: FileInfo[]) {
|
||||
try {
|
||||
// 首先添加当前文件夹
|
||||
if (relativePath) {
|
||||
results.push({
|
||||
path: relativePath,
|
||||
type: 'folder',
|
||||
name: path.basename(relativePath)
|
||||
})
|
||||
}
|
||||
|
||||
// 确保目录存在且可访问
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
console.error('目录不存在:', dirPath)
|
||||
return
|
||||
}
|
||||
|
||||
let items
|
||||
try {
|
||||
items = fs.readdirSync(dirPath, { withFileTypes: true })
|
||||
} catch (err) {
|
||||
console.error(`无法读取目录 ${dirPath}:`, err)
|
||||
return
|
||||
}
|
||||
|
||||
for (const item of items) {
|
||||
// 忽略以.开头的隐藏文件夹和文件
|
||||
if (item.name.startsWith('.')) {
|
||||
continue
|
||||
}
|
||||
|
||||
const newRelativePath = relativePath ? `${relativePath}/${item.name}` : item.name
|
||||
const fullPath = path.join(dirPath, item.name)
|
||||
|
||||
if (item.isDirectory()) {
|
||||
this.traverseDirectory(fullPath, newRelativePath, results)
|
||||
} else if (item.isFile() && item.name.endsWith('.md')) {
|
||||
// 收集.md文件
|
||||
results.push({
|
||||
path: newRelativePath,
|
||||
type: 'markdown',
|
||||
name: item.name
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`遍历目录出错 ${dirPath}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取指定Vault的文件夹和Markdown文件结构
|
||||
* @param vaultName vault名称
|
||||
*/
|
||||
getFilesByVaultName(vaultName: string): FileInfo[] {
|
||||
try {
|
||||
const vaults = this.getVaults()
|
||||
const vault = vaults.find((v) => v.name === vaultName)
|
||||
|
||||
if (!vault) {
|
||||
console.error('未找到指定名称的Vault:', vaultName)
|
||||
return []
|
||||
}
|
||||
|
||||
console.log('获取Vault文件结构:', vault.name, vault.path)
|
||||
return this.getVaultStructure(vault.path)
|
||||
} catch (error) {
|
||||
console.error('获取Vault文件结构时发生错误:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default ObsidianVaultService
|
||||
34
src/main/services/ProtocolClient.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
export const CHERRY_STUDIO_PROTOCOL = 'cherrystudio'
|
||||
|
||||
export function registerProtocolClient(app: Electron.App) {
|
||||
if (process.defaultApp) {
|
||||
if (process.argv.length >= 2) {
|
||||
app.setAsDefaultProtocolClient(CHERRY_STUDIO_PROTOCOL, process.execPath, [process.argv[1]])
|
||||
}
|
||||
}
|
||||
|
||||
app.setAsDefaultProtocolClient('cherrystudio')
|
||||
}
|
||||
|
||||
export function handleProtocolUrl(url: string) {
|
||||
if (!url) return
|
||||
// Process the URL that was used to open the app
|
||||
// The url will be in the format: cherrystudio://data?param1=value1¶m2=value2
|
||||
console.log('Received URL:', url)
|
||||
|
||||
// Parse the URL and extract parameters
|
||||
const urlObj = new URL(url)
|
||||
const params = new URLSearchParams(urlObj.search)
|
||||
|
||||
// You can send the data to your renderer process
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.send('protocol-data', {
|
||||
url,
|
||||
params: Object.fromEntries(params.entries())
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,23 @@
|
||||
import { ProxyConfig as _ProxyConfig, session } from 'electron'
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent'
|
||||
import { socksDispatcher } from 'fetch-socks'
|
||||
import { ProxyAgent as GeneralProxyAgent } from 'proxy-agent'
|
||||
import { ProxyAgent, setGlobalDispatcher } from 'undici'
|
||||
|
||||
type ProxyMode = 'system' | 'custom' | 'none'
|
||||
|
||||
export interface ProxyConfig {
|
||||
mode: ProxyMode
|
||||
url?: string | null
|
||||
url?: string
|
||||
}
|
||||
|
||||
export class ProxyManager {
|
||||
private config: ProxyConfig
|
||||
private proxyAgent: HttpsProxyAgent | null = null
|
||||
private proxyUrl: string | null = null
|
||||
private proxyAgent: GeneralProxyAgent | null = null
|
||||
private systemProxyInterval: NodeJS.Timeout | null = null
|
||||
|
||||
constructor() {
|
||||
this.config = {
|
||||
mode: 'system',
|
||||
url: ''
|
||||
mode: 'none'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,12 +26,30 @@ export class ProxyManager {
|
||||
await Promise.all(sessions.map((session) => session.setProxy(config)))
|
||||
}
|
||||
|
||||
private async monitorSystemProxy(): Promise<void> {
|
||||
// Clear any existing interval first
|
||||
this.clearSystemProxyMonitor()
|
||||
// Set new interval
|
||||
this.systemProxyInterval = setInterval(async () => {
|
||||
await this.setSystemProxy()
|
||||
}, 10000)
|
||||
}
|
||||
|
||||
private clearSystemProxyMonitor(): void {
|
||||
if (this.systemProxyInterval) {
|
||||
clearInterval(this.systemProxyInterval)
|
||||
this.systemProxyInterval = null
|
||||
}
|
||||
}
|
||||
|
||||
async configureProxy(config: ProxyConfig): Promise<void> {
|
||||
try {
|
||||
this.config = config
|
||||
this.clearSystemProxyMonitor()
|
||||
if (this.config.mode === 'system') {
|
||||
await this.setSystemProxy()
|
||||
} else if (this.config.mode == 'custom') {
|
||||
this.monitorSystemProxy()
|
||||
} else if (this.config.mode === 'custom') {
|
||||
await this.setCustomProxy()
|
||||
} else {
|
||||
await this.clearProxy()
|
||||
@@ -51,11 +70,14 @@ export class ProxyManager {
|
||||
|
||||
private async setSystemProxy(): Promise<void> {
|
||||
try {
|
||||
this.proxyUrl = await this.resolveSystemProxy()
|
||||
if (this.proxyUrl) {
|
||||
this.proxyAgent = new HttpsProxyAgent(this.proxyUrl)
|
||||
this.setEnvironment(this.proxyUrl)
|
||||
await this.setSessionsProxy({ mode: 'system' })
|
||||
const proxyString = await session.defaultSession.resolveProxy('https://dummy.com')
|
||||
const [protocol, address] = proxyString.split(';')[0].split(' ')
|
||||
const url = protocol === 'PROXY' ? `http://${address}` : null
|
||||
if (url && url !== this.config.url) {
|
||||
this.config.url = url.toLowerCase()
|
||||
this.setEnvironment(this.config.url)
|
||||
this.proxyAgent = new GeneralProxyAgent()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to set system proxy:', error)
|
||||
@@ -66,9 +88,8 @@ export class ProxyManager {
|
||||
private async setCustomProxy(): Promise<void> {
|
||||
try {
|
||||
if (this.config.url) {
|
||||
this.proxyUrl = this.config.url
|
||||
this.proxyAgent = new HttpsProxyAgent(this.config.url)
|
||||
this.setEnvironment(this.config.url)
|
||||
this.proxyAgent = new GeneralProxyAgent()
|
||||
await this.setSessionsProxy({ proxyRules: this.config.url })
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -77,39 +98,45 @@ export class ProxyManager {
|
||||
}
|
||||
}
|
||||
|
||||
private async clearProxy(): Promise<void> {
|
||||
private clearEnvironment(): void {
|
||||
delete process.env.HTTP_PROXY
|
||||
delete process.env.HTTPS_PROXY
|
||||
await this.setSessionsProxy({})
|
||||
delete process.env.grpc_proxy
|
||||
delete process.env.http_proxy
|
||||
delete process.env.https_proxy
|
||||
}
|
||||
|
||||
private async clearProxy(): Promise<void> {
|
||||
this.clearEnvironment()
|
||||
await this.setSessionsProxy({ mode: 'direct' })
|
||||
this.config = { mode: 'none' }
|
||||
this.proxyAgent = null
|
||||
}
|
||||
|
||||
private async resolveSystemProxy(): Promise<string | null> {
|
||||
try {
|
||||
return await this.resolveElectronProxy()
|
||||
} catch (error) {
|
||||
console.error('Failed to resolve system proxy:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
private async resolveElectronProxy(): Promise<string | null> {
|
||||
try {
|
||||
const proxyString = await session.defaultSession.resolveProxy('https://dummy.com')
|
||||
const [protocol, address] = proxyString.split(';')[0].split(' ')
|
||||
return protocol === 'PROXY' ? `http://${address}` : null
|
||||
} catch (error) {
|
||||
console.error('Failed to resolve electron proxy:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
getProxyAgent(): HttpsProxyAgent | null {
|
||||
getProxyAgent(): GeneralProxyAgent | null {
|
||||
return this.proxyAgent
|
||||
}
|
||||
|
||||
getProxyUrl(): string | null {
|
||||
return this.proxyUrl
|
||||
getProxyUrl(): string {
|
||||
return this.config.url || ''
|
||||
}
|
||||
|
||||
setGlobalProxy() {
|
||||
const proxyUrl = this.config.url
|
||||
if (proxyUrl) {
|
||||
const [protocol, address] = proxyUrl.split('://')
|
||||
const [host, port] = address.split(':')
|
||||
if (!protocol.includes('socks')) {
|
||||
setGlobalDispatcher(new ProxyAgent(proxyUrl))
|
||||
} else {
|
||||
const dispatcher = socksDispatcher({
|
||||
port: parseInt(port),
|
||||
type: protocol === 'socks5' ? 5 : 4,
|
||||
host: host
|
||||
})
|
||||
global[Symbol.for('undici.globalDispatcher.1')] = dispatcher
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
220
src/main/services/ReduxService.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
import { ipcMain } from 'electron'
|
||||
import { EventEmitter } from 'events'
|
||||
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
type StoreValue = any
|
||||
type Unsubscribe = () => void
|
||||
|
||||
export class ReduxService extends EventEmitter {
|
||||
private stateCache: any = {}
|
||||
private isReady = false
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.setupIpcHandlers()
|
||||
}
|
||||
|
||||
private setupIpcHandlers() {
|
||||
// 监听 store 就绪事件
|
||||
ipcMain.handle('redux-store-ready', () => {
|
||||
this.isReady = true
|
||||
this.emit('ready')
|
||||
})
|
||||
|
||||
// 监听 store 状态变化
|
||||
ipcMain.on('redux-state-change', (_, newState) => {
|
||||
this.stateCache = newState
|
||||
this.emit('stateChange', newState)
|
||||
})
|
||||
}
|
||||
|
||||
private async waitForStoreReady(webContents: Electron.WebContents, timeout = 10000): Promise<void> {
|
||||
if (this.isReady) return
|
||||
|
||||
const startTime = Date.now()
|
||||
while (Date.now() - startTime < timeout) {
|
||||
try {
|
||||
const isReady = await webContents.executeJavaScript(`
|
||||
!!window.store && typeof window.store.getState === 'function'
|
||||
`)
|
||||
if (isReady) {
|
||||
this.isReady = true
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
// 忽略错误,继续等待
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
}
|
||||
throw new Error('Timeout waiting for Redux store to be ready')
|
||||
}
|
||||
|
||||
// 添加同步获取状态的方法
|
||||
getStateSync() {
|
||||
return this.stateCache
|
||||
}
|
||||
|
||||
// 添加同步选择器方法
|
||||
selectSync<T = StoreValue>(selector: string): T | undefined {
|
||||
try {
|
||||
// 使用 Function 构造器来安全地执行选择器
|
||||
const selectorFn = new Function('state', `return ${selector}`)
|
||||
return selectorFn(this.stateCache)
|
||||
} catch (error) {
|
||||
console.error('Failed to select from cache:', error)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
// 修改 select 方法,优先使用缓存
|
||||
async select<T = StoreValue>(selector: string): Promise<T> {
|
||||
try {
|
||||
// 如果已经准备就绪,先尝试从缓存中获取
|
||||
if (this.isReady) {
|
||||
const cachedValue = this.selectSync<T>(selector)
|
||||
if (cachedValue !== undefined) {
|
||||
return cachedValue
|
||||
}
|
||||
}
|
||||
|
||||
// 如果缓存中没有,再从渲染进程获取
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (!mainWindow) {
|
||||
throw new Error('Main window is not available')
|
||||
}
|
||||
await this.waitForStoreReady(mainWindow.webContents)
|
||||
return await mainWindow.webContents.executeJavaScript(`
|
||||
(() => {
|
||||
const state = window.store.getState();
|
||||
return ${selector};
|
||||
})()
|
||||
`)
|
||||
} catch (error) {
|
||||
console.error('Failed to select store value:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// 派发 action
|
||||
async dispatch(action: any): Promise<void> {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (!mainWindow) {
|
||||
throw new Error('Main window is not available')
|
||||
}
|
||||
await this.waitForStoreReady(mainWindow.webContents)
|
||||
try {
|
||||
await mainWindow.webContents.executeJavaScript(`
|
||||
window.store.dispatch(${JSON.stringify(action)})
|
||||
`)
|
||||
} catch (error) {
|
||||
console.error('Failed to dispatch action:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// 订阅状态变化
|
||||
async subscribe(selector: string, callback: (newValue: any) => void): Promise<Unsubscribe> {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (!mainWindow) {
|
||||
throw new Error('Main window is not available')
|
||||
}
|
||||
await this.waitForStoreReady(mainWindow.webContents)
|
||||
|
||||
// 在渲染进程中设置监听
|
||||
await mainWindow.webContents.executeJavaScript(`
|
||||
if (!window._storeSubscriptions) {
|
||||
window._storeSubscriptions = new Set();
|
||||
|
||||
// 设置全局状态变化监听
|
||||
const unsubscribe = window.store.subscribe(() => {
|
||||
const state = window.store.getState();
|
||||
window.electron.ipcRenderer.send('redux-state-change', state);
|
||||
});
|
||||
|
||||
window._storeSubscriptions.add(unsubscribe);
|
||||
}
|
||||
`)
|
||||
|
||||
// 在主进程中处理回调
|
||||
const handler = async () => {
|
||||
try {
|
||||
const newValue = await this.select(selector)
|
||||
callback(newValue)
|
||||
} catch (error) {
|
||||
console.error('Error in subscription handler:', error)
|
||||
}
|
||||
}
|
||||
|
||||
this.on('stateChange', handler)
|
||||
return () => {
|
||||
this.off('stateChange', handler)
|
||||
}
|
||||
}
|
||||
|
||||
// 获取整个状态树
|
||||
async getState(): Promise<any> {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (!mainWindow) {
|
||||
throw new Error('Main window is not available')
|
||||
}
|
||||
await this.waitForStoreReady(mainWindow.webContents)
|
||||
try {
|
||||
return await mainWindow.webContents.executeJavaScript(`
|
||||
window.store.getState()
|
||||
`)
|
||||
} catch (error) {
|
||||
console.error('Failed to get state:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// 批量执行 actions
|
||||
async batch(actions: any[]): Promise<void> {
|
||||
for (const action of actions) {
|
||||
await this.dispatch(action)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const reduxService = new ReduxService()
|
||||
|
||||
/** example
|
||||
async function example() {
|
||||
try {
|
||||
// 读取状态
|
||||
const settings = await reduxService.select('state.settings')
|
||||
console.log('settings', settings)
|
||||
|
||||
// 派发 action
|
||||
await reduxService.dispatch({
|
||||
type: 'settings/updateApiKey',
|
||||
payload: 'new-api-key'
|
||||
})
|
||||
|
||||
// 订阅状态变化
|
||||
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
|
||||
console.log('API key changed:', newValue)
|
||||
})
|
||||
|
||||
// 批量执行 actions
|
||||
await reduxService.batch([
|
||||
{ type: 'action1', payload: 'data1' },
|
||||
{ type: 'action2', payload: 'data2' }
|
||||
])
|
||||
|
||||
// 同步方法虽然可能不是最新的数据,但响应更快
|
||||
const apiKey = reduxService.selectSync('state.settings.apiKey')
|
||||
console.log('apiKey', apiKey)
|
||||
|
||||
// 处理保证是最新的数据
|
||||
const apiKey1 = await reduxService.select('state.settings.apiKey')
|
||||
console.log('apiKey1', apiKey1)
|
||||
|
||||
// 取消订阅
|
||||
unsubscribe()
|
||||
} catch (error) {
|
||||
console.error('Error:', error)
|
||||
}
|
||||
}
|
||||
*/
|
||||
@@ -8,6 +8,9 @@ import { windowService } from './WindowService'
|
||||
let showAppAccelerator: string | null = null
|
||||
let showMiniWindowAccelerator: string | null = null
|
||||
|
||||
// store the focus and blur handlers for each window to unregister them later
|
||||
const windowOnHandlers = new Map<BrowserWindow, { onFocusHandler: () => void; onBlurHandler: () => void }>()
|
||||
|
||||
function getShortcutHandler(shortcut: Shortcut) {
|
||||
switch (shortcut.key) {
|
||||
case 'zoom_in':
|
||||
@@ -20,17 +23,8 @@ function getShortcutHandler(shortcut: Shortcut) {
|
||||
configManager.setZoomFactor(1)
|
||||
}
|
||||
case 'show_app':
|
||||
return (window: BrowserWindow) => {
|
||||
if (window.isVisible()) {
|
||||
if (window.isFocused()) {
|
||||
window.hide()
|
||||
} else {
|
||||
window.focus()
|
||||
}
|
||||
} else {
|
||||
window.show()
|
||||
window.focus()
|
||||
}
|
||||
return () => {
|
||||
windowService.toggleMainWindow()
|
||||
}
|
||||
case 'mini_window':
|
||||
return () => {
|
||||
@@ -113,10 +107,19 @@ const convertShortcutRecordedByKeyboardEventKeyValueToElectronGlobalShortcutForm
|
||||
|
||||
export function registerShortcuts(window: BrowserWindow) {
|
||||
window.once('ready-to-show', () => {
|
||||
window.webContents.setZoomFactor(configManager.getZoomFactor())
|
||||
if (configManager.getLaunchToTray()) {
|
||||
registerOnlyUniversalShortcuts()
|
||||
}
|
||||
})
|
||||
|
||||
const register = () => {
|
||||
//only for clearer code
|
||||
const registerOnlyUniversalShortcuts = () => {
|
||||
register(true)
|
||||
}
|
||||
|
||||
//onlyUniversalShortcuts is used to register shortcuts that are not window specific, like show_app & mini_window
|
||||
//onlyUniversalShortcuts is needed when we launch to tray
|
||||
const register = (onlyUniversalShortcuts: boolean = false) => {
|
||||
if (window.isDestroyed()) return
|
||||
|
||||
const shortcuts = configManager.getShortcuts()
|
||||
@@ -128,44 +131,55 @@ export function registerShortcuts(window: BrowserWindow) {
|
||||
return
|
||||
}
|
||||
|
||||
const handler = getShortcutHandler(shortcut)
|
||||
//if not enabled, exit early from the process.
|
||||
if (!shortcut.enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
// only register universal shortcuts when needed
|
||||
if (onlyUniversalShortcuts && !['show_app', 'mini_window'].includes(shortcut.key)) {
|
||||
return
|
||||
}
|
||||
|
||||
const handler = getShortcutHandler(shortcut)
|
||||
if (!handler) {
|
||||
return
|
||||
}
|
||||
|
||||
const accelerator = formatShortcutKey(shortcut.shortcut)
|
||||
|
||||
if (shortcut.key === 'show_app' && shortcut.enabled) {
|
||||
showAppAccelerator = accelerator
|
||||
}
|
||||
|
||||
if (shortcut.key === 'mini_window' && shortcut.enabled) {
|
||||
showMiniWindowAccelerator = accelerator
|
||||
}
|
||||
|
||||
if (shortcut.key.includes('zoom')) {
|
||||
switch (shortcut.key) {
|
||||
case 'zoom_in':
|
||||
globalShortcut.register('CommandOrControl+=', () => shortcut.enabled && handler(window))
|
||||
globalShortcut.register('CommandOrControl+numadd', () => shortcut.enabled && handler(window))
|
||||
return
|
||||
case 'zoom_out':
|
||||
globalShortcut.register('CommandOrControl+-', () => shortcut.enabled && handler(window))
|
||||
globalShortcut.register('CommandOrControl+numsub', () => shortcut.enabled && handler(window))
|
||||
return
|
||||
case 'zoom_reset':
|
||||
globalShortcut.register('CommandOrControl+0', () => shortcut.enabled && handler(window))
|
||||
case 'show_app':
|
||||
showAppAccelerator = formatShortcutKey(shortcut.shortcut)
|
||||
break
|
||||
|
||||
case 'mini_window':
|
||||
//available only when QuickAssistant enabled
|
||||
if (!configManager.getEnableQuickAssistant()) {
|
||||
return
|
||||
}
|
||||
showMiniWindowAccelerator = formatShortcutKey(shortcut.shortcut)
|
||||
break
|
||||
|
||||
//the following ZOOMs will register shortcuts seperately, so will return
|
||||
case 'zoom_in':
|
||||
globalShortcut.register('CommandOrControl+=', () => handler(window))
|
||||
globalShortcut.register('CommandOrControl+numadd', () => handler(window))
|
||||
return
|
||||
|
||||
case 'zoom_out':
|
||||
globalShortcut.register('CommandOrControl+-', () => handler(window))
|
||||
globalShortcut.register('CommandOrControl+numsub', () => handler(window))
|
||||
return
|
||||
|
||||
case 'zoom_reset':
|
||||
globalShortcut.register('CommandOrControl+0', () => handler(window))
|
||||
return
|
||||
}
|
||||
|
||||
if (shortcut.enabled) {
|
||||
const accelerator = convertShortcutRecordedByKeyboardEventKeyValueToElectronGlobalShortcutFormat(
|
||||
shortcut.shortcut
|
||||
)
|
||||
|
||||
globalShortcut.register(accelerator, () => handler(window))
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.error(`[ShortcutService] Failed to register shortcut ${shortcut.key}`)
|
||||
}
|
||||
@@ -196,8 +210,16 @@ export function registerShortcuts(window: BrowserWindow) {
|
||||
}
|
||||
}
|
||||
|
||||
window.on('focus', () => register())
|
||||
window.on('blur', () => unregister())
|
||||
// only register the event handlers once
|
||||
if (undefined === windowOnHandlers.get(window)) {
|
||||
// pass register() directly to listener, the func will receive Event as argument, it's not expected
|
||||
const registerHandler = () => {
|
||||
register()
|
||||
}
|
||||
window.on('focus', registerHandler)
|
||||
window.on('blur', unregister)
|
||||
windowOnHandlers.set(window, { onFocusHandler: registerHandler, onBlurHandler: unregister })
|
||||
}
|
||||
|
||||
if (!window.isDestroyed() && window.isFocused()) {
|
||||
register()
|
||||
@@ -208,6 +230,11 @@ export function unregisterAllShortcuts() {
|
||||
try {
|
||||
showAppAccelerator = null
|
||||
showMiniWindowAccelerator = null
|
||||
windowOnHandlers.forEach((handlers, window) => {
|
||||
window.off('focus', handlers.onFocusHandler)
|
||||
window.off('blur', handlers.onBlurHandler)
|
||||
})
|
||||
windowOnHandlers.clear()
|
||||
globalShortcut.unregisterAll()
|
||||
} catch (error) {
|
||||
Logger.error('[ShortcutService] Failed to unregister all shortcuts')
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { WebDavConfig } from '@types'
|
||||
import Logger from 'electron-log'
|
||||
import { HttpProxyAgent } from 'http-proxy-agent'
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent'
|
||||
import Stream from 'stream'
|
||||
import { BufferLike, createClient, GetFileContentsOptions, PutFileContentsOptions, WebDAVClient } from 'webdav'
|
||||
|
||||
import { proxyManager } from './ProxyManager'
|
||||
|
||||
import {
|
||||
BufferLike,
|
||||
createClient,
|
||||
CreateDirectoryOptions,
|
||||
GetFileContentsOptions,
|
||||
PutFileContentsOptions,
|
||||
WebDAVClient
|
||||
} from 'webdav'
|
||||
export default class WebDav {
|
||||
public instance: WebDAVClient | undefined
|
||||
private webdavPath: string
|
||||
@@ -14,20 +16,16 @@ export default class WebDav {
|
||||
constructor(params: WebDavConfig) {
|
||||
this.webdavPath = params.webdavPath
|
||||
|
||||
const httpProxy = proxyManager.getProxyUrl() || ''
|
||||
const httpsProxy = proxyManager.getProxyUrl() || ''
|
||||
|
||||
this.instance = createClient(params.webdavHost, {
|
||||
username: params.webdavUser,
|
||||
password: params.webdavPass,
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
httpAgent: httpProxy ? new HttpProxyAgent(httpProxy) : undefined,
|
||||
httpsAgent: httpsProxy ? new HttpsProxyAgent(httpsProxy) : undefined
|
||||
maxContentLength: Infinity
|
||||
})
|
||||
|
||||
this.putFileContents = this.putFileContents.bind(this)
|
||||
this.getFileContents = this.getFileContents.bind(this)
|
||||
this.createDirectory = this.createDirectory.bind(this)
|
||||
}
|
||||
|
||||
public putFileContents = async (
|
||||
@@ -74,4 +72,30 @@ export default class WebDav {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public checkConnection = async () => {
|
||||
if (!this.instance) {
|
||||
throw new Error('WebDAV client not initialized')
|
||||
}
|
||||
|
||||
try {
|
||||
return await this.instance.exists('/')
|
||||
} catch (error) {
|
||||
Logger.error('[WebDAV] Error checking connection:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public createDirectory = async (path: string, options?: CreateDirectoryOptions) => {
|
||||
if (!this.instance) {
|
||||
throw new Error('WebDAV client not initialized')
|
||||
}
|
||||
|
||||
try {
|
||||
return await this.instance.createDirectory(path, options)
|
||||
} catch (error) {
|
||||
Logger.error('[WebDAV] Error creating directory on WebDAV:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { is } from '@electron-toolkit/utils'
|
||||
import { isLinux, isWin } from '@main/constant'
|
||||
import { isDev, isLinux, isMac, isWin } from '@main/constant'
|
||||
import { getFilesDir } from '@main/utils/file'
|
||||
import { app, BrowserWindow, ipcMain, Menu, MenuItem, shell } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import windowStateKeeper from 'electron-window-state'
|
||||
import path, { join } from 'path'
|
||||
import { join } from 'path'
|
||||
|
||||
import icon from '../../../build/icon.png?asset'
|
||||
import { titleBarOverlayDark, titleBarOverlayLight } from '../config'
|
||||
@@ -14,7 +15,11 @@ export class WindowService {
|
||||
private static instance: WindowService | null = null
|
||||
private mainWindow: BrowserWindow | null = null
|
||||
private miniWindow: BrowserWindow | null = null
|
||||
private isPinnedMiniWindow: boolean = false
|
||||
private wasFullScreen: boolean = false
|
||||
//hacky-fix: store the focused status of mainWindow before miniWindow shows
|
||||
//to restore the focus status when miniWindow hides
|
||||
private wasMainWindowFocused: boolean = false
|
||||
private selectionMenuWindow: BrowserWindow | null = null
|
||||
private lastSelectedText: string = ''
|
||||
private contextMenu: Menu | null = null
|
||||
@@ -29,6 +34,7 @@ export class WindowService {
|
||||
public createMainWindow(): BrowserWindow {
|
||||
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
|
||||
this.mainWindow.show()
|
||||
this.mainWindow.focus()
|
||||
return this.mainWindow
|
||||
}
|
||||
|
||||
@@ -38,8 +44,6 @@ export class WindowService {
|
||||
})
|
||||
|
||||
const theme = configManager.getTheme()
|
||||
const isMac = process.platform === 'darwin'
|
||||
const isLinux = process.platform === 'linux'
|
||||
|
||||
this.mainWindow = new BrowserWindow({
|
||||
x: mainWindowState.x,
|
||||
@@ -57,7 +61,7 @@ export class WindowService {
|
||||
titleBarOverlay: theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight,
|
||||
backgroundColor: isMac ? undefined : theme === 'dark' ? '#181818' : '#FFFFFF',
|
||||
trafficLightPosition: { x: 8, y: 12 },
|
||||
...(process.platform === 'linux' ? { icon } : {}),
|
||||
...(isLinux ? { icon } : {}),
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
sandbox: false,
|
||||
@@ -69,6 +73,12 @@ export class WindowService {
|
||||
|
||||
this.setupMainWindow(this.mainWindow, mainWindowState)
|
||||
|
||||
//preload miniWindow to resolve series of issues about miniWindow in Mac
|
||||
const enableQuickAssistant = configManager.getEnableQuickAssistant()
|
||||
if (enableQuickAssistant && !this.miniWindow) {
|
||||
this.miniWindow = this.createMiniWindow(true)
|
||||
}
|
||||
|
||||
return this.mainWindow
|
||||
}
|
||||
|
||||
@@ -127,6 +137,13 @@ export class WindowService {
|
||||
this.contextMenu?.popup()
|
||||
})
|
||||
|
||||
// Dangerous API
|
||||
if (isDev) {
|
||||
mainWindow.webContents.on('will-attach-webview', (_, webPreferences) => {
|
||||
webPreferences.preload = join(__dirname, '../preload/index.js')
|
||||
})
|
||||
}
|
||||
|
||||
// Handle webview context menu
|
||||
mainWindow.webContents.on('did-attach-webview', (_, webContents) => {
|
||||
webContents.on('context-menu', () => {
|
||||
@@ -137,7 +154,15 @@ export class WindowService {
|
||||
|
||||
private setupWindowEvents(mainWindow: BrowserWindow) {
|
||||
mainWindow.once('ready-to-show', () => {
|
||||
mainWindow.webContents.setZoomFactor(configManager.getZoomFactor())
|
||||
|
||||
// show window only when laucn to tray not set
|
||||
const isLaunchToTray = configManager.getLaunchToTray()
|
||||
if (!isLaunchToTray) {
|
||||
//[mac]hacky-fix: miniWindow set visibleOnFullScreen:true will cause dock icon disappeared
|
||||
app.dock?.show()
|
||||
mainWindow.show()
|
||||
}
|
||||
})
|
||||
|
||||
// 处理全屏相关事件
|
||||
@@ -150,6 +175,36 @@ export class WindowService {
|
||||
this.wasFullScreen = false
|
||||
mainWindow.webContents.send('fullscreen-status-changed', false)
|
||||
})
|
||||
|
||||
// set the zoom factor again when the window is going to resize
|
||||
//
|
||||
// this is a workaround for the known bug that
|
||||
// the zoom factor is reset to cached value when window is resized after routing to other page
|
||||
// see: https://github.com/electron/electron/issues/10572
|
||||
//
|
||||
mainWindow.on('will-resize', () => {
|
||||
mainWindow.webContents.setZoomFactor(configManager.getZoomFactor())
|
||||
})
|
||||
|
||||
// ARCH: as `will-resize` is only for Win & Mac,
|
||||
// linux has the same problem, use `resize` listener instead
|
||||
// but `resize` will fliker the ui
|
||||
if (isLinux) {
|
||||
mainWindow.on('resize', () => {
|
||||
mainWindow.webContents.setZoomFactor(configManager.getZoomFactor())
|
||||
})
|
||||
}
|
||||
|
||||
// 添加Escape键退出全屏的支持
|
||||
mainWindow.webContents.on('before-input-event', (event, input) => {
|
||||
// 当按下Escape键且窗口处于全屏状态时退出全屏
|
||||
if (input.key === 'Escape' && !input.alt && !input.control && !input.meta && !input.shift) {
|
||||
if (mainWindow.isFullScreen()) {
|
||||
event.preventDefault()
|
||||
mainWindow.setFullScreen(false)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private setupWebContentsHandlers(mainWindow: BrowserWindow) {
|
||||
@@ -185,7 +240,7 @@ export class WindowService {
|
||||
|
||||
if (url.includes('http://file/')) {
|
||||
const fileName = url.replace('http://file/', '')
|
||||
const storageDir = path.join(app.getPath('userData'), 'Data', 'Files')
|
||||
const storageDir = getFilesDir()
|
||||
const filePath = storageDir + '/' + fileName
|
||||
shell.openPath(filePath).catch((err) => Logger.error('Failed to open file:', err))
|
||||
} else {
|
||||
@@ -235,19 +290,36 @@ export class WindowService {
|
||||
return app.quit()
|
||||
}
|
||||
|
||||
// 没有开启托盘,且是Windows或Linux系统,直接退出
|
||||
const notInTray = !configManager.getTray()
|
||||
if ((isWin || isLinux) && notInTray) {
|
||||
// 托盘及关闭行为设置
|
||||
const isShowTray = configManager.getTray()
|
||||
const isTrayOnClose = configManager.getTrayOnClose()
|
||||
|
||||
// 没有开启托盘,或者开启了托盘,但设置了直接关闭,应执行直接退出
|
||||
if (!isShowTray || (isShowTray && !isTrayOnClose)) {
|
||||
// 如果是Windows或Linux,直接退出
|
||||
// mac按照系统默认行为,不退出
|
||||
if (isWin || isLinux) {
|
||||
return app.quit()
|
||||
}
|
||||
}
|
||||
|
||||
// 如果是全屏状态,直接退出
|
||||
//上述逻辑以下,是“开启托盘+设置关闭时最小化到托盘”的情况
|
||||
// 如果是Windows或Linux,且处于全屏状态,则退出应用
|
||||
if (this.wasFullScreen) {
|
||||
if (isWin || isLinux) {
|
||||
return app.quit()
|
||||
} else {
|
||||
event.preventDefault()
|
||||
mainWindow.setFullScreen(false)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
event.preventDefault()
|
||||
mainWindow.hide()
|
||||
|
||||
//for mac users, should hide dock icon if close to tray
|
||||
app.dock?.hide()
|
||||
})
|
||||
|
||||
mainWindow.on('closed', () => {
|
||||
@@ -269,45 +341,51 @@ export class WindowService {
|
||||
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
|
||||
if (this.mainWindow.isMinimized()) {
|
||||
this.mainWindow.restore()
|
||||
return
|
||||
}
|
||||
//[macOS] Known Issue
|
||||
// setVisibleOnAllWorkspaces true/false will NOT bring window to current desktop in Mac (works fine with Windows)
|
||||
// AppleScript may be a solution, but it's not worth
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(true)
|
||||
this.mainWindow.show()
|
||||
this.mainWindow.focus()
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(false)
|
||||
} else {
|
||||
this.mainWindow = this.createMainWindow()
|
||||
}
|
||||
}
|
||||
|
||||
public toggleMainWindow() {
|
||||
// should not toggle main window when in full screen
|
||||
if (this.wasFullScreen) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.mainWindow && !this.mainWindow.isDestroyed() && this.mainWindow.isVisible()) {
|
||||
if (this.mainWindow.isFocused()) {
|
||||
// if tray is enabled, hide the main window, else do nothing
|
||||
if (configManager.getTray()) {
|
||||
this.mainWindow.hide()
|
||||
app.dock?.hide()
|
||||
}
|
||||
} else {
|
||||
this.mainWindow.focus()
|
||||
}
|
||||
}
|
||||
|
||||
public showMiniWindow() {
|
||||
const enableQuickAssistant = configManager.getEnableQuickAssistant()
|
||||
|
||||
if (!enableQuickAssistant) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
|
||||
this.mainWindow.hide()
|
||||
}
|
||||
if (this.selectionMenuWindow && !this.selectionMenuWindow.isDestroyed()) {
|
||||
this.selectionMenuWindow.hide()
|
||||
this.showMainWindow()
|
||||
}
|
||||
|
||||
if (this.miniWindow && !this.miniWindow.isDestroyed()) {
|
||||
if (this.miniWindow.isMinimized()) {
|
||||
this.miniWindow.restore()
|
||||
}
|
||||
this.miniWindow.show()
|
||||
this.miniWindow.center()
|
||||
this.miniWindow.focus()
|
||||
return
|
||||
}
|
||||
|
||||
const isMac = process.platform === 'darwin'
|
||||
|
||||
public createMiniWindow(isPreload: boolean = false): BrowserWindow {
|
||||
this.miniWindow = new BrowserWindow({
|
||||
width: 500,
|
||||
height: 520,
|
||||
show: true,
|
||||
width: 550,
|
||||
height: 400,
|
||||
minWidth: 350,
|
||||
minHeight: 380,
|
||||
maxWidth: 1024,
|
||||
maxHeight: 768,
|
||||
show: false,
|
||||
autoHideMenuBar: true,
|
||||
transparent: isMac,
|
||||
vibrancy: 'under-window',
|
||||
@@ -315,8 +393,13 @@ export class WindowService {
|
||||
center: true,
|
||||
frame: false,
|
||||
alwaysOnTop: true,
|
||||
resizable: false,
|
||||
resizable: true,
|
||||
useContentSize: true,
|
||||
...(isMac ? { type: 'panel' } : {}),
|
||||
skipTaskbar: true,
|
||||
minimizable: false,
|
||||
maximizable: false,
|
||||
fullscreenable: false,
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
sandbox: false,
|
||||
@@ -325,8 +408,25 @@ export class WindowService {
|
||||
}
|
||||
})
|
||||
|
||||
//miniWindow should show in current desktop
|
||||
this.miniWindow?.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true })
|
||||
//make miniWindow always on top of fullscreen apps with level set
|
||||
this.miniWindow.setAlwaysOnTop(true, 'screen-saver', 1)
|
||||
|
||||
this.miniWindow.on('ready-to-show', () => {
|
||||
if (isPreload) {
|
||||
return
|
||||
}
|
||||
|
||||
this.wasMainWindowFocused = this.mainWindow?.isFocused() || false
|
||||
this.miniWindow?.center()
|
||||
this.miniWindow?.show()
|
||||
})
|
||||
|
||||
this.miniWindow.on('blur', () => {
|
||||
this.miniWindow?.hide()
|
||||
if (!this.isPinnedMiniWindow) {
|
||||
this.hideMiniWindow()
|
||||
}
|
||||
})
|
||||
|
||||
this.miniWindow.on('closed', () => {
|
||||
@@ -352,9 +452,48 @@ export class WindowService {
|
||||
hash: '#/mini'
|
||||
})
|
||||
}
|
||||
|
||||
return this.miniWindow
|
||||
}
|
||||
|
||||
public showMiniWindow() {
|
||||
const enableQuickAssistant = configManager.getEnableQuickAssistant()
|
||||
|
||||
if (!enableQuickAssistant) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.selectionMenuWindow && !this.selectionMenuWindow.isDestroyed()) {
|
||||
this.selectionMenuWindow.hide()
|
||||
}
|
||||
|
||||
if (this.miniWindow && !this.miniWindow.isDestroyed()) {
|
||||
this.wasMainWindowFocused = this.mainWindow?.isFocused() || false
|
||||
|
||||
if (this.miniWindow.isMinimized()) {
|
||||
this.miniWindow.restore()
|
||||
}
|
||||
this.miniWindow.show()
|
||||
return
|
||||
}
|
||||
|
||||
this.miniWindow = this.createMiniWindow()
|
||||
}
|
||||
|
||||
public hideMiniWindow() {
|
||||
//hacky-fix:[mac/win] previous window(not self-app) should be focused again after miniWindow hide
|
||||
if (isWin) {
|
||||
this.miniWindow?.minimize()
|
||||
this.miniWindow?.hide()
|
||||
return
|
||||
} else if (isMac) {
|
||||
this.miniWindow?.hide()
|
||||
if (!this.wasMainWindowFocused) {
|
||||
app.hide()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
this.miniWindow?.hide()
|
||||
}
|
||||
|
||||
@@ -363,11 +502,16 @@ export class WindowService {
|
||||
}
|
||||
|
||||
public toggleMiniWindow() {
|
||||
if (this.miniWindow) {
|
||||
this.miniWindow.isVisible() ? this.miniWindow.hide() : this.miniWindow.show()
|
||||
} else {
|
||||
if (this.miniWindow && !this.miniWindow.isDestroyed() && this.miniWindow.isVisible()) {
|
||||
this.hideMiniWindow()
|
||||
return
|
||||
}
|
||||
|
||||
this.showMiniWindow()
|
||||
}
|
||||
|
||||
public setPinMiniWindow(isPinned) {
|
||||
this.isPinnedMiniWindow = isPinned
|
||||
}
|
||||
|
||||
public showSelectionMenu(bounds: { x: number; y: number }) {
|
||||
@@ -378,7 +522,6 @@ export class WindowService {
|
||||
}
|
||||
|
||||
const theme = configManager.getTheme()
|
||||
const isMac = process.platform === 'darwin'
|
||||
|
||||
this.selectionMenuWindow = new BrowserWindow({
|
||||
width: 280,
|
||||
|
||||
@@ -3,17 +3,29 @@ import path from 'node:path'
|
||||
|
||||
import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant'
|
||||
import { FileType, FileTypes } from '@types'
|
||||
import { app } from 'electron'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
// 创建文件类型映射表,提高查找效率
|
||||
const fileTypeMap = new Map<string, FileTypes>()
|
||||
|
||||
// 初始化映射表
|
||||
function initFileTypeMap() {
|
||||
imageExts.forEach((ext) => fileTypeMap.set(ext, FileTypes.IMAGE))
|
||||
videoExts.forEach((ext) => fileTypeMap.set(ext, FileTypes.VIDEO))
|
||||
audioExts.forEach((ext) => fileTypeMap.set(ext, FileTypes.AUDIO))
|
||||
textExts.forEach((ext) => fileTypeMap.set(ext, FileTypes.TEXT))
|
||||
documentExts.forEach((ext) => fileTypeMap.set(ext, FileTypes.DOCUMENT))
|
||||
}
|
||||
|
||||
// 初始化映射表
|
||||
initFileTypeMap()
|
||||
|
||||
export function getFileType(ext: string): FileTypes {
|
||||
ext = ext.toLowerCase()
|
||||
if (imageExts.includes(ext)) return FileTypes.IMAGE
|
||||
if (videoExts.includes(ext)) return FileTypes.VIDEO
|
||||
if (audioExts.includes(ext)) return FileTypes.AUDIO
|
||||
if (textExts.includes(ext)) return FileTypes.TEXT
|
||||
if (documentExts.includes(ext)) return FileTypes.DOCUMENT
|
||||
return FileTypes.OTHER
|
||||
return fileTypeMap.get(ext) || FileTypes.OTHER
|
||||
}
|
||||
|
||||
export function getAllFiles(dirPath: string, arrayOfFiles: FileType[] = []): FileType[] {
|
||||
const files = fs.readdirSync(dirPath)
|
||||
|
||||
@@ -45,7 +57,7 @@ export function getAllFiles(dirPath: string, arrayOfFiles: FileType[] = []): Fil
|
||||
count: 1,
|
||||
origin_name: name,
|
||||
type: fileType,
|
||||
created_at: new Date()
|
||||
created_at: new Date().toISOString()
|
||||
}
|
||||
|
||||
arrayOfFiles.push(fileItem)
|
||||
@@ -54,3 +66,11 @@ export function getAllFiles(dirPath: string, arrayOfFiles: FileType[] = []): Fil
|
||||
|
||||
return arrayOfFiles
|
||||
}
|
||||
|
||||
export function getTempDir() {
|
||||
return path.join(app.getPath('temp'), 'CherryStudio')
|
||||
}
|
||||
|
||||
export function getFilesDir() {
|
||||
return path.join(app.getPath('userData'), 'Data', 'Files')
|
||||
}
|
||||
|
||||
@@ -42,3 +42,13 @@ export function dumpPersistState() {
|
||||
}
|
||||
return JSON.stringify(persistState)
|
||||
}
|
||||
|
||||
export const runAsyncFunction = async (fn: () => void) => {
|
||||
await fn()
|
||||
}
|
||||
|
||||
export function makeSureDirExists(dir: string) {
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
59
src/main/utils/process.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { spawn } from 'child_process'
|
||||
import log from 'electron-log'
|
||||
import fs from 'fs'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
|
||||
import { getResourcePath } from '.'
|
||||
|
||||
export function runInstallScript(scriptPath: string): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const installScriptPath = path.join(getResourcePath(), 'scripts', scriptPath)
|
||||
log.info(`Running script at: ${installScriptPath}`)
|
||||
|
||||
const nodeProcess = spawn(process.execPath, [installScriptPath], {
|
||||
env: { ...process.env, ELECTRON_RUN_AS_NODE: '1' }
|
||||
})
|
||||
|
||||
nodeProcess.stdout.on('data', (data) => {
|
||||
log.info(`Script output: ${data}`)
|
||||
})
|
||||
|
||||
nodeProcess.stderr.on('data', (data) => {
|
||||
log.error(`Script error: ${data}`)
|
||||
})
|
||||
|
||||
nodeProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
log.info('Script completed successfully')
|
||||
resolve()
|
||||
} else {
|
||||
log.error(`Script exited with code ${code}`)
|
||||
reject(new Error(`Process exited with code ${code}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function getBinaryName(name: string): Promise<string> {
|
||||
if (process.platform === 'win32') {
|
||||
return `${name}.exe`
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
export async function getBinaryPath(name?: string): Promise<string> {
|
||||
if (!name) {
|
||||
return path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
}
|
||||
|
||||
const binaryName = await getBinaryName(name)
|
||||
const binariesDir = path.join(os.homedir(), '.cherrystudio', 'bin')
|
||||
const binariesDirExists = await fs.existsSync(binariesDir)
|
||||
return binariesDirExists ? path.join(binariesDir, binaryName) : binaryName
|
||||
}
|
||||
|
||||
export async function isBinaryExists(name: string): Promise<boolean> {
|
||||
const cmd = await getBinaryPath(name)
|
||||
return await fs.existsSync(cmd)
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
import { spawn } from 'child_process'
|
||||
import { app, dialog } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
export async function updateUserDataPath() {
|
||||
const currentPath = app.getPath('userData')
|
||||
const oldPath = currentPath.replace('CherryStudio', 'cherry-studio')
|
||||
|
||||
if (currentPath !== oldPath && fs.existsSync(oldPath)) {
|
||||
Logger.log('Update userData path')
|
||||
|
||||
try {
|
||||
if (process.platform === 'win32') {
|
||||
// Windows 系统:创建 bat 文件
|
||||
const batPath = await createWindowsBatFile(oldPath, currentPath)
|
||||
await promptRestartAndExecute(batPath)
|
||||
} else {
|
||||
// 其他系统:直接更新
|
||||
fs.rmSync(currentPath, { recursive: true, force: true })
|
||||
fs.renameSync(oldPath, currentPath)
|
||||
Logger.log(`Directory renamed: ${currentPath}`)
|
||||
await promptRestart()
|
||||
}
|
||||
} catch (error: any) {
|
||||
Logger.error('Error updating userData path:', error)
|
||||
dialog.showErrorBox('错误', `更新用户数据目录时发生错误: ${error.message}`)
|
||||
}
|
||||
} else {
|
||||
Logger.log('userData path does not need to be updated')
|
||||
}
|
||||
}
|
||||
|
||||
async function createWindowsBatFile(oldPath: string, currentPath: string): Promise<string> {
|
||||
const batPath = path.join(app.getPath('temp'), 'rename_userdata.bat')
|
||||
const appPath = app.getPath('exe')
|
||||
const batContent = `
|
||||
@echo off
|
||||
timeout /t 2 /nobreak
|
||||
rmdir /s /q "${currentPath}"
|
||||
rename "${oldPath}" "${path.basename(currentPath)}"
|
||||
start "" "${appPath}"
|
||||
del "%~f0"
|
||||
`
|
||||
fs.writeFileSync(batPath, batContent)
|
||||
return batPath
|
||||
}
|
||||
|
||||
async function promptRestartAndExecute(batPath: string) {
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: '应用需要重启',
|
||||
message: '用户数据目录将在重启后更新。请重启应用以应用更改。',
|
||||
buttons: ['手动重启']
|
||||
})
|
||||
|
||||
// 执行 bat 文件
|
||||
spawn('cmd.exe', ['/c', batPath], {
|
||||
detached: true,
|
||||
stdio: 'ignore'
|
||||
})
|
||||
|
||||
app.exit(0)
|
||||
}
|
||||
|
||||
async function promptRestart() {
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: '应用需要重启',
|
||||
message: '用户数据目录已更新。请重启应用以应用更改。',
|
||||
buttons: ['重启']
|
||||
})
|
||||
|
||||
app.relaunch()
|
||||
app.exit(0)
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import { BrowserWindow } from 'electron'
|
||||
|
||||
function isTilingWindowManager() {
|
||||
if (process.platform === 'darwin') {
|
||||
return false
|
||||
@@ -13,4 +15,33 @@ function isTilingWindowManager() {
|
||||
return tilingSystems.some((system) => desktopEnv?.includes(system))
|
||||
}
|
||||
|
||||
export const replaceDevtoolsFont = (browserWindow: BrowserWindow) => {
|
||||
if (process.platform === 'win32') {
|
||||
browserWindow.webContents.on('devtools-opened', () => {
|
||||
const css = `
|
||||
:root {
|
||||
--sys-color-base: var(--ref-palette-neutral100);
|
||||
--source-code-font-family: consolas;
|
||||
--source-code-font-size: 12px;
|
||||
--monospace-font-family: consolas;
|
||||
--monospace-font-size: 12px;
|
||||
--default-font-family: system-ui, sans-serif;
|
||||
--default-font-size: 12px;
|
||||
}
|
||||
.-theme-with-dark-background {
|
||||
--sys-color-base: var(--ref-palette-secondary25);
|
||||
}
|
||||
body {
|
||||
--default-font-family: system-ui,sans-serif;
|
||||
}`
|
||||
|
||||
browserWindow.webContents.devToolsWebContents?.executeJavaScript(`
|
||||
const overriddenStyle = document.createElement('style');
|
||||
overriddenStyle.innerHTML = '${css.replaceAll('\n', ' ')}';
|
||||
document.body.append(overriddenStyle);
|
||||
document.body.classList.remove('platform-windows');`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export { isTilingWindowManager }
|
||||
|
||||
68
src/preload/index.d.ts
vendored
@@ -1,13 +1,17 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { ElectronAPI } from '@electron-toolkit/preload'
|
||||
import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server'
|
||||
import { ExtractChunkData } from '@llm-tools/embedjs-interfaces'
|
||||
import { FileType } from '@renderer/types'
|
||||
import { WebDavConfig } from '@renderer/types'
|
||||
import { AppInfo, KnowledgeBaseParams, KnowledgeItem, LanguageVarious } from '@renderer/types'
|
||||
import type { MCPServer, MCPTool } from '@renderer/types'
|
||||
import { AppInfo, FileType, KnowledgeBaseParams, KnowledgeItem, LanguageVarious, WebDavConfig } from '@renderer/types'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import type { OpenDialogOptions } from 'electron'
|
||||
import type { UpdateInfo } from 'electron-updater'
|
||||
import { Readable } from 'stream'
|
||||
|
||||
interface BackupFile {
|
||||
fileName: string
|
||||
modifiedTime: string
|
||||
size: number
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
@@ -15,15 +19,22 @@ declare global {
|
||||
api: {
|
||||
getAppInfo: () => Promise<AppInfo>
|
||||
checkForUpdate: () => Promise<{ currentVersion: string; updateInfo: UpdateInfo | null }>
|
||||
showUpdateDialog: () => Promise<void>
|
||||
openWebsite: (url: string) => void
|
||||
setProxy: (proxy: string | undefined) => void
|
||||
setLanguage: (theme: LanguageVarious) => void
|
||||
setLaunchOnBoot: (isActive: boolean) => void
|
||||
setLaunchToTray: (isActive: boolean) => void
|
||||
setTray: (isActive: boolean) => void
|
||||
setTrayOnClose: (isActive: boolean) => void
|
||||
restartTray: () => void
|
||||
setTheme: (theme: 'light' | 'dark') => void
|
||||
minApp: (options: { url: string; windowOptions?: Electron.BrowserWindowConstructorOptions }) => void
|
||||
reload: () => void
|
||||
clearCache: () => Promise<{ success: boolean; error?: string }>
|
||||
system: {
|
||||
getDeviceType: () => Promise<'mac' | 'windows' | 'linux'>
|
||||
}
|
||||
zip: {
|
||||
compress: (text: string) => Promise<Buffer>
|
||||
decompress: (text: Buffer) => Promise<string>
|
||||
@@ -33,6 +44,9 @@ declare global {
|
||||
restore: (backupPath: string) => Promise<string>
|
||||
backupToWebdav: (data: string, webdavConfig: WebDavConfig) => Promise<boolean>
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) => Promise<string>
|
||||
listWebdavFiles: (webdavConfig: WebDavConfig) => Promise<BackupFile[]>
|
||||
checkConnection: (webdavConfig: WebDavConfig) => Promise<boolean>
|
||||
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) => Promise<void>
|
||||
}
|
||||
file: {
|
||||
select: (options?: OpenDialogOptions) => Promise<FileType[] | null>
|
||||
@@ -68,8 +82,8 @@ declare global {
|
||||
update: (shortcuts: Shortcut[]) => Promise<void>
|
||||
}
|
||||
knowledgeBase: {
|
||||
create: ({ id, model, apiKey, baseURL }: KnowledgeBaseParams) => Promise<void>
|
||||
reset: ({ base }: { base: KnowledgeBaseParams }) => Promise<void>
|
||||
create: (base: KnowledgeBaseParams) => Promise<void>
|
||||
reset: (base: KnowledgeBaseParams) => Promise<void>
|
||||
delete: (id: string) => Promise<void>
|
||||
add: ({
|
||||
base,
|
||||
@@ -90,6 +104,15 @@ declare global {
|
||||
base: KnowledgeBaseParams
|
||||
}) => Promise<void>
|
||||
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => Promise<ExtractChunkData[]>
|
||||
rerank: ({
|
||||
search,
|
||||
base,
|
||||
results
|
||||
}: {
|
||||
search: string
|
||||
base: KnowledgeBaseParams
|
||||
results: ExtractChunkData[]
|
||||
}) => Promise<ExtractChunkData[]>
|
||||
}
|
||||
window: {
|
||||
setMinimumSize: (width: number, height: number) => Promise<void>
|
||||
@@ -114,6 +137,7 @@ declare global {
|
||||
hide: () => Promise<void>
|
||||
close: () => Promise<void>
|
||||
toggle: () => Promise<void>
|
||||
setPin: (isPinned: boolean) => Promise<void>
|
||||
}
|
||||
aes: {
|
||||
encrypt: (text: string, secretKey: string, iv: string) => Promise<{ iv: string; encryptedData: string }>
|
||||
@@ -122,6 +146,36 @@ declare global {
|
||||
shell: {
|
||||
openExternal: (url: string, options?: OpenExternalOptions) => Promise<void>
|
||||
}
|
||||
mcp: {
|
||||
removeServer: (server: MCPServer) => Promise<void>
|
||||
restartServer: (server: MCPServer) => Promise<void>
|
||||
stopServer: (server: MCPServer) => Promise<void>
|
||||
listTools: (server: MCPServer) => Promise<MCPTool[]>
|
||||
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) => Promise<any>
|
||||
getInstallInfo: () => Promise<{ dir: string; uvPath: string; bunPath: string }>
|
||||
}
|
||||
copilot: {
|
||||
getAuthMessage: (
|
||||
headers?: Record<string, string>
|
||||
) => Promise<{ device_code: string; user_code: string; verification_uri: string }>
|
||||
getCopilotToken: (device_code: string, headers?: Record<string, string>) => Promise<{ access_token: string }>
|
||||
saveCopilotToken: (access_token: string) => Promise<void>
|
||||
getToken: (headers?: Record<string, string>) => Promise<{ token: string }>
|
||||
logout: () => Promise<void>
|
||||
getUser: (token: string) => Promise<{ login: string; avatar: string }>
|
||||
}
|
||||
isBinaryExist: (name: string) => Promise<boolean>
|
||||
getBinaryPath: (name: string) => Promise<string>
|
||||
installUVBinary: () => Promise<void>
|
||||
installBunBinary: () => Promise<void>
|
||||
protocol: {
|
||||
onReceiveData: (callback: (data: { url: string; params: any }) => void) => () => void
|
||||
}
|
||||
nutstore: {
|
||||
getSSOUrl: () => Promise<string>
|
||||
decryptToken: (token: string) => Promise<{ username: string; access_token: string }>
|
||||
getDirectoryContents: (token: string, path: string) => Promise<any>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { electronAPI } from '@electron-toolkit/preload'
|
||||
import { FileType, KnowledgeBaseParams, KnowledgeItem, Shortcut, WebDavConfig } from '@types'
|
||||
import { FileType, KnowledgeBaseParams, KnowledgeItem, MCPServer, Shortcut, WebDavConfig } from '@types'
|
||||
import { contextBridge, ipcRenderer, OpenDialogOptions, shell } from 'electron'
|
||||
import { CreateDirectoryOptions } from 'webdav'
|
||||
|
||||
// Custom APIs for renderer
|
||||
const api = {
|
||||
@@ -8,13 +10,20 @@ const api = {
|
||||
reload: () => ipcRenderer.invoke('app:reload'),
|
||||
setProxy: (proxy: string) => ipcRenderer.invoke('app:proxy', proxy),
|
||||
checkForUpdate: () => ipcRenderer.invoke('app:check-for-update'),
|
||||
showUpdateDialog: () => ipcRenderer.invoke('app:show-update-dialog'),
|
||||
setLanguage: (lang: string) => ipcRenderer.invoke('app:set-language', lang),
|
||||
setLaunchOnBoot: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-on-boot', isActive),
|
||||
setLaunchToTray: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-to-tray', isActive),
|
||||
setTray: (isActive: boolean) => ipcRenderer.invoke('app:set-tray', isActive),
|
||||
setTrayOnClose: (isActive: boolean) => ipcRenderer.invoke('app:set-tray-on-close', isActive),
|
||||
restartTray: () => ipcRenderer.invoke('app:restart-tray'),
|
||||
setTheme: (theme: 'light' | 'dark') => ipcRenderer.invoke('app:set-theme', theme),
|
||||
openWebsite: (url: string) => ipcRenderer.invoke('open:website', url),
|
||||
minApp: (url: string) => ipcRenderer.invoke('minapp', url),
|
||||
clearCache: () => ipcRenderer.invoke('app:clear-cache'),
|
||||
system: {
|
||||
getDeviceType: () => ipcRenderer.invoke('system:getDeviceType')
|
||||
},
|
||||
zip: {
|
||||
compress: (text: string) => ipcRenderer.invoke('zip:compress', text),
|
||||
decompress: (text: Buffer) => ipcRenderer.invoke('zip:decompress', text)
|
||||
@@ -25,7 +34,11 @@ const api = {
|
||||
restore: (backupPath: string) => ipcRenderer.invoke('backup:restore', backupPath),
|
||||
backupToWebdav: (data: string, webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke('backup:backupToWebdav', data, webdavConfig),
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:restoreFromWebdav', webdavConfig)
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:restoreFromWebdav', webdavConfig),
|
||||
listWebdavFiles: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:listWebdavFiles', webdavConfig),
|
||||
checkConnection: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:checkConnection', webdavConfig),
|
||||
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) =>
|
||||
ipcRenderer.invoke('backup:createDirectory', webdavConfig, path, options)
|
||||
},
|
||||
file: {
|
||||
select: (options?: OpenDialogOptions) => ipcRenderer.invoke('file:select', options),
|
||||
@@ -58,9 +71,8 @@ const api = {
|
||||
update: (shortcuts: Shortcut[]) => ipcRenderer.invoke('shortcuts:update', shortcuts)
|
||||
},
|
||||
knowledgeBase: {
|
||||
create: ({ id, model, apiKey, baseURL }: KnowledgeBaseParams) =>
|
||||
ipcRenderer.invoke('knowledge-base:create', { id, model, apiKey, baseURL }),
|
||||
reset: ({ base }: { base: KnowledgeBaseParams }) => ipcRenderer.invoke('knowledge-base:reset', { base }),
|
||||
create: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:create', base),
|
||||
reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:reset', base),
|
||||
delete: (id: string) => ipcRenderer.invoke('knowledge-base:delete', id),
|
||||
add: ({
|
||||
base,
|
||||
@@ -74,7 +86,9 @@ const api = {
|
||||
remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) =>
|
||||
ipcRenderer.invoke('knowledge-base:remove', { uniqueId, uniqueIds, base }),
|
||||
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) =>
|
||||
ipcRenderer.invoke('knowledge-base:search', { search, base })
|
||||
ipcRenderer.invoke('knowledge-base:search', { search, base }),
|
||||
rerank: ({ search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }) =>
|
||||
ipcRenderer.invoke('knowledge-base:rerank', { search, base, results })
|
||||
},
|
||||
window: {
|
||||
setMinimumSize: (width: number, height: number) => ipcRenderer.invoke('window:set-minimum-size', width, height),
|
||||
@@ -98,15 +112,57 @@ const api = {
|
||||
show: () => ipcRenderer.invoke('miniwindow:show'),
|
||||
hide: () => ipcRenderer.invoke('miniwindow:hide'),
|
||||
close: () => ipcRenderer.invoke('miniwindow:close'),
|
||||
toggle: () => ipcRenderer.invoke('miniwindow:toggle')
|
||||
toggle: () => ipcRenderer.invoke('miniwindow:toggle'),
|
||||
setPin: (isPinned: boolean) => ipcRenderer.invoke('miniwindow:set-pin', isPinned)
|
||||
},
|
||||
aes: {
|
||||
encrypt: (text: string, secretKey: string, iv: string) => ipcRenderer.invoke('aes:encrypt', text, secretKey, iv),
|
||||
decrypt: (encryptedData: string, iv: string, secretKey: string) =>
|
||||
ipcRenderer.invoke('aes:decrypt', encryptedData, iv, secretKey)
|
||||
},
|
||||
mcp: {
|
||||
removeServer: (server: MCPServer) => ipcRenderer.invoke('mcp:remove-server', server),
|
||||
restartServer: (server: MCPServer) => ipcRenderer.invoke('mcp:restart-server', server),
|
||||
stopServer: (server: MCPServer) => ipcRenderer.invoke('mcp:stop-server', server),
|
||||
listTools: (server: MCPServer) => ipcRenderer.invoke('mcp:list-tools', server),
|
||||
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) =>
|
||||
ipcRenderer.invoke('mcp:call-tool', { server, name, args }),
|
||||
getInstallInfo: () => ipcRenderer.invoke('mcp:get-install-info')
|
||||
},
|
||||
shell: {
|
||||
openExternal: shell.openExternal
|
||||
},
|
||||
copilot: {
|
||||
getAuthMessage: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-auth-message', headers),
|
||||
getCopilotToken: (device_code: string, headers?: Record<string, string>) =>
|
||||
ipcRenderer.invoke('copilot:get-copilot-token', device_code, headers),
|
||||
saveCopilotToken: (access_token: string) => ipcRenderer.invoke('copilot:save-copilot-token', access_token),
|
||||
getToken: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-token', headers),
|
||||
logout: () => ipcRenderer.invoke('copilot:logout'),
|
||||
getUser: (token: string) => ipcRenderer.invoke('copilot:get-user', token)
|
||||
},
|
||||
|
||||
// Binary related APIs
|
||||
isBinaryExist: (name: string) => ipcRenderer.invoke('app:is-binary-exist', name),
|
||||
getBinaryPath: (name: string) => ipcRenderer.invoke('app:get-binary-path', name),
|
||||
installUVBinary: () => ipcRenderer.invoke('app:install-uv-binary'),
|
||||
installBunBinary: () => ipcRenderer.invoke('app:install-bun-binary'),
|
||||
protocol: {
|
||||
onReceiveData: (callback: (data: { url: string; params: any }) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, data: { url: string; params: any }) => {
|
||||
callback(data)
|
||||
}
|
||||
ipcRenderer.on('protocol-data', listener)
|
||||
return () => {
|
||||
ipcRenderer.off('protocol-data', listener)
|
||||
}
|
||||
}
|
||||
},
|
||||
nutstore: {
|
||||
getSSOUrl: () => ipcRenderer.invoke('nutstore:get-sso-url'),
|
||||
decryptToken: (token: string) => ipcRenderer.invoke('nutstore:decrypt-token', token),
|
||||
getDirectoryContents: (token: string, path: string) =>
|
||||
ipcRenderer.invoke('nutstore:get-directory-contents', token, path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,6 +173,11 @@ if (process.contextIsolated) {
|
||||
try {
|
||||
contextBridge.exposeInMainWorld('electron', electronAPI)
|
||||
contextBridge.exposeInMainWorld('api', api)
|
||||
contextBridge.exposeInMainWorld('obsidian', {
|
||||
getVaults: () => ipcRenderer.invoke('obsidian:get-vaults'),
|
||||
getFolders: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName),
|
||||
getFiles: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName)
|
||||
})
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
|
||||
@@ -39,5 +39,4 @@
|
||||
<script type="module" src="/src/init.ts"></script>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -8,6 +8,7 @@ import { PersistGate } from 'redux-persist/integration/react'
|
||||
import Sidebar from './components/app/Sidebar'
|
||||
import TopViewContainer from './components/TopView'
|
||||
import AntdProvider from './context/AntdProvider'
|
||||
import StyleSheetManager from './context/StyleSheetManager'
|
||||
import { SyntaxHighlighterProvider } from './context/SyntaxHighlighterProvider'
|
||||
import { ThemeProvider } from './context/ThemeProvider'
|
||||
import NavigationHandler from './handler/NavigationHandler'
|
||||
@@ -20,9 +21,10 @@ import PaintingsPage from './pages/paintings/PaintingsPage'
|
||||
import SettingsPage from './pages/settings/SettingsPage'
|
||||
import TranslatePage from './pages/translate/TranslatePage'
|
||||
|
||||
function App(): JSX.Element {
|
||||
function App(): React.ReactElement {
|
||||
return (
|
||||
<Provider store={store}>
|
||||
<StyleSheetManager>
|
||||
<ThemeProvider>
|
||||
<AntdProvider>
|
||||
<SyntaxHighlighterProvider>
|
||||
@@ -30,7 +32,6 @@ function App(): JSX.Element {
|
||||
<TopViewContainer>
|
||||
<HashRouter>
|
||||
<NavigationHandler />
|
||||
{/* 添加导航处理组件 */}
|
||||
<Sidebar />
|
||||
<Routes>
|
||||
<Route path="/" element={<HomePage />} />
|
||||
@@ -48,6 +49,7 @@ function App(): JSX.Element {
|
||||
</SyntaxHighlighterProvider>
|
||||
</AntdProvider>
|
||||
</ThemeProvider>
|
||||
</StyleSheetManager>
|
||||
</Provider>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
@font-face {
|
||||
font-family: 'iconfont'; /* Project id 4753420 */
|
||||
src: url('iconfont.woff2?t=1738750230250') format('woff2');
|
||||
src: url('iconfont.woff2?t=1742184675192') format('woff2');
|
||||
}
|
||||
|
||||
.iconfont {
|
||||
@@ -11,6 +11,14 @@
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
.icon-obsidian:before {
|
||||
content: '\e677';
|
||||
}
|
||||
|
||||
.icon-notion:before {
|
||||
content: '\e690';
|
||||
}
|
||||
|
||||
.icon-thinking:before {
|
||||
content: '\e65b';
|
||||
}
|
||||
@@ -27,10 +35,6 @@
|
||||
content: '\e630';
|
||||
}
|
||||
|
||||
.icon-a-darkmode:before {
|
||||
content: '\e6cd';
|
||||
}
|
||||
|
||||
.icon-ai-model:before {
|
||||
content: '\e827';
|
||||
}
|
||||
|
||||
BIN
src/renderer/src/assets/images/apps/cici-app-logo.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
src/renderer/src/assets/images/apps/cici.webp
Normal file
|
After Width: | Height: | Size: 4.2 KiB |
BIN
src/renderer/src/assets/images/apps/you.jpg
Normal file
|
After Width: | Height: | Size: 4.9 KiB |
BIN
src/renderer/src/assets/images/apps/zhihu.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
src/renderer/src/assets/images/models/voyageai.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
src/renderer/src/assets/images/providers/alayanew.webp
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
14
src/renderer/src/assets/images/providers/gpustack.svg
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
src/renderer/src/assets/images/providers/tencent-cloud-ti.png
Normal file
|
After Width: | Height: | Size: 3.4 KiB |
BIN
src/renderer/src/assets/images/providers/voyageai.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
src/renderer/src/assets/images/search/exa.png
Normal file
|
After Width: | Height: | Size: 1.7 KiB |
1
src/renderer/src/assets/images/search/searxng.svg
Executable file
@@ -0,0 +1 @@
|
||||
<svg height="92mm" viewBox="0 0 92 92" width="92mm" xmlns="http://www.w3.org/2000/svg"><g transform="translate(-40.921303 -17.416526)"><g fill="none"><circle cx="75" cy="92" r="0" stroke="#000" stroke-width="12"/><circle cx="75.921" cy="53.903" r="30" stroke="#3050ff" stroke-width="10"/><path d="m67.514849 37.91524a18 18 0 0 1 21.051475 3.312407 18 18 0 0 1 3.137312 21.078282" stroke="#3050ff" stroke-width="5"/></g><path d="m3.706 122.09h18.846v39.963h-18.846z" fill="#3050ff" transform="matrix(.69170581 -.72217939 .72217939 .69170581 0 0)"/></g></svg>
|
||||
|
After Width: | Height: | Size: 557 B |
@@ -1,14 +0,0 @@
|
||||
<svg width="778" height="257" viewBox="0 0 778 257" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M97.1853 5.35901L127.346 53.1064C132.19 60.7745 126.68 70.7725 117.61 70.7725H105.279V142.278H87.4492V-0.00683594C91.1876 -0.00683594 94.926 1.78179 97.1853 5.35901Z" fill="#8FBCFA"/>
|
||||
<path d="M47.5482 53.1064L77.7098 5.35901C79.9691 1.78179 83.7075 -0.00683594 87.4459 -0.00683594V142.279C81.0587 141.981 74.8755 143.829 69.616 147.544V70.7725H57.2849C48.2149 70.7725 42.7047 60.7745 47.5482 53.1064Z" fill="#468BFF"/>
|
||||
<path d="M182.003 189.445L107.34 189.445C111.648 184.622 114.201 178.481 114.476 171.615H252.782C252.782 175.353 250.993 179.092 247.416 181.351L199.669 211.512C192.001 216.356 182.003 210.846 182.003 201.776V189.445Z" fill="#FDBB11"/>
|
||||
<path d="M199.668 131.718L247.415 161.879C250.993 164.138 252.781 167.877 252.781 171.615H114.471C114.72 165.212 112.733 158.898 108.957 153.785H182.002V141.454C182.002 132.384 192 126.874 199.668 131.718Z" fill="#F6D785"/>
|
||||
<path d="M46.9409 209.797L3.37891 253.359C6.02226 256.003 9.93035 257.381 14.0576 256.45L69.1472 244.014C77.9944 242.017 81.1678 231.051 74.7545 224.638L66.035 215.918L98.7916 183.055C105.771 176.075 105.462 164.899 98.6758 158.113L46.9409 209.797Z" fill="#FF9A9D"/>
|
||||
<path d="M40.8221 190.708L73.6898 157.963C80.6694 150.983 91.8931 151.328 98.679 158.113L46.9436 209.802L3.38131 253.364C0.737954 250.721 -0.640662 246.812 0.291 242.685L12.7265 187.596C14.7236 178.748 25.6895 175.575 32.1028 181.988L40.8221 190.708Z" fill="#FE363B"/>
|
||||
<path d="M777.344 93.6689L718.337 234.049H692.704L713.348 186.567L675.156 93.6689H702.166L726.766 160.246L751.711 93.6689H777.344Z" fill="#FFFFFF"/>
|
||||
<path d="M664.096 70.1191V188.976H640.012V70.1191H664.096Z" fill="#FFFFFF"/>
|
||||
<path d="M606.041 82.2736C601.797 82.2736 598.242 80.9547 595.375 78.3168C592.622 75.5643 591.246 72.181 591.246 68.1668C591.246 64.1527 592.622 60.8267 595.375 58.1889C598.242 55.4363 601.797 54.0601 606.041 54.0601C610.284 54.0601 613.783 55.4363 616.535 58.1889C619.402 60.8267 620.836 63.6942 620.836 67.7084C620.836 71.7225 619.402 75.5643 616.535 78.3168C613.783 80.9547 610.284 82.2736 606.041 82.2736ZM617.911 93.6279V188.978H593.827V93.6279H617.911Z" fill="#FFFFFF"/>
|
||||
<path d="M532.3 166.783L556.385 93.6689H582.018L546.751 188.976H517.505L482.41 93.6689H508.215L532.3 166.783Z" fill="#FFFFFF"/>
|
||||
<path d="M371.52 140.972C371.52 131.338 373.412 122.794 377.197 115.339C381.096 107.884 386.314 102.15 392.852 98.1355C399.504 94.1213 406.901 92.1143 415.044 92.1143C422.155 92.1143 428.348 93.5479 433.624 96.4151C439.014 99.2823 443.315 102.895 446.526 107.253V93.6626H470.783V188.969H446.526V175.035C443.43 179.507 439.129 183.235 433.624 186.217C428.233 189.084 421.983 190.518 414.872 190.518C406.844 190.518 399.504 188.453 392.852 184.324C386.314 180.196 381.096 174.404 377.197 166.949C373.412 159.38 371.52 150.72 371.52 140.972ZM446.526 141.316C446.526 135.467 445.379 130.478 443.086 126.349C440.792 122.105 437.695 118.894 433.796 116.715C429.896 114.421 425.71 113.274 421.237 113.274C416.764 113.274 412.636 114.364 408.851 116.543C405.066 118.722 401.97 121.933 399.561 126.177C397.267 130.306 396.12 135.237 396.12 140.972C396.12 146.706 397.267 151.753 399.561 156.111C401.97 160.354 405.066 163.623 408.851 165.917C412.75 168.211 416.879 169.357 421.237 169.357C425.71 169.357 429.896 168.268 433.796 166.089C437.695 163.795 440.792 160.584 443.086 156.455C445.379 152.211 446.526 147.165 446.526 141.316Z" fill="#FFFFFF"/>
|
||||
<path d="M340.767 113.445V159.55C340.767 162.762 341.513 165.113 343.004 166.604C344.609 167.98 347.247 168.668 350.917 168.668H362.099V188.968H346.96C326.66 188.968 316.51 179.105 316.51 159.378V113.445H305.156V93.6614H316.51V70.0928H340.767V93.6614H362.099V113.445H340.767Z" fill="#FFFFFF"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 3.7 KiB |
BIN
src/renderer/src/assets/images/search/tavily.png
Normal file
|
After Width: | Height: | Size: 4.2 KiB |
18
src/renderer/src/assets/styles/animation.scss
Normal file
@@ -0,0 +1,18 @@
|
||||
@keyframes animation-pulse {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0 rgba(var(--pulse-color), 0.5);
|
||||
}
|
||||
70% {
|
||||
box-shadow: 0 0 0 var(--pulse-size) rgba(var(--pulse-color), 0);
|
||||
}
|
||||
100% {
|
||||
box-shadow: 0 0 0 0 rgba(var(--pulse-color), 0);
|
||||
}
|
||||
}
|
||||
|
||||
// 电磁波扩散效果
|
||||
.animation-pulse {
|
||||
--pulse-color: 59, 130, 246;
|
||||
--pulse-size: 8px;
|
||||
animation: animation-pulse 1.5s infinite;
|
||||
}
|
||||