Compare commits
323 Commits
feat/varia
...
v1.2.10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f844a7e024 | ||
|
|
74e7c6a327 | ||
|
|
8c05b4f067 | ||
|
|
449d74040f | ||
|
|
712bd11274 | ||
|
|
cc89f5330a | ||
|
|
db7a7e2c2b | ||
|
|
d64ae18bfc | ||
|
|
9f2ac4aa81 | ||
|
|
e227fbf821 | ||
|
|
6595dc9a1e | ||
|
|
d16235d916 | ||
|
|
f5a7258229 | ||
|
|
dfd957434c | ||
|
|
2e0d315ce4 | ||
|
|
47bde9eb36 | ||
|
|
506af6cfb9 | ||
|
|
4e07faf520 | ||
|
|
381377c0eb | ||
|
|
24f59047a5 | ||
|
|
e1a97ccd50 | ||
|
|
f2929d44d8 | ||
|
|
bb02dca7e9 | ||
|
|
54cb4d7ac1 | ||
|
|
b76a609b97 | ||
|
|
9cea0166e6 | ||
|
|
e5e04c8132 | ||
|
|
6b113c19a3 | ||
|
|
bcbb3f294e | ||
|
|
e17765f1bf | ||
|
|
3064b7a3e3 | ||
|
|
da72a5706f | ||
|
|
261eeb097a | ||
|
|
bf17e71445 | ||
|
|
0744e42be9 | ||
|
|
2833c377fa | ||
|
|
ad5769f6b7 | ||
|
|
a6a4a32159 | ||
|
|
a112b143e7 | ||
|
|
a7ee3cbd02 | ||
|
|
32f160444b | ||
|
|
a546c265ee | ||
|
|
ea89a37b1d | ||
|
|
c288b4a8d0 | ||
|
|
e8384db91a | ||
|
|
36c87451d9 | ||
|
|
eaa37fe674 | ||
|
|
308ad9f68f | ||
|
|
62b6584d65 | ||
|
|
5a44f6aca8 | ||
|
|
4c6a904929 | ||
|
|
be4ef2990f | ||
|
|
2807e71f1a | ||
|
|
a5f8ac8587 | ||
|
|
4bd50251ff | ||
|
|
f0d60052c4 | ||
|
|
84f4b565f3 | ||
|
|
ebdacdde3e | ||
|
|
aeb66195a0 | ||
|
|
53ef8b0f32 | ||
|
|
794c23f296 | ||
|
|
62440cbfa1 | ||
|
|
39b723f143 | ||
|
|
bdc75f2f4e | ||
|
|
eb3f136997 | ||
|
|
6ba5768650 | ||
|
|
1f588d242e | ||
|
|
25b1e309ed | ||
|
|
7a7b24fe2f | ||
|
|
0686b2d813 | ||
|
|
4a027892b9 | ||
|
|
be323b6304 | ||
|
|
2f5cfc0162 | ||
|
|
4a00eb57ad | ||
|
|
784b02e62e | ||
|
|
4c2c026f6d | ||
|
|
07b2c6f169 | ||
|
|
aafd04090e | ||
|
|
af10ae3f37 | ||
|
|
3df4680c7b | ||
|
|
5d04ef2508 | ||
|
|
7fb6eb1949 | ||
|
|
4fe99cddce | ||
|
|
a84763def6 | ||
|
|
8125fac309 | ||
|
|
6c6b2f0b9e | ||
|
|
314be9b198 | ||
|
|
409e0096d8 | ||
|
|
a1ffabae41 | ||
|
|
0fa10627bc | ||
|
|
80618b2331 | ||
|
|
bf8baedfcf | ||
|
|
98f2c8a0b6 | ||
|
|
3887cf2a6f | ||
|
|
eb89c6ea21 | ||
|
|
fd09edc2b9 | ||
|
|
55a9447a7b | ||
|
|
c576aa5cb4 | ||
|
|
ca553a2454 | ||
|
|
ef9c8fd037 | ||
|
|
234a5e085f | ||
|
|
cb22b80ead | ||
|
|
a6d9ad6716 | ||
|
|
185900ada6 | ||
|
|
288ebe5222 | ||
|
|
6e91066e5d | ||
|
|
49a7b2dc8b | ||
|
|
4789ba3e8f | ||
|
|
cc18f0f0c3 | ||
|
|
9bb96c212d | ||
|
|
81eab1179b | ||
|
|
24c9a8e8f1 | ||
|
|
c4d0f8e950 | ||
|
|
3bdf0be4ad | ||
|
|
9e4ebf7c6f | ||
|
|
2408566d34 | ||
|
|
cf61ae927c | ||
|
|
60680936d3 | ||
|
|
5bfa13112a | ||
|
|
7f7db748a7 | ||
|
|
55aac1cb9b | ||
|
|
4663794ba6 | ||
|
|
e4de5331e0 | ||
|
|
c7ed15684a | ||
|
|
1bb27ee3f9 | ||
|
|
579d7d1e5d | ||
|
|
36aa13c4f1 | ||
|
|
c5161b9da4 | ||
|
|
32c96daf1f | ||
|
|
30309c29ff | ||
|
|
8b462935b4 | ||
|
|
d907344ca7 | ||
|
|
9b21c334cc | ||
|
|
3e1e814004 | ||
|
|
1c5adc1329 | ||
|
|
3360905275 | ||
|
|
0a28df132d | ||
|
|
fd3d9f17b8 | ||
|
|
73f8148a94 | ||
|
|
456f0657a6 | ||
|
|
53f74725ed | ||
|
|
4fa04a801a | ||
|
|
c5580f5b71 | ||
|
|
f8f808c9f4 | ||
|
|
dbbd539207 | ||
|
|
703eae5777 | ||
|
|
9438c8e6ff | ||
|
|
75f986087a | ||
|
|
7ac8f480bb | ||
|
|
676ac21804 | ||
|
|
24ddd69cd5 | ||
|
|
35c50b54a8 | ||
|
|
ac0fe75078 | ||
|
|
f0c25f8108 | ||
|
|
c10e5a9ca4 | ||
|
|
444abc9b88 | ||
|
|
2d130a8526 | ||
|
|
5061ee5c4d | ||
|
|
9e913f531c | ||
|
|
98130de8ac | ||
|
|
b339b7b6d4 | ||
|
|
eb8ee5ec02 | ||
|
|
a34e10cb0d | ||
|
|
fcae5b097b | ||
|
|
3cb339e480 | ||
|
|
35224f5213 | ||
|
|
b8b37fcd11 | ||
|
|
615fda0547 | ||
|
|
0585d28312 | ||
|
|
9ad40b9219 | ||
|
|
18e99dee67 | ||
|
|
43ef1d6815 | ||
|
|
141904e61a | ||
|
|
247501c26c | ||
|
|
748252febc | ||
|
|
8ac4d07d6b | ||
|
|
5fbff8c1fe | ||
|
|
f0f44d5768 | ||
|
|
8c20bd6d8f | ||
|
|
d1c2bbed1b | ||
|
|
f372ebe485 | ||
|
|
833873b95e | ||
|
|
1d211ee9f7 | ||
|
|
c7ef2c5791 | ||
|
|
6e66721688 | ||
|
|
ffc8a33ccf | ||
|
|
81538a5446 | ||
|
|
e6b325dd88 | ||
|
|
0e8c053cee | ||
|
|
24e46efa0c | ||
|
|
64200b00a9 | ||
|
|
ab4fb7d1d6 | ||
|
|
352731827c | ||
|
|
e13a43d82a | ||
|
|
e51de5b492 | ||
|
|
a54360cc69 | ||
|
|
88cbb27557 | ||
|
|
e22d076d67 | ||
|
|
de7f806bbc | ||
|
|
5f7d8652bc | ||
|
|
39c614e4db | ||
|
|
412e8b03fc | ||
|
|
486ccc1a15 | ||
|
|
c6ab7b9326 | ||
|
|
41981acd77 | ||
|
|
97ef7016d3 | ||
|
|
e4514bd04c | ||
|
|
f39bb9869b | ||
|
|
fc884d72af | ||
|
|
883bdd6283 | ||
|
|
fa19f41385 | ||
|
|
8b95a131ec | ||
|
|
72e18fbcc1 | ||
|
|
b62c59eb52 | ||
|
|
ffe7702c1c | ||
|
|
1ed6320caf | ||
|
|
315271ac35 | ||
|
|
0bd24f652d | ||
|
|
0e7c4e4bdd | ||
|
|
d4bf8da225 | ||
|
|
8eb6632620 | ||
|
|
10225512f4 | ||
|
|
76058bd749 | ||
|
|
a692ae7e9d | ||
|
|
a70ca190ba | ||
|
|
7c39116351 | ||
|
|
04333535dd | ||
|
|
a1dba93d27 | ||
|
|
0842b7e84d | ||
|
|
24d6d146c0 | ||
|
|
978c3ea3cf | ||
|
|
a9eb235c43 | ||
|
|
e0a47de8f7 | ||
|
|
78a4696327 | ||
|
|
57fa0aad38 | ||
|
|
2e0251aed7 | ||
|
|
afd1381d7f | ||
|
|
c3b5cbee8f | ||
|
|
e1f255048e | ||
|
|
8a579be4c1 | ||
|
|
efcffbaa30 | ||
|
|
f9c6bddae5 | ||
|
|
5e086a1686 | ||
|
|
0db4c8b475 | ||
|
|
d5fcef39d3 | ||
|
|
5c44f71684 | ||
|
|
3462be2a2a | ||
|
|
a0be911dc9 | ||
|
|
f7f7d2bde8 | ||
|
|
10efa444bf | ||
|
|
24e28b86cf | ||
|
|
fa66d048d7 | ||
|
|
fe7a392116 | ||
|
|
c883fd85d8 | ||
|
|
aa73025568 | ||
|
|
9689f00214 | ||
|
|
3674cc4afe | ||
|
|
1f21b99820 | ||
|
|
a3e10dd116 | ||
|
|
ab1a5f18c9 | ||
|
|
2a0d6eb08a | ||
|
|
f78663f815 | ||
|
|
8bcb31071e | ||
|
|
3aaa1848f0 | ||
|
|
037027f1f4 | ||
|
|
97c1d67cbf | ||
|
|
d38c4c7368 | ||
|
|
b1bd5d0531 | ||
|
|
1fcee6c829 | ||
|
|
cbcebdc87a | ||
|
|
96df9f6979 | ||
|
|
4ef9d52694 | ||
|
|
41b9f8dbd5 | ||
|
|
8191791036 | ||
|
|
99b37f2782 | ||
|
|
da49c3ddd3 | ||
|
|
6891068ca1 | ||
|
|
b361001f39 | ||
|
|
3823912b3e | ||
|
|
b5ad77e70c | ||
|
|
3491eec86b | ||
|
|
581ad5fbda | ||
|
|
90424808ab | ||
|
|
c884b11f01 | ||
|
|
a530ce652e | ||
|
|
9c052dee5c | ||
|
|
1085c11240 | ||
|
|
ae6097a29e | ||
|
|
d74f05f27e | ||
|
|
8501ab82c6 | ||
|
|
f2ca56a088 | ||
|
|
e02c967f5b | ||
|
|
7284679907 | ||
|
|
56e9a7371a | ||
|
|
95639df35c | ||
|
|
641dfc60b0 | ||
|
|
46c7df6f5b | ||
|
|
b828d1f54f | ||
|
|
d9abfc5443 | ||
|
|
7364646caa | ||
|
|
5fa7465174 | ||
|
|
bc02727633 | ||
|
|
c76f274562 | ||
|
|
f9be0e0d26 | ||
|
|
ea059d5517 | ||
|
|
9c6de71fbb | ||
|
|
3290ac4b1b | ||
|
|
ef8250ab72 | ||
|
|
773c0da9ef | ||
|
|
a3d124b9fd | ||
|
|
c11adc01dc | ||
|
|
d8baf378ea | ||
|
|
3768c135d8 | ||
|
|
10848f7a45 | ||
|
|
4d5cfe06f5 | ||
|
|
fb5ddaf9d5 | ||
|
|
8cb11e6d55 | ||
|
|
d0cb333f3c | ||
|
|
23de48ecbd | ||
|
|
06c730aaf6 | ||
|
|
aed9c04c20 | ||
|
|
d067d21561 | ||
|
|
515721239f |
14
.github/ISSUE_TEMPLATE/#0_bug_report.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 🐛 错误报告 (中文)
|
||||
description: 创建一个报告以帮助我们改进
|
||||
title: '[错误]: '
|
||||
labels: ['bug']
|
||||
labels: ['kind/bug']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -18,7 +18,9 @@ body:
|
||||
options:
|
||||
- label: 我理解 Issue 是用于反馈和解决问题的,而非吐槽评论区,将尽可能提供更多信息帮助问题解决。
|
||||
required: true
|
||||
- label: 我已经查看了置顶 Issue 并搜索了现有的 [开放Issue](https://github.com/CherryHQ/cherry-studio/issues)和[已关闭Issue](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20),没有找到类似的问题。
|
||||
- label: 我的问题不是 [常见问题](https://github.com/CherryHQ/cherry-studio/issues/3881) 中的内容。
|
||||
required: true
|
||||
- label: 我已经查看了 **置顶 Issue** 并搜索了现有的 [开放Issue](https://github.com/CherryHQ/cherry-studio/issues)和[已关闭Issue](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20),没有找到类似的问题。
|
||||
required: true
|
||||
- label: 我填写了简短且清晰明确的标题,以便开发者在翻阅 Issue 列表时能快速确定大致问题。而不是“一个建议”、“卡住了”等。
|
||||
required: true
|
||||
@@ -48,8 +50,8 @@ body:
|
||||
id: description
|
||||
attributes:
|
||||
label: 错误描述
|
||||
description: 描述问题时请尽可能详细
|
||||
placeholder: 告诉我们发生了什么...
|
||||
description: 描述问题时请尽可能详细。请尽可能提供截图或屏幕录制,以帮助我们更好地理解问题。
|
||||
placeholder: 告诉我们发生了什么...(记得附上截图/录屏,如果适用)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -57,12 +59,14 @@ body:
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: 重现步骤
|
||||
description: 提供详细的重现步骤,以便于我们可以准确地重现问题
|
||||
description: 提供详细的重现步骤,以便于我们的开发人员可以准确地重现问题。请尽可能为每个步骤提供截图或屏幕录制。
|
||||
placeholder: |
|
||||
1. 转到 '...'
|
||||
2. 点击 '....'
|
||||
3. 向下滚动到 '....'
|
||||
4. 看到错误
|
||||
|
||||
记得尽可能为每个步骤附上截图/录屏!
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: 💡 功能建议 (中文)
|
||||
description: 为项目提出新的想法
|
||||
title: '[功能]: '
|
||||
labels: ['enhancement']
|
||||
labels: ['kind/enhancement']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/#2_question.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ❓ 讨论 & 提问 (中文)
|
||||
name: ❓ 提问 & 讨论 (中文)
|
||||
description: 寻求帮助、讨论问题、提出疑问等...
|
||||
title: '[讨论]: '
|
||||
labels: ['question']
|
||||
labels: ['kind/question']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
76
.github/ISSUE_TEMPLATE/#3_others.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: 🤔 其他问题 (中文)
|
||||
description: 提交不属于错误报告或功能需求的问题
|
||||
title: '[其他]: '
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
感谢您花时间提出问题!
|
||||
在提交此问题之前,请确保您已经了解了[常见问题](https://docs.cherry-ai.com/question-contact/questions)和[知识科普](https://docs.cherry-ai.com/question-contact/knowledge)
|
||||
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
label: 提交前检查
|
||||
description: |
|
||||
在提交 Issue 前请确保您已经完成了以下所有步骤
|
||||
options:
|
||||
- label: 我理解 Issue 是用于反馈和解决问题的,而非吐槽评论区,将尽可能提供更多信息帮助问题解决。
|
||||
required: true
|
||||
- label: 我已经查看了置顶 Issue 并搜索了现有的 [开放Issue](https://github.com/CherryHQ/cherry-studio/issues)和[已关闭Issue](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20),没有找到类似的问题。
|
||||
required: true
|
||||
- label: 我填写了简短且清晰明确的标题,以便开发者在翻阅 Issue 列表时能快速确定大致问题。而不是"一个问题"、"求助"等。
|
||||
required: true
|
||||
- label: 我的问题不属于错误报告或功能需求类别。
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: platform
|
||||
attributes:
|
||||
label: 平台
|
||||
description: 您正在使用哪个平台?
|
||||
options:
|
||||
- Windows
|
||||
- macOS
|
||||
- Linux
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: 版本
|
||||
description: 您正在运行的 Cherry Studio 版本是什么?
|
||||
placeholder: 例如 v1.0.0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: 问题描述
|
||||
description: 请详细描述您的问题或疑问
|
||||
placeholder: 我想了解有关...的更多信息
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: 相关背景
|
||||
description: 请提供与您的问题相关的任何背景信息或上下文
|
||||
placeholder: 我尝试实现...时遇到了疑问
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: attempts
|
||||
attributes:
|
||||
label: 您已尝试的方法
|
||||
description: 请描述您为解决问题已经尝试过的方法(如果有)
|
||||
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: 附加信息
|
||||
description: 任何能让我们对您的问题有更多了解的信息,包括截图或相关链接
|
||||
10
.github/ISSUE_TEMPLATE/0_bug_report.yml
vendored
@@ -1,13 +1,13 @@
|
||||
name: 🐛 Bug Report (English)
|
||||
description: Create a report to help us improve
|
||||
title: '[Bug]: '
|
||||
labels: ['bug']
|
||||
labels: ['kind/bug']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out this bug report!
|
||||
Before submitting this issue, please make sure that you have understood the [FAQ](https://docs.cherry-ai.com/question-contact/questions) and [Knowledge Science](https://docs.cherry-ai.com/question-contact/knowledge)
|
||||
Thank you for taking the time to fill out this bug report!
|
||||
Before submitting this issue, please make sure that you have understood the [FAQ](https://docs.cherry-ai.com/question-contact/questions) and [Knowledge Science](https://docs.cherry-ai.com/question-contact/knowledge)
|
||||
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
@@ -18,7 +18,9 @@ body:
|
||||
options:
|
||||
- label: I understand that issues are for feedback and problem solving, not for complaining in the comment section, and will provide as much information as possible to help solve the problem.
|
||||
required: true
|
||||
- label: I've looked at pinned issues and searched for existing [Open Issues](https://github.com/CherryHQ/cherry-studio/issues), [Closed Issues](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed), and [Discussions](https://github.com/CherryHQ/cherry-studio/discussions), no similar issue or discussion was found.
|
||||
- label: My issue is not listed in the [FAQ](https://github.com/CherryHQ/cherry-studio/issues/3881).
|
||||
required: true
|
||||
- label: I've looked at **pinned issues** and searched for existing [Open Issues](https://github.com/CherryHQ/cherry-studio/issues), [Closed Issues](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed), and [Discussions](https://github.com/CherryHQ/cherry-studio/discussions), no similar issue or discussion was found.
|
||||
required: true
|
||||
- label: I've filled in short, clear headings so that developers can quickly identify a rough idea of what to expect when flipping through the list of issues. And not "a suggestion", "stuck", etc.
|
||||
required: true
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/1_feature_request.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 💡 Feature Request (English)
|
||||
description: Suggest an idea for this project
|
||||
title: '[Feature]: '
|
||||
labels: ['enhancement']
|
||||
labels: ['kind/enhancement']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/2_question.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ❓ Discussion & Questions
|
||||
name: ❓ Questions & Discussion
|
||||
description: Seeking help, discussing issues, asking questions, etc...
|
||||
title: '[Discussion]: '
|
||||
labels: ['question']
|
||||
labels: ['kind/question']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
76
.github/ISSUE_TEMPLATE/3_others.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: 🤔 Other Questions (English)
|
||||
description: Submit questions that don't fit into bug reports or feature requests
|
||||
title: '[Other]: '
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to ask a question!
|
||||
Before submitting this issue, please make sure you've reviewed the [FAQ](https://docs.cherry-ai.com/question-contact/questions) and [Knowledge Base](https://docs.cherry-ai.com/question-contact/knowledge)
|
||||
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
label: Pre-submission Checklist
|
||||
description: |
|
||||
Please ensure you've completed all the steps below before submitting your issue
|
||||
options:
|
||||
- label: I understand that Issues are for feedback and problem-solving, not for complaints, and I will provide as much information as possible to help resolve the issue.
|
||||
required: true
|
||||
- label: I have checked the pinned Issues and searched through existing [open Issues](https://github.com/CherryHQ/cherry-studio/issues) and [closed Issues](https://github.com/CherryHQ/cherry-studio/issues?q=is%3Aissue%20state%3Aclosed%20) and didn't find similar questions.
|
||||
required: true
|
||||
- label: I have written a short and clear title that helps developers quickly understand the nature of my question, rather than vague titles like "A question" or "Help needed".
|
||||
required: true
|
||||
- label: My question doesn't fall under bug reports or feature requests categories.
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: platform
|
||||
attributes:
|
||||
label: Platform
|
||||
description: Which platform are you using?
|
||||
options:
|
||||
- Windows
|
||||
- macOS
|
||||
- Linux
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of Cherry Studio are you running?
|
||||
placeholder: e.g., v1.0.0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: Question Description
|
||||
description: Please describe your question or inquiry in detail
|
||||
placeholder: I would like to know more about...
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: Relevant Context
|
||||
description: Please provide any background information or context related to your question
|
||||
placeholder: I encountered this question while trying to implement...
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: attempts
|
||||
attributes:
|
||||
label: Attempted Solutions
|
||||
description: Please describe any methods you've already tried to resolve your question (if applicable)
|
||||
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Additional Information
|
||||
description: Any other information that could help us better understand your question, including screenshots or relevant links
|
||||
252
.github/issue-checker.yml
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
default-mode:
|
||||
add:
|
||||
remove: [pull_request_target, issues]
|
||||
|
||||
labels:
|
||||
# <!-- [Ss]kip `LABEL` --> 跳过一个 label
|
||||
# <!-- [Rr]emove `LABEL` --> 去掉一个 label
|
||||
|
||||
# skips and removes
|
||||
- name: skip all
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Aa]ll |)[Ll]abels?"
|
||||
- name: remove all
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Aa]ll |)[Ll]abels?"
|
||||
|
||||
- name: skip kind/bug
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)"
|
||||
- name: remove kind/bug
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/bug(?:`|)"
|
||||
|
||||
- name: skip kind/enhancement
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)"
|
||||
- name: remove kind/enhancement
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/enhancement(?:`|)"
|
||||
|
||||
- name: skip kind/question
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/question(?:`|)"
|
||||
- name: remove kind/question
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/question(?:`|)"
|
||||
|
||||
- name: skip area/Connectivity
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)"
|
||||
- name: remove area/Connectivity
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)area/Connectivity(?:`|)"
|
||||
|
||||
- name: skip area/UI/UX
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)"
|
||||
- name: remove area/UI/UX
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)area/UI/UX(?:`|)"
|
||||
|
||||
- name: skip kind/documentation
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)"
|
||||
- name: remove kind/documentation
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)kind/documentation(?:`|)"
|
||||
|
||||
- name: skip client:linux
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:linux(?:`|)"
|
||||
- name: remove client:linux
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:linux(?:`|)"
|
||||
|
||||
- name: skip client:mac
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:mac(?:`|)"
|
||||
- name: remove client:mac
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:mac(?:`|)"
|
||||
|
||||
- name: skip client:win
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)client:win(?:`|)"
|
||||
- name: remove client:win
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)client:win(?:`|)"
|
||||
|
||||
- name: skip sig/Assistant
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)"
|
||||
- name: remove sig/Assistant
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Assistant(?:`|)"
|
||||
|
||||
- name: skip sig/Data
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)"
|
||||
- name: remove sig/Data
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/Data(?:`|)"
|
||||
|
||||
- name: skip sig/MCP
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)"
|
||||
- name: remove sig/MCP
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/MCP(?:`|)"
|
||||
|
||||
- name: skip sig/RAG
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)"
|
||||
- name: remove sig/RAG
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)sig/RAG(?:`|)"
|
||||
|
||||
- name: skip lgtm
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)lgtm(?:`|)"
|
||||
- name: remove lgtm
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)lgtm(?:`|)"
|
||||
|
||||
- name: skip License
|
||||
content:
|
||||
regexes: "[Ss]kip (?:[Ll]abels? |)(?:`|)License(?:`|)"
|
||||
- name: remove License
|
||||
content:
|
||||
regexes: "[Rr]emove (?:[Ll]abels? |)(?:`|)License(?:`|)"
|
||||
|
||||
# `Dev Team`
|
||||
- name: Dev Team
|
||||
mode:
|
||||
add: [pull_request_target, issues]
|
||||
author_association:
|
||||
- COLLABORATOR
|
||||
|
||||
# Area labels
|
||||
- name: area/Connectivity
|
||||
content: area/Connectivity
|
||||
regexes: "代理|[Pp]roxy"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip area/Connectivity
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove area/Connectivity
|
||||
|
||||
- name: area/UI/UX
|
||||
content: area/UI/UX
|
||||
regexes: "界面|[Uu][Ii]|重叠|按钮|图标|组件|渲染|菜单|栏目|头像|主题|样式|[Cc][Ss][Ss]"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip area/UI/UX
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove area/UI/UX
|
||||
|
||||
# Kind labels
|
||||
- name: kind/documentation
|
||||
content: kind/documentation
|
||||
regexes: "文档|教程|[Dd]oc(s|umentation)|[Rr]eadme"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip kind/documentation
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove kind/documentation
|
||||
|
||||
# Client labels
|
||||
- name: client:linux
|
||||
content: client:linux
|
||||
regexes: "(?:[Ll]inux|[Uu]buntu|[Dd]ebian)"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip client:linux
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove client:linux
|
||||
|
||||
- name: client:mac
|
||||
content: client:mac
|
||||
regexes: "(?:[Mm]ac|[Mm]acOS|[Oo]SX)"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip client:mac
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove client:mac
|
||||
|
||||
- name: client:win
|
||||
content: client:win
|
||||
regexes: "(?:[Ww]in|[Ww]indows)"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip client:win
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove client:win
|
||||
|
||||
# SIG labels
|
||||
- name: sig/Assistant
|
||||
content: sig/Assistant
|
||||
regexes: "快捷助手|[Aa]ssistant"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip sig/Assistant
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove sig/Assistant
|
||||
|
||||
- name: sig/Data
|
||||
content: sig/Data
|
||||
regexes: "[Ww]ebdav|坚果云|备份|同步|数据|Obsidian|Notion|Joplin|思源"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip sig/Data
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove sig/Data
|
||||
|
||||
- name: sig/MCP
|
||||
content: sig/MCP
|
||||
regexes: "[Mm][Cc][Pp]"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip sig/MCP
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove sig/MCP
|
||||
|
||||
- name: sig/RAG
|
||||
content: sig/RAG
|
||||
regexes: "知识库|[Rr][Aa][Gg]"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip sig/RAG
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove sig/RAG
|
||||
|
||||
# Other labels
|
||||
- name: lgtm
|
||||
content: lgtm
|
||||
regexes: "(?:[Ll][Gg][Tt][Mm]|[Ll]ooks [Gg]ood [Tt]o [Mm]e)"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip lgtm
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove lgtm
|
||||
|
||||
- name: License
|
||||
content: License
|
||||
regexes: "(?:[Ll]icense|[Cc]opyright|[Mm][Ii][Tt]|[Aa]pache)"
|
||||
skip-if:
|
||||
- skip all
|
||||
- skip License
|
||||
remove-if:
|
||||
- remove all
|
||||
- remove License
|
||||
54
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
<!-- Template from https://github.com/kubevirt/kubevirt/blob/main/.github/PULL_REQUEST_TEMPLATE.md?-->
|
||||
<!-- Thanks for sending a pull request! Here are some tips for you:
|
||||
1. Consider creating this PR as draft: https://github.com/CherryHQ/cherry-studio/blob/main/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
### What this PR does
|
||||
|
||||
Before this PR:
|
||||
|
||||
After this PR:
|
||||
|
||||
<!-- (optional, in `fixes #<issue number>(, fixes #<issue_number>, ...)` format, will close the issue(s) when PR gets merged)*: -->
|
||||
|
||||
Fixes #
|
||||
|
||||
### Why we need it and why it was done in this way
|
||||
|
||||
The following tradeoffs were made:
|
||||
|
||||
The following alternatives were considered:
|
||||
|
||||
Links to places where the discussion took place: <!-- optional: slack, other GH issue, mailinglist, ... -->
|
||||
|
||||
### Breaking changes
|
||||
|
||||
<!-- optional -->
|
||||
|
||||
If this PR introduces breaking changes, please describe the changes and the impact on users.
|
||||
|
||||
### Special notes for your reviewer
|
||||
|
||||
<!-- optional -->
|
||||
|
||||
### Checklist
|
||||
|
||||
This checklist is not enforcing, but it's a reminder of items that could be relevant to every PR.
|
||||
Approvers are expected to review this list.
|
||||
|
||||
- [ ] PR: The PR description is expressive enough and will help future contributors
|
||||
- [ ] Code: [Write code that humans can understand](https://en.wikiquote.org/wiki/Martin_Fowler#code-for-humans) and [Keep it simple](https://en.wikipedia.org/wiki/KISS_principle)
|
||||
- [ ] Refactor: You have [left the code cleaner than you found it (Boy Scout Rule)](https://learning.oreilly.com/library/view/97-things-every/9780596809515/ch08.html)
|
||||
- [ ] Upgrade: Impact of this change on upgrade flows was considered and addressed if required
|
||||
- [ ] Documentation: A [user-guide update](https://docs.cherry-ai.com) was considered and is present (link) or not required. You want a user-guide update if it's a user facing feature.
|
||||
|
||||
### Release note
|
||||
|
||||
<!-- Write your release note:
|
||||
1. Enter your extended release note in the below block. If the PR requires additional action from users switching to the new release, include the string "action required".
|
||||
2. If no release note is required, just write "NONE".
|
||||
-->
|
||||
|
||||
```release-note
|
||||
|
||||
```
|
||||
25
.github/workflows/issue-checker.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: "Issue Checker"
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, edited]
|
||||
pull_request_target:
|
||||
types: [opened, edited]
|
||||
issue_comment:
|
||||
types: [created, edited]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: MaaAssistantArknights/issue-checker@v1.14
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
configuration-path: .github/issue-checker.yml
|
||||
not-before: 2022-08-05T00:00:00Z
|
||||
include-title: 1
|
||||
58
.github/workflows/issue-management.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: "Stale Issue Management"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
daysBeforeStale: 30 # Number of days of inactivity before marking as stale
|
||||
daysBeforeClose: 10 # Number of days to wait after marking as stale before closing
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: github.repository_owner == 'CherryHQ'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: write # Workaround for https://github.com/actions/stale/issues/1090
|
||||
issues: write
|
||||
# Completely disable stalling for PRs
|
||||
pull-requests: none
|
||||
contents: none
|
||||
steps:
|
||||
- name: Close needs-more-info issues
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
only-labels: "needs-more-info"
|
||||
days-before-stale: ${{ env.daysBeforeStale }}
|
||||
days-before-close: 0 # Close immediately after stale
|
||||
stale-issue-label: "inactive"
|
||||
close-issue-label: "closed:no-response"
|
||||
stale-issue-message: |
|
||||
This issue has been labeled as needing more information and has been inactive for ${{ env.daysBeforeStale }} days.
|
||||
It will be closed now due to lack of additional information.
|
||||
|
||||
该问题被标记为"需要更多信息"且已经 ${{ env.daysBeforeStale }} 天没有任何活动,将立即关闭。
|
||||
operations-per-run: 50
|
||||
exempt-issue-labels: "pending, Dev Team"
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
|
||||
- name: Close inactive issues
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: ${{ env.daysBeforeStale }}
|
||||
days-before-close: ${{ env.daysBeforeClose }}
|
||||
stale-issue-label: "inactive"
|
||||
stale-issue-message: |
|
||||
This issue has been inactive for a prolonged period and will be closed automatically in ${{ env.daysBeforeClose }} days.
|
||||
该问题已长时间处于闲置状态,${{ env.daysBeforeClose }} 天后将自动关闭。
|
||||
exempt-issue-labels: "pending, Dev Team, kind/enhancement"
|
||||
days-before-pr-stale: -1 # Completely disable stalling for PRs
|
||||
days-before-pr-close: -1 # Completely disable closing for PRs
|
||||
|
||||
# Temporary to reduce the huge issues number
|
||||
operations-per-run: 100
|
||||
debug-only: false
|
||||
76
.github/workflows/nightly-build.yml
vendored
@@ -7,9 +7,41 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write # Required for deleting artifacts
|
||||
|
||||
jobs:
|
||||
cleanup-artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delete old artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPO: ${{ github.repository }}
|
||||
run: |
|
||||
# Calculate the date 14 days ago
|
||||
cutoff_date=$(date -d "14 days ago" +%Y-%m-%d)
|
||||
|
||||
# List and delete artifacts older than cutoff date
|
||||
gh api repos/$REPO/actions/artifacts --paginate | \
|
||||
jq -r '.artifacts[] | select(.name | startswith("cherry-studio-nightly-")) | select(.created_at < "'$cutoff_date'") | .id' | \
|
||||
while read artifact_id; do
|
||||
echo "Deleting artifact $artifact_id"
|
||||
gh api repos/$REPO/actions/artifacts/$artifact_id -X DELETE
|
||||
done
|
||||
|
||||
check-repository:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ github.repository == 'CherryHQ/cherry-studio' }}
|
||||
steps:
|
||||
- name: Check if running in main repository
|
||||
run: |
|
||||
echo "Running in repository: ${{ github.repository }}"
|
||||
echo "Should run: ${{ github.repository == 'CherryHQ/cherry-studio' }}"
|
||||
|
||||
nightly-build:
|
||||
needs: check-repository
|
||||
if: needs.check-repository.outputs.should_run == 'true'
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
@@ -26,6 +58,11 @@ jobs:
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
@@ -59,6 +96,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Build Mac
|
||||
if: matrix.os == 'macos-latest'
|
||||
@@ -73,16 +111,17 @@ jobs:
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Build Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: yarn build:win
|
||||
run: |
|
||||
yarn build:npm windows
|
||||
yarn build:win
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
|
||||
- name: Replace spaces in filenames
|
||||
run: node scripts/replace-spaces.js
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Rename artifacts with nightly format
|
||||
shell: bash
|
||||
@@ -93,39 +132,24 @@ jobs:
|
||||
# Windows artifacts - based on actual file naming pattern
|
||||
if [ "${{ matrix.os }}" == "windows-latest" ]; then
|
||||
# Setup installer
|
||||
find dist -name "*setup.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-setup.exe \;
|
||||
|
||||
# Portable exe
|
||||
find dist -name "*portable.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-portable.exe \;
|
||||
find dist -name "*-x64-setup.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64-setup.exe \;
|
||||
find dist -name "*-arm64-setup.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64-setup.exe \;
|
||||
|
||||
# Rename blockmap files to match the new exe names
|
||||
if [ -f "dist/*setup.exe.blockmap" ]; then
|
||||
cp dist/*setup.exe.blockmap renamed-artifacts/cherry-studio-nightly-${DATE}-setup.exe.blockmap || true
|
||||
fi
|
||||
# Portable exe
|
||||
find dist -name "*-x64-portable.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64-portable.exe \;
|
||||
find dist -name "*-arm64-portable.exe" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64-portable.exe \;
|
||||
fi
|
||||
|
||||
# macOS artifacts
|
||||
if [ "${{ matrix.os }}" == "macos-latest" ]; then
|
||||
# 处理arm64架构文件
|
||||
find dist -name "*-arm64.dmg" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.dmg \;
|
||||
find dist -name "*-arm64.dmg.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.dmg.blockmap \;
|
||||
find dist -name "*-arm64.zip" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.zip \;
|
||||
find dist -name "*-arm64.zip.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.zip.blockmap \;
|
||||
|
||||
# 处理x64架构文件
|
||||
find dist -name "*-x64.dmg" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.dmg \;
|
||||
find dist -name "*-x64.dmg.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.dmg.blockmap \;
|
||||
find dist -name "*-x64.zip" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.zip \;
|
||||
find dist -name "*-x64.zip.blockmap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x64.zip.blockmap \;
|
||||
fi
|
||||
|
||||
# Linux artifacts
|
||||
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
|
||||
find dist -name "*.AppImage" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.AppImage \;
|
||||
find dist -name "*.snap" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.snap \;
|
||||
find dist -name "*.deb" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.deb \;
|
||||
find dist -name "*.rpm" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.rpm \;
|
||||
find dist -name "*.tar.gz" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}.tar.gz \;
|
||||
find dist -name "*-x86_64.AppImage" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-x86_64.AppImage \;
|
||||
find dist -name "*-arm64.AppImage" -exec cp {} renamed-artifacts/cherry-studio-nightly-${DATE}-arm64.AppImage \;
|
||||
fi
|
||||
|
||||
# Copy update files
|
||||
|
||||
18
.github/workflows/release.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
tag:
|
||||
description: 'Release tag (e.g. v1.0.0)'
|
||||
required: true
|
||||
default: 'v0.9.18'
|
||||
default: 'v1.0.0'
|
||||
push:
|
||||
tags:
|
||||
- v*.*.*
|
||||
@@ -42,6 +42,11 @@ jobs:
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
@@ -71,10 +76,12 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Build Mac
|
||||
if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
sudo -H pip install setuptools
|
||||
yarn build:npm mac
|
||||
yarn build:mac
|
||||
env:
|
||||
@@ -85,16 +92,17 @@ jobs:
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Build Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: yarn build:win
|
||||
run: |
|
||||
yarn build:npm windows
|
||||
yarn build:win
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
|
||||
|
||||
- name: Replace spaces in filenames
|
||||
run: node scripts/replace-spaces.js
|
||||
NODE_OPTIONS: --max-old-space-size=8192
|
||||
|
||||
- name: Release
|
||||
uses: ncipollo/release-action@v1
|
||||
|
||||
6
.gitignore
vendored
@@ -35,7 +35,6 @@ Thumbs.db
|
||||
node_modules
|
||||
dist
|
||||
out
|
||||
build/icons
|
||||
stats.html
|
||||
|
||||
# ENV
|
||||
@@ -47,3 +46,8 @@ local
|
||||
.aider*
|
||||
.cursorrules
|
||||
.cursor/rules
|
||||
|
||||
# test
|
||||
coverage
|
||||
.vitest-cache
|
||||
vitest.config.*.timestamp-*
|
||||
|
||||
1
.vscode/launch.json
vendored
@@ -7,6 +7,7 @@
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceRoot}",
|
||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite",
|
||||
"runtimeVersion": "20",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite.cmd"
|
||||
},
|
||||
|
||||
10
.vscode/settings.json
vendored
@@ -31,5 +31,13 @@
|
||||
"[markdown]": {
|
||||
"files.trimTrailingWhitespace": false
|
||||
},
|
||||
"i18n-ally.localesPaths": ["src/renderer/src/i18n"]
|
||||
"i18n-ally.localesPaths": ["src/renderer/src/i18n/locales"],
|
||||
"i18n-ally.enabledFrameworks": ["react-i18next", "i18next"],
|
||||
"i18n-ally.keystyle": "nested", // 翻译路径格式
|
||||
"i18n-ally.sortKeys": true, // 排序
|
||||
"i18n-ally.namespace": true, // 开启命名空间
|
||||
"i18n-ally.enabledParsers": ["ts", "js", "json"], // 解析语言
|
||||
"i18n-ally.sourceLanguage": "en-us", // 翻译源语言
|
||||
"i18n-ally.displayLanguage": "zh-cn",
|
||||
"i18n-ally.fullReloadOnChanged": true // 界面显示语言
|
||||
}
|
||||
|
||||
92
.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
diff --git a/out/electron/ElectronFramework.js b/out/electron/ElectronFramework.js
|
||||
index 5a4b4546870ee9e770d5a50d79790d39baabd268..3f0ac05dfd6bbaeaf5f834341a823718bd10f55c 100644
|
||||
--- a/out/electron/ElectronFramework.js
|
||||
+++ b/out/electron/ElectronFramework.js
|
||||
@@ -55,26 +55,27 @@ async function removeUnusedLanguagesIfNeeded(options) {
|
||||
if (!wantedLanguages.length) {
|
||||
return;
|
||||
}
|
||||
- const { dir, langFileExt } = getLocalesConfig(options);
|
||||
+ const { dirs, langFileExt } = getLocalesConfig(options);
|
||||
// noinspection SpellCheckingInspection
|
||||
- await (0, tiny_async_pool_1.default)(builder_util_1.MAX_FILE_REQUESTS, await (0, fs_extra_1.readdir)(dir), async (file) => {
|
||||
- if (!file.endsWith(langFileExt)) {
|
||||
+ const deletedFiles = async (dir) => {
|
||||
+ await (0, tiny_async_pool_1.default)(builder_util_1.MAX_FILE_REQUESTS, await (0, fs_extra_1.readdir)(dir), async (file) => {
|
||||
+ if (!file.endsWith(langFileExt)) {
|
||||
+ return;
|
||||
+ }
|
||||
+ const language = file.substring(0, file.length - langFileExt.length);
|
||||
+ if (!wantedLanguages.includes(language)) {
|
||||
+ return fs.rm(path.join(dir, file), { recursive: true, force: true });
|
||||
+ }
|
||||
return;
|
||||
- }
|
||||
- const language = file.substring(0, file.length - langFileExt.length);
|
||||
- if (!wantedLanguages.includes(language)) {
|
||||
- return fs.rm(path.join(dir, file), { recursive: true, force: true });
|
||||
- }
|
||||
- return;
|
||||
- });
|
||||
+ });
|
||||
+ };
|
||||
+ await Promise.all(dirs.map(deletedFiles));
|
||||
function getLocalesConfig(options) {
|
||||
const { appOutDir, packager } = options;
|
||||
if (packager.platform === index_1.Platform.MAC) {
|
||||
- return { dir: packager.getResourcesDir(appOutDir), langFileExt: ".lproj" };
|
||||
- }
|
||||
- else {
|
||||
- return { dir: path.join(packager.getResourcesDir(appOutDir), "..", "locales"), langFileExt: ".pak" };
|
||||
+ return { dirs: [packager.getResourcesDir(appOutDir), packager.getMacOsElectronFrameworkResourcesDir(appOutDir)], langFileExt: ".lproj" };
|
||||
}
|
||||
+ return { dirs: [path.join(packager.getResourcesDir(appOutDir), "..", "locales")], langFileExt: ".pak" };
|
||||
}
|
||||
}
|
||||
class ElectronFramework {
|
||||
diff --git a/out/node-module-collector/index.d.ts b/out/node-module-collector/index.d.ts
|
||||
index 8e808be0fa0d5971b9f9605c8eb88f71630e34b7..1b97dccd8a150a67c4312d2ba4757960e624045b 100644
|
||||
--- a/out/node-module-collector/index.d.ts
|
||||
+++ b/out/node-module-collector/index.d.ts
|
||||
@@ -2,6 +2,6 @@ import { NpmNodeModulesCollector } from "./npmNodeModulesCollector";
|
||||
import { PnpmNodeModulesCollector } from "./pnpmNodeModulesCollector";
|
||||
import { detect, PM, getPackageManagerVersion } from "./packageManager";
|
||||
import { NodeModuleInfo } from "./types";
|
||||
-export declare function getCollectorByPackageManager(rootDir: string): Promise<NpmNodeModulesCollector | PnpmNodeModulesCollector>;
|
||||
+export declare function getCollectorByPackageManager(rootDir: string): Promise<PnpmNodeModulesCollector | NpmNodeModulesCollector>;
|
||||
export declare function getNodeModules(rootDir: string): Promise<NodeModuleInfo[]>;
|
||||
export { detect, getPackageManagerVersion, PM };
|
||||
diff --git a/out/platformPackager.d.ts b/out/platformPackager.d.ts
|
||||
index 2df1ba2725c54c7b0e8fed67ab52e94f0cdb17bc..c7ff756564cfd216d2c7d8f72f367527010c06f9 100644
|
||||
--- a/out/platformPackager.d.ts
|
||||
+++ b/out/platformPackager.d.ts
|
||||
@@ -67,6 +67,7 @@ export declare abstract class PlatformPackager<DC extends PlatformSpecificBuildO
|
||||
getElectronSrcDir(dist: string): string;
|
||||
getElectronDestinationDir(appOutDir: string): string;
|
||||
getResourcesDir(appOutDir: string): string;
|
||||
+ getMacOsElectronFrameworkResourcesDir(appOutDir: string): string;
|
||||
getMacOsResourcesDir(appOutDir: string): string;
|
||||
private checkFileInPackage;
|
||||
private sanityCheckPackage;
|
||||
diff --git a/out/platformPackager.js b/out/platformPackager.js
|
||||
index 6f799ce0d1cdb5f0b18a9c8187b2db84b3567aa9..879248e6c6786d3473e1a80e3930d3a8d0190aab 100644
|
||||
--- a/out/platformPackager.js
|
||||
+++ b/out/platformPackager.js
|
||||
@@ -465,12 +465,13 @@ class PlatformPackager {
|
||||
if (this.platform === index_1.Platform.MAC) {
|
||||
return this.getMacOsResourcesDir(appOutDir);
|
||||
}
|
||||
- else if ((0, Framework_1.isElectronBased)(this.info.framework)) {
|
||||
+ if ((0, Framework_1.isElectronBased)(this.info.framework)) {
|
||||
return path.join(appOutDir, "resources");
|
||||
}
|
||||
- else {
|
||||
- return appOutDir;
|
||||
- }
|
||||
+ return appOutDir;
|
||||
+ }
|
||||
+ getMacOsElectronFrameworkResourcesDir(appOutDir) {
|
||||
+ return path.join(appOutDir, `${this.appInfo.productFilename}.app`, "Contents", "Frameworks", "Electron Framework.framework", "Resources");
|
||||
}
|
||||
getMacOsResourcesDir(appOutDir) {
|
||||
return path.join(appOutDir, `${this.appInfo.productFilename}.app`, "Contents", "Resources");
|
||||
51
.yarn/patches/electron-updater-npm-6.6.3-9269dbaf84.patch
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
diff --git a/out/MacUpdater.js b/out/MacUpdater.js
|
||||
index 8f18dc5416c91835ded4e47f2358fba680c129ac..a3fb43c2450dc3484bf099b5ea79a362a3b372cc 100644
|
||||
--- a/out/MacUpdater.js
|
||||
+++ b/out/MacUpdater.js
|
||||
@@ -74,7 +74,7 @@ class MacUpdater extends AppUpdater_1.AppUpdater {
|
||||
else {
|
||||
files = files.filter(file => !isArm64(file));
|
||||
}
|
||||
- const zipFileInfo = (0, Provider_1.findFile)(files, "zip", ["pkg", "dmg"]);
|
||||
+ const zipFileInfo = (0, Provider_1.findFile)(files, "zip", ["pkg", "dmg"], false /*has been filtered by myself*/);
|
||||
if (zipFileInfo == null) {
|
||||
throw (0, builder_util_runtime_1.newError)(`ZIP file not provided: ${(0, builder_util_runtime_1.safeStringifyJson)(files)}`, "ERR_UPDATER_ZIP_FILE_NOT_FOUND");
|
||||
}
|
||||
diff --git a/out/providers/Provider.js b/out/providers/Provider.js
|
||||
index 9829dff7e95aa9baa0bfdf29f52e6f761c9b7243..6ecaade9e294c87c03bb42e77ff5463f2782cb3c 100644
|
||||
--- a/out/providers/Provider.js
|
||||
+++ b/out/providers/Provider.js
|
||||
@@ -61,11 +61,18 @@ class Provider {
|
||||
}
|
||||
}
|
||||
exports.Provider = Provider;
|
||||
-function findFile(files, extension, not) {
|
||||
+function findFile(files, extension, not, filterByArch = true) {
|
||||
if (files.length === 0) {
|
||||
throw (0, builder_util_runtime_1.newError)("No files provided", "ERR_UPDATER_NO_FILES_PROVIDED");
|
||||
}
|
||||
- const result = files.find(it => it.url.pathname.toLowerCase().endsWith(`.${extension.toLowerCase()}`));
|
||||
+ const result = files
|
||||
+ .filter(file => {
|
||||
+ if (!filterByArch) {
|
||||
+ return true;
|
||||
+ }
|
||||
+ return (process.arch == "arm64") === (file.url.pathname.includes("arm64") || file.info.url.includes("arm64"));
|
||||
+ })
|
||||
+ .find(it => it.url.pathname.toLowerCase().endsWith(`.${extension.toLowerCase()}`));
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
diff --git a/out/differentialDownloader/multipleRangeDownloader.js b/out/differentialDownloader/multipleRangeDownloader.js
|
||||
index bf7d3a2982c62b94054fed4ef60455b20b26d622..3a924eddc946ec446654a112a33be4e2cea311d2 100644
|
||||
--- a/out/differentialDownloader/multipleRangeDownloader.js
|
||||
+++ b/out/differentialDownloader/multipleRangeDownloader.js
|
||||
@@ -75,7 +75,7 @@ function doExecuteTasks(differentialDownloader, options, out, resolve, reject) {
|
||||
return;
|
||||
}
|
||||
const contentType = (0, builder_util_runtime_1.safeGetHeader)(response, "content-type");
|
||||
- const m = /^multipart\/.+?(?:; boundary=(?:(?:"(.+)")|(?:([^\s]+))))$/i.exec(contentType);
|
||||
+ const m = /^multipart\/.+?\s*;\s*boundary=(?:"([^"]+)"|([^\s";]+))\s*$/i.exec(contentType);
|
||||
if (m == null) {
|
||||
reject(new Error(`Content-Type "multipart/byteranges" is expected, but got "${contentType}"`));
|
||||
return;
|
||||
17
.yarn/patches/libsql-npm-0.4.7-444e260fb1.patch
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
diff --git a/index.js b/index.js
|
||||
index 4e8423491ab51a9eb9fee22182e4ea0fcc3d3d3b..2846c5d4354c130d478dc99565b3ecd6d85b7d2e 100644
|
||||
--- a/index.js
|
||||
+++ b/index.js
|
||||
@@ -19,7 +19,11 @@ function requireNative() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
- return require(`@libsql/${target}`);
|
||||
+ if (target === "win32-arm64-msvc") {
|
||||
+ return require(`@strongtz/win32-arm64-msvc`);
|
||||
+ } else {
|
||||
+ return require(`@libsql/${target}`);
|
||||
+ }
|
||||
}
|
||||
|
||||
const {
|
||||
@@ -1,8 +1,8 @@
|
||||
diff --git a/core.js b/core.js
|
||||
index e75a18281ce8f051990c5a50bc1076afdddf91a3..e62f796791a155f23d054e74a429516c14d6e11b 100644
|
||||
index ebb071d31cd5a14792b62814df072c5971e83300..31e1062d4a7f2422ffec79cf96a35dbb69fe89cb 100644
|
||||
--- a/core.js
|
||||
+++ b/core.js
|
||||
@@ -156,7 +156,7 @@ class APIClient {
|
||||
@@ -157,7 +157,7 @@ class APIClient {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': this.getUserAgent(),
|
||||
@@ -12,10 +12,10 @@ index e75a18281ce8f051990c5a50bc1076afdddf91a3..e62f796791a155f23d054e74a429516c
|
||||
};
|
||||
}
|
||||
diff --git a/core.mjs b/core.mjs
|
||||
index fcef58eb502664c41a77483a00db8adaf29b2817..18c5d6ed4be86b3640931277bdc27700006764d7 100644
|
||||
index 9c1a0264dcd73a85de1cf81df4efab9ce9ee2ab7..33f9f1f237f2eb2667a05dae1a7e3dc916f6bfff 100644
|
||||
--- a/core.mjs
|
||||
+++ b/core.mjs
|
||||
@@ -149,7 +149,7 @@ export class APIClient {
|
||||
@@ -150,7 +150,7 @@ export class APIClient {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': this.getUserAgent(),
|
||||
2
.yarn/releases/yarn-4.6.0.cjs
vendored
@@ -1,45 +1,73 @@
|
||||
# Cherry Studio 贡献者指南
|
||||
[中文](./docs/CONTRIBUTING.zh.md) | [English](./CONTRIBUTING.md)
|
||||
|
||||
欢迎来到 Cherry Studio 的贡献者社区!我们致力于将 Cherry Studio 打造成一个长期提供价值的项目,并希望邀请更多的开发者加入我们的行列。无论您是经验丰富的开发者还是刚刚起步的初学者,您的贡献都将帮助我们更好地服务用户,提升软件质量。
|
||||
# Cherry Studio Contributor Guide
|
||||
|
||||
## 如何贡献
|
||||
Welcome to the Cherry Studio contributor community! We are committed to making Cherry Studio a project that provides long-term value and hope to invite more developers to join us. Whether you are an experienced developer or a beginner just starting out, your contributions will help us better serve users and improve software quality.
|
||||
|
||||
以下是您可以参与的几种方式:
|
||||
## How to Contribute
|
||||
|
||||
1. **贡献代码**:帮助我们开发新功能或优化现有代码。请确保您的代码符合我们的编码标准,并通过所有测试。
|
||||
Here are several ways you can participate:
|
||||
|
||||
2. **修复 BUG**:如果您发现了 BUG,欢迎提交修复方案。请在提交前确认问题已被解决,并附上相关测试。
|
||||
1. **Contribute Code**: Help us develop new features or optimize existing code. Please ensure your code adheres to our coding standards and passes all tests.
|
||||
|
||||
3. **维护 Issue**:协助我们管理 GitHub 上的 issue,帮助标记、分类和解决问题。
|
||||
2. **Fix Bugs**: If you find a bug, you are welcome to submit a fix. Please confirm the issue is resolved before submitting and include relevant tests.
|
||||
|
||||
4. **产品设计**:参与产品设计讨论,帮助我们改进用户体验和界面设计。
|
||||
3. **Maintain Issues**: Help us manage issues on GitHub by assisting with tagging, classifying, and resolving problems.
|
||||
|
||||
5. **编写文档**:帮助我们完善用户手册、API 文档和开发者指南。
|
||||
4. **Product Design**: Participate in product design discussions to help us improve user experience and interface design.
|
||||
|
||||
6. **社区维护**:参与社区讨论,帮助解答用户问题,促进社区活跃。
|
||||
5. **Write Documentation**: Help us improve the user manual, API documentation, and developer guides.
|
||||
|
||||
7. **推广使用**:通过博客、社交媒体等渠道推广 Cherry Studio,吸引更多用户和开发者。
|
||||
6. **Community Maintenance**: Participate in community discussions, help answer user questions, and promote community activity.
|
||||
|
||||
## 开始贡献
|
||||
7. **Promote Usage**: Promote Cherry Studio through blogs, social media, and other channels to attract more users and developers.
|
||||
|
||||
1. **Fork 仓库**:在 GitHub 上 fork 我们的仓库,并将其克隆到本地。
|
||||
## Before You Start
|
||||
|
||||
2. **创建分支**:为您要进行的更改创建一个新的分支。
|
||||
Please make sure you have read the [Code of Conduct](CODE_OF_CONDUCT.md) and the [LICENSE](LICENSE).
|
||||
|
||||
3. **提交更改**:在本地进行更改并提交。请确保您的提交信息清晰明了。
|
||||
## Getting Started
|
||||
|
||||
4. **发起 Pull Request**:将您的更改推送到 GitHub,并发起 Pull Request。请描述您的更改内容和原因。
|
||||
To help you get familiar with the codebase, we recommend tackling issues tagged with one or more of the following labels: [good-first-issue](https://github.com/CherryHQ/cherry-studio/labels/good%20first%20issue), [help-wanted](https://github.com/CherryHQ/cherry-studio/labels/help%20wanted), or [kind/bug](https://github.com/CherryHQ/cherry-studio/labels/kind%2Fbug). Any help is welcome.
|
||||
|
||||
### 其他建议
|
||||
### Testing
|
||||
|
||||
- **联系开发者**:在提交 PR 之前,您可以先和开发者进行联系,共同探讨或者获取帮助。
|
||||
- **成为核心开发者**:如果您能够稳定为项目贡献,恭喜您可以成为项目核心开发者,获取到项目成员身份。
|
||||
Features without tests are considered non-existent. To ensure code is truly effective, relevant processes should be covered by unit tests and functional tests. Therefore, when considering contributions, please also consider testability. All tests can be run locally without dependency on CI. Please refer to the "Testing" section in the [Developer Guide](docs/dev.md).
|
||||
|
||||
## 联系我们
|
||||
### Automated Testing for Pull Requests
|
||||
|
||||
如果您有任何问题或建议,欢迎通过以下方式联系我们:
|
||||
Automated tests are triggered on pull requests (PRs) opened by members of the Cherry Studio organization, except for draft PRs. PRs opened by new contributors will initially be marked with the `needs-ok-to-test` label and will not be automatically tested. Once a Cherry Studio organization member adds `/ok-to-test` to the PR, the test pipeline will be created.
|
||||
|
||||
- 微信:kangfenmao
|
||||
### Consider Opening Your Pull Request as a Draft
|
||||
|
||||
Not all pull requests are ready for review when created. This might be because the author wants to start a discussion, they are not entirely sure if the changes are heading in the right direction, or the changes are not yet complete. Please consider creating these PRs as [draft pull requests](https://github.blog/2019-02-14-introducing-draft-pull-requests/). Draft PRs are skipped by CI, thus saving CI resources. This also means reviewers will not be automatically assigned, and the community will understand that this PR is not yet ready for review.
|
||||
Reviewers will be assigned after you mark the draft pull request as ready for review.
|
||||
|
||||
### Contributor Compliance with Project Terms
|
||||
|
||||
We require every contributor to certify that they have the right to legally contribute to our project. Contributors express this by consciously signing their commits, thereby indicating their compliance with the [LICENSE](LICENSE).
|
||||
A signed commit is one where the commit message includes the following:
|
||||
|
||||
You can generate a signed commit using the following command [git commit --signoff](https://git-scm.com/docs/git-commit#Documentation/git-commit.txt---signoff):
|
||||
|
||||
```
|
||||
git commit --signoff -m "Your commit message"
|
||||
```
|
||||
|
||||
### Getting Code Reviewed/Merged
|
||||
|
||||
Maintainers are here to help you implement your use case within a reasonable timeframe. They will do their best to review your code and provide constructive feedback promptly. However, if you get stuck during the review process or feel your Pull Request is not receiving the attention it deserves, please contact us via comments in the Issue or through the [Community](README.md#-community).
|
||||
|
||||
### Other Suggestions
|
||||
|
||||
- **Contact Developers**: Before submitting a PR, you can contact the developers first to discuss or get help.
|
||||
- **Become a Core Developer**: If you contribute to the project consistently, congratulations, you can become a core developer and gain project membership status. Please check our [Membership Guide](https://github.com/CherryHQ/community/blob/main/docs/membership.en.md).
|
||||
|
||||
## Contact Us
|
||||
|
||||
If you have any questions or suggestions, feel free to contact us through the following ways:
|
||||
|
||||
- WeChat: kangfenmao
|
||||
- [GitHub Issues](https://github.com/CherryHQ/cherry-studio/issues)
|
||||
|
||||
感谢您的支持和贡献!我们期待与您一起将 Cherry Studio 打造成更好的产品。
|
||||
Thank you for your support and contributions! We look forward to working with you to make Cherry Studio a better product.
|
||||
|
||||
111
LICENSE
@@ -1,62 +1,87 @@
|
||||
**许可协议**
|
||||
**许可协议 (Licensing)**
|
||||
|
||||
本软件采用 Apache License 2.0 许可。除 Apache License 2.0 规定的条款外,您在使用 Cherry Studio 时还应遵守以下附加条款:
|
||||
本项目采用**区分用户的双重许可 (User-Segmented Dual Licensing)** 模式。
|
||||
|
||||
**一. 商用许可**
|
||||
**核心原则:**
|
||||
|
||||
在以下任何一种情况下,您需要联系我们并获得明确的书面商业授权后,方可继续使用 Cherry Studio 材料:
|
||||
* **个人用户 和 10人及以下企业/组织:** 默认适用 **GNU Affero 通用公共许可证 v3.0 (AGPLv3)**。
|
||||
* **超过10人的企业/组织:** **必须** 获取 **商业许可证 (Commercial License)**。
|
||||
|
||||
1. **修改与衍生**: 您对 Cherry Studio 材料进行修改或基于其进行衍生开发(包括但不限于修改应用名称、Logo、代码、功能、界面,数据等)。
|
||||
2. **企业服务**: 在您的企业内部,或为企业客户提供基于 CherryStudio 的服务,且该服务支持 10 人及以上累计用户使用。
|
||||
3. **硬件捆绑销售**: 您将 Cherry Studio 预装或集成到硬件设备或产品中进行捆绑销售。
|
||||
4. **政府或教育机构大规模采购**: 您的使用场景属于政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。
|
||||
5. **面向公众的公有云服务**:基于 Cherry Studio,提供面向公众的公有云服务。
|
||||
|
||||
**二. 贡献者协议**
|
||||
|
||||
作为 Cherry Studio 的贡献者,您应当同意以下条款:
|
||||
|
||||
1. **许可调整**:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。
|
||||
2. **商业用途**:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。
|
||||
|
||||
**三. 其他条款**
|
||||
|
||||
1. 本协议条款的解释权归 Cherry Studio 开发者所有。
|
||||
2. 本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。
|
||||
|
||||
如有任何问题或需申请商业授权,请联系 Cherry Studio 开发团队。
|
||||
|
||||
除上述特定条件外,其他所有权利和限制均遵循 Apache License 2.0。有关 Apache License 2.0 的详细信息,请访问 http://www.apache.org/licenses/LICENSE-2.0。
|
||||
定义:“10人及以下”
|
||||
指在您的组织(包括公司、非营利组织、政府机构、教育机构等任何实体)中,能够访问、使用或以任何方式直接或间接受益于本软件(Cherry Studio)功能的个人总数不超过10人。这包括但不限于开发者、测试人员、运营人员、最终用户、通过集成系统间接使用者等。
|
||||
|
||||
---
|
||||
|
||||
**1. 开源许可证 (Open Source License): AGPLv3 - 适用于个人及10人及以下组织**
|
||||
|
||||
**License Agreement**
|
||||
* 如果您是个人用户,或者您的组织满足上述“10人及以下”的定义,您可以在 **AGPLv3** 的条款下自由使用、修改和分发 Cherry Studio。AGPLv3 的完整文本可以访问 [https://www.gnu.org/licenses/agpl-3.0.html](https://www.gnu.org/licenses/agpl-3.0.html) 获取。
|
||||
* **核心义务:** AGPLv3 的一个关键要求是,如果您修改了 Cherry Studio 并通过网络提供服务,或者分发了修改后的版本,您必须以 AGPLv3 许可证向接收者提供相应的**完整源代码**。即使您符合“10人及以下”的标准,如果您希望避免此源代码公开义务,您也需要考虑获取商业许可证(见下文)。
|
||||
* 使用前请务必仔细阅读并理解 AGPLv3 的所有条款。
|
||||
|
||||
This software is licensed under the Apache License 2.0. In addition to the terms stipulated by the Apache License 2.0, you must comply with the following supplementary terms when using Cherry Studio:
|
||||
**2. 商业许可证 (Commercial License) - 适用于超过10人的组织,或希望规避 AGPLv3 义务的用户**
|
||||
|
||||
**I. Commercial Licensing**
|
||||
* **强制要求:** 如果您的组织**不**满足上述“10人及以下”的定义(即有11人或更多人可以访问、使用或受益于本软件),您**必须**联系我们获取并签署一份商业许可证才能使用 Cherry Studio。
|
||||
* **自愿选择:** 即使您的组织满足“10人及以下”的条件,但如果您的使用场景**无法满足 AGPLv3 的条款要求**(特别是关于**源代码公开**的义务),或者您需要 AGPLv3 **未提供**的特定商业条款(如保证、赔偿、无 Copyleft 限制等),您也**必须**联系我们获取并签署一份商业许可证。
|
||||
* **需要商业许可证的常见情况包括(但不限于):**
|
||||
* 您的组织规模超过10人。
|
||||
* (无论组织规模)您希望分发修改过的 Cherry Studio 版本,但**不希望**根据 AGPLv3 公开您修改部分的源代码。
|
||||
* (无论组织规模)您希望基于修改过的 Cherry Studio 提供网络服务(SaaS),但**不希望**根据 AGPLv3 向服务使用者提供修改后的源代码。
|
||||
* (无论组织规模)您的公司政策、客户合同或项目要求不允许使用 AGPLv3 许可的软件,或要求闭源分发及保密。
|
||||
* 商业许可证将为您提供豁免 AGPLv3 义务(如源代码公开)的权利,并可能包含额外的商业保障条款。
|
||||
* **获取商业许可:** 请通过邮箱 **bd@cherry-ai.com** 联系 Cherry Studio 开发团队洽谈商业授权事宜。
|
||||
|
||||
You must contact us and obtain explicit written commercial authorization to continue using Cherry Studio materials under any of the following circumstances:
|
||||
**3. 贡献 (Contributions)**
|
||||
|
||||
1. **Modifications and Derivatives:** You modify Cherry Studio materials or perform derivative development based on them (including but not limited to changing the application’s name, logo, code, functionality, user interface, data, etc.).
|
||||
2. **Enterprise Services:** You use Cherry Studio internally within your enterprise, or you provide Cherry Studio-based services for enterprise customers, and such services support cumulative usage by 10 or more users.
|
||||
3. **Hardware Bundling and Sales:** You pre-install or integrate Cherry Studio into hardware devices or products for bundled sale.
|
||||
4. **Large-scale Procurement by Government or Educational Institutions:** Your usage scenario involves large-scale procurement projects by government or educational institutions, especially in cases involving sensitive requirements such as security and data privacy.
|
||||
5. **Public Cloud Services:** You provide public cloud-based product services utilizing Cherry Studio.
|
||||
* 我们欢迎社区对 Cherry Studio 的贡献。所有向本项目提交的贡献都将被视为在 **AGPLv3** 许可证下提供。
|
||||
* 通过向本项目提交贡献(例如通过 Pull Request),即表示您同意您的代码以 AGPLv3 许可证授权给本项目及所有后续使用者(无论这些使用者最终遵循 AGPLv3 还是商业许可)。
|
||||
* 您也理解并同意,您的贡献可能会被包含在根据商业许可证分发的 Cherry Studio 版本中。
|
||||
|
||||
**II. Contributor Agreement**
|
||||
**4. 其他条款 (Other Terms)**
|
||||
|
||||
As a contributor to Cherry Studio, you must agree to the following terms:
|
||||
* 关于商业许可证的具体条款和条件,以双方签署的正式商业许可协议为准。
|
||||
* 项目维护者保留根据需要更新本许可政策(包括用户规模定义和阈值)的权利。相关更新将通过项目官方渠道(如代码仓库、官方网站)进行通知。
|
||||
|
||||
1. **License Adjustments:** The producer reserves the right to adjust the open-source license as necessary, making it more strict or permissive.
|
||||
2. **Commercial Usage:** Your contributed code may be used commercially, including but not limited to cloud business operations.
|
||||
---
|
||||
|
||||
**III. Other Terms**
|
||||
**Licensing**
|
||||
|
||||
1. Cherry Studio developers reserve the right of final interpretation of these agreement terms.
|
||||
2. This agreement may be updated according to practical circumstances, and users will be notified of updates through this software.
|
||||
This project employs a **User-Segmented Dual Licensing** model.
|
||||
|
||||
If you have any questions or need to apply for commercial authorization, please contact the Cherry Studio development team.
|
||||
**Core Principle:**
|
||||
|
||||
Other than these specific conditions, all remaining rights and restrictions follow the Apache License 2.0. For more detailed information regarding Apache License 2.0, please visit http://www.apache.org/licenses/LICENSE-2.0.
|
||||
* **Individual Users and Organizations with 10 or Fewer Individuals:** Governed by default under the **GNU Affero General Public License v3.0 (AGPLv3)**.
|
||||
* **Organizations with More Than 10 Individuals:** **Must** obtain a **Commercial License**.
|
||||
|
||||
Definition: "10 or Fewer Individuals"
|
||||
Refers to any organization (including companies, non-profits, government agencies, educational institutions, etc.) where the total number of individuals who can access, use, or in any way directly or indirectly benefit from the functionality of this software (Cherry Studio) does not exceed 10. This includes, but is not limited to, developers, testers, operations staff, end-users, and indirect users via integrated systems.
|
||||
|
||||
---
|
||||
|
||||
**1. Open Source License: AGPLv3 - For Individuals and Organizations of 10 or Fewer**
|
||||
|
||||
* If you are an individual user, or if your organization meets the "10 or Fewer Individuals" definition above, you are free to use, modify, and distribute Cherry Studio under the terms of the **AGPLv3**. The full text of the AGPLv3 can be found in the LICENSE file at [https://www.gnu.org/licenses/agpl-3.0.html](https://www.gnu.org/licenses/agpl-3.0.html).
|
||||
* **Core Obligation:** A key requirement of the AGPLv3 is that if you modify Cherry Studio and make it available over a network, or distribute the modified version, you must provide the **complete corresponding source code** under the AGPLv3 license to the recipients. Even if you qualify under the "10 or Fewer Individuals" rule, if you wish to avoid this source code disclosure obligation, you will need to obtain a Commercial License (see below).
|
||||
* Please read and understand the full terms of the AGPLv3 carefully before use.
|
||||
|
||||
**2. Commercial License - For Organizations with More Than 10 Individuals, or Users Needing to Avoid AGPLv3 Obligations**
|
||||
|
||||
* **Mandatory Requirement:** If your organization does **not** meet the "10 or Fewer Individuals" definition above (i.e., 11 or more individuals can access, use, or benefit from the software), you **must** contact us to obtain and execute a Commercial License to use Cherry Studio.
|
||||
* **Voluntary Option:** Even if your organization meets the "10 or Fewer Individuals" condition, if your intended use case **cannot comply with the terms of the AGPLv3** (particularly the obligations regarding **source code disclosure**), or if you require specific commercial terms **not offered** by the AGPLv3 (such as warranties, indemnities, or freedom from copyleft restrictions), you also **must** contact us to obtain and execute a Commercial License.
|
||||
* **Common scenarios requiring a Commercial License include (but are not limited to):**
|
||||
* Your organization has more than 10 individuals who can access, use, or benefit from the software.
|
||||
* (Regardless of organization size) You wish to distribute a modified version of Cherry Studio but **do not want** to disclose the source code of your modifications under AGPLv3.
|
||||
* (Regardless of organization size) You wish to provide a network service (SaaS) based on a modified version of Cherry Studio but **do not want** to provide the modified source code to users of the service under AGPLv3.
|
||||
* (Regardless of organization size) Your corporate policies, client contracts, or project requirements prohibit the use of AGPLv3-licensed software or mandate closed-source distribution and confidentiality.
|
||||
* The Commercial License grants you rights exempting you from AGPLv3 obligations (like source code disclosure) and may include additional commercial assurances.
|
||||
* **Obtaining a Commercial License:** Please contact the Cherry Studio development team via email at **bd@cherry-ai.com** to discuss commercial licensing options.
|
||||
|
||||
**3. Contributions**
|
||||
|
||||
* We welcome community contributions to Cherry Studio. All contributions submitted to this project are considered to be offered under the **AGPLv3** license.
|
||||
* By submitting a contribution to this project (e.g., via a Pull Request), you agree to license your code under the AGPLv3 to the project and all its downstream users (regardless of whether those users ultimately operate under AGPLv3 or a Commercial License).
|
||||
* You also understand and agree that your contribution may be included in distributions of Cherry Studio offered under our commercial license.
|
||||
|
||||
**4. Other Terms**
|
||||
|
||||
* The specific terms and conditions of the Commercial License are governed by the formal commercial license agreement signed by both parties.
|
||||
* The project maintainers reserve the right to update this licensing policy (including the definition and threshold for user count) as needed. Updates will be communicated through official project channels (e.g., code repository, official website).
|
||||
|
||||
14
README.md
@@ -13,7 +13,7 @@
|
||||
|
||||
Cherry Studio is a desktop client that supports for multiple LLM providers, available on Windows, Mac and Linux.
|
||||
|
||||
👏 Join [Telegram Group](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
👏 Join [Telegram Group](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ Group(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
|
||||
|
||||
❤️ Like Cherry Studio? Give it a star 🌟 or [Sponsor](docs/sponsor.md) to support the development!
|
||||
|
||||
@@ -23,14 +23,12 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 Screenshot
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
# 🌟 Key Features
|
||||
|
||||

|
||||
|
||||
1. **Diverse LLM Provider Support**:
|
||||
|
||||
- ☁️ Major LLM Cloud Services: OpenAI, Gemini, Anthropic, and more
|
||||
@@ -84,9 +82,11 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌈 Theme
|
||||
|
||||
- Theme Gallery: https://cherrycss.com
|
||||
- Theme Gallery: https://cherrycss.com
|
||||
- Aero Theme: https://github.com/hakadao/CherryStudio-Aero
|
||||
- PaperMaterial Theme: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
|
||||
- Claude dynamic-style: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
|
||||
- Maple Neon Theme: https://github.com/BoningtonChen/CherryStudio_themes
|
||||
|
||||
Welcome PR for more themes
|
||||
|
||||
|
||||
BIN
build/icons/1024x1024.png
Normal file
|
After Width: | Height: | Size: 105 KiB |
BIN
build/icons/128x128.png
Normal file
|
After Width: | Height: | Size: 9.3 KiB |
BIN
build/icons/16x16.png
Normal file
|
After Width: | Height: | Size: 621 B |
BIN
build/icons/24x24.png
Normal file
|
After Width: | Height: | Size: 1.0 KiB |
BIN
build/icons/256x256.png
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
build/icons/32x32.png
Normal file
|
After Width: | Height: | Size: 1.5 KiB |
BIN
build/icons/48x48.png
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
build/icons/512x512.png
Normal file
|
After Width: | Height: | Size: 50 KiB |
BIN
build/icons/64x64.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
@@ -1,8 +1,8 @@
|
||||
# provider: generic
|
||||
# url: http://127.0.0.1:8080
|
||||
# updaterCacheDirName: cherry-studio-updater
|
||||
provider: github
|
||||
repo: cherry-studio
|
||||
owner: kangfenmao
|
||||
# provider: generic
|
||||
# url: https://cherrystudio.ocool.online
|
||||
# provider: github
|
||||
# repo: cherry-studio
|
||||
# owner: kangfenmao
|
||||
provider: generic
|
||||
url: https://releases.cherry-ai.com
|
||||
|
||||
77
docs/CONTRIBUTING.zh.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Cherry Studio 贡献者指南
|
||||
|
||||
[**English**](../CONTRIBUTING.md) | [**中文**](./CONTRIBUTING.zh.md)
|
||||
|
||||
欢迎来到 Cherry Studio 的贡献者社区!我们致力于将 Cherry Studio 打造成一个长期提供价值的项目,并希望邀请更多的开发者加入我们的行列。无论您是经验丰富的开发者还是刚刚起步的初学者,您的贡献都将帮助我们更好地服务用户,提升软件质量。
|
||||
|
||||
## 如何贡献
|
||||
|
||||
以下是您可以参与的几种方式:
|
||||
|
||||
1. **贡献代码**:帮助我们开发新功能或优化现有代码。请确保您的代码符合我们的编码标准,并通过所有测试。
|
||||
|
||||
2. **修复 BUG**:如果您发现了 BUG,欢迎提交修复方案。请在提交前确认问题已被解决,并附上相关测试。
|
||||
|
||||
3. **维护 Issue**:协助我们管理 GitHub 上的 issue,帮助标记、分类和解决问题。
|
||||
|
||||
4. **产品设计**:参与产品设计讨论,帮助我们改进用户体验和界面设计。
|
||||
|
||||
5. **编写文档**:帮助我们完善用户手册、API 文档和开发者指南。
|
||||
|
||||
6. **社区维护**:参与社区讨论,帮助解答用户问题,促进社区活跃。
|
||||
|
||||
7. **推广使用**:通过博客、社交媒体等渠道推广 Cherry Studio,吸引更多用户和开发者。
|
||||
|
||||
## 开始之前
|
||||
|
||||
请确保阅读了[行为准则](CODE_OF_CONDUCT.md)和[LICENSE](LICENSE)。
|
||||
|
||||
## 开始贡献
|
||||
|
||||
为了让您更熟悉代码,建议您处理一些标记有以下标签之一或多个的问题:[good-first-issue](https://github.com/CherryHQ/cherry-studio/labels/good%20first%20issue)、[help-wanted](https://github.com/CherryHQ/cherry-studio/labels/help%20wanted) 或 [kind/bug](https://github.com/CherryHQ/cherry-studio/labels/kind%2Fbug)。任何帮助都会收到欢迎。
|
||||
|
||||
### 测试
|
||||
|
||||
未经测试的功能等同于不存在。为确保代码真正有效,应通过单元测试和功能测试覆盖相关流程。因此,在考虑贡献时,也请考虑可测试性。所有测试均可本地运行,无需依赖 CI。请参阅[开发者指南](docs/dev.md#test)中的“Test”部分。
|
||||
|
||||
### 拉取请求的自动化测试
|
||||
|
||||
自动化测试会在 Cherry Studio 组织成员开启的拉取请求(PR)上触发,草稿 PR 除外。新贡献者开启的 PR 最初会标记为 needs-ok-to-test 标签且不自动测试。待 Cherry Studio 组织成员在 PR 上添加 /ok-to-test 后,测试通道将被创建。
|
||||
|
||||
### 考虑将您的拉取请求作为草稿打开
|
||||
|
||||
并非所有拉取请求在创建时就准备好接受审查。这可能是因为作者想发起讨论,或者他们不完全确定更改是否朝着正确的方向发展,甚至可能是因为更改尚未完成。请考虑将这些 PR 创建为[草稿拉取请求](https://github.blog/2019-02-14-introducing-draft-pull-requests/)。草稿 PR 会被CI跳过,从而节省CI资源。这也意味着审阅者不会被自动分配,社区会理解此 PR 尚未准备好接受审阅。
|
||||
在您将草稿拉取请求标记为准备审核后,审核人员将被分配
|
||||
|
||||
### 贡献者遵守项目条款
|
||||
|
||||
我们要求每位贡献者证明他们有权合法地为我们的项目做出贡献。贡献者通过有意识地签署他们的提交来表达这一点,并通过这一行为表明他们遵守许可证[LICENSE](LICENSE)。
|
||||
签名提交是指提交信息中包含以下内容的提交:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Name <your.email@example.com>
|
||||
```
|
||||
|
||||
您可以通过以下命令[git commit --signoff](https://git-scm.com/docs/git-commit#Documentation/git-commit.txt---signoff)生成签名提交:
|
||||
|
||||
```
|
||||
git commit --signoff -m "Your commit message"
|
||||
```
|
||||
|
||||
### 获取代码审查/合并
|
||||
|
||||
维护者在此帮助您在合理时间内实现您的用例。他们会尽力在合理时间内审查您的代码并提供建设性反馈。但如果您在审查过程中受阻,或认为您的 Pull Request 未得到应有的关注,请通过 Issue 中的评论或者[社群](README.md#-community)联系我们
|
||||
|
||||
### 其他建议
|
||||
|
||||
- **联系开发者**:在提交 PR 之前,您可以先和开发者进行联系,共同探讨或者获取帮助。
|
||||
- **成为核心开发者**:如果您能够稳定为项目贡献,恭喜您可以成为项目核心开发者,获取到项目成员身份。请查看我们的[成员指南](https://github.com/CherryHQ/community/blob/main/membership.md)
|
||||
|
||||
## 联系我们
|
||||
|
||||
如果您有任何问题或建议,欢迎通过以下方式联系我们:
|
||||
|
||||
- 微信:kangfenmao
|
||||
- [GitHub Issues](https://github.com/CherryHQ/cherry-studio/issues)
|
||||
|
||||
感谢您的支持和贡献!我们期待与您一起将 Cherry Studio 打造成更好的产品。
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
Cherry Studio は、複数の LLM プロバイダーをサポートするデスクトップクライアントで、Windows、Mac、Linux で利用可能です。
|
||||
|
||||
👏 [Telegram](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
👏 [Telegram](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQグループ(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
|
||||
|
||||
❤️ Cherry Studio をお気に入りにしましたか?小さな星をつけてください 🌟 または [スポンサー](sponsor.md) をして開発をサポートしてください!❤️
|
||||
|
||||
@@ -24,14 +24,12 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 スクリーンショット
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
# 🌟 主な機能
|
||||
|
||||

|
||||
|
||||
1. **多様な LLM サービス対応**:
|
||||
|
||||
- ☁️ 主要な LLM クラウドサービス対応:OpenAI、Gemini、Anthropic など
|
||||
@@ -85,8 +83,11 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌈 テーマ
|
||||
|
||||
テーマギャラリー: https://cherrycss.com
|
||||
Aero テーマ: https://github.com/hakadao/CherryStudio-Aero
|
||||
- テーマギャラリー: https://cherrycss.com
|
||||
- Aero テーマ: https://github.com/hakadao/CherryStudio-Aero
|
||||
- PaperMaterial テーマ: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
|
||||
- Claude テーマ: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
|
||||
- メープルネオンテーマ: https://github.com/BoningtonChen/CherryStudio_themes
|
||||
|
||||
より多くのテーマのPRを歓迎します
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
Cherry Studio 是一款支持多个大语言模型(LLM)服务商的桌面客户端,兼容 Windows、Mac 和 Linux 系统。
|
||||
|
||||
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(472019156)](https://qm.qq.com/q/CbZiBWwCXu)
|
||||
👏 欢迎加入 [Telegram 群组](https://t.me/CherryStudioAI)|[Discord](https://discord.gg/wez8HtpxqQ) | [QQ群(575014769)](https://qm.qq.com/q/lo0D4qVZKi)
|
||||
|
||||
❤️ 喜欢 Cherry Studio? 点亮小星星 🌟 或 [赞助开发者](sponsor.md)! ❤️
|
||||
|
||||
@@ -24,14 +24,12 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌠 界面
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
# 🌟 主要特性
|
||||
|
||||

|
||||
|
||||
1. **多样化 LLM 服务支持**:
|
||||
|
||||
- ☁️ 支持主流 LLM 云服务:OpenAI、Gemini、Anthropic、硅基流动等
|
||||
@@ -85,8 +83,11 @@ https://docs.cherry-ai.com
|
||||
|
||||
# 🌈 主题
|
||||
|
||||
主题库:https://cherrycss.com
|
||||
Aero 主题:https://github.com/hakadao/CherryStudio-Aero
|
||||
- 主题库:https://cherrycss.com
|
||||
- Aero 主题:https://github.com/hakadao/CherryStudio-Aero
|
||||
- PaperMaterial 主题: https://github.com/rainoffallingstar/CherryStudio-PaperMaterial
|
||||
- 仿Claude 主题: https://github.com/bjl101501/CherryStudio-Claudestyle-dynamic
|
||||
- 霓虹枫叶字体主题: https://github.com/BoningtonChen/CherryStudio_themes
|
||||
|
||||
欢迎 PR 更多主题
|
||||
|
||||
@@ -113,7 +114,7 @@ Aero 主题:https://github.com/hakadao/CherryStudio-Aero
|
||||
3. **提交更改**:提交并推送您的更改。
|
||||
4. **打开 Pull Request**:描述您的更改和原因。
|
||||
|
||||
有关更详细的指南,请参阅我们的 [贡献指南](../CONTRIBUTING.md)。
|
||||
有关更详细的指南,请参阅我们的 [贡献指南](./CONTRIBUTING.zh.md)。
|
||||
|
||||
感谢您的支持和贡献!
|
||||
|
||||
|
||||
@@ -37,6 +37,12 @@ yarn install
|
||||
yarn dev
|
||||
```
|
||||
|
||||
### Test
|
||||
|
||||
```bash
|
||||
yarn test
|
||||
```
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
appId: com.kangfenmao.CherryStudio
|
||||
productName: Cherry Studio
|
||||
electronLanguages:
|
||||
- zh-CN
|
||||
- zh-TW
|
||||
- en-US
|
||||
- ja # macOS/linux/win
|
||||
- ru # macOS/linux/win
|
||||
- zh_CN # for macOS
|
||||
- zh_TW # for macOS
|
||||
- en # for macOS
|
||||
directories:
|
||||
buildResources: build
|
||||
files:
|
||||
@@ -29,21 +38,24 @@ files:
|
||||
- '!node_modules/mammoth/{mammoth.browser.js,mammoth.browser.min.js}'
|
||||
asarUnpack:
|
||||
- resources/**
|
||||
- '**/*.{node,dll,metal,exp,lib}'
|
||||
- '**/*.{metal,exp,lib}'
|
||||
win:
|
||||
executableName: Cherry Studio
|
||||
artifactName: ${productName}-${version}-portable.${ext}
|
||||
artifactName: ${productName}-${version}-${arch}-setup.${ext}
|
||||
target:
|
||||
- target: nsis
|
||||
- target: portable
|
||||
nsis:
|
||||
artifactName: ${productName}-${version}-setup.${ext}
|
||||
artifactName: ${productName}-${version}-${arch}-setup.${ext}
|
||||
shortcutName: ${productName}
|
||||
uninstallDisplayName: ${productName}
|
||||
createDesktopShortcut: always
|
||||
allowToChangeInstallationDirectory: true
|
||||
oneClick: false
|
||||
include: build/nsis-installer.nsh
|
||||
buildUniversalInstaller: false
|
||||
portable:
|
||||
artifactName: ${productName}-${version}-${arch}-portable.${ext}
|
||||
mac:
|
||||
entitlementsInherit: build/entitlements.mac.plist
|
||||
notarize: false
|
||||
@@ -55,35 +67,31 @@ mac:
|
||||
- NSDownloadsFolderUsageDescription: Application requests access to the user's Downloads folder.
|
||||
target:
|
||||
- target: dmg
|
||||
arch:
|
||||
- arm64
|
||||
- x64
|
||||
- target: zip
|
||||
arch:
|
||||
- arm64
|
||||
- x64
|
||||
linux:
|
||||
artifactName: ${productName}-${version}-${arch}.${ext}
|
||||
target:
|
||||
- target: AppImage
|
||||
arch:
|
||||
- arm64
|
||||
- x64
|
||||
maintainer: electronjs.org
|
||||
category: Utility
|
||||
desktop:
|
||||
entry:
|
||||
StartupWMClass: CherryStudio
|
||||
mimeTypes:
|
||||
- x-scheme-handler/cherrystudio
|
||||
publish:
|
||||
# provider: generic
|
||||
# url: https://cherrystudio.ocool.online
|
||||
provider: github
|
||||
repo: cherry-studio
|
||||
owner: CherryHQ
|
||||
provider: generic
|
||||
url: https://releases.cherry-ai.com
|
||||
electronDownload:
|
||||
mirror: https://npmmirror.com/mirrors/electron/
|
||||
afterPack: scripts/after-pack.js
|
||||
afterSign: scripts/notarize.js
|
||||
artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
小程序支持多开
|
||||
支持 GPT-4o 图像生成
|
||||
修复 MCP 服务器无法使用问题
|
||||
修复升级导致旧版本数据丢失问题
|
||||
新增对 grok-2-image 和 gpt-4o-image 图像支持
|
||||
支持 Windows 便携版使用 data 目录存储数据
|
||||
MCP 界面改版,新增描述信息显示
|
||||
Mermaid 渲染逻辑优化
|
||||
支持关闭公示渲染
|
||||
修复 OpenAI 类型渲染错误
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import react from '@vitejs/plugin-react'
|
||||
import react from '@vitejs/plugin-react-swc'
|
||||
import { defineConfig, externalizeDepsPlugin } from 'electron-vite'
|
||||
import { resolve } from 'path'
|
||||
import { visualizer } from 'rollup-plugin-visualizer'
|
||||
@@ -42,24 +42,27 @@ export default defineConfig({
|
||||
}
|
||||
},
|
||||
preload: {
|
||||
plugins: [externalizeDepsPlugin()]
|
||||
plugins: [externalizeDepsPlugin()],
|
||||
resolve: {
|
||||
alias: {
|
||||
'@shared': resolve('packages/shared')
|
||||
}
|
||||
}
|
||||
},
|
||||
renderer: {
|
||||
plugins: [
|
||||
react({
|
||||
babel: {
|
||||
plugins: [
|
||||
[
|
||||
'styled-components',
|
||||
{
|
||||
displayName: true, // 开发环境下启用组件名称
|
||||
fileName: false, // 不在类名中包含文件名
|
||||
pure: true, // 优化性能
|
||||
ssr: false // 不需要服务端渲染
|
||||
}
|
||||
]
|
||||
plugins: [
|
||||
[
|
||||
'@swc/plugin-styled-components',
|
||||
{
|
||||
displayName: true, // 开发环境下启用组件名称
|
||||
fileName: false, // 不在类名中包含文件名
|
||||
pure: true, // 优化性能
|
||||
ssr: false // 不需要服务端渲染
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}),
|
||||
...visualizerPlugin('renderer')
|
||||
],
|
||||
@@ -70,7 +73,7 @@ export default defineConfig({
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
exclude: ['chunk-PZ64DZKH.js', 'chunk-JMKENWIY.js', 'chunk-UXYB6GHG.js', 'chunk-ALDIEZMG.js', 'chunk-4X6ZJEXY.js']
|
||||
exclude: []
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
72
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.1.17",
|
||||
"version": "1.2.10",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@@ -23,12 +23,13 @@
|
||||
"build": "npm run typecheck && electron-vite build",
|
||||
"build:check": "yarn test && yarn typecheck && yarn check:i18n",
|
||||
"build:unpack": "dotenv npm run build && electron-builder --dir",
|
||||
"build:win": "dotenv npm run build && electron-builder --win",
|
||||
"build:win": "dotenv npm run build && electron-builder --win --x64 --arm64",
|
||||
"build:win:x64": "dotenv npm run build && electron-builder --win --x64",
|
||||
"build:mac": "dotenv electron-vite build && electron-builder --mac",
|
||||
"build:win:arm64": "dotenv npm run build && electron-builder --win --arm64",
|
||||
"build:mac": "dotenv electron-vite build && electron-builder --mac --arm64 --x64",
|
||||
"build:mac:arm64": "dotenv electron-vite build && electron-builder --mac --arm64",
|
||||
"build:mac:x64": "dotenv electron-vite build && electron-builder --mac --x64",
|
||||
"build:linux": "dotenv electron-vite build && electron-builder --linux",
|
||||
"build:linux": "dotenv electron-vite build && electron-builder --linux --x64 --arm64",
|
||||
"build:linux:arm64": "dotenv electron-vite build && electron-builder --linux --arm64",
|
||||
"build:linux:x64": "dotenv electron-vite build && electron-builder --linux --x64",
|
||||
"build:npm": "node scripts/build-npm.js",
|
||||
@@ -43,7 +44,12 @@
|
||||
"typecheck:node": "tsc --noEmit -p tsconfig.node.json --composite false",
|
||||
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
|
||||
"check:i18n": "node scripts/check-i18n.js",
|
||||
"test": "npx -y tsx --test src/**/*.test.ts",
|
||||
"test": "yarn test:renderer",
|
||||
"test:coverage": "yarn test:renderer:coverage",
|
||||
"test:node": "npx -y tsx --test src/**/*.test.ts",
|
||||
"test:renderer": "vitest run",
|
||||
"test:renderer:ui": "vitest --ui",
|
||||
"test:renderer:coverage": "vitest run --coverage",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
@@ -63,28 +69,38 @@
|
||||
"@cherrystudio/embedjs-openai": "^0.1.28",
|
||||
"@electron-toolkit/utils": "^3.0.0",
|
||||
"@electron/notarize": "^2.5.0",
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"@langchain/community": "^0.3.36",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"adm-zip": "^0.5.16",
|
||||
"archiver": "^7.0.1",
|
||||
"async-mutex": "^0.5.0",
|
||||
"bufferutil": "^4.0.9",
|
||||
"color": "^5.0.0",
|
||||
"diff": "^7.0.0",
|
||||
"docx": "^9.0.2",
|
||||
"electron-log": "^5.1.5",
|
||||
"electron-store": "^8.2.0",
|
||||
"electron-updater": "^6.3.9",
|
||||
"electron-updater": "patch:electron-updater@npm%3A6.6.3#~/.yarn/patches/electron-updater-npm-6.6.3-9269dbaf84.patch",
|
||||
"electron-window-state": "^5.0.3",
|
||||
"epub": "patch:epub@npm%3A1.3.0#~/.yarn/patches/epub-npm-1.3.0-8325494ffe.patch",
|
||||
"fast-xml-parser": "^5.0.9",
|
||||
"extract-zip": "^2.0.1",
|
||||
"fast-xml-parser": "^5.2.0",
|
||||
"fetch-socks": "^1.3.2",
|
||||
"fs-extra": "^11.2.0",
|
||||
"got-scraping": "^4.1.1",
|
||||
"jsdom": "^26.0.0",
|
||||
"markdown-it": "^14.1.0",
|
||||
"node-stream-zip": "^1.15.0",
|
||||
"officeparser": "^4.1.1",
|
||||
"os-proxy-config": "^1.1.2",
|
||||
"proxy-agent": "^6.5.0",
|
||||
"tar": "^7.4.3",
|
||||
"turndown": "^7.2.0",
|
||||
"turndown-plugin-gfm": "^1.0.2",
|
||||
"undici": "^7.4.0",
|
||||
"webdav": "^5.8.0",
|
||||
"ws": "^8.18.1",
|
||||
"zipread": "^1.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -100,15 +116,19 @@
|
||||
"@emotion/is-prop-valid": "^1.3.1",
|
||||
"@eslint-react/eslint-plugin": "^1.36.1",
|
||||
"@eslint/js": "^9.22.0",
|
||||
"@google/genai": "^0.4.0",
|
||||
"@google/genai": "^0.10.0",
|
||||
"@hello-pangea/dnd": "^16.6.0",
|
||||
"@kangfenmao/keyv-storage": "^0.1.0",
|
||||
"@modelcontextprotocol/sdk": "^1.8.0",
|
||||
"@modelcontextprotocol/sdk": "^1.10.2",
|
||||
"@mozilla/readability": "^0.6.0",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@reduxjs/toolkit": "^2.2.5",
|
||||
"@shikijs/markdown-it": "^3.2.2",
|
||||
"@swc/plugin-styled-components": "^7.1.3",
|
||||
"@tavily/core": "patch:@tavily/core@npm%3A0.3.1#~/.yarn/patches/@tavily-core-npm-0.3.1-fe69bf2bea.patch",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
"@types/adm-zip": "^0",
|
||||
"@types/diff": "^7",
|
||||
"@types/fs-extra": "^11",
|
||||
"@types/lodash": "^4.17.5",
|
||||
"@types/markdown-it": "^14",
|
||||
@@ -119,7 +139,11 @@
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@types/tinycolor2": "^1",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"@types/ws": "^8",
|
||||
"@vitejs/plugin-react-swc": "^3.9.0",
|
||||
"@vitest/coverage-v8": "^3.1.1",
|
||||
"@vitest/ui": "^3.1.1",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"antd": "^5.22.5",
|
||||
"applescript": "^1.0.0",
|
||||
"axios": "^1.7.3",
|
||||
@@ -130,7 +154,7 @@
|
||||
"dexie-react-hooks": "^1.1.7",
|
||||
"dotenv-cli": "^7.4.2",
|
||||
"electron": "31.7.6",
|
||||
"electron-builder": "^24.13.3",
|
||||
"electron-builder": "26.0.13",
|
||||
"electron-devtools-installer": "^3.2.0",
|
||||
"electron-icon-builder": "^2.0.1",
|
||||
"electron-vite": "^2.3.0",
|
||||
@@ -145,9 +169,11 @@
|
||||
"i18next": "^23.11.5",
|
||||
"lint-staged": "^15.5.0",
|
||||
"lodash": "^4.17.21",
|
||||
"lru-cache": "^11.1.0",
|
||||
"lucide-react": "^0.487.0",
|
||||
"mime": "^4.0.4",
|
||||
"npx-scope-finder": "^1.2.0",
|
||||
"openai": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
|
||||
"openai": "patch:openai@npm%3A4.87.3#~/.yarn/patches/openai-npm-4.87.3-2b30a7685f.patch",
|
||||
"p-queue": "^8.1.0",
|
||||
"prettier": "^3.5.3",
|
||||
"rc-virtual-list": "^3.18.5",
|
||||
@@ -171,21 +197,27 @@
|
||||
"remark-math": "^6.0.0",
|
||||
"rollup-plugin-visualizer": "^5.12.0",
|
||||
"sass": "^1.77.2",
|
||||
"shiki": "^1.22.2",
|
||||
"shiki": "^3.2.2",
|
||||
"string-width": "^7.2.0",
|
||||
"styled-components": "^6.1.11",
|
||||
"tiny-pinyin": "^1.3.2",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"tokenx": "^0.4.1",
|
||||
"typescript": "^5.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"vite": "^5.0.12"
|
||||
"vite": "6.2.6",
|
||||
"vitest": "^3.1.1"
|
||||
},
|
||||
"resolutions": {
|
||||
"pdf-parse@npm:1.1.1": "patch:pdf-parse@npm%3A1.1.1#~/.yarn/patches/pdf-parse-npm-1.1.1-04a6109b2a.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.4.0": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"openai@npm:^4.77.0": "patch:openai@npm%3A4.77.3#~/.yarn/patches/openai-npm-4.77.3-59c6d42e7a.patch",
|
||||
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch"
|
||||
"node-gyp": "^9.1.0",
|
||||
"libsql@npm:^0.4.4": "patch:libsql@npm%3A0.4.7#~/.yarn/patches/libsql-npm-0.4.7-444e260fb1.patch",
|
||||
"openai@npm:^4.77.0": "patch:openai@npm%3A4.87.3#~/.yarn/patches/openai-npm-4.87.3-2b30a7685f.patch",
|
||||
"pkce-challenge@npm:^4.1.0": "patch:pkce-challenge@npm%3A4.1.0#~/.yarn/patches/pkce-challenge-npm-4.1.0-fbc51695a3.patch",
|
||||
"app-builder-lib@npm:26.0.13": "patch:app-builder-lib@npm%3A26.0.13#~/.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch",
|
||||
"shiki": "3.2.2"
|
||||
},
|
||||
"packageManager": "yarn@4.6.0",
|
||||
"lint-staged": {
|
||||
|
||||
166
packages/shared/IpcChannel.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
export enum IpcChannel {
|
||||
App_ClearCache = 'app:clear-cache',
|
||||
App_SetLaunchOnBoot = 'app:set-launch-on-boot',
|
||||
App_SetLanguage = 'app:set-language',
|
||||
App_ShowUpdateDialog = 'app:show-update-dialog',
|
||||
App_CheckForUpdate = 'app:check-for-update',
|
||||
App_Reload = 'app:reload',
|
||||
App_Info = 'app:info',
|
||||
App_Proxy = 'app:proxy',
|
||||
App_SetLaunchToTray = 'app:set-launch-to-tray',
|
||||
App_SetTray = 'app:set-tray',
|
||||
App_SetTrayOnClose = 'app:set-tray-on-close',
|
||||
App_RestartTray = 'app:restart-tray',
|
||||
App_SetTheme = 'app:set-theme',
|
||||
App_SetCustomCss = 'app:set-custom-css',
|
||||
App_SetAutoUpdate = 'app:set-auto-update',
|
||||
|
||||
App_IsBinaryExist = 'app:is-binary-exist',
|
||||
App_GetBinaryPath = 'app:get-binary-path',
|
||||
App_InstallUvBinary = 'app:install-uv-binary',
|
||||
App_InstallBunBinary = 'app:install-bun-binary',
|
||||
|
||||
Webview_SetOpenLinkExternal = 'webview:set-open-link-external',
|
||||
|
||||
// Open
|
||||
Open_Path = 'open:path',
|
||||
Open_Website = 'open:website',
|
||||
|
||||
Minapp = 'minapp',
|
||||
|
||||
Config_Set = 'config:set',
|
||||
Config_Get = 'config:get',
|
||||
|
||||
MiniWindow_Show = 'miniwindow:show',
|
||||
MiniWindow_Hide = 'miniwindow:hide',
|
||||
MiniWindow_Close = 'miniwindow:close',
|
||||
MiniWindow_Toggle = 'miniwindow:toggle',
|
||||
MiniWindow_SetPin = 'miniwindow:set-pin',
|
||||
|
||||
// Mcp
|
||||
Mcp_AddServer = 'mcp:add-server',
|
||||
Mcp_RemoveServer = 'mcp:remove-server',
|
||||
Mcp_RestartServer = 'mcp:restart-server',
|
||||
Mcp_StopServer = 'mcp:stop-server',
|
||||
Mcp_ListTools = 'mcp:list-tools',
|
||||
Mcp_CallTool = 'mcp:call-tool',
|
||||
Mcp_ListPrompts = 'mcp:list-prompts',
|
||||
Mcp_GetPrompt = 'mcp:get-prompt',
|
||||
Mcp_ListResources = 'mcp:list-resources',
|
||||
Mcp_GetResource = 'mcp:get-resource',
|
||||
Mcp_GetInstallInfo = 'mcp:get-install-info',
|
||||
Mcp_ServersChanged = 'mcp:servers-changed',
|
||||
Mcp_ServersUpdated = 'mcp:servers-updated',
|
||||
|
||||
//copilot
|
||||
Copilot_GetAuthMessage = 'copilot:get-auth-message',
|
||||
Copilot_GetCopilotToken = 'copilot:get-copilot-token',
|
||||
Copilot_SaveCopilotToken = 'copilot:save-copilot-token',
|
||||
Copilot_GetToken = 'copilot:get-token',
|
||||
Copilot_Logout = 'copilot:logout',
|
||||
Copilot_GetUser = 'copilot:get-user',
|
||||
|
||||
// obsidian
|
||||
Obsidian_GetVaults = 'obsidian:get-vaults',
|
||||
Obsidian_GetFiles = 'obsidian:get-files',
|
||||
|
||||
// nutstore
|
||||
Nutstore_GetSsoUrl = 'nutstore:get-sso-url',
|
||||
Nutstore_DecryptToken = 'nutstore:decrypt-token',
|
||||
Nutstore_GetDirectoryContents = 'nutstore:get-directory-contents',
|
||||
|
||||
//aes
|
||||
Aes_Encrypt = 'aes:encrypt',
|
||||
Aes_Decrypt = 'aes:decrypt',
|
||||
|
||||
Gemini_UploadFile = 'gemini:upload-file',
|
||||
Gemini_Base64File = 'gemini:base64-file',
|
||||
Gemini_RetrieveFile = 'gemini:retrieve-file',
|
||||
Gemini_ListFiles = 'gemini:list-files',
|
||||
Gemini_DeleteFile = 'gemini:delete-file',
|
||||
|
||||
Windows_ResetMinimumSize = 'window:reset-minimum-size',
|
||||
Windows_SetMinimumSize = 'window:set-minimum-size',
|
||||
|
||||
SelectionMenu_Action = 'selection-menu:action',
|
||||
|
||||
KnowledgeBase_Create = 'knowledge-base:create',
|
||||
KnowledgeBase_Reset = 'knowledge-base:reset',
|
||||
KnowledgeBase_Delete = 'knowledge-base:delete',
|
||||
KnowledgeBase_Add = 'knowledge-base:add',
|
||||
KnowledgeBase_Remove = 'knowledge-base:remove',
|
||||
KnowledgeBase_Search = 'knowledge-base:search',
|
||||
KnowledgeBase_Rerank = 'knowledge-base:rerank',
|
||||
|
||||
//file
|
||||
File_Open = 'file:open',
|
||||
File_OpenPath = 'file:openPath',
|
||||
File_Save = 'file:save',
|
||||
File_Select = 'file:select',
|
||||
File_Upload = 'file:upload',
|
||||
File_Clear = 'file:clear',
|
||||
File_Read = 'file:read',
|
||||
File_Delete = 'file:delete',
|
||||
File_Get = 'file:get',
|
||||
File_SelectFolder = 'file:selectFolder',
|
||||
File_Create = 'file:create',
|
||||
File_Write = 'file:write',
|
||||
File_SaveImage = 'file:saveImage',
|
||||
File_Base64Image = 'file:base64Image',
|
||||
File_Download = 'file:download',
|
||||
File_Copy = 'file:copy',
|
||||
File_BinaryFile = 'file:binaryFile',
|
||||
|
||||
Fs_Read = 'fs:read',
|
||||
|
||||
Export_Word = 'export:word',
|
||||
|
||||
Shortcuts_Update = 'shortcuts:update',
|
||||
|
||||
// backup
|
||||
Backup_Backup = 'backup:backup',
|
||||
Backup_Restore = 'backup:restore',
|
||||
Backup_BackupToWebdav = 'backup:backupToWebdav',
|
||||
Backup_RestoreFromWebdav = 'backup:restoreFromWebdav',
|
||||
Backup_ListWebdavFiles = 'backup:listWebdavFiles',
|
||||
Backup_CheckConnection = 'backup:checkConnection',
|
||||
Backup_CreateDirectory = 'backup:createDirectory',
|
||||
Backup_DeleteWebdavFile = 'backup:deleteWebdavFile',
|
||||
|
||||
// zip
|
||||
Zip_Compress = 'zip:compress',
|
||||
Zip_Decompress = 'zip:decompress',
|
||||
|
||||
// system
|
||||
System_GetDeviceType = 'system:getDeviceType',
|
||||
System_GetHostname = 'system:getHostname',
|
||||
|
||||
// events
|
||||
SelectionAction = 'selection-action',
|
||||
BackupProgress = 'backup-progress',
|
||||
ThemeChange = 'theme:change',
|
||||
UpdateDownloadedCancelled = 'update-downloaded-cancelled',
|
||||
RestoreProgress = 'restore-progress',
|
||||
UpdateError = 'update-error',
|
||||
UpdateAvailable = 'update-available',
|
||||
UpdateNotAvailable = 'update-not-available',
|
||||
DownloadProgress = 'download-progress',
|
||||
UpdateDownloaded = 'update-downloaded',
|
||||
DownloadUpdate = 'download-update',
|
||||
|
||||
DirectoryProcessingPercent = 'directory-processing-percent',
|
||||
|
||||
FullscreenStatusChanged = 'fullscreen-status-changed',
|
||||
|
||||
HideMiniWindow = 'hide-mini-window',
|
||||
ShowMiniWindow = 'show-mini-window',
|
||||
MiniWindowReload = 'miniwindow-reload',
|
||||
|
||||
ReduxStateChange = 'redux-state-change',
|
||||
ReduxStoreReady = 'redux-store-ready',
|
||||
|
||||
// Search Window
|
||||
SearchWindow_Open = 'search-window:open',
|
||||
SearchWindow_Close = 'search-window:close',
|
||||
SearchWindow_OpenUrl = 'search-window:open-url'
|
||||
}
|
||||
@@ -157,3 +157,8 @@ export const ZOOM_SHORTCUTS = [
|
||||
system: true
|
||||
}
|
||||
]
|
||||
|
||||
export const KB = 1024
|
||||
export const MB = 1024 * KB
|
||||
export const GB = 1024 * MB
|
||||
export const defaultLanguage = 'en-US'
|
||||
|
||||
@@ -1,116 +1,197 @@
|
||||
<!doctype html>
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-CN">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>CherryStudio 许可协议-ZH/EN</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/tailwindcss@2.2.19/dist/tailwind.min.css" rel="stylesheet" />
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>许可协议 | License Agreement</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
</head>
|
||||
|
||||
<body class="bg-gray-100 p-8">
|
||||
<div class="container mx-auto bg-white p-6 rounded shadow-lg">
|
||||
<h1 class="text-3xl font-bold mb-6 text-center">Cherry Studio 许可协议</h1>
|
||||
<div class="mb-8">
|
||||
<h2 class="text-2xl font-semibold mb-4">许可协议</h2>
|
||||
<p class="mb-4">
|
||||
本软件采用 <strong>Apache License 2.0</strong> 许可。除 Apache License 2.0 规定的条款外,您在使用 Cherry
|
||||
Studio 时还应遵守以下附加条款:
|
||||
</p>
|
||||
<h3 class="text-xl font-semibold mb-2">一. 商用许可</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li><strong>免费商用</strong>:用户在不修改代码的情况下,可以免费用于商业目的。</li>
|
||||
<li>
|
||||
<strong>商业授权</strong>:如果您满足以下任意条件之一,需取得商业授权:
|
||||
<ol class="list-decimal list-inside ml-4">
|
||||
<li>对本软件进行二次修改、开发(包括但不限于修改应用名称、logo、代码以及功能)。</li>
|
||||
<li>为企业客户提供多租户服务,且该服务支持 10 人或以上的使用。</li>
|
||||
<li>预装或集成到硬件设备或产品中进行捆绑销售。</li>
|
||||
<li>政府或教育机构的大规模采购项目,特别是涉及安全、数据隐私等敏感需求时。</li>
|
||||
</ol>
|
||||
</li>
|
||||
</ol>
|
||||
<h3 class="text-xl font-semibold mb-2">二. 贡献者协议</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li><strong>许可调整</strong>:生产者有权根据需要对开源协议进行调整,使其更加严格或宽松。</li>
|
||||
<li><strong>商业用途</strong>:您贡献的代码可能会被用于商业用途,包括但不限于云业务运营。</li>
|
||||
</ol>
|
||||
<h3 class="text-xl font-semibold mb-2">三. 其他条款</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li>本协议条款的解释权归 Cherry Studio 开发者所有。</li>
|
||||
<li>本协议可能根据实际情况进行更新,更新时将通过本软件通知用户。</li>
|
||||
</ol>
|
||||
<p class="mb-4">如有任何问题或需申请商业授权,请联系 Cherry Studio 开发团队。</p>
|
||||
<p>
|
||||
除上述特定条件外,其他所有权利和限制均遵循 Apache License 2.0。有关 Apache License 2.0 的详细信息,请访问
|
||||
<a href="http://www.apache.org/licenses/LICENSE-2.0"
|
||||
class="text-blue-500 underline">http://www.apache.org/licenses/LICENSE-2.0</a>
|
||||
</p>
|
||||
<body class="bg-gray-50">
|
||||
<div class="max-w-4xl mx-auto px-4 py-8">
|
||||
<!-- 中文版本 -->
|
||||
<div class="mb-12">
|
||||
<h1 class="text-3xl font-bold mb-8 text-gray-900">许可协议</h1>
|
||||
|
||||
<p class="mb-6 text-gray-700">本项目采用<strong>区分用户的双重许可 (User-Segmented Dual Licensing)</strong> 模式。</p>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">核心原则</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li><strong>个人用户 和 10人及以下企业/组织:</strong> 默认适用 <strong>GNU Affero 通用公共许可证 v3.0 (AGPLv3)</strong>。</li>
|
||||
<li><strong>超过10人的企业/组织:</strong> <strong>必须</strong> 获取 <strong>商业许可证 (Commercial License)</strong>。</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">定义:"10人及以下"</h2>
|
||||
<p class="text-gray-700">
|
||||
指在您的组织(包括公司、非营利组织、政府机构、教育机构等任何实体)中,能够访问、使用或以任何方式直接或间接受益于本软件(Cherry
|
||||
Studio)功能的个人总数不超过10人。这包括但不限于开发者、测试人员、运营人员、最终用户、通过集成系统间接使用者等。
|
||||
</p>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">1. 开源许可证 (Open Source License): AGPLv3 - 适用于个人及10人及以下组织
|
||||
</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>如果您是个人用户,或者您的组织满足上述"10人及以下"的定义,您可以在 <strong>AGPLv3</strong> 的条款下自由使用、修改和分发 Cherry Studio。AGPLv3 的完整文本可以访问
|
||||
<a href="https://www.gnu.org/licenses/agpl-3.0.html"
|
||||
class="text-blue-600 hover:underline">https://www.gnu.org/licenses/agpl-3.0.html</a> 获取。
|
||||
</li>
|
||||
<li><strong>核心义务:</strong> AGPLv3 的一个关键要求是,如果您修改了 Cherry Studio 并通过网络提供服务,或者分发了修改后的版本,您必须以 AGPLv3
|
||||
许可证向接收者提供相应的<strong>完整源代码</strong>。即使您符合"10人及以下"的标准,如果您希望避免此源代码公开义务,您也需要考虑获取商业许可证(见下文)。</li>
|
||||
<li>使用前请务必仔细阅读并理解 AGPLv3 的所有条款。</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">2. 商业许可证 (Commercial License) - 适用于超过10人的组织,或希望规避 AGPLv3
|
||||
义务的用户</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li><strong>强制要求:</strong>
|
||||
如果您的组织<strong>不</strong>满足上述"10人及以下"的定义(即有11人或更多人可以访问、使用或受益于本软件),您<strong>必须</strong>联系我们获取并签署一份商业许可证才能使用
|
||||
Cherry Studio。</li>
|
||||
<li><strong>自愿选择:</strong> 即使您的组织满足"10人及以下"的条件,但如果您的使用场景<strong>无法满足 AGPLv3
|
||||
的条款要求</strong>(特别是关于<strong>源代码公开</strong>的义务),或者您需要 AGPLv3 <strong>未提供</strong>的特定商业条款(如保证、赔偿、无 Copyleft
|
||||
限制等),您也<strong>必须</strong>联系我们获取并签署一份商业许可证。</li>
|
||||
<li><strong>需要商业许可证的常见情况包括(但不限于):</strong>
|
||||
<ul class="list-disc pl-6 mt-2 space-y-1">
|
||||
<li>您的组织规模超过10人。</li>
|
||||
<li>(无论组织规模)您希望分发修改过的 Cherry Studio 版本,但<strong>不希望</strong>根据 AGPLv3 公开您修改部分的源代码。</li>
|
||||
<li>(无论组织规模)您希望基于修改过的 Cherry Studio 提供网络服务(SaaS),但<strong>不希望</strong>根据 AGPLv3 向服务使用者提供修改后的源代码。</li>
|
||||
<li>(无论组织规模)您的公司政策、客户合同或项目要求不允许使用 AGPLv3 许可的软件,或要求闭源分发及保密。</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><strong>获取商业许可:</strong> 请通过邮箱 <a href="mailto:bd@cherry-ai.com"
|
||||
class="text-blue-600 hover:underline">bd@cherry-ai.com</a> 联系 Cherry Studio 开发团队洽谈商业授权事宜。</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">3. 贡献 (Contributions)</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>我们欢迎社区对 Cherry Studio 的贡献。所有向本项目提交的贡献都将被视为在 <strong>AGPLv3</strong> 许可证下提供。</li>
|
||||
<li>通过向本项目提交贡献(例如通过 Pull Request),即表示您同意您的代码以 AGPLv3 许可证授权给本项目及所有后续使用者(无论这些使用者最终遵循 AGPLv3 还是商业许可)。</li>
|
||||
<li>您也理解并同意,您的贡献可能会被包含在根据商业许可证分发的 Cherry Studio 版本中。</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">4. 其他条款 (Other Terms)</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>关于商业许可证的具体条款和条件,以双方签署的正式商业许可协议为准。</li>
|
||||
<li>项目维护者保留根据需要更新本许可政策(包括用户规模定义和阈值)的权利。相关更新将通过项目官方渠道(如代码仓库、官方网站)进行通知。</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
<h1 class="text-3xl font-bold mb-6 text-center">Cherry Studio License</h1>
|
||||
<div class="mb-8">
|
||||
<h2 class="text-2xl font-semibold mb-4">License Agreement</h2>
|
||||
<p class="mb-4">
|
||||
This software is licensed under the <strong>Apache License 2.0</strong>. In addition to the terms of the
|
||||
Apache License 2.0, the following additional terms apply to the use of Cherry Studio:
|
||||
</p>
|
||||
<h3 class="text-xl font-semibold mb-2">I. Commercial Use License</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li>
|
||||
<strong>Free Commercial Use</strong>: Users can use the software for commercial purposes without
|
||||
modifying
|
||||
the code.
|
||||
</li>
|
||||
<li>
|
||||
<strong>Commercial License Required</strong>: A commercial license is required if any of the
|
||||
following
|
||||
conditions are met:
|
||||
<ol class="list-decimal list-inside ml-4">
|
||||
<li>
|
||||
You modify, develop, or alter the software, including but not limited to changes to the
|
||||
application
|
||||
name, logo, code, or functionality.
|
||||
</li>
|
||||
<li>You provide multi-tenant services to enterprise customers with 10 or more users.</li>
|
||||
<li>
|
||||
You pre-install or integrate the software into hardware devices or products and bundle it
|
||||
for sale.
|
||||
</li>
|
||||
<li>
|
||||
You are engaging in large-scale procurement for government or educational institutions,
|
||||
especially
|
||||
involving security, data privacy, or other sensitive requirements.
|
||||
</li>
|
||||
</ol>
|
||||
</li>
|
||||
</ol>
|
||||
<h3 class="text-xl font-semibold mb-2">II. Contributor Agreement</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li>
|
||||
<strong>License Adjustment</strong>: The producer reserves the right to adjust the open-source
|
||||
license as
|
||||
needed, making it stricter or more lenient.
|
||||
</li>
|
||||
<li>
|
||||
<strong>Commercial Use</strong>: Any code you contribute may be used for commercial purposes,
|
||||
including but
|
||||
not limited to cloud business operations.
|
||||
</li>
|
||||
</ol>
|
||||
<h3 class="text-xl font-semibold mb-2">III. Other Terms</h3>
|
||||
<ol class="list-decimal list-inside mb-4">
|
||||
<li>The interpretation of these terms is subject to the discretion of Cherry Studio developers.</li>
|
||||
<li>These terms may be updated, and users will be notified through the software when changes occur.</li>
|
||||
</ol>
|
||||
<p class="mb-4">
|
||||
For any questions or to request a commercial license, please contact the Cherry Studio development team.
|
||||
</p>
|
||||
<p>
|
||||
Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache
|
||||
License 2.0. Detailed information about the Apache License 2.0 can be found at
|
||||
<a href="http://www.apache.org/licenses/LICENSE-2.0"
|
||||
class="text-blue-500 underline">http://www.apache.org/licenses/LICENSE-2.0</a>
|
||||
</p>
|
||||
|
||||
<hr class="my-12 border-gray-300">
|
||||
|
||||
<!-- English Version -->
|
||||
<div>
|
||||
<h1 class="text-3xl font-bold mb-8 text-gray-900">Licensing</h1>
|
||||
|
||||
<p class="mb-6 text-gray-700">This project employs a <strong>User-Segmented Dual Licensing</strong> model.</p>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">Core Principle</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li><strong>Individual Users and Organizations with 10 or Fewer Individuals:</strong> Governed by default
|
||||
under the <strong>GNU Affero General Public License v3.0 (AGPLv3)</strong>.</li>
|
||||
<li><strong>Organizations with More Than 10 Individuals:</strong> <strong>Must</strong> obtain a
|
||||
<strong>Commercial License</strong>.
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">Definition: "10 or Fewer Individuals"</h2>
|
||||
<p class="text-gray-700">
|
||||
Refers to any organization (including companies, non-profits, government agencies, educational institutions,
|
||||
etc.) where the total number of individuals who can access, use, or in any way directly or indirectly benefit
|
||||
from the functionality of this software (Cherry Studio) does not exceed 10. This includes, but is not limited
|
||||
to, developers, testers, operations staff, end-users, and indirect users via integrated systems.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">1. Open Source License: AGPLv3 - For Individuals and
|
||||
Organizations of 10 or Fewer</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>If you are an individual user, or if your organization meets the "10 or Fewer Individuals" definition
|
||||
above, you are free to use, modify, and distribute Cherry Studio under the terms of the
|
||||
<strong>AGPLv3</strong>. The full text of the AGPLv3 can be found at <a
|
||||
href="https://www.gnu.org/licenses/agpl-3.0.html"
|
||||
class="text-blue-600 hover:underline">https://www.gnu.org/licenses/agpl-3.0.html</a>.
|
||||
</li>
|
||||
<li><strong>Core Obligation:</strong> A key requirement of the AGPLv3 is that if you modify Cherry Studio and
|
||||
make it available over a network, or distribute the modified version, you must provide the <strong>complete
|
||||
corresponding source code</strong> under the AGPLv3 license to the recipients. Even if you qualify under
|
||||
the "10 or Fewer Individuals" rule, if you wish to avoid this source code disclosure obligation, you will
|
||||
need to obtain a Commercial License (see below).</li>
|
||||
<li>Please read and understand the full terms of the AGPLv3 carefully before use.</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">2. Commercial License - For Organizations with More Than 10
|
||||
Individuals, or Users Needing to Avoid AGPLv3 Obligations</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li><strong>Mandatory Requirement:</strong> If your organization does <strong>not</strong> meet the "10 or
|
||||
Fewer Individuals" definition above (i.e., 11 or more individuals can access, use, or benefit from the
|
||||
software), you <strong>must</strong> contact us to obtain and execute a Commercial License to use Cherry
|
||||
Studio.</li>
|
||||
<li><strong>Voluntary Option:</strong> Even if your organization meets the "10 or Fewer Individuals"
|
||||
condition, if your intended use case <strong>cannot comply with the terms of the AGPLv3</strong>
|
||||
(particularly the obligations regarding <strong>source code disclosure</strong>), or if you require specific
|
||||
commercial terms <strong>not offered</strong> by the AGPLv3 (such as warranties, indemnities, or freedom
|
||||
from copyleft restrictions), you also <strong>must</strong> contact us to obtain and execute a Commercial
|
||||
License.</li>
|
||||
<li><strong>Common scenarios requiring a Commercial License include (but are not limited to):</strong>
|
||||
<ul class="list-disc pl-6 mt-2 space-y-1">
|
||||
<li>Your organization has more than 10 individuals who can access, use, or benefit from the software.</li>
|
||||
<li>(Regardless of organization size) You wish to distribute a modified version of Cherry Studio but
|
||||
<strong>do not want</strong> to disclose the source code of your modifications under AGPLv3.
|
||||
</li>
|
||||
<li>(Regardless of organization size) You wish to provide a network service (SaaS) based on a modified
|
||||
version of Cherry Studio but <strong>do not want</strong> to provide the modified source code to users
|
||||
of the service under AGPLv3.</li>
|
||||
<li>(Regardless of organization size) Your corporate policies, client contracts, or project requirements
|
||||
prohibit the use of AGPLv3-licensed software or mandate closed-source distribution and confidentiality.
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><strong>Obtaining a Commercial License:</strong> Please contact the Cherry Studio development team via
|
||||
email at <a href="mailto:bd@cherry-ai.com" class="text-blue-600 hover:underline">bd@cherry-ai.com</a> to
|
||||
discuss commercial licensing options.</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">3. Contributions</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>We welcome community contributions to Cherry Studio. All contributions submitted to this project are
|
||||
considered to be offered under the <strong>AGPLv3</strong> license.</li>
|
||||
<li>By submitting a contribution to this project (e.g., via a Pull Request), you agree to license your code
|
||||
under the AGPLv3 to the project and all its downstream users (regardless of whether those users ultimately
|
||||
operate under AGPLv3 or a Commercial License).</li>
|
||||
<li>You also understand and agree that your contribution may be included in distributions of Cherry Studio
|
||||
offered under our commercial license.</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
<section class="mb-8">
|
||||
<h2 class="text-xl font-semibold mb-4 text-gray-900">4. Other Terms</h2>
|
||||
<ul class="list-disc pl-6 space-y-2 text-gray-700">
|
||||
<li>The specific terms and conditions of the Commercial License are governed by the formal commercial license
|
||||
agreement signed by both parties.</li>
|
||||
<li>The project maintainers reserve the right to update this licensing policy (including the definition and
|
||||
threshold for user count) as needed. Updates will be communicated through official project channels (e.g.,
|
||||
code repository, official website).</li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Github Releases Timeline</title>
|
||||
@@ -9,194 +8,201 @@
|
||||
<script src="https://unpkg.com/vue@3/dist/vue.global.prod.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/markdown-it@13.0.1/dist/markdown-it.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@tailwindcss/typography@0.5.10/dist/typography.min.css"></script>
|
||||
</head>
|
||||
</head>
|
||||
|
||||
<body id="app">
|
||||
<body id="app">
|
||||
<div :class="isDark ? 'dark-bg' : 'bg'" class="min-h-screen">
|
||||
<div class="max-w-3xl mx-auto py-12 px-4">
|
||||
<h1 class="text-3xl font-bold mb-8" :class="isDark ? 'text-white' : 'text-gray-900'">Release Timeline</h1>
|
||||
<div class="max-w-3xl mx-auto py-12 px-4">
|
||||
<h1 class="text-3xl font-bold mb-8" :class="isDark ? 'text-white' : 'text-gray-900'">Release Timeline</h1>
|
||||
|
||||
<!-- Loading状态 -->
|
||||
<div v-if="loading" class="text-center py-8">
|
||||
<div class="inline-block animate-spin rounded-full h-8 w-8 border-4"
|
||||
:class="isDark ? 'border-gray-700 border-t-blue-500' : 'border-gray-300 border-t-blue-500'"></div>
|
||||
</div>
|
||||
|
||||
<!-- Error 状态 -->
|
||||
<div v-else-if="error" class="text-red-500 text-center py-8">{{ error }}</div>
|
||||
|
||||
<!-- Release 列表 -->
|
||||
<div v-else class="space-y-8">
|
||||
<div v-for="release in releases" :key="release.id" class="relative pl-8"
|
||||
:class="isDark ? 'border-l-2 border-gray-700' : 'border-l-2 border-gray-200'">
|
||||
<div class="absolute -left-2 top-0 w-4 h-4 rounded-full bg-green-500"></div>
|
||||
<div class="rounded-lg shadow-sm p-6 transition-shadow"
|
||||
:class="isDark ? 'bg-black hover:shadow-md hover:shadow-black' : 'bg-white hover:shadow-md'">
|
||||
<div class="flex items-start justify-between mb-4">
|
||||
<div>
|
||||
<h2 class="text-xl font-semibold" :class="isDark ? 'text-white' : 'text-gray-900'">
|
||||
{{ release.name || release.tag_name }}
|
||||
</h2>
|
||||
<p class="text-sm mt-1" :class="isDark ? 'text-gray-400' : 'text-gray-500'">
|
||||
{{ formatDate(release.published_at) }}
|
||||
</p>
|
||||
</div>
|
||||
<span class="inline-flex items-center px-3 py-1 rounded-full text-sm font-medium"
|
||||
:class="isDark ? 'bg-green-900 text-green-200' : 'bg-green-100 text-green-800'">
|
||||
{{ release.tag_name }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="prose" :class="isDark ? 'text-gray-300 dark-prose' : 'text-gray-600'"
|
||||
v-html="renderMarkdown(release.body)"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Loading状态 -->
|
||||
<div v-if="loading" class="text-center py-8">
|
||||
<div
|
||||
class="inline-block animate-spin rounded-full h-8 w-8 border-4"
|
||||
:class="isDark ? 'border-gray-700 border-t-blue-500' : 'border-gray-300 border-t-blue-500'"></div>
|
||||
</div>
|
||||
|
||||
<!-- Error 状态 -->
|
||||
<div v-else-if="error" class="text-red-500 text-center py-8">{{ error }}</div>
|
||||
|
||||
<!-- Release 列表 -->
|
||||
<div v-else class="space-y-8">
|
||||
<div
|
||||
v-for="release in releases"
|
||||
:key="release.id"
|
||||
class="relative pl-8"
|
||||
:class="isDark ? 'border-l-2 border-gray-700' : 'border-l-2 border-gray-200'">
|
||||
<div class="absolute -left-2 top-0 w-4 h-4 rounded-full bg-green-500"></div>
|
||||
<div
|
||||
class="rounded-lg shadow-sm p-6 transition-shadow"
|
||||
:class="isDark ? 'bg-black hover:shadow-md hover:shadow-black' : 'bg-white hover:shadow-md'">
|
||||
<div class="flex items-start justify-between mb-4">
|
||||
<div>
|
||||
<h2 class="text-xl font-semibold" :class="isDark ? 'text-white' : 'text-gray-900'">
|
||||
{{ release.name || release.tag_name }}
|
||||
</h2>
|
||||
<p class="text-sm mt-1" :class="isDark ? 'text-gray-400' : 'text-gray-500'">
|
||||
{{ formatDate(release.published_at) }}
|
||||
</p>
|
||||
</div>
|
||||
<span
|
||||
class="inline-flex items-center px-3 py-1 rounded-full text-sm font-medium"
|
||||
:class="isDark ? 'bg-green-900 text-green-200' : 'bg-green-100 text-green-800'">
|
||||
{{ release.tag_name }}
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
class="prose"
|
||||
:class="isDark ? 'text-gray-300 dark-prose' : 'text-gray-600'"
|
||||
v-html="renderMarkdown(release.body)"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const md = window.markdownit({
|
||||
breaks: true,
|
||||
linkify: true
|
||||
})
|
||||
const md = window.markdownit({
|
||||
breaks: true,
|
||||
linkify: true
|
||||
})
|
||||
|
||||
const { createApp } = Vue
|
||||
const { createApp } = Vue
|
||||
|
||||
createApp({
|
||||
data() {
|
||||
return {
|
||||
releases: [],
|
||||
loading: true,
|
||||
error: null,
|
||||
isDark: false
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
async fetchReleases() {
|
||||
try {
|
||||
this.loading = true
|
||||
this.error = null
|
||||
const response = await fetch('https://api.github.com/repos/kangfenmao/cherry-studio/releases')
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch releases')
|
||||
}
|
||||
this.releases = await response.json()
|
||||
} catch (err) {
|
||||
this.error = 'Error loading releases: ' + err.message
|
||||
} finally {
|
||||
this.loading = false
|
||||
}
|
||||
},
|
||||
formatDate(dateString) {
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
})
|
||||
},
|
||||
renderMarkdown(content) {
|
||||
if (!content) return ''
|
||||
return md.render(content)
|
||||
},
|
||||
initTheme() {
|
||||
// 从 URL 参数获取主题设置
|
||||
const url = new URL(window.location.href)
|
||||
const theme = url.searchParams.get('theme')
|
||||
this.isDark = theme === 'dark'
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.initTheme()
|
||||
this.fetchReleases()
|
||||
createApp({
|
||||
data() {
|
||||
return {
|
||||
releases: [],
|
||||
loading: true,
|
||||
error: null,
|
||||
isDark: false
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
async fetchReleases() {
|
||||
try {
|
||||
this.loading = true
|
||||
this.error = null
|
||||
const response = await fetch('https://api.github.com/repos/kangfenmao/cherry-studio/releases')
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch releases')
|
||||
}
|
||||
this.releases = await response.json()
|
||||
} catch (err) {
|
||||
this.error = 'Error loading releases: ' + err.message
|
||||
} finally {
|
||||
this.loading = false
|
||||
}
|
||||
}).mount('#app')
|
||||
},
|
||||
formatDate(dateString) {
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
})
|
||||
},
|
||||
renderMarkdown(content) {
|
||||
if (!content) return ''
|
||||
return md.render(content)
|
||||
},
|
||||
initTheme() {
|
||||
// 从 URL 参数获取主题设置
|
||||
const url = new URL(window.location.href)
|
||||
const theme = url.searchParams.get('theme')
|
||||
this.isDark = theme === 'dark'
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
this.initTheme()
|
||||
this.fetchReleases()
|
||||
}
|
||||
}).mount('#app')
|
||||
</script>
|
||||
|
||||
<style>
|
||||
/* 基础的 Markdown 样式 */
|
||||
.prose {
|
||||
line-height: 1.6;
|
||||
}
|
||||
/* 基础的 Markdown 样式 */
|
||||
.prose {
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
.prose h1 {
|
||||
font-size: 1.5em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.prose h1 {
|
||||
font-size: 1.5em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
.prose h2 {
|
||||
font-size: 1.3em;
|
||||
margin: 0.8em 0;
|
||||
}
|
||||
.prose h2 {
|
||||
font-size: 1.3em;
|
||||
margin: 0.8em 0;
|
||||
}
|
||||
|
||||
.prose h3 {
|
||||
font-size: 1.1em;
|
||||
margin: 0.6em 0;
|
||||
}
|
||||
.prose h3 {
|
||||
font-size: 1.1em;
|
||||
margin: 0.6em 0;
|
||||
}
|
||||
|
||||
.prose ul {
|
||||
list-style-type: disc;
|
||||
margin-left: 1.5em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.prose ul {
|
||||
list-style-type: disc;
|
||||
margin-left: 1.5em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.prose ol {
|
||||
list-style-type: decimal;
|
||||
margin-left: 1.5em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.prose ol {
|
||||
list-style-type: decimal;
|
||||
margin-left: 1.5em;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.prose code {
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 0.2em;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
.prose code {
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 0.2em;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.dark .prose code {
|
||||
background-color: #1f2937;
|
||||
}
|
||||
.dark .prose code {
|
||||
background-color: #1f2937;
|
||||
}
|
||||
|
||||
.prose code {
|
||||
background-color: #f3f4f6;
|
||||
}
|
||||
.prose code {
|
||||
background-color: #f3f4f6;
|
||||
}
|
||||
|
||||
.prose pre code {
|
||||
display: block;
|
||||
padding: 1em;
|
||||
overflow-x: auto;
|
||||
}
|
||||
.prose pre code {
|
||||
display: block;
|
||||
padding: 1em;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.prose a {
|
||||
color: #3b82f6;
|
||||
text-decoration: underline;
|
||||
}
|
||||
.prose a {
|
||||
color: #3b82f6;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.dark .prose a {
|
||||
color: #60a5fa;
|
||||
}
|
||||
.dark .prose a {
|
||||
color: #60a5fa;
|
||||
}
|
||||
|
||||
.prose blockquote {
|
||||
border-left: 4px solid #e5e7eb;
|
||||
padding-left: 1em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.prose blockquote {
|
||||
border-left: 4px solid #e5e7eb;
|
||||
padding-left: 1em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
.dark .prose blockquote {
|
||||
border-left-color: #374151;
|
||||
color: #9ca3af;
|
||||
}
|
||||
.dark .prose blockquote {
|
||||
border-left-color: #374151;
|
||||
color: #9ca3af;
|
||||
}
|
||||
|
||||
.dark .prose {
|
||||
color: #e5e7eb;
|
||||
}
|
||||
.dark .prose {
|
||||
color: #e5e7eb;
|
||||
}
|
||||
|
||||
.dark-bg {
|
||||
background-color: #151515;
|
||||
}
|
||||
.dark-bg {
|
||||
background-color: #151515;
|
||||
}
|
||||
|
||||
.bg {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
.bg {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
</style>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
}
|
||||
</style>
|
||||
<script src="https://unpkg.com/3d-force-graph"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="3d-graph"></div>
|
||||
<script src="./js/bridge.js"></script>
|
||||
<script type="module">
|
||||
import { getQueryParam } from './js/utils.js'
|
||||
|
||||
const apiUrl = getQueryParam('apiUrl')
|
||||
const modelId = getQueryParam('modelId')
|
||||
const jsonUrl = `${apiUrl}/v1/global_graph/${modelId}`
|
||||
|
||||
const infoCard = document.createElement('div')
|
||||
infoCard.style.position = 'fixed'
|
||||
infoCard.style.backgroundColor = 'rgba(255, 255, 255, 0.9)'
|
||||
infoCard.style.padding = '8px'
|
||||
infoCard.style.borderRadius = '4px'
|
||||
infoCard.style.boxShadow = '0 2px 5px rgba(0,0,0,0.2)'
|
||||
infoCard.style.fontSize = '12px'
|
||||
infoCard.style.maxWidth = '200px'
|
||||
infoCard.style.display = 'none'
|
||||
infoCard.style.zIndex = '1000'
|
||||
document.body.appendChild(infoCard)
|
||||
|
||||
document.addEventListener('mousemove', (event) => {
|
||||
infoCard.style.left = `${event.clientX + 10}px`
|
||||
infoCard.style.top = `${event.clientY + 10}px`
|
||||
})
|
||||
|
||||
const elem = document.getElementById('3d-graph')
|
||||
const Graph = ForceGraph3D()(elem)
|
||||
.jsonUrl(jsonUrl)
|
||||
.nodeAutoColorBy((node) => node.properties.type || 'default')
|
||||
.nodeVal((node) => node.properties.degree)
|
||||
.linkWidth((link) => link.properties.weight)
|
||||
.onNodeHover((node) => {
|
||||
if (node) {
|
||||
infoCard.innerHTML = `
|
||||
<div style="font-weight: bold; margin-bottom: 4px; color: #333;">
|
||||
${node.properties.title}
|
||||
</div>
|
||||
<div style="color: #666;">
|
||||
${node.properties.description}
|
||||
</div>`
|
||||
infoCard.style.display = 'block'
|
||||
} else {
|
||||
infoCard.style.display = 'none'
|
||||
}
|
||||
})
|
||||
.onNodeClick((node) => {
|
||||
const url = `${apiUrl}/v1/references/${modelId}/entities/${node.properties.human_readable_id}`
|
||||
window.api.minApp({
|
||||
url,
|
||||
windowOptions: {
|
||||
title: node.properties.title,
|
||||
width: 500,
|
||||
height: 800
|
||||
}
|
||||
})
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
@@ -6,8 +6,8 @@ const AdmZip = require('adm-zip')
|
||||
const { downloadWithRedirects } = require('./download')
|
||||
|
||||
// Base URL for downloading bun binaries
|
||||
const BUN_RELEASE_BASE_URL = 'https://github.com/oven-sh/bun/releases/download'
|
||||
const DEFAULT_BUN_VERSION = '1.2.5' // Default fallback version
|
||||
const BUN_RELEASE_BASE_URL = 'https://gitcode.com/CherryHQ/bun/releases/download'
|
||||
const DEFAULT_BUN_VERSION = '1.2.9' // Default fallback version
|
||||
|
||||
// Mapping of platform+arch to binary package name
|
||||
const BUN_PACKAGES = {
|
||||
|
||||
@@ -7,8 +7,8 @@ const AdmZip = require('adm-zip')
|
||||
const { downloadWithRedirects } = require('./download')
|
||||
|
||||
// Base URL for downloading uv binaries
|
||||
const UV_RELEASE_BASE_URL = 'https://github.com/astral-sh/uv/releases/download'
|
||||
const DEFAULT_UV_VERSION = '0.6.6'
|
||||
const UV_RELEASE_BASE_URL = 'https://gitcode.com/CherryHQ/uv/releases/download'
|
||||
const DEFAULT_UV_VERSION = '0.6.14'
|
||||
|
||||
// Mapping of platform+arch to binary package name
|
||||
const UV_PACKAGES = {
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
const { Arch } = require('electron-builder')
|
||||
const { default: removeLocales } = require('./remove-locales')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
exports.default = async function (context) {
|
||||
await removeLocales(context)
|
||||
const platform = context.packager.platform.name
|
||||
const arch = context.arch
|
||||
|
||||
@@ -18,28 +16,48 @@ exports.default = async function (context) {
|
||||
'node_modules'
|
||||
)
|
||||
|
||||
removeDifferentArchNodeFiles(node_modules_path, '@libsql', arch === Arch.arm64 ? ['darwin-arm64'] : ['darwin-x64'])
|
||||
keepPackageNodeFiles(node_modules_path, '@libsql', arch === Arch.arm64 ? ['darwin-arm64'] : ['darwin-x64'])
|
||||
}
|
||||
|
||||
if (platform === 'linux') {
|
||||
const node_modules_path = path.join(context.appOutDir, 'resources', 'app.asar.unpacked', 'node_modules')
|
||||
const _arch = arch === Arch.arm64 ? ['linux-arm64-gnu', 'linux-arm64-musl'] : ['linux-x64-gnu', 'linux-x64-musl']
|
||||
removeDifferentArchNodeFiles(node_modules_path, '@libsql', _arch)
|
||||
keepPackageNodeFiles(node_modules_path, '@libsql', _arch)
|
||||
}
|
||||
|
||||
if (platform === 'windows') {
|
||||
const node_modules_path = path.join(context.appOutDir, 'resources', 'app.asar.unpacked', 'node_modules')
|
||||
removeDifferentArchNodeFiles(node_modules_path, '@libsql', ['win32-x64-msvc'])
|
||||
if (arch === Arch.arm64) {
|
||||
keepPackageNodeFiles(node_modules_path, '@strongtz', ['win32-arm64-msvc'])
|
||||
keepPackageNodeFiles(node_modules_path, '@libsql', ['win32-arm64-msvc'])
|
||||
}
|
||||
if (arch === Arch.x64) {
|
||||
keepPackageNodeFiles(node_modules_path, '@strongtz', ['win32-x64-msvc'])
|
||||
keepPackageNodeFiles(node_modules_path, '@libsql', ['win32-x64-msvc'])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function removeDifferentArchNodeFiles(nodeModulesPath, packageName, arch) {
|
||||
/**
|
||||
* 使用指定架构的 node_modules 文件
|
||||
* @param {*} nodeModulesPath
|
||||
* @param {*} packageName
|
||||
* @param {*} arch
|
||||
* @returns
|
||||
*/
|
||||
function keepPackageNodeFiles(nodeModulesPath, packageName, arch) {
|
||||
const modulePath = path.join(nodeModulesPath, packageName)
|
||||
|
||||
if (!fs.existsSync(modulePath)) {
|
||||
console.log(`[After Pack] Directory does not exist: ${modulePath}`)
|
||||
return
|
||||
}
|
||||
|
||||
const dirs = fs.readdirSync(modulePath)
|
||||
dirs
|
||||
.filter((dir) => !arch.includes(dir))
|
||||
.forEach((dir) => {
|
||||
fs.rmSync(path.join(modulePath, dir), { recursive: true, force: true })
|
||||
console.log(`Removed dir: ${dir}`, arch)
|
||||
console.log(`[After Pack] Removed dir: ${dir}`, arch)
|
||||
})
|
||||
}
|
||||
|
||||
23
scripts/artifact-build-completed.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const fs = require('fs')
|
||||
|
||||
exports.default = function (buildResult) {
|
||||
try {
|
||||
console.log('[artifact build completed] rename artifact file...')
|
||||
if (!buildResult.file.includes(' ')) {
|
||||
return
|
||||
}
|
||||
|
||||
let oldFilePath = buildResult.file
|
||||
if (oldFilePath.includes('-portable') && !oldFilePath.includes('-x64') && !oldFilePath.includes('-arm64')) {
|
||||
console.log('[artifact build completed] delete portable file:', oldFilePath)
|
||||
fs.unlinkSync(oldFilePath)
|
||||
return
|
||||
}
|
||||
const newfilePath = oldFilePath.replace(/ /g, '-')
|
||||
fs.renameSync(oldFilePath, newfilePath)
|
||||
buildResult.file = newfilePath
|
||||
console.log(`[artifact build completed] rename file ${oldFilePath} to ${newfilePath} `)
|
||||
} catch (error) {
|
||||
console.error('Error renaming file:', error)
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,10 @@ async function downloadNpm(platform) {
|
||||
'@libsql/win32-x64-msvc',
|
||||
'https://registry.npmjs.org/@libsql/win32-x64-msvc/-/win32-x64-msvc-0.4.7.tgz'
|
||||
)
|
||||
downloadNpmPackage(
|
||||
'@strongtz/win32-arm64-msvc',
|
||||
'https://registry.npmjs.org/@strongtz/win32-arm64-msvc/-/win32-arm64-msvc-0.4.7.tgz'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
exports.default = async function (context) {
|
||||
const platform = context.packager.platform.name
|
||||
|
||||
// 根据平台确定 locales 目录位置
|
||||
let resourceDirs = []
|
||||
if (platform === 'mac') {
|
||||
// macOS 的语言文件位置
|
||||
resourceDirs = [
|
||||
path.join(context.appOutDir, 'Cherry Studio.app', 'Contents', 'Resources'),
|
||||
path.join(
|
||||
context.appOutDir,
|
||||
'Cherry Studio.app',
|
||||
'Contents',
|
||||
'Frameworks',
|
||||
'Electron Framework.framework',
|
||||
'Resources'
|
||||
)
|
||||
]
|
||||
} else {
|
||||
// Windows 和 Linux 的语言文件位置
|
||||
resourceDirs = [path.join(context.appOutDir, 'locales')]
|
||||
}
|
||||
|
||||
// 处理每个资源目录
|
||||
for (const resourceDir of resourceDirs) {
|
||||
if (!fs.existsSync(resourceDir)) {
|
||||
console.log(`Resource directory not found: ${resourceDir}, skipping...`)
|
||||
continue
|
||||
}
|
||||
|
||||
// 读取所有文件和目录
|
||||
const items = fs.readdirSync(resourceDir)
|
||||
|
||||
// 遍历并删除不需要的语言文件
|
||||
for (const item of items) {
|
||||
if (platform === 'mac') {
|
||||
// 在 macOS 上检查 .lproj 目录
|
||||
if (item.endsWith('.lproj') && !item.match(/^(en|zh|ru)/)) {
|
||||
const dirPath = path.join(resourceDir, item)
|
||||
fs.rmSync(dirPath, { recursive: true, force: true })
|
||||
console.log(`Removed locale directory: ${item} from ${resourceDir}`)
|
||||
}
|
||||
} else {
|
||||
// 其他平台处理 .pak 文件
|
||||
if (!item.match(/^(en|zh|ru)/)) {
|
||||
const filePath = path.join(resourceDir, item)
|
||||
fs.unlinkSync(filePath)
|
||||
console.log(`Removed locale file: ${item} from ${resourceDir}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Locale cleanup completed!')
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
// replaceSpaces.js
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
const directory = 'dist'
|
||||
|
||||
// 处理文件名中的空格
|
||||
function replaceFileNames() {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err
|
||||
|
||||
files.forEach((file) => {
|
||||
const oldPath = path.join(directory, file)
|
||||
const newPath = path.join(directory, file.replace(/ /g, '-'))
|
||||
|
||||
fs.stat(oldPath, (err, stats) => {
|
||||
if (err) throw err
|
||||
|
||||
if (stats.isFile() && oldPath !== newPath) {
|
||||
fs.rename(oldPath, newPath, (err) => {
|
||||
if (err) throw err
|
||||
console.log(`Renamed: ${oldPath} -> ${newPath}`)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function replaceYmlContent() {
|
||||
fs.readdir(directory, (err, files) => {
|
||||
if (err) throw err
|
||||
|
||||
files.forEach((file) => {
|
||||
if (path.extname(file).toLowerCase() === '.yml') {
|
||||
const filePath = path.join(directory, file)
|
||||
|
||||
fs.readFile(filePath, 'utf8', (err, data) => {
|
||||
if (err) throw err
|
||||
|
||||
// 替换内容
|
||||
const newContent = data.replace(/Cherry Studio-/g, 'Cherry-Studio-')
|
||||
|
||||
// 写回文件
|
||||
fs.writeFile(filePath, newContent, 'utf8', (err) => {
|
||||
if (err) throw err
|
||||
console.log(`Updated content in: ${filePath}`)
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// 执行两个操作
|
||||
replaceFileNames()
|
||||
replaceYmlContent()
|
||||
@@ -22,7 +22,8 @@ function downloadNpmPackage(packageName, url) {
|
||||
console.log(`Extracting ${filename}...`)
|
||||
execSync(`tar -xvf ${filename}`)
|
||||
execSync(`rm -rf ${filename}`)
|
||||
execSync(`mv package ${targetDir}`)
|
||||
execSync(`mkdir -p ${targetDir}`)
|
||||
execSync(`mv package/* ${targetDir}/`)
|
||||
} catch (error) {
|
||||
console.error(`Error processing ${packageName}: ${error.message}`)
|
||||
if (fs.existsSync(filename)) {
|
||||
|
||||
@@ -2,3 +2,4 @@ export const isMac = process.platform === 'darwin'
|
||||
export const isWin = process.platform === 'win32'
|
||||
export const isLinux = process.platform === 'linux'
|
||||
export const isDev = process.env.NODE_ENV === 'development'
|
||||
export const isPortable = isWin && 'PORTABLE_EXECUTABLE_DIR' in process.env
|
||||
|
||||
@@ -11,6 +11,7 @@ export default class VoyageEmbeddings extends BaseEmbeddings {
|
||||
if (!this.configuration.outputDimension) {
|
||||
throw new Error('You need to pass in the optional dimensions parameter for this model')
|
||||
}
|
||||
console.log('VoyageEmbeddings', this.configuration)
|
||||
this.model = new _VoyageEmbeddings(this.configuration)
|
||||
}
|
||||
override async getDimensions(): Promise<number> {
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { app, ipcMain } from 'electron'
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
import Logger from 'electron-log'
|
||||
|
||||
import { registerIpc } from './ipc'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import { CHERRY_STUDIO_PROTOCOL, handleProtocolUrl, registerProtocolClient } from './services/ProtocolClient'
|
||||
import mcpService from './services/MCPService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
handleProtocolUrl,
|
||||
registerProtocolClient,
|
||||
setupAppImageDeepLink
|
||||
} from './services/ProtocolClient'
|
||||
import { registerShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { setUserDataDir } from './utils/file'
|
||||
|
||||
// Check for single instance lock
|
||||
if (!app.requestSingleInstanceLock()) {
|
||||
@@ -47,22 +56,23 @@ if (!app.requestSingleInstanceLock()) {
|
||||
|
||||
replaceDevtoolsFont(mainWindow)
|
||||
|
||||
setUserDataDir()
|
||||
|
||||
// Setup deep link for AppImage on Linux
|
||||
await setupAppImageDeepLink()
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
installExtension([REDUX_DEVTOOLS, REACT_DEVELOPER_TOOLS])
|
||||
.then((name) => console.log(`Added Extension: ${name}`))
|
||||
.catch((err) => console.log('An error occurred: ', err))
|
||||
}
|
||||
ipcMain.handle('system:getDeviceType', () => {
|
||||
ipcMain.handle(IpcChannel.System_GetDeviceType, () => {
|
||||
return process.platform === 'darwin' ? 'mac' : process.platform === 'win32' ? 'windows' : 'linux'
|
||||
})
|
||||
})
|
||||
|
||||
registerProtocolClient(app)
|
||||
|
||||
// macOS specific: handle protocol when app is already running
|
||||
app.on('open-url', (event, url) => {
|
||||
event.preventDefault()
|
||||
handleProtocolUrl(url)
|
||||
ipcMain.handle(IpcChannel.System_GetHostname, () => {
|
||||
return require('os').hostname()
|
||||
})
|
||||
})
|
||||
|
||||
registerProtocolClient(app)
|
||||
@@ -91,6 +101,15 @@ if (!app.requestSingleInstanceLock()) {
|
||||
app.isQuitting = true
|
||||
})
|
||||
|
||||
app.on('will-quit', async () => {
|
||||
// event.preventDefault()
|
||||
try {
|
||||
await mcpService.cleanup()
|
||||
} catch (error) {
|
||||
Logger.error('Error cleaning up MCP service:', error)
|
||||
}
|
||||
})
|
||||
|
||||
// In this file you can include the rest of your app"s specific main process
|
||||
// code. You can also put them in separate files and require them here.
|
||||
}
|
||||
|
||||
14
src/main/integration/nutstore/sso/lib/index.d.mts
Normal file
@@ -0,0 +1,14 @@
|
||||
interface CreateOAuthUrlArgs {
|
||||
app: string;
|
||||
}
|
||||
declare function createOAuthUrl({ app }: CreateOAuthUrlArgs): Promise<string>;
|
||||
declare function _dont_use_in_prod_createOAuthUrl({ app, }: CreateOAuthUrlArgs): Promise<string>;
|
||||
|
||||
interface DecryptSecretArgs {
|
||||
app: string;
|
||||
s: string;
|
||||
}
|
||||
declare function decryptSecret({ app, s }: DecryptSecretArgs): Promise<string>;
|
||||
declare function _dont_use_in_prod_decryptSecret({ app, s, }: DecryptSecretArgs): Promise<string>;
|
||||
|
||||
export { type CreateOAuthUrlArgs, type DecryptSecretArgs, _dont_use_in_prod_createOAuthUrl, _dont_use_in_prod_decryptSecret, createOAuthUrl, decryptSecret };
|
||||
@@ -1,8 +0,0 @@
|
||||
declare function decrypt(app: string, s: string): string;
|
||||
|
||||
interface Secret {
|
||||
app: string;
|
||||
}
|
||||
declare function createOAuthUrl(secret: Secret): string;
|
||||
|
||||
export { type Secret, createOAuthUrl, decrypt };
|
||||
1
src/main/integration/nutstore/sso/lib/index.mjs
Normal file
267
src/main/ipc.ts
@@ -1,9 +1,11 @@
|
||||
import fs from 'node:fs'
|
||||
import { arch } from 'node:os'
|
||||
|
||||
import { isMac, isWin } from '@main/constant'
|
||||
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { Shortcut, ThemeMode } from '@types'
|
||||
import { BrowserWindow, ipcMain, session, shell } from 'electron'
|
||||
import { BrowserWindow, ipcMain, nativeTheme, session, shell } from 'electron'
|
||||
import log from 'electron-log'
|
||||
|
||||
import { titleBarOverlayDark, titleBarOverlayLight } from './config'
|
||||
@@ -20,12 +22,14 @@ import mcpService from './services/MCPService'
|
||||
import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ProxyConfig, proxyManager } from './services/ProxyManager'
|
||||
import { searchService } from './services/SearchService'
|
||||
import { registerShortcuts, unregisterAllShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { setOpenLinkExternal } from './services/WebviewService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { getResourcePath } from './utils'
|
||||
import { decrypt, encrypt } from './utils/aes'
|
||||
import { getFilesDir } from './utils/file'
|
||||
import { getConfigDir, getFilesDir } from './utils/file'
|
||||
import { compress, decompress } from './utils/zip'
|
||||
|
||||
const fileManager = new FileStorage()
|
||||
@@ -36,17 +40,20 @@ const obsidianVaultService = new ObsidianVaultService()
|
||||
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const appUpdater = new AppUpdater(mainWindow)
|
||||
|
||||
ipcMain.handle('app:info', () => ({
|
||||
ipcMain.handle(IpcChannel.App_Info, () => ({
|
||||
version: app.getVersion(),
|
||||
isPackaged: app.isPackaged,
|
||||
appPath: app.getAppPath(),
|
||||
filesPath: getFilesDir(),
|
||||
configPath: getConfigDir(),
|
||||
appDataPath: app.getPath('userData'),
|
||||
resourcesPath: getResourcePath(),
|
||||
logsPath: log.transports.file.getFile().path
|
||||
logsPath: log.transports.file.getFile().path,
|
||||
arch: arch(),
|
||||
isPortable: isWin && 'PORTABLE_EXECUTABLE_DIR' in process.env
|
||||
}))
|
||||
|
||||
ipcMain.handle('app:proxy', async (_, proxy: string) => {
|
||||
ipcMain.handle(IpcChannel.App_Proxy, async (_, proxy: string) => {
|
||||
let proxyConfig: ProxyConfig
|
||||
|
||||
if (proxy === 'system') {
|
||||
@@ -60,19 +67,19 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
await proxyManager.configureProxy(proxyConfig)
|
||||
})
|
||||
|
||||
ipcMain.handle('app:reload', () => mainWindow.reload())
|
||||
ipcMain.handle('open:website', (_, url: string) => shell.openExternal(url))
|
||||
ipcMain.handle(IpcChannel.App_Reload, () => mainWindow.reload())
|
||||
ipcMain.handle(IpcChannel.Open_Website, (_, url: string) => shell.openExternal(url))
|
||||
|
||||
// Update
|
||||
ipcMain.handle('app:show-update-dialog', () => appUpdater.showUpdateDialog(mainWindow))
|
||||
ipcMain.handle(IpcChannel.App_ShowUpdateDialog, () => appUpdater.showUpdateDialog(mainWindow))
|
||||
|
||||
// language
|
||||
ipcMain.handle('app:set-language', (_, language) => {
|
||||
ipcMain.handle(IpcChannel.App_SetLanguage, (_, language) => {
|
||||
configManager.setLanguage(language)
|
||||
})
|
||||
|
||||
// launch on boot
|
||||
ipcMain.handle('app:set-launch-on-boot', (_, openAtLogin: boolean) => {
|
||||
ipcMain.handle(IpcChannel.App_SetLaunchOnBoot, (_, openAtLogin: boolean) => {
|
||||
// Set login item settings for windows and mac
|
||||
// linux is not supported because it requires more file operations
|
||||
if (isWin || isMac) {
|
||||
@@ -81,52 +88,77 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
})
|
||||
|
||||
// launch to tray
|
||||
ipcMain.handle('app:set-launch-to-tray', (_, isActive: boolean) => {
|
||||
ipcMain.handle(IpcChannel.App_SetLaunchToTray, (_, isActive: boolean) => {
|
||||
configManager.setLaunchToTray(isActive)
|
||||
})
|
||||
|
||||
// tray
|
||||
ipcMain.handle('app:set-tray', (_, isActive: boolean) => {
|
||||
ipcMain.handle(IpcChannel.App_SetTray, (_, isActive: boolean) => {
|
||||
configManager.setTray(isActive)
|
||||
})
|
||||
|
||||
// to tray on close
|
||||
ipcMain.handle('app:set-tray-on-close', (_, isActive: boolean) => {
|
||||
ipcMain.handle(IpcChannel.App_SetTrayOnClose, (_, isActive: boolean) => {
|
||||
configManager.setTrayOnClose(isActive)
|
||||
})
|
||||
|
||||
ipcMain.handle('app:restart-tray', () => TrayService.getInstance().restartTray())
|
||||
// auto update
|
||||
ipcMain.handle(IpcChannel.App_SetAutoUpdate, (_, isActive: boolean) => {
|
||||
appUpdater.setAutoUpdate(isActive)
|
||||
configManager.setAutoUpdate(isActive)
|
||||
})
|
||||
|
||||
ipcMain.handle('config:set', (_, key: string, value: any) => {
|
||||
ipcMain.handle(IpcChannel.App_RestartTray, () => TrayService.getInstance().restartTray())
|
||||
|
||||
ipcMain.handle(IpcChannel.Config_Set, (_, key: string, value: any) => {
|
||||
configManager.set(key, value)
|
||||
})
|
||||
|
||||
ipcMain.handle('config:get', (_, key: string) => {
|
||||
ipcMain.handle(IpcChannel.Config_Get, (_, key: string) => {
|
||||
return configManager.get(key)
|
||||
})
|
||||
|
||||
// theme
|
||||
ipcMain.handle('app:set-theme', (event, theme: ThemeMode) => {
|
||||
if (theme === configManager.getTheme()) return
|
||||
ipcMain.handle(IpcChannel.App_SetTheme, (_, theme: ThemeMode) => {
|
||||
const notifyThemeChange = () => {
|
||||
const windows = BrowserWindow.getAllWindows()
|
||||
windows.forEach((win) =>
|
||||
win.webContents.send(IpcChannel.ThemeChange, nativeTheme.shouldUseDarkColors ? ThemeMode.dark : ThemeMode.light)
|
||||
)
|
||||
}
|
||||
|
||||
if (theme === ThemeMode.auto) {
|
||||
nativeTheme.themeSource = 'system'
|
||||
nativeTheme.on('updated', notifyThemeChange)
|
||||
} else {
|
||||
nativeTheme.themeSource = theme
|
||||
nativeTheme.removeAllListeners('updated')
|
||||
}
|
||||
|
||||
mainWindow?.setTitleBarOverlay &&
|
||||
mainWindow.setTitleBarOverlay(nativeTheme.shouldUseDarkColors ? titleBarOverlayDark : titleBarOverlayLight)
|
||||
configManager.setTheme(theme)
|
||||
notifyThemeChange()
|
||||
})
|
||||
|
||||
// should sync theme change to all windows
|
||||
// custom css
|
||||
ipcMain.handle(IpcChannel.App_SetCustomCss, (event, css: string) => {
|
||||
if (css === configManager.getCustomCss()) return
|
||||
configManager.setCustomCss(css)
|
||||
|
||||
// Broadcast to all windows including the mini window
|
||||
const senderWindowId = event.sender.id
|
||||
const windows = BrowserWindow.getAllWindows()
|
||||
// 向其他窗口广播主题变化
|
||||
windows.forEach((win) => {
|
||||
if (win.webContents.id !== senderWindowId) {
|
||||
win.webContents.send('theme:change', theme)
|
||||
win.webContents.send('custom-css:update', css)
|
||||
}
|
||||
})
|
||||
|
||||
mainWindow?.setTitleBarOverlay &&
|
||||
mainWindow.setTitleBarOverlay(theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight)
|
||||
})
|
||||
|
||||
// clear cache
|
||||
ipcMain.handle('app:clear-cache', async () => {
|
||||
ipcMain.handle(IpcChannel.App_ClearCache, async () => {
|
||||
const sessions = [session.defaultSession, session.fromPartition('persist:webview')]
|
||||
|
||||
try {
|
||||
@@ -148,71 +180,56 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
})
|
||||
|
||||
// check for update
|
||||
ipcMain.handle('app:check-for-update', async () => {
|
||||
const update = await appUpdater.autoUpdater.checkForUpdates()
|
||||
return {
|
||||
currentVersion: appUpdater.autoUpdater.currentVersion,
|
||||
updateInfo: update?.updateInfo
|
||||
}
|
||||
ipcMain.handle(IpcChannel.App_CheckForUpdate, async () => {
|
||||
await appUpdater.checkForUpdates()
|
||||
})
|
||||
|
||||
// zip
|
||||
ipcMain.handle('zip:compress', (_, text: string) => compress(text))
|
||||
ipcMain.handle('zip:decompress', (_, text: Buffer) => decompress(text))
|
||||
ipcMain.handle(IpcChannel.Zip_Compress, (_, text: string) => compress(text))
|
||||
ipcMain.handle(IpcChannel.Zip_Decompress, (_, text: Buffer) => decompress(text))
|
||||
|
||||
// backup
|
||||
ipcMain.handle('backup:backup', backupManager.backup)
|
||||
ipcMain.handle('backup:restore', backupManager.restore)
|
||||
ipcMain.handle('backup:backupToWebdav', backupManager.backupToWebdav)
|
||||
ipcMain.handle('backup:restoreFromWebdav', backupManager.restoreFromWebdav)
|
||||
ipcMain.handle('backup:listWebdavFiles', backupManager.listWebdavFiles)
|
||||
ipcMain.handle('backup:checkConnection', backupManager.checkConnection)
|
||||
ipcMain.handle('backup:createDirectory', backupManager.createDirectory)
|
||||
ipcMain.handle(IpcChannel.Backup_Backup, backupManager.backup)
|
||||
ipcMain.handle(IpcChannel.Backup_Restore, backupManager.restore)
|
||||
ipcMain.handle(IpcChannel.Backup_BackupToWebdav, backupManager.backupToWebdav)
|
||||
ipcMain.handle(IpcChannel.Backup_RestoreFromWebdav, backupManager.restoreFromWebdav)
|
||||
ipcMain.handle(IpcChannel.Backup_ListWebdavFiles, backupManager.listWebdavFiles)
|
||||
ipcMain.handle(IpcChannel.Backup_CheckConnection, backupManager.checkConnection)
|
||||
ipcMain.handle(IpcChannel.Backup_CreateDirectory, backupManager.createDirectory)
|
||||
ipcMain.handle(IpcChannel.Backup_DeleteWebdavFile, backupManager.deleteWebdavFile)
|
||||
|
||||
// file
|
||||
ipcMain.handle('file:open', fileManager.open)
|
||||
ipcMain.handle('file:openPath', fileManager.openPath)
|
||||
ipcMain.handle('file:save', fileManager.save)
|
||||
ipcMain.handle('file:select', fileManager.selectFile)
|
||||
ipcMain.handle('file:upload', fileManager.uploadFile)
|
||||
ipcMain.handle('file:clear', fileManager.clear)
|
||||
ipcMain.handle('file:read', fileManager.readFile)
|
||||
ipcMain.handle('file:delete', fileManager.deleteFile)
|
||||
ipcMain.handle('file:get', fileManager.getFile)
|
||||
ipcMain.handle('file:selectFolder', fileManager.selectFolder)
|
||||
ipcMain.handle('file:create', fileManager.createTempFile)
|
||||
ipcMain.handle('file:write', fileManager.writeFile)
|
||||
ipcMain.handle('file:saveImage', fileManager.saveImage)
|
||||
ipcMain.handle('file:base64Image', fileManager.base64Image)
|
||||
ipcMain.handle('file:download', fileManager.downloadFile)
|
||||
ipcMain.handle('file:copy', fileManager.copyFile)
|
||||
ipcMain.handle('file:binaryFile', fileManager.binaryFile)
|
||||
ipcMain.handle(IpcChannel.File_Open, fileManager.open)
|
||||
ipcMain.handle(IpcChannel.File_OpenPath, fileManager.openPath)
|
||||
ipcMain.handle(IpcChannel.File_Save, fileManager.save)
|
||||
ipcMain.handle(IpcChannel.File_Select, fileManager.selectFile)
|
||||
ipcMain.handle(IpcChannel.File_Upload, fileManager.uploadFile)
|
||||
ipcMain.handle(IpcChannel.File_Clear, fileManager.clear)
|
||||
ipcMain.handle(IpcChannel.File_Read, fileManager.readFile)
|
||||
ipcMain.handle(IpcChannel.File_Delete, fileManager.deleteFile)
|
||||
ipcMain.handle(IpcChannel.File_Get, fileManager.getFile)
|
||||
ipcMain.handle(IpcChannel.File_SelectFolder, fileManager.selectFolder)
|
||||
ipcMain.handle(IpcChannel.File_Create, fileManager.createTempFile)
|
||||
ipcMain.handle(IpcChannel.File_Write, fileManager.writeFile)
|
||||
ipcMain.handle(IpcChannel.File_SaveImage, fileManager.saveImage)
|
||||
ipcMain.handle(IpcChannel.File_Base64Image, fileManager.base64Image)
|
||||
ipcMain.handle(IpcChannel.File_Download, fileManager.downloadFile)
|
||||
ipcMain.handle(IpcChannel.File_Copy, fileManager.copyFile)
|
||||
ipcMain.handle(IpcChannel.File_BinaryFile, fileManager.binaryFile)
|
||||
|
||||
// fs
|
||||
ipcMain.handle('fs:read', FileService.readFile)
|
||||
|
||||
// minapp
|
||||
ipcMain.handle('minapp', (_, args) => {
|
||||
windowService.createMinappWindow({
|
||||
url: args.url,
|
||||
parent: mainWindow,
|
||||
windowOptions: {
|
||||
...mainWindow.getBounds(),
|
||||
...args.windowOptions
|
||||
}
|
||||
})
|
||||
})
|
||||
ipcMain.handle(IpcChannel.Fs_Read, FileService.readFile)
|
||||
|
||||
// export
|
||||
ipcMain.handle('export:word', exportService.exportToWord)
|
||||
ipcMain.handle(IpcChannel.Export_Word, exportService.exportToWord)
|
||||
|
||||
// open path
|
||||
ipcMain.handle('open:path', async (_, path: string) => {
|
||||
ipcMain.handle(IpcChannel.Open_Path, async (_, path: string) => {
|
||||
await shell.openPath(path)
|
||||
})
|
||||
|
||||
// shortcuts
|
||||
ipcMain.handle('shortcuts:update', (_, shortcuts: Shortcut[]) => {
|
||||
ipcMain.handle(IpcChannel.Shortcuts_Update, (_, shortcuts: Shortcut[]) => {
|
||||
configManager.setShortcuts(shortcuts)
|
||||
// Refresh shortcuts registration
|
||||
if (mainWindow) {
|
||||
@@ -222,20 +239,20 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
})
|
||||
|
||||
// knowledge base
|
||||
ipcMain.handle('knowledge-base:create', KnowledgeService.create)
|
||||
ipcMain.handle('knowledge-base:reset', KnowledgeService.reset)
|
||||
ipcMain.handle('knowledge-base:delete', KnowledgeService.delete)
|
||||
ipcMain.handle('knowledge-base:add', KnowledgeService.add)
|
||||
ipcMain.handle('knowledge-base:remove', KnowledgeService.remove)
|
||||
ipcMain.handle('knowledge-base:search', KnowledgeService.search)
|
||||
ipcMain.handle('knowledge-base:rerank', KnowledgeService.rerank)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Create, KnowledgeService.create)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Reset, KnowledgeService.reset)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Delete, KnowledgeService.delete)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Add, KnowledgeService.add)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Remove, KnowledgeService.remove)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Search, KnowledgeService.search)
|
||||
ipcMain.handle(IpcChannel.KnowledgeBase_Rerank, KnowledgeService.rerank)
|
||||
|
||||
// window
|
||||
ipcMain.handle('window:set-minimum-size', (_, width: number, height: number) => {
|
||||
ipcMain.handle(IpcChannel.Windows_SetMinimumSize, (_, width: number, height: number) => {
|
||||
mainWindow?.setMinimumSize(width, height)
|
||||
})
|
||||
|
||||
ipcMain.handle('window:reset-minimum-size', () => {
|
||||
ipcMain.handle(IpcChannel.Windows_ResetMinimumSize, () => {
|
||||
mainWindow?.setMinimumSize(1080, 600)
|
||||
const [width, height] = mainWindow?.getSize() ?? [1080, 600]
|
||||
if (width < 1080) {
|
||||
@@ -244,59 +261,81 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
})
|
||||
|
||||
// gemini
|
||||
ipcMain.handle('gemini:upload-file', GeminiService.uploadFile)
|
||||
ipcMain.handle('gemini:base64-file', GeminiService.base64File)
|
||||
ipcMain.handle('gemini:retrieve-file', GeminiService.retrieveFile)
|
||||
ipcMain.handle('gemini:list-files', GeminiService.listFiles)
|
||||
ipcMain.handle('gemini:delete-file', GeminiService.deleteFile)
|
||||
ipcMain.handle(IpcChannel.Gemini_UploadFile, GeminiService.uploadFile)
|
||||
ipcMain.handle(IpcChannel.Gemini_Base64File, GeminiService.base64File)
|
||||
ipcMain.handle(IpcChannel.Gemini_RetrieveFile, GeminiService.retrieveFile)
|
||||
ipcMain.handle(IpcChannel.Gemini_ListFiles, GeminiService.listFiles)
|
||||
ipcMain.handle(IpcChannel.Gemini_DeleteFile, GeminiService.deleteFile)
|
||||
|
||||
// mini window
|
||||
ipcMain.handle('miniwindow:show', () => windowService.showMiniWindow())
|
||||
ipcMain.handle('miniwindow:hide', () => windowService.hideMiniWindow())
|
||||
ipcMain.handle('miniwindow:close', () => windowService.closeMiniWindow())
|
||||
ipcMain.handle('miniwindow:toggle', () => windowService.toggleMiniWindow())
|
||||
ipcMain.handle('miniwindow:set-pin', (_, isPinned) => windowService.setPinMiniWindow(isPinned))
|
||||
ipcMain.handle(IpcChannel.MiniWindow_Show, () => windowService.showMiniWindow())
|
||||
ipcMain.handle(IpcChannel.MiniWindow_Hide, () => windowService.hideMiniWindow())
|
||||
ipcMain.handle(IpcChannel.MiniWindow_Close, () => windowService.closeMiniWindow())
|
||||
ipcMain.handle(IpcChannel.MiniWindow_Toggle, () => windowService.toggleMiniWindow())
|
||||
ipcMain.handle(IpcChannel.MiniWindow_SetPin, (_, isPinned) => windowService.setPinMiniWindow(isPinned))
|
||||
|
||||
// aes
|
||||
ipcMain.handle('aes:encrypt', (_, text: string, secretKey: string, iv: string) => encrypt(text, secretKey, iv))
|
||||
ipcMain.handle('aes:decrypt', (_, encryptedData: string, iv: string, secretKey: string) =>
|
||||
ipcMain.handle(IpcChannel.Aes_Encrypt, (_, text: string, secretKey: string, iv: string) =>
|
||||
encrypt(text, secretKey, iv)
|
||||
)
|
||||
ipcMain.handle(IpcChannel.Aes_Decrypt, (_, encryptedData: string, iv: string, secretKey: string) =>
|
||||
decrypt(encryptedData, iv, secretKey)
|
||||
)
|
||||
|
||||
// Register MCP handlers
|
||||
ipcMain.handle('mcp:remove-server', mcpService.removeServer)
|
||||
ipcMain.handle('mcp:restart-server', mcpService.restartServer)
|
||||
ipcMain.handle('mcp:stop-server', mcpService.stopServer)
|
||||
ipcMain.handle('mcp:list-tools', mcpService.listTools)
|
||||
ipcMain.handle('mcp:call-tool', mcpService.callTool)
|
||||
ipcMain.handle('mcp:get-install-info', mcpService.getInstallInfo)
|
||||
ipcMain.handle(IpcChannel.Mcp_RemoveServer, mcpService.removeServer)
|
||||
ipcMain.handle(IpcChannel.Mcp_RestartServer, mcpService.restartServer)
|
||||
ipcMain.handle(IpcChannel.Mcp_StopServer, mcpService.stopServer)
|
||||
ipcMain.handle(IpcChannel.Mcp_ListTools, mcpService.listTools)
|
||||
ipcMain.handle(IpcChannel.Mcp_CallTool, mcpService.callTool)
|
||||
ipcMain.handle(IpcChannel.Mcp_ListPrompts, mcpService.listPrompts)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetPrompt, mcpService.getPrompt)
|
||||
ipcMain.handle(IpcChannel.Mcp_ListResources, mcpService.listResources)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetResource, mcpService.getResource)
|
||||
ipcMain.handle(IpcChannel.Mcp_GetInstallInfo, mcpService.getInstallInfo)
|
||||
|
||||
ipcMain.handle('app:is-binary-exist', (_, name: string) => isBinaryExists(name))
|
||||
ipcMain.handle('app:get-binary-path', (_, name: string) => getBinaryPath(name))
|
||||
ipcMain.handle('app:install-uv-binary', () => runInstallScript('install-uv.js'))
|
||||
ipcMain.handle('app:install-bun-binary', () => runInstallScript('install-bun.js'))
|
||||
ipcMain.handle(IpcChannel.App_IsBinaryExist, (_, name: string) => isBinaryExists(name))
|
||||
ipcMain.handle(IpcChannel.App_GetBinaryPath, (_, name: string) => getBinaryPath(name))
|
||||
ipcMain.handle(IpcChannel.App_InstallUvBinary, () => runInstallScript('install-uv.js'))
|
||||
ipcMain.handle(IpcChannel.App_InstallBunBinary, () => runInstallScript('install-bun.js'))
|
||||
|
||||
//copilot
|
||||
ipcMain.handle('copilot:get-auth-message', CopilotService.getAuthMessage)
|
||||
ipcMain.handle('copilot:get-copilot-token', CopilotService.getCopilotToken)
|
||||
ipcMain.handle('copilot:save-copilot-token', CopilotService.saveCopilotToken)
|
||||
ipcMain.handle('copilot:get-token', CopilotService.getToken)
|
||||
ipcMain.handle('copilot:logout', CopilotService.logout)
|
||||
ipcMain.handle('copilot:get-user', CopilotService.getUser)
|
||||
ipcMain.handle(IpcChannel.Copilot_GetAuthMessage, CopilotService.getAuthMessage)
|
||||
ipcMain.handle(IpcChannel.Copilot_GetCopilotToken, CopilotService.getCopilotToken)
|
||||
ipcMain.handle(IpcChannel.Copilot_SaveCopilotToken, CopilotService.saveCopilotToken)
|
||||
ipcMain.handle(IpcChannel.Copilot_GetToken, CopilotService.getToken)
|
||||
ipcMain.handle(IpcChannel.Copilot_Logout, CopilotService.logout)
|
||||
ipcMain.handle(IpcChannel.Copilot_GetUser, CopilotService.getUser)
|
||||
|
||||
// Obsidian service
|
||||
ipcMain.handle('obsidian:get-vaults', () => {
|
||||
ipcMain.handle(IpcChannel.Obsidian_GetVaults, () => {
|
||||
return obsidianVaultService.getVaults()
|
||||
})
|
||||
|
||||
ipcMain.handle('obsidian:get-files', (_event, vaultName) => {
|
||||
ipcMain.handle(IpcChannel.Obsidian_GetFiles, (_event, vaultName) => {
|
||||
return obsidianVaultService.getFilesByVaultName(vaultName)
|
||||
})
|
||||
|
||||
// nutstore
|
||||
ipcMain.handle('nutstore:get-sso-url', NutstoreService.getNutstoreSSOUrl)
|
||||
ipcMain.handle('nutstore:decrypt-token', (_, token: string) => NutstoreService.decryptToken(token))
|
||||
ipcMain.handle('nutstore:get-directory-contents', (_, token: string, path: string) =>
|
||||
ipcMain.handle(IpcChannel.Nutstore_GetSsoUrl, NutstoreService.getNutstoreSSOUrl)
|
||||
ipcMain.handle(IpcChannel.Nutstore_DecryptToken, (_, token: string) => NutstoreService.decryptToken(token))
|
||||
ipcMain.handle(IpcChannel.Nutstore_GetDirectoryContents, (_, token: string, path: string) =>
|
||||
NutstoreService.getDirectoryContents(token, path)
|
||||
)
|
||||
|
||||
// search window
|
||||
ipcMain.handle(IpcChannel.SearchWindow_Open, async (_, uid: string) => {
|
||||
await searchService.openSearchWindow(uid)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.SearchWindow_Close, async (_, uid: string) => {
|
||||
await searchService.closeSearchWindow(uid)
|
||||
})
|
||||
ipcMain.handle(IpcChannel.SearchWindow_OpenUrl, async (_, uid: string, url: string) => {
|
||||
return await searchService.openUrlInSearchWindow(uid, url)
|
||||
})
|
||||
|
||||
// webview
|
||||
ipcMain.handle(IpcChannel.Webview_SetOpenLinkExternal, (_, webviewId: number, isExternal: boolean) =>
|
||||
setOpenLinkExternal(webviewId, isExternal)
|
||||
)
|
||||
}
|
||||
|
||||
374
src/main/mcpServers/brave-search.ts
Normal file
@@ -0,0 +1,374 @@
|
||||
// Brave Search MCP Server
|
||||
// port https://github.com/modelcontextprotocol/servers/blob/main/src/brave-search/index.ts
|
||||
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, Tool } from '@modelcontextprotocol/sdk/types.js'
|
||||
|
||||
const WEB_SEARCH_TOOL: Tool = {
|
||||
name: 'brave_web_search',
|
||||
description:
|
||||
'Performs a web search using the Brave Search API, ideal for general queries, news, articles, and online content. ' +
|
||||
'Use this for broad information gathering, recent events, or when you need diverse web sources. ' +
|
||||
'Supports pagination, content filtering, and freshness controls. ' +
|
||||
'Maximum 20 results per request, with offset for pagination. ',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: 'Search query (max 400 chars, 50 words)'
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
description: 'Number of results (1-20, default 10)',
|
||||
default: 10
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
description: 'Pagination offset (max 9, default 0)',
|
||||
default: 0
|
||||
}
|
||||
},
|
||||
required: ['query']
|
||||
}
|
||||
}
|
||||
|
||||
const LOCAL_SEARCH_TOOL: Tool = {
|
||||
name: 'brave_local_search',
|
||||
description:
|
||||
"Searches for local businesses and places using Brave's Local Search API. " +
|
||||
'Best for queries related to physical locations, businesses, restaurants, services, etc. ' +
|
||||
'Returns detailed information including:\n' +
|
||||
'- Business names and addresses\n' +
|
||||
'- Ratings and review counts\n' +
|
||||
'- Phone numbers and opening hours\n' +
|
||||
"Use this when the query implies 'near me' or mentions specific locations. " +
|
||||
'Automatically falls back to web search if no local results are found.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: "Local search query (e.g. 'pizza near Central Park')"
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
description: 'Number of results (1-20, default 5)',
|
||||
default: 5
|
||||
}
|
||||
},
|
||||
required: ['query']
|
||||
}
|
||||
}
|
||||
|
||||
const RATE_LIMIT = {
|
||||
perSecond: 1,
|
||||
perMonth: 15000
|
||||
}
|
||||
|
||||
const requestCount = {
|
||||
second: 0,
|
||||
month: 0,
|
||||
lastReset: Date.now()
|
||||
}
|
||||
|
||||
function checkRateLimit() {
|
||||
const now = Date.now()
|
||||
if (now - requestCount.lastReset > 1000) {
|
||||
requestCount.second = 0
|
||||
requestCount.lastReset = now
|
||||
}
|
||||
if (requestCount.second >= RATE_LIMIT.perSecond || requestCount.month >= RATE_LIMIT.perMonth) {
|
||||
throw new Error('Rate limit exceeded')
|
||||
}
|
||||
requestCount.second++
|
||||
requestCount.month++
|
||||
}
|
||||
|
||||
interface BraveWeb {
|
||||
web?: {
|
||||
results?: Array<{
|
||||
title: string
|
||||
description: string
|
||||
url: string
|
||||
language?: string
|
||||
published?: string
|
||||
rank?: number
|
||||
}>
|
||||
}
|
||||
locations?: {
|
||||
results?: Array<{
|
||||
id: string // Required by API
|
||||
title?: string
|
||||
}>
|
||||
}
|
||||
}
|
||||
|
||||
interface BraveLocation {
|
||||
id: string
|
||||
name: string
|
||||
address: {
|
||||
streetAddress?: string
|
||||
addressLocality?: string
|
||||
addressRegion?: string
|
||||
postalCode?: string
|
||||
}
|
||||
coordinates?: {
|
||||
latitude: number
|
||||
longitude: number
|
||||
}
|
||||
phone?: string
|
||||
rating?: {
|
||||
ratingValue?: number
|
||||
ratingCount?: number
|
||||
}
|
||||
openingHours?: string[]
|
||||
priceRange?: string
|
||||
}
|
||||
|
||||
interface BravePoiResponse {
|
||||
results: BraveLocation[]
|
||||
}
|
||||
|
||||
interface BraveDescription {
|
||||
descriptions: { [id: string]: string }
|
||||
}
|
||||
|
||||
function isBraveWebSearchArgs(args: unknown): args is { query: string; count?: number } {
|
||||
return (
|
||||
typeof args === 'object' &&
|
||||
args !== null &&
|
||||
'query' in args &&
|
||||
typeof (args as { query: string }).query === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
function isBraveLocalSearchArgs(args: unknown): args is { query: string; count?: number } {
|
||||
return (
|
||||
typeof args === 'object' &&
|
||||
args !== null &&
|
||||
'query' in args &&
|
||||
typeof (args as { query: string }).query === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
async function performWebSearch(apiKey: string, query: string, count: number = 10, offset: number = 0) {
|
||||
checkRateLimit()
|
||||
const url = new URL('https://api.search.brave.com/res/v1/web/search')
|
||||
url.searchParams.set('q', query)
|
||||
url.searchParams.set('count', Math.min(count, 20).toString()) // API limit
|
||||
url.searchParams.set('offset', offset.toString())
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': apiKey
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as BraveWeb
|
||||
|
||||
// Extract just web results
|
||||
const results = (data.web?.results || []).map((result) => ({
|
||||
title: result.title || '',
|
||||
description: result.description || '',
|
||||
url: result.url || ''
|
||||
}))
|
||||
|
||||
return results.map((r) => `Title: ${r.title}\nDescription: ${r.description}\nURL: ${r.url}`).join('\n\n')
|
||||
}
|
||||
|
||||
async function performLocalSearch(apiKey: string, query: string, count: number = 5) {
|
||||
checkRateLimit()
|
||||
// Initial search to get location IDs
|
||||
const webUrl = new URL('https://api.search.brave.com/res/v1/web/search')
|
||||
webUrl.searchParams.set('q', query)
|
||||
webUrl.searchParams.set('search_lang', 'en')
|
||||
webUrl.searchParams.set('result_filter', 'locations')
|
||||
webUrl.searchParams.set('count', Math.min(count, 20).toString())
|
||||
|
||||
const webResponse = await fetch(webUrl, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': apiKey
|
||||
}
|
||||
})
|
||||
|
||||
if (!webResponse.ok) {
|
||||
throw new Error(`Brave API error: ${webResponse.status} ${webResponse.statusText}\n${await webResponse.text()}`)
|
||||
}
|
||||
|
||||
const webData = (await webResponse.json()) as BraveWeb
|
||||
const locationIds =
|
||||
webData.locations?.results?.filter((r): r is { id: string; title?: string } => r.id != null).map((r) => r.id) || []
|
||||
|
||||
if (locationIds.length === 0) {
|
||||
return performWebSearch(apiKey, query, count) // Fallback to web search
|
||||
}
|
||||
|
||||
// Get POI details and descriptions in parallel
|
||||
const [poisData, descriptionsData] = await Promise.all([
|
||||
getPoisData(apiKey, locationIds),
|
||||
getDescriptionsData(apiKey, locationIds)
|
||||
])
|
||||
|
||||
return formatLocalResults(poisData, descriptionsData)
|
||||
}
|
||||
|
||||
async function getPoisData(apiKey: string, ids: string[]): Promise<BravePoiResponse> {
|
||||
checkRateLimit()
|
||||
const url = new URL('https://api.search.brave.com/res/v1/local/pois')
|
||||
ids.filter(Boolean).forEach((id) => url.searchParams.append('ids', id))
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': apiKey
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
|
||||
}
|
||||
|
||||
const poisResponse = (await response.json()) as BravePoiResponse
|
||||
return poisResponse
|
||||
}
|
||||
|
||||
async function getDescriptionsData(apiKey: string, ids: string[]): Promise<BraveDescription> {
|
||||
checkRateLimit()
|
||||
const url = new URL('https://api.search.brave.com/res/v1/local/descriptions')
|
||||
ids.filter(Boolean).forEach((id) => url.searchParams.append('ids', id))
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Accept-Encoding': 'gzip',
|
||||
'X-Subscription-Token': apiKey
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Brave API error: ${response.status} ${response.statusText}\n${await response.text()}`)
|
||||
}
|
||||
|
||||
const descriptionsData = (await response.json()) as BraveDescription
|
||||
return descriptionsData
|
||||
}
|
||||
|
||||
function formatLocalResults(poisData: BravePoiResponse, descData: BraveDescription): string {
|
||||
return (
|
||||
(poisData.results || [])
|
||||
.map((poi) => {
|
||||
const address =
|
||||
[
|
||||
poi.address?.streetAddress ?? '',
|
||||
poi.address?.addressLocality ?? '',
|
||||
poi.address?.addressRegion ?? '',
|
||||
poi.address?.postalCode ?? ''
|
||||
]
|
||||
.filter((part) => part !== '')
|
||||
.join(', ') || 'N/A'
|
||||
|
||||
return `Name: ${poi.name}
|
||||
Address: ${address}
|
||||
Phone: ${poi.phone || 'N/A'}
|
||||
Rating: ${poi.rating?.ratingValue ?? 'N/A'} (${poi.rating?.ratingCount ?? 0} reviews)
|
||||
Price Range: ${poi.priceRange || 'N/A'}
|
||||
Hours: ${(poi.openingHours || []).join(', ') || 'N/A'}
|
||||
Description: ${descData.descriptions[poi.id] || 'No description available'}
|
||||
`
|
||||
})
|
||||
.join('\n---\n') || 'No local results found'
|
||||
)
|
||||
}
|
||||
|
||||
class BraveSearchServer {
|
||||
public server: Server
|
||||
private apiKey: string
|
||||
|
||||
constructor(apiKey: string) {
|
||||
if (!apiKey) {
|
||||
throw new Error('BRAVE_API_KEY is required for Brave Search MCP server')
|
||||
}
|
||||
this.apiKey = apiKey
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'brave-search-server',
|
||||
version: '0.1.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
initialize() {
|
||||
// Tool handlers
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [WEB_SEARCH_TOOL, LOCAL_SEARCH_TOOL]
|
||||
}))
|
||||
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params
|
||||
|
||||
if (!args) {
|
||||
throw new Error('No arguments provided')
|
||||
}
|
||||
|
||||
switch (name) {
|
||||
case 'brave_web_search': {
|
||||
if (!isBraveWebSearchArgs(args)) {
|
||||
throw new Error('Invalid arguments for brave_web_search')
|
||||
}
|
||||
const { query, count = 10 } = args
|
||||
const results = await performWebSearch(this.apiKey, query, count)
|
||||
return {
|
||||
content: [{ type: 'text', text: results }],
|
||||
isError: false
|
||||
}
|
||||
}
|
||||
|
||||
case 'brave_local_search': {
|
||||
if (!isBraveLocalSearchArgs(args)) {
|
||||
throw new Error('Invalid arguments for brave_local_search')
|
||||
}
|
||||
const { query, count = 5 } = args
|
||||
const results = await performLocalSearch(this.apiKey, query, count)
|
||||
return {
|
||||
content: [{ type: 'text', text: results }],
|
||||
isError: false
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
content: [{ type: 'text', text: `Unknown tool: ${name}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Error: ${error instanceof Error ? error.message : String(error)}`
|
||||
}
|
||||
],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default BraveSearchServer
|
||||
263
src/main/mcpServers/dify-knowledge.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
// inspired by https://dify.ai/blog/turn-your-dify-app-into-an-mcp-server
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, ToolSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { z } from 'zod'
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema'
|
||||
|
||||
interface DifyKnowledgeServerConfig {
|
||||
difyKey: string
|
||||
apiHost: string
|
||||
}
|
||||
|
||||
interface DifyListKnowledgeResponse {
|
||||
id: string
|
||||
name: string
|
||||
description: string
|
||||
}
|
||||
|
||||
interface DifySearchKnowledgeResponse {
|
||||
query: {
|
||||
content: string
|
||||
}
|
||||
records: Array<{
|
||||
segment: {
|
||||
id: string
|
||||
position: number
|
||||
document_id: string
|
||||
content: string
|
||||
keywords: string[]
|
||||
document?: {
|
||||
id: string
|
||||
data_source_type: string
|
||||
name: string
|
||||
}
|
||||
}
|
||||
score: number
|
||||
}>
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>
|
||||
|
||||
const SearchKnowledgeArgsSchema = z.object({
|
||||
id: z.string().describe('Knowledge ID'),
|
||||
query: z.string().describe('Query string'),
|
||||
topK: z.number().optional().describe('Number of top results to return')
|
||||
})
|
||||
|
||||
type McpResponse = {
|
||||
content: Array<{ type: 'text'; text: string }>
|
||||
isError?: boolean
|
||||
}
|
||||
|
||||
class DifyKnowledgeServer {
|
||||
public server: Server
|
||||
private config: DifyKnowledgeServerConfig
|
||||
|
||||
constructor(difyKey: string, args: string[]) {
|
||||
console.log('DifyKnowledgeServer args', args)
|
||||
if (args.length === 0) {
|
||||
throw new Error('DifyKnowledgeServer requires at least one argument')
|
||||
}
|
||||
this.config = {
|
||||
difyKey: difyKey,
|
||||
apiHost: args[0]
|
||||
}
|
||||
this.server = new Server(
|
||||
{
|
||||
name: '@cherry/dify-knowledge-server',
|
||||
version: '0.1.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
initialize() {
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: 'list_knowledges',
|
||||
description: 'List all knowledges',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: []
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'search_knowledge',
|
||||
description: 'Search knowledge by id and query',
|
||||
inputSchema: zodToJsonSchema(SearchKnowledgeArgsSchema) as ToolInput
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params
|
||||
switch (name) {
|
||||
case 'list_knowledges': {
|
||||
return await this.performListKnowledges(this.config.difyKey, this.config.apiHost)
|
||||
}
|
||||
case 'search_knowledge': {
|
||||
const parsed = SearchKnowledgeArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
const errorDetails = JSON.stringify(parsed.error.format(), null, 2)
|
||||
throw new Error(`无效的参数:\n${errorDetails}`)
|
||||
}
|
||||
|
||||
console.log('DifyKnowledgeServer search_knowledge parsed', parsed.data)
|
||||
return await this.performSearchKnowledge(
|
||||
parsed.data.id,
|
||||
parsed.data.query,
|
||||
parsed.data.topK || 6,
|
||||
this.config.difyKey,
|
||||
this.config.apiHost
|
||||
)
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private async performListKnowledges(difyKey: string, apiHost: string): Promise<McpResponse> {
|
||||
try {
|
||||
const url = `${apiHost.replace(/\/$/, '')}/datasets`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${difyKey}`
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`API 请求失败,状态码 ${response.status}: ${errorText}`)
|
||||
}
|
||||
|
||||
const apiResponse = await response.json()
|
||||
|
||||
const knowledges: DifyListKnowledgeResponse[] =
|
||||
apiResponse?.data?.map((item: any) => ({
|
||||
id: item.id,
|
||||
name: item.name,
|
||||
description: item.description || ''
|
||||
})) || []
|
||||
|
||||
const listText =
|
||||
knowledges.length > 0
|
||||
? knowledges.map((k) => `- **${k.name}** (ID: ${k.id})\n ${k.description || 'No Description'}`).join('\n')
|
||||
: '- No knowledges found.'
|
||||
|
||||
const formattedText = `### 可用知识库:\n\n${listText}`
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: formattedText }]
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('获取知识库列表时出错:', error)
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
// 返回包含错误信息的 MCP 响应
|
||||
return {
|
||||
content: [{ type: 'text', text: `Accessing Knowledge Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async performSearchKnowledge(
|
||||
id: string,
|
||||
query: string,
|
||||
topK: number,
|
||||
difyKey: string,
|
||||
apiHost: string
|
||||
): Promise<McpResponse> {
|
||||
try {
|
||||
const url = `${apiHost.replace(/\/$/, '')}/datasets/${id}/retrieve`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${difyKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: query,
|
||||
retrieval_model: {
|
||||
top_k: topK,
|
||||
// will be error if not set
|
||||
reranking_enable: null,
|
||||
score_threshold_enabled: null
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`API 请求失败,状态码 ${response.status}: ${errorText}`)
|
||||
}
|
||||
|
||||
const searchResponse: DifySearchKnowledgeResponse = await response.json()
|
||||
|
||||
if (!searchResponse || !Array.isArray(searchResponse.records)) {
|
||||
throw new Error(`从 Dify API 收到的响应格式无效: ${JSON.stringify(searchResponse)}`)
|
||||
}
|
||||
|
||||
const header = `### Query: ${query}\n\n`
|
||||
let body: string
|
||||
|
||||
if (searchResponse.records.length === 0) {
|
||||
body = 'No results found.'
|
||||
} else {
|
||||
const resultsText = searchResponse.records
|
||||
.map((record, index) => {
|
||||
const docName = record.segment.document?.name || 'Unknown Document'
|
||||
const content = record.segment.content.trim()
|
||||
const score = record.score
|
||||
const keywords = record.segment.keywords || []
|
||||
|
||||
let resultEntry = `#### ${index + 1}. ${docName} (Relevant Score: ${(score * 100).toFixed(1)}%)`
|
||||
resultEntry += `\n${content}`
|
||||
if (keywords.length > 0) {
|
||||
resultEntry += `\n*Keywords: ${keywords.join(', ')}*`
|
||||
}
|
||||
return resultEntry
|
||||
})
|
||||
.join('\n\n')
|
||||
|
||||
body = `Found ${searchResponse.records.length} results:\n\n${resultsText}`
|
||||
}
|
||||
|
||||
const formattedText = header + body
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: formattedText }]
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('搜索知识库时出错:', error)
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
content: [{ type: 'text', text: `Search Knowledge Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default DifyKnowledgeServer
|
||||
37
src/main/mcpServers/factory.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import Logger from 'electron-log'
|
||||
|
||||
import BraveSearchServer from './brave-search'
|
||||
import DifyKnowledgeServer from './dify-knowledge'
|
||||
import FetchServer from './fetch'
|
||||
import FileSystemServer from './filesystem'
|
||||
import MemoryServer from './memory'
|
||||
import ThinkingServer from './sequentialthinking'
|
||||
|
||||
export function createInMemoryMCPServer(name: string, args: string[] = [], envs: Record<string, string> = {}): Server {
|
||||
Logger.info(`[MCP] Creating in-memory MCP server: ${name} with args: ${args} and envs: ${JSON.stringify(envs)}`)
|
||||
switch (name) {
|
||||
case '@cherry/memory': {
|
||||
const envPath = envs.MEMORY_FILE_PATH
|
||||
return new MemoryServer(envPath).server
|
||||
}
|
||||
case '@cherry/sequentialthinking': {
|
||||
return new ThinkingServer().server
|
||||
}
|
||||
case '@cherry/brave-search': {
|
||||
return new BraveSearchServer(envs.BRAVE_API_KEY).server
|
||||
}
|
||||
case '@cherry/fetch': {
|
||||
return new FetchServer().server
|
||||
}
|
||||
case '@cherry/filesystem': {
|
||||
return new FileSystemServer(args).server
|
||||
}
|
||||
case '@cherry/dify-knowledge': {
|
||||
const difyKey = envs.DIFY_KEY
|
||||
return new DifyKnowledgeServer(difyKey, args).server
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown in-memory MCP server: ${name}`)
|
||||
}
|
||||
}
|
||||
236
src/main/mcpServers/fetch.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
// port https://github.com/zcaceres/fetch-mcp/blob/main/src/index.ts
|
||||
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { JSDOM } from 'jsdom'
|
||||
import TurndownService from 'turndown'
|
||||
import { z } from 'zod'
|
||||
|
||||
export const RequestPayloadSchema = z.object({
|
||||
url: z.string().url(),
|
||||
headers: z.record(z.string()).optional()
|
||||
})
|
||||
|
||||
export type RequestPayload = z.infer<typeof RequestPayloadSchema>
|
||||
|
||||
export class Fetcher {
|
||||
private static async _fetch({ url, headers }: RequestPayload): Promise<Response> {
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
...headers
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error: ${response.status}`)
|
||||
}
|
||||
return response
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof Error) {
|
||||
throw new Error(`Failed to fetch ${url}: ${e.message}`)
|
||||
} else {
|
||||
throw new Error(`Failed to fetch ${url}: Unknown error`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async html(requestPayload: RequestPayload) {
|
||||
try {
|
||||
const response = await this._fetch(requestPayload)
|
||||
const html = await response.text()
|
||||
return { content: [{ type: 'text', text: html }], isError: false }
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{ type: 'text', text: (error as Error).message }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async json(requestPayload: RequestPayload) {
|
||||
try {
|
||||
const response = await this._fetch(requestPayload)
|
||||
const json = await response.json()
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(json) }],
|
||||
isError: false
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{ type: 'text', text: (error as Error).message }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async txt(requestPayload: RequestPayload) {
|
||||
try {
|
||||
const response = await this._fetch(requestPayload)
|
||||
const html = await response.text()
|
||||
|
||||
const dom = new JSDOM(html)
|
||||
const document = dom.window.document
|
||||
|
||||
const scripts = document.getElementsByTagName('script')
|
||||
const styles = document.getElementsByTagName('style')
|
||||
Array.from(scripts).forEach((script: any) => script.remove())
|
||||
Array.from(styles).forEach((style: any) => style.remove())
|
||||
|
||||
const text = document.body.textContent || ''
|
||||
|
||||
const normalizedText = text.replace(/\s+/g, ' ').trim()
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: normalizedText }],
|
||||
isError: false
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{ type: 'text', text: (error as Error).message }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async markdown(requestPayload: RequestPayload) {
|
||||
try {
|
||||
const response = await this._fetch(requestPayload)
|
||||
const html = await response.text()
|
||||
const turndownService = new TurndownService()
|
||||
const markdown = turndownService.turndown(html)
|
||||
return { content: [{ type: 'text', text: markdown }], isError: false }
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{ type: 'text', text: (error as Error).message }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: 'zcaceres/fetch',
|
||||
version: '0.1.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
resources: {},
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: 'fetch_html',
|
||||
description: 'Fetch a website and return the content as HTML',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the website to fetch'
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Optional headers to include in the request'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'fetch_markdown',
|
||||
description: 'Fetch a website and return the content as Markdown',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the website to fetch'
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Optional headers to include in the request'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'fetch_txt',
|
||||
description: 'Fetch a website, return the content as plain text (no HTML)',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the website to fetch'
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Optional headers to include in the request'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'fetch_json',
|
||||
description: 'Fetch a JSON file from a URL',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the JSON to fetch'
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Optional headers to include in the request'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const { arguments: args } = request.params
|
||||
|
||||
const validatedArgs = RequestPayloadSchema.parse(args)
|
||||
|
||||
if (request.params.name === 'fetch_html') {
|
||||
const fetchResult = await Fetcher.html(validatedArgs)
|
||||
return fetchResult
|
||||
}
|
||||
if (request.params.name === 'fetch_json') {
|
||||
const fetchResult = await Fetcher.json(validatedArgs)
|
||||
return fetchResult
|
||||
}
|
||||
if (request.params.name === 'fetch_txt') {
|
||||
const fetchResult = await Fetcher.txt(validatedArgs)
|
||||
return fetchResult
|
||||
}
|
||||
if (request.params.name === 'fetch_markdown') {
|
||||
const fetchResult = await Fetcher.markdown(validatedArgs)
|
||||
return fetchResult
|
||||
}
|
||||
throw new Error('Tool not found')
|
||||
})
|
||||
|
||||
class FetchServer {
|
||||
public server: Server
|
||||
constructor() {
|
||||
this.server = server
|
||||
}
|
||||
}
|
||||
export default FetchServer
|
||||
655
src/main/mcpServers/filesystem.ts
Normal file
@@ -0,0 +1,655 @@
|
||||
// port https://github.com/modelcontextprotocol/servers/blob/main/src/filesystem/index.ts
|
||||
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, ToolSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { createTwoFilesPatch } from 'diff'
|
||||
import fs from 'fs/promises'
|
||||
import { minimatch } from 'minimatch'
|
||||
import os from 'os'
|
||||
import path from 'path'
|
||||
import { z } from 'zod'
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema'
|
||||
|
||||
// Normalize all paths consistently
|
||||
function normalizePath(p: string): string {
|
||||
return path.normalize(p)
|
||||
}
|
||||
|
||||
function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1))
|
||||
}
|
||||
return filepath
|
||||
}
|
||||
|
||||
// Security utilities
|
||||
async function validatePath(allowedDirectories: string[], requestedPath: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath)
|
||||
const absolute = path.isAbsolute(expandedPath)
|
||||
? path.resolve(expandedPath)
|
||||
: path.resolve(process.cwd(), expandedPath)
|
||||
|
||||
const normalizedRequested = normalizePath(absolute)
|
||||
|
||||
// Check if path is within allowed directories
|
||||
const isAllowed = allowedDirectories.some((dir) => normalizedRequested.startsWith(dir))
|
||||
if (!isAllowed) {
|
||||
throw new Error(
|
||||
`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
// Handle symlinks by checking their real path
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute)
|
||||
const normalizedReal = normalizePath(realPath)
|
||||
const isRealPathAllowed = allowedDirectories.some((dir) => normalizedReal.startsWith(dir))
|
||||
if (!isRealPathAllowed) {
|
||||
throw new Error('Access denied - symlink target outside allowed directories')
|
||||
}
|
||||
return realPath
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
const parentDir = path.dirname(absolute)
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir)
|
||||
const normalizedParent = normalizePath(realParentPath)
|
||||
const isParentAllowed = allowedDirectories.some((dir) => normalizedParent.startsWith(dir))
|
||||
if (!isParentAllowed) {
|
||||
throw new Error('Access denied - parent directory outside allowed directories')
|
||||
}
|
||||
return absolute
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Schema definitions
|
||||
const ReadFileArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const ReadMultipleFilesArgsSchema = z.object({
|
||||
paths: z.array(z.string())
|
||||
})
|
||||
|
||||
const WriteFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string()
|
||||
})
|
||||
|
||||
const EditOperation = z.object({
|
||||
oldText: z.string().describe('Text to search for - must match exactly'),
|
||||
newText: z.string().describe('Text to replace with')
|
||||
})
|
||||
|
||||
const EditFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
edits: z.array(EditOperation),
|
||||
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
|
||||
})
|
||||
|
||||
const CreateDirectoryArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const ListDirectoryArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
const MoveFileArgsSchema = z.object({
|
||||
source: z.string(),
|
||||
destination: z.string()
|
||||
})
|
||||
|
||||
const SearchFilesArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
pattern: z.string(),
|
||||
excludePatterns: z.array(z.string()).optional().default([])
|
||||
})
|
||||
|
||||
const GetFileInfoArgsSchema = z.object({
|
||||
path: z.string()
|
||||
})
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>
|
||||
|
||||
interface FileInfo {
|
||||
size: number
|
||||
created: Date
|
||||
modified: Date
|
||||
accessed: Date
|
||||
isDirectory: boolean
|
||||
isFile: boolean
|
||||
permissions: string
|
||||
}
|
||||
|
||||
// Tool implementations
|
||||
async function getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const stats = await fs.stat(filePath)
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3)
|
||||
}
|
||||
}
|
||||
|
||||
async function searchFiles(
|
||||
allowedDirectories: string[],
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
excludePatterns: string[] = []
|
||||
): Promise<string[]> {
|
||||
const results: string[] = []
|
||||
|
||||
async function search(currentPath: string) {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true })
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name)
|
||||
|
||||
try {
|
||||
// Validate each path before processing
|
||||
await validatePath(allowedDirectories, fullPath)
|
||||
|
||||
// Check if path matches any exclude pattern
|
||||
const relativePath = path.relative(rootPath, fullPath)
|
||||
const shouldExclude = excludePatterns.some((pattern) => {
|
||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`
|
||||
return minimatch(relativePath, globPattern, { dot: true })
|
||||
})
|
||||
|
||||
if (shouldExclude) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
||||
results.push(fullPath)
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath)
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid paths during search
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await search(rootPath)
|
||||
return results
|
||||
}
|
||||
|
||||
// file editing and diffing utilities
|
||||
function normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n')
|
||||
}
|
||||
|
||||
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = normalizeLineEndings(originalContent)
|
||||
const normalizedNew = normalizeLineEndings(newContent)
|
||||
|
||||
return createTwoFilesPatch(filepath, filepath, normalizedOriginal, normalizedNew, 'original', 'modified')
|
||||
}
|
||||
|
||||
async function applyFileEdits(
|
||||
filePath: string,
|
||||
edits: Array<{ oldText: string; newText: string }>,
|
||||
dryRun = false
|
||||
): Promise<string> {
|
||||
// Read file content and normalize line endings
|
||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'))
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText)
|
||||
const normalizedNew = normalizeLineEndings(edit.newText)
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n')
|
||||
const contentLines = modifiedContent.split('\n')
|
||||
let matchFound = false
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length)
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j]
|
||||
return oldLine.trim() === contentLine.trim()
|
||||
})
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart()
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''
|
||||
const newIndent = line.match(/^\s*/)?.[0] || ''
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart()
|
||||
}
|
||||
return line
|
||||
})
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines)
|
||||
modifiedContent = contentLines.join('\n')
|
||||
matchFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = createUnifiedDiff(content, modifiedContent, filePath)
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`
|
||||
|
||||
if (!dryRun) {
|
||||
await fs.writeFile(filePath, modifiedContent, 'utf-8')
|
||||
}
|
||||
|
||||
return formattedDiff
|
||||
}
|
||||
|
||||
class FileSystemServer {
|
||||
public server: Server
|
||||
private allowedDirectories: string[]
|
||||
constructor(allowedDirs: string[]) {
|
||||
if (!Array.isArray(allowedDirs) || allowedDirs.length === 0) {
|
||||
throw new Error('No allowed directories provided, please specify at least one directory in args')
|
||||
}
|
||||
|
||||
this.allowedDirectories = allowedDirs.map((dir) => normalizePath(path.resolve(expandHome(dir))))
|
||||
|
||||
// Validate that all directories exist and are accessible
|
||||
this.validateDirs().catch((error) => {
|
||||
console.error('Error validating allowed directories:', error)
|
||||
throw new Error(`Error validating allowed directories: ${error}`)
|
||||
})
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'secure-filesystem-server',
|
||||
version: '0.2.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
async validateDirs() {
|
||||
// Validate that all directories exist and are accessible
|
||||
await Promise.all(
|
||||
this.allowedDirectories.map(async (dir) => {
|
||||
try {
|
||||
const stats = await fs.stat(expandHome(dir))
|
||||
if (!stats.isDirectory()) {
|
||||
console.error(`Error: ${dir} is not a directory`)
|
||||
throw new Error(`Error: ${dir} is not a directory`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error(`Error accessing directory ${dir}:`, error)
|
||||
throw new Error(`Error accessing directory ${dir}:`, error)
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
initialize() {
|
||||
// Tool handlers
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: 'read_file',
|
||||
description:
|
||||
'Read the complete contents of a file from the file system. ' +
|
||||
'Handles various text encodings and provides detailed error messages ' +
|
||||
'if the file cannot be read. Use this tool when you need to examine ' +
|
||||
'the contents of a single file. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'read_multiple_files',
|
||||
description:
|
||||
'Read the contents of multiple files simultaneously. This is more ' +
|
||||
'efficient than reading files one by one when you need to analyze ' +
|
||||
"or compare multiple files. Each file's content is returned with its " +
|
||||
"path as a reference. Failed reads for individual files won't stop " +
|
||||
'the entire operation. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'write_file',
|
||||
description:
|
||||
'Create a new file or completely overwrite an existing file with new content. ' +
|
||||
'Use with caution as it will overwrite existing files without warning. ' +
|
||||
'Handles text content with proper encoding. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'edit_file',
|
||||
description:
|
||||
'Make line-based edits to a text file. Each edit replaces exact line sequences ' +
|
||||
'with new content. Returns a git-style diff showing the changes made. ' +
|
||||
'Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'create_directory',
|
||||
description:
|
||||
'Create a new directory or ensure a directory exists. Can create multiple ' +
|
||||
'nested directories in one operation. If the directory already exists, ' +
|
||||
'this operation will succeed silently. Perfect for setting up directory ' +
|
||||
'structures for projects or ensuring required paths exist. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'list_directory',
|
||||
description:
|
||||
'Get a detailed listing of all files and directories in a specified path. ' +
|
||||
'Results clearly distinguish between files and directories with [FILE] and [DIR] ' +
|
||||
'prefixes. This tool is essential for understanding directory structure and ' +
|
||||
'finding specific files within a directory. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'directory_tree',
|
||||
description:
|
||||
'Get a recursive tree view of files and directories as a JSON structure. ' +
|
||||
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
|
||||
'Files have no children array, while directories always have a children array (which may be empty). ' +
|
||||
'The output is formatted with 2-space indentation for readability. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(DirectoryTreeArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'move_file',
|
||||
description:
|
||||
'Move or rename files and directories. Can move files between directories ' +
|
||||
'and rename them in a single operation. If the destination exists, the ' +
|
||||
'operation will fail. Works across different directories and can be used ' +
|
||||
'for simple renaming within the same directory. Both source and destination must be within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'search_files',
|
||||
description:
|
||||
'Recursively search for files and directories matching a pattern. ' +
|
||||
'Searches through all subdirectories from the starting path. The search ' +
|
||||
'is case-insensitive and matches partial names. Returns full paths to all ' +
|
||||
"matching items. Great for finding files when you don't know their exact location. " +
|
||||
'Only searches within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'get_file_info',
|
||||
description:
|
||||
'Retrieve detailed metadata about a file or directory. Returns comprehensive ' +
|
||||
'information including size, creation time, last modified time, permissions, ' +
|
||||
'and type. This tool is perfect for understanding file characteristics ' +
|
||||
'without reading the actual content. Only works within allowed directories.',
|
||||
inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput
|
||||
},
|
||||
{
|
||||
name: 'list_allowed_directories',
|
||||
description:
|
||||
'Returns the list of directories that this server is allowed to access. ' +
|
||||
'Use this to understand which directories are available before trying to access files.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params
|
||||
|
||||
switch (name) {
|
||||
case 'read_file': {
|
||||
const parsed = ReadFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
return {
|
||||
content: [{ type: 'text', text: content }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'read_multiple_files': {
|
||||
const parsed = ReadMultipleFilesArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`)
|
||||
}
|
||||
const results = await Promise.all(
|
||||
parsed.data.paths.map(async (filePath: string) => {
|
||||
try {
|
||||
const validPath = await validatePath(this.allowedDirectories, filePath)
|
||||
const content = await fs.readFile(validPath, 'utf-8')
|
||||
return `${filePath}:\n${content}\n`
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return `${filePath}: Error - ${errorMessage}`
|
||||
}
|
||||
})
|
||||
)
|
||||
return {
|
||||
content: [{ type: 'text', text: results.join('\n---\n') }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'write_file': {
|
||||
const parsed = WriteFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
await fs.writeFile(validPath, parsed.data.content, 'utf-8')
|
||||
return {
|
||||
content: [{ type: 'text', text: `Successfully wrote to ${parsed.data.path}` }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'edit_file': {
|
||||
const parsed = EditFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun)
|
||||
return {
|
||||
content: [{ type: 'text', text: result }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'create_directory': {
|
||||
const parsed = CreateDirectoryArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
await fs.mkdir(validPath, { recursive: true })
|
||||
return {
|
||||
content: [{ type: 'text', text: `Successfully created directory ${parsed.data.path}` }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'list_directory': {
|
||||
const parsed = ListDirectoryArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
||||
const formatted = entries
|
||||
.map((entry) => `${entry.isDirectory() ? '[DIR]' : '[FILE]'} ${entry.name}`)
|
||||
.join('\n')
|
||||
return {
|
||||
content: [{ type: 'text', text: formatted }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'directory_tree': {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`)
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string
|
||||
type: 'file' | 'directory'
|
||||
children?: TreeEntry[]
|
||||
}
|
||||
|
||||
async function buildTree(allowedDirectories: string[], currentPath: string): Promise<TreeEntry[]> {
|
||||
const validPath = await validatePath(allowedDirectories, currentPath)
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true })
|
||||
const result: TreeEntry[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name)
|
||||
entryData.children = await buildTree(allowedDirectories, subPath)
|
||||
}
|
||||
|
||||
result.push(entryData)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const treeData = await buildTree(this.allowedDirectories, parsed.data.path)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'move_file': {
|
||||
const parsed = MoveFileArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for move_file: ${parsed.error}`)
|
||||
}
|
||||
const validSourcePath = await validatePath(this.allowedDirectories, parsed.data.source)
|
||||
const validDestPath = await validatePath(this.allowedDirectories, parsed.data.destination)
|
||||
await fs.rename(validSourcePath, validDestPath)
|
||||
return {
|
||||
content: [
|
||||
{ type: 'text', text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'search_files': {
|
||||
const parsed = SearchFilesArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const results = await searchFiles(
|
||||
this.allowedDirectories,
|
||||
validPath,
|
||||
parsed.data.pattern,
|
||||
parsed.data.excludePatterns
|
||||
)
|
||||
return {
|
||||
content: [{ type: 'text', text: results.length > 0 ? results.join('\n') : 'No matches found' }]
|
||||
}
|
||||
}
|
||||
|
||||
case 'get_file_info': {
|
||||
const parsed = GetFileInfoArgsSchema.safeParse(args)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`)
|
||||
}
|
||||
const validPath = await validatePath(this.allowedDirectories, parsed.data.path)
|
||||
const info = await getFileStats(validPath)
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: Object.entries(info)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join('\n')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
case 'list_allowed_directories': {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Allowed directories:\n${this.allowedDirectories.join('\n')}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default FileSystemServer
|
||||
700
src/main/mcpServers/memory.ts
Normal file
@@ -0,0 +1,700 @@
|
||||
import { getConfigDir } from '@main/utils/file'
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { Mutex } from 'async-mutex' // 引入 Mutex
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
// Define memory file path
|
||||
const defaultMemoryPath = path.join(getConfigDir(), 'memory.json')
|
||||
|
||||
// Interfaces remain the same
|
||||
interface Entity {
|
||||
name: string
|
||||
entityType: string
|
||||
observations: string[]
|
||||
}
|
||||
|
||||
interface Relation {
|
||||
from: string
|
||||
to: string
|
||||
relationType: string
|
||||
}
|
||||
|
||||
// Structure for storing the graph in memory and in the file
|
||||
interface KnowledgeGraph {
|
||||
entities: Entity[]
|
||||
relations: Relation[]
|
||||
}
|
||||
|
||||
// The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
|
||||
class KnowledgeGraphManager {
|
||||
private memoryPath: string
|
||||
private entities: Map<string, Entity> // Use Map for efficient entity lookup
|
||||
private relations: Set<string> // Store stringified relations for easy Set operations
|
||||
private fileMutex: Mutex // Mutex for file writing
|
||||
|
||||
private constructor(memoryPath: string) {
|
||||
this.memoryPath = memoryPath
|
||||
this.entities = new Map<string, Entity>()
|
||||
this.relations = new Set<string>()
|
||||
this.fileMutex = new Mutex()
|
||||
}
|
||||
|
||||
// Static async factory method for initialization
|
||||
public static async create(memoryPath: string): Promise<KnowledgeGraphManager> {
|
||||
const manager = new KnowledgeGraphManager(memoryPath)
|
||||
await manager._ensureMemoryPathExists()
|
||||
await manager._loadGraphFromDisk()
|
||||
return manager
|
||||
}
|
||||
|
||||
private async _ensureMemoryPathExists(): Promise<void> {
|
||||
try {
|
||||
const directory = path.dirname(this.memoryPath)
|
||||
await fs.mkdir(directory, { recursive: true })
|
||||
try {
|
||||
await fs.access(this.memoryPath)
|
||||
} catch (error) {
|
||||
// File doesn't exist, create an empty file with initial structure
|
||||
await fs.writeFile(this.memoryPath, JSON.stringify({ entities: [], relations: [] }, null, 2))
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to ensure memory path exists:', error)
|
||||
// Propagate the error or handle it more gracefully depending on requirements
|
||||
throw new McpError(
|
||||
ErrorCode.InternalError,
|
||||
`Failed to ensure memory path: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Load graph from disk into memory (called once during initialization)
|
||||
private async _loadGraphFromDisk(): Promise<void> {
|
||||
try {
|
||||
const data = await fs.readFile(this.memoryPath, 'utf-8')
|
||||
// Handle empty file case
|
||||
if (data.trim() === '') {
|
||||
this.entities = new Map()
|
||||
this.relations = new Set()
|
||||
// Optionally write the initial empty structure back
|
||||
await this._persistGraph()
|
||||
return
|
||||
}
|
||||
const graph: KnowledgeGraph = JSON.parse(data)
|
||||
this.entities.clear()
|
||||
this.relations.clear()
|
||||
graph.entities.forEach((entity) => this.entities.set(entity.name, entity))
|
||||
graph.relations.forEach((relation) => this.relations.add(this._serializeRelation(relation)))
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && (error as any).code === 'ENOENT') {
|
||||
// File doesn't exist (should have been created by _ensureMemoryPathExists, but handle defensively)
|
||||
this.entities = new Map()
|
||||
this.relations = new Set()
|
||||
await this._persistGraph() // Create the file with empty structure
|
||||
} else if (error instanceof SyntaxError) {
|
||||
console.error('Failed to parse memory.json, initializing with empty graph:', error)
|
||||
// If JSON is invalid, start fresh and overwrite the corrupted file
|
||||
this.entities = new Map()
|
||||
this.relations = new Set()
|
||||
await this._persistGraph()
|
||||
} else {
|
||||
console.error('Failed to load knowledge graph from disk:', error)
|
||||
throw new McpError(
|
||||
ErrorCode.InternalError,
|
||||
`Failed to load graph: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Persist the current in-memory graph to disk using a mutex
|
||||
private async _persistGraph(): Promise<void> {
|
||||
const release = await this.fileMutex.acquire()
|
||||
try {
|
||||
const graphData: KnowledgeGraph = {
|
||||
entities: Array.from(this.entities.values()),
|
||||
relations: Array.from(this.relations).map((rStr) => this._deserializeRelation(rStr))
|
||||
}
|
||||
await fs.writeFile(this.memoryPath, JSON.stringify(graphData, null, 2))
|
||||
} catch (error) {
|
||||
console.error('Failed to save knowledge graph:', error)
|
||||
// Decide how to handle write errors - potentially retry or notify
|
||||
throw new McpError(
|
||||
ErrorCode.InternalError,
|
||||
`Failed to save graph: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
} finally {
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to consistently serialize relations for Set storage
|
||||
private _serializeRelation(relation: Relation): string {
|
||||
// Simple serialization, ensure order doesn't matter if properties are consistent
|
||||
return JSON.stringify({ from: relation.from, to: relation.to, relationType: relation.relationType })
|
||||
}
|
||||
|
||||
// Helper to deserialize relations from Set storage
|
||||
private _deserializeRelation(relationStr: string): Relation {
|
||||
return JSON.parse(relationStr) as Relation
|
||||
}
|
||||
|
||||
async createEntities(entities: Entity[]): Promise<Entity[]> {
|
||||
const newEntities: Entity[] = []
|
||||
entities.forEach((entity) => {
|
||||
if (!this.entities.has(entity.name)) {
|
||||
// Ensure observations is always an array
|
||||
const newEntity = { ...entity, observations: Array.isArray(entity.observations) ? entity.observations : [] }
|
||||
this.entities.set(entity.name, newEntity)
|
||||
newEntities.push(newEntity)
|
||||
}
|
||||
})
|
||||
if (newEntities.length > 0) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
return newEntities
|
||||
}
|
||||
|
||||
async createRelations(relations: Relation[]): Promise<Relation[]> {
|
||||
const newRelations: Relation[] = []
|
||||
relations.forEach((relation) => {
|
||||
// Ensure related entities exist before creating a relation
|
||||
if (!this.entities.has(relation.from) || !this.entities.has(relation.to)) {
|
||||
console.warn(`Skipping relation creation: Entity not found for relation ${relation.from} -> ${relation.to}`)
|
||||
return // Skip this relation
|
||||
}
|
||||
const relationStr = this._serializeRelation(relation)
|
||||
if (!this.relations.has(relationStr)) {
|
||||
this.relations.add(relationStr)
|
||||
newRelations.push(relation)
|
||||
}
|
||||
})
|
||||
if (newRelations.length > 0) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
return newRelations
|
||||
}
|
||||
|
||||
async addObservations(
|
||||
observations: { entityName: string; contents: string[] }[]
|
||||
): Promise<{ entityName: string; addedObservations: string[] }[]> {
|
||||
const results: { entityName: string; addedObservations: string[] }[] = []
|
||||
let changed = false
|
||||
observations.forEach((o) => {
|
||||
const entity = this.entities.get(o.entityName)
|
||||
if (!entity) {
|
||||
// Option 1: Throw error
|
||||
throw new McpError(ErrorCode.InvalidParams, `Entity with name ${o.entityName} not found`)
|
||||
// Option 2: Skip and warn
|
||||
// console.warn(`Entity with name ${o.entityName} not found when adding observations. Skipping.`);
|
||||
// return;
|
||||
}
|
||||
// Ensure observations array exists
|
||||
if (!Array.isArray(entity.observations)) {
|
||||
entity.observations = []
|
||||
}
|
||||
const newObservations = o.contents.filter((content) => !entity.observations.includes(content))
|
||||
if (newObservations.length > 0) {
|
||||
entity.observations.push(...newObservations)
|
||||
results.push({ entityName: o.entityName, addedObservations: newObservations })
|
||||
changed = true
|
||||
} else {
|
||||
// Still include in results even if nothing was added, to confirm processing
|
||||
results.push({ entityName: o.entityName, addedObservations: [] })
|
||||
}
|
||||
})
|
||||
if (changed) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
async deleteEntities(entityNames: string[]): Promise<void> {
|
||||
let changed = false
|
||||
const namesToDelete = new Set(entityNames)
|
||||
|
||||
// Delete entities
|
||||
namesToDelete.forEach((name) => {
|
||||
if (this.entities.delete(name)) {
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
// Delete relations involving deleted entities
|
||||
const relationsToDelete = new Set<string>()
|
||||
this.relations.forEach((relStr) => {
|
||||
const rel = this._deserializeRelation(relStr)
|
||||
if (namesToDelete.has(rel.from) || namesToDelete.has(rel.to)) {
|
||||
relationsToDelete.add(relStr)
|
||||
}
|
||||
})
|
||||
|
||||
relationsToDelete.forEach((relStr) => {
|
||||
if (this.relations.delete(relStr)) {
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
}
|
||||
|
||||
async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise<void> {
|
||||
let changed = false
|
||||
deletions.forEach((d) => {
|
||||
const entity = this.entities.get(d.entityName)
|
||||
if (entity && Array.isArray(entity.observations)) {
|
||||
const initialLength = entity.observations.length
|
||||
const observationsToDelete = new Set(d.observations)
|
||||
entity.observations = entity.observations.filter((o) => !observationsToDelete.has(o))
|
||||
if (entity.observations.length !== initialLength) {
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
})
|
||||
if (changed) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
}
|
||||
|
||||
async deleteRelations(relations: Relation[]): Promise<void> {
|
||||
let changed = false
|
||||
relations.forEach((rel) => {
|
||||
const relStr = this._serializeRelation(rel)
|
||||
if (this.relations.delete(relStr)) {
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
if (changed) {
|
||||
await this._persistGraph()
|
||||
}
|
||||
}
|
||||
|
||||
// Read the current state from memory
|
||||
async readGraph(): Promise<KnowledgeGraph> {
|
||||
// Return a deep copy to prevent external modification of the internal state
|
||||
return JSON.parse(
|
||||
JSON.stringify({
|
||||
entities: Array.from(this.entities.values()),
|
||||
relations: Array.from(this.relations).map((rStr) => this._deserializeRelation(rStr))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Search operates on the in-memory graph
|
||||
async searchNodes(query: string): Promise<KnowledgeGraph> {
|
||||
const lowerCaseQuery = query.toLowerCase()
|
||||
const filteredEntities = Array.from(this.entities.values()).filter(
|
||||
(e) =>
|
||||
e.name.toLowerCase().includes(lowerCaseQuery) ||
|
||||
e.entityType.toLowerCase().includes(lowerCaseQuery) ||
|
||||
(Array.isArray(e.observations) && e.observations.some((o) => o.toLowerCase().includes(lowerCaseQuery)))
|
||||
)
|
||||
|
||||
const filteredEntityNames = new Set(filteredEntities.map((e) => e.name))
|
||||
|
||||
const filteredRelations = Array.from(this.relations)
|
||||
.map((rStr) => this._deserializeRelation(rStr))
|
||||
.filter((r) => filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to))
|
||||
|
||||
return {
|
||||
entities: filteredEntities,
|
||||
relations: filteredRelations
|
||||
}
|
||||
}
|
||||
|
||||
// Open operates on the in-memory graph
|
||||
async openNodes(names: string[]): Promise<KnowledgeGraph> {
|
||||
const nameSet = new Set(names)
|
||||
const filteredEntities = Array.from(this.entities.values()).filter((e) => nameSet.has(e.name))
|
||||
const filteredEntityNames = new Set(filteredEntities.map((e) => e.name))
|
||||
|
||||
const filteredRelations = Array.from(this.relations)
|
||||
.map((rStr) => this._deserializeRelation(rStr))
|
||||
.filter((r) => filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to))
|
||||
|
||||
return {
|
||||
entities: filteredEntities,
|
||||
relations: filteredRelations
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MemoryServer {
|
||||
public server: Server
|
||||
// Hold the manager instance, initialized asynchronously
|
||||
private knowledgeGraphManager: KnowledgeGraphManager | null = null
|
||||
private initializationPromise: Promise<void> // To track initialization
|
||||
|
||||
constructor(envPath: string = '') {
|
||||
const memoryPath = envPath
|
||||
? path.isAbsolute(envPath)
|
||||
? envPath
|
||||
: path.resolve(envPath) // Use path.resolve for relative paths based on CWD
|
||||
: defaultMemoryPath
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'memory-server',
|
||||
version: '1.1.0' // Incremented version for changes
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
// Start initialization, but don't block constructor
|
||||
this.initializationPromise = this._initializeManager(memoryPath)
|
||||
this.setupRequestHandlers() // Setup handlers immediately
|
||||
}
|
||||
|
||||
// Private async method to handle manager initialization
|
||||
private async _initializeManager(memoryPath: string): Promise<void> {
|
||||
try {
|
||||
this.knowledgeGraphManager = await KnowledgeGraphManager.create(memoryPath)
|
||||
console.log('KnowledgeGraphManager initialized successfully.')
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize KnowledgeGraphManager:', error)
|
||||
// Server might be unusable, consider how to handle this state
|
||||
// Maybe set a flag and return errors for all tool calls?
|
||||
this.knowledgeGraphManager = null // Ensure it's null if init fails
|
||||
}
|
||||
}
|
||||
|
||||
// Ensures the manager is initialized before handling tool calls
|
||||
private async _getManager(): Promise<KnowledgeGraphManager> {
|
||||
await this.initializationPromise // Wait for initialization to complete
|
||||
if (!this.knowledgeGraphManager) {
|
||||
throw new McpError(ErrorCode.InternalError, 'Memory server failed to initialize. Cannot process requests.')
|
||||
}
|
||||
return this.knowledgeGraphManager
|
||||
}
|
||||
|
||||
// Setup handlers (can be called from constructor)
|
||||
setupRequestHandlers() {
|
||||
// ListTools remains largely the same, descriptions might be updated if needed
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
// Ensure manager is ready before listing tools that depend on it
|
||||
// Although ListTools itself doesn't *call* the manager, it implies the
|
||||
// manager is ready to handle calls for those tools.
|
||||
try {
|
||||
await this._getManager() // Wait for initialization before confirming tools are available
|
||||
} catch (error) {
|
||||
// If manager failed to init, maybe return an empty tool list or throw?
|
||||
console.error('Cannot list tools, manager initialization failed:', error)
|
||||
return { tools: [] } // Return empty list if server is not ready
|
||||
}
|
||||
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: 'create_entities',
|
||||
description: 'Create multiple new entities in the knowledge graph. Skips existing entities.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
entities: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'The name of the entity' },
|
||||
entityType: { type: 'string', description: 'The type of the entity' },
|
||||
observations: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'An array of observation contents associated with the entity',
|
||||
default: [] // Add default empty array
|
||||
}
|
||||
},
|
||||
required: ['name', 'entityType'] // Observations are optional now on creation
|
||||
}
|
||||
}
|
||||
},
|
||||
required: ['entities']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'create_relations',
|
||||
description:
|
||||
'Create multiple new relations between EXISTING entities. Skips existing relations or relations with non-existent entities.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
relations: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
from: { type: 'string', description: 'The name of the entity where the relation starts' },
|
||||
to: { type: 'string', description: 'The name of the entity where the relation ends' },
|
||||
relationType: { type: 'string', description: 'The type of the relation' }
|
||||
},
|
||||
required: ['from', 'to', 'relationType']
|
||||
}
|
||||
}
|
||||
},
|
||||
required: ['relations']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'add_observations',
|
||||
description: 'Add new observations to existing entities. Skips duplicate observations.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
observations: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
entityName: { type: 'string', description: 'The name of the entity to add the observations to' },
|
||||
contents: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'An array of observation contents to add'
|
||||
}
|
||||
},
|
||||
required: ['entityName', 'contents']
|
||||
}
|
||||
}
|
||||
},
|
||||
required: ['observations']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'delete_entities',
|
||||
description: 'Delete multiple entities and their associated relations.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
entityNames: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'An array of entity names to delete'
|
||||
}
|
||||
},
|
||||
required: ['entityNames']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'delete_observations',
|
||||
description: 'Delete specific observations from entities.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
deletions: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
entityName: { type: 'string', description: 'The name of the entity containing the observations' },
|
||||
observations: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'An array of observations to delete'
|
||||
}
|
||||
},
|
||||
required: ['entityName', 'observations']
|
||||
}
|
||||
}
|
||||
},
|
||||
required: ['deletions']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'delete_relations',
|
||||
description: 'Delete multiple specific relations.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
relations: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
from: { type: 'string', description: 'The name of the entity where the relation starts' },
|
||||
to: { type: 'string', description: 'The name of the entity where the relation ends' },
|
||||
relationType: { type: 'string', description: 'The type of the relation' }
|
||||
},
|
||||
required: ['from', 'to', 'relationType']
|
||||
},
|
||||
description: 'An array of relations to delete'
|
||||
}
|
||||
},
|
||||
required: ['relations']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'read_graph',
|
||||
description: 'Read the entire knowledge graph from memory.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'search_nodes',
|
||||
description: 'Search nodes (entities and relations) in memory based on a query.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: 'The search query to match against entity names, types, and observation content'
|
||||
}
|
||||
},
|
||||
required: ['query']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'open_nodes',
|
||||
description: 'Retrieve specific entities and their connecting relations from memory by name.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
names: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'An array of entity names to retrieve'
|
||||
}
|
||||
},
|
||||
required: ['names']
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
// CallTool handler needs to await the manager and the async methods
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const manager = await this._getManager() // Ensure manager is ready
|
||||
const { name, arguments: args } = request.params
|
||||
|
||||
if (!args) {
|
||||
// Use McpError for standard errors
|
||||
throw new McpError(ErrorCode.InvalidParams, `No arguments provided for tool: ${name}`)
|
||||
}
|
||||
|
||||
try {
|
||||
switch (name) {
|
||||
case 'create_entities':
|
||||
// Validate args structure if necessary, though SDK might do basic validation
|
||||
if (!args.entities || !Array.isArray(args.entities)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'entities' array is required.`
|
||||
)
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{ type: 'text', text: JSON.stringify(await manager.createEntities(args.entities as Entity[]), null, 2) }
|
||||
]
|
||||
}
|
||||
case 'create_relations':
|
||||
if (!args.relations || !Array.isArray(args.relations)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'relations' array is required.`
|
||||
)
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(await manager.createRelations(args.relations as Relation[]), null, 2)
|
||||
}
|
||||
]
|
||||
}
|
||||
case 'add_observations':
|
||||
if (!args.observations || !Array.isArray(args.observations)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'observations' array is required.`
|
||||
)
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(
|
||||
await manager.addObservations(args.observations as { entityName: string; contents: string[] }[]),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
]
|
||||
}
|
||||
case 'delete_entities':
|
||||
if (!args.entityNames || !Array.isArray(args.entityNames)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'entityNames' array is required.`
|
||||
)
|
||||
}
|
||||
await manager.deleteEntities(args.entityNames as string[])
|
||||
return { content: [{ type: 'text', text: 'Entities deleted successfully' }] }
|
||||
case 'delete_observations':
|
||||
if (!args.deletions || !Array.isArray(args.deletions)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'deletions' array is required.`
|
||||
)
|
||||
}
|
||||
await manager.deleteObservations(args.deletions as { entityName: string; observations: string[] }[])
|
||||
return { content: [{ type: 'text', text: 'Observations deleted successfully' }] }
|
||||
case 'delete_relations':
|
||||
if (!args.relations || !Array.isArray(args.relations)) {
|
||||
throw new McpError(
|
||||
ErrorCode.InvalidParams,
|
||||
`Invalid arguments for ${name}: 'relations' array is required.`
|
||||
)
|
||||
}
|
||||
await manager.deleteRelations(args.relations as Relation[])
|
||||
return { content: [{ type: 'text', text: 'Relations deleted successfully' }] }
|
||||
case 'read_graph':
|
||||
// No arguments expected or needed for read_graph based on original schema
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(await manager.readGraph(), null, 2) }]
|
||||
}
|
||||
case 'search_nodes':
|
||||
if (typeof args.query !== 'string') {
|
||||
throw new McpError(ErrorCode.InvalidParams, `Invalid arguments for ${name}: 'query' string is required.`)
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{ type: 'text', text: JSON.stringify(await manager.searchNodes(args.query as string), null, 2) }
|
||||
]
|
||||
}
|
||||
case 'open_nodes':
|
||||
if (!args.names || !Array.isArray(args.names)) {
|
||||
throw new McpError(ErrorCode.InvalidParams, `Invalid arguments for ${name}: 'names' array is required.`)
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{ type: 'text', text: JSON.stringify(await manager.openNodes(args.names as string[]), null, 2) }
|
||||
]
|
||||
}
|
||||
default:
|
||||
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`)
|
||||
}
|
||||
} catch (error) {
|
||||
// Catch errors from manager methods (like entity not found) or other issues
|
||||
if (error instanceof McpError) {
|
||||
throw error // Re-throw McpErrors directly
|
||||
}
|
||||
console.error(`Error executing tool ${name}:`, error)
|
||||
// Throw a generic internal error for unexpected issues
|
||||
throw new McpError(
|
||||
ErrorCode.InternalError,
|
||||
`Error executing tool ${name}: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default MemoryServer
|
||||
289
src/main/mcpServers/sequentialthinking.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
// Sequential Thinking MCP Server
|
||||
// port https://github.com/modelcontextprotocol/servers/blob/main/src/sequentialthinking/index.ts
|
||||
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, Tool } from '@modelcontextprotocol/sdk/types.js'
|
||||
// Fixed chalk import for ESM
|
||||
import chalk from 'chalk'
|
||||
|
||||
interface ThoughtData {
|
||||
thought: string
|
||||
thoughtNumber: number
|
||||
totalThoughts: number
|
||||
isRevision?: boolean
|
||||
revisesThought?: number
|
||||
branchFromThought?: number
|
||||
branchId?: string
|
||||
needsMoreThoughts?: boolean
|
||||
nextThoughtNeeded: boolean
|
||||
}
|
||||
|
||||
class SequentialThinkingServer {
|
||||
private thoughtHistory: ThoughtData[] = []
|
||||
private branches: Record<string, ThoughtData[]> = {}
|
||||
|
||||
private validateThoughtData(input: unknown): ThoughtData {
|
||||
const data = input as Record<string, unknown>
|
||||
|
||||
if (!data.thought || typeof data.thought !== 'string') {
|
||||
throw new Error('Invalid thought: must be a string')
|
||||
}
|
||||
if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') {
|
||||
throw new Error('Invalid thoughtNumber: must be a number')
|
||||
}
|
||||
if (!data.totalThoughts || typeof data.totalThoughts !== 'number') {
|
||||
throw new Error('Invalid totalThoughts: must be a number')
|
||||
}
|
||||
if (typeof data.nextThoughtNeeded !== 'boolean') {
|
||||
throw new Error('Invalid nextThoughtNeeded: must be a boolean')
|
||||
}
|
||||
|
||||
return {
|
||||
thought: data.thought,
|
||||
thoughtNumber: data.thoughtNumber,
|
||||
totalThoughts: data.totalThoughts,
|
||||
nextThoughtNeeded: data.nextThoughtNeeded,
|
||||
isRevision: data.isRevision as boolean | undefined,
|
||||
revisesThought: data.revisesThought as number | undefined,
|
||||
branchFromThought: data.branchFromThought as number | undefined,
|
||||
branchId: data.branchId as string | undefined,
|
||||
needsMoreThoughts: data.needsMoreThoughts as boolean | undefined
|
||||
}
|
||||
}
|
||||
|
||||
private formatThought(thoughtData: ThoughtData): string {
|
||||
const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } =
|
||||
thoughtData
|
||||
|
||||
let prefix = ''
|
||||
let context = ''
|
||||
|
||||
if (isRevision) {
|
||||
prefix = chalk.yellow('🔄 Revision')
|
||||
context = ` (revising thought ${revisesThought})`
|
||||
} else if (branchFromThought) {
|
||||
prefix = chalk.green('🌿 Branch')
|
||||
context = ` (from thought ${branchFromThought}, ID: ${branchId})`
|
||||
} else {
|
||||
prefix = chalk.blue('💭 Thought')
|
||||
context = ''
|
||||
}
|
||||
|
||||
const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`
|
||||
const border = '─'.repeat(Math.max(header.length, thought.length) + 4)
|
||||
|
||||
return `
|
||||
┌${border}┐
|
||||
│ ${header} │
|
||||
├${border}┤
|
||||
│ ${thought.padEnd(border.length - 2)} │
|
||||
└${border}┘`
|
||||
}
|
||||
|
||||
public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } {
|
||||
try {
|
||||
const validatedInput = this.validateThoughtData(input)
|
||||
|
||||
if (validatedInput.thoughtNumber > validatedInput.totalThoughts) {
|
||||
validatedInput.totalThoughts = validatedInput.thoughtNumber
|
||||
}
|
||||
|
||||
this.thoughtHistory.push(validatedInput)
|
||||
|
||||
if (validatedInput.branchFromThought && validatedInput.branchId) {
|
||||
if (!this.branches[validatedInput.branchId]) {
|
||||
this.branches[validatedInput.branchId] = []
|
||||
}
|
||||
this.branches[validatedInput.branchId].push(validatedInput)
|
||||
}
|
||||
|
||||
const formattedThought = this.formatThought(validatedInput)
|
||||
console.error(formattedThought)
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(
|
||||
{
|
||||
thoughtNumber: validatedInput.thoughtNumber,
|
||||
totalThoughts: validatedInput.totalThoughts,
|
||||
nextThoughtNeeded: validatedInput.nextThoughtNeeded,
|
||||
branches: Object.keys(this.branches),
|
||||
thoughtHistoryLength: this.thoughtHistory.length
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
]
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(
|
||||
{
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
status: 'failed'
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
],
|
||||
isError: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const SEQUENTIAL_THINKING_TOOL: Tool = {
|
||||
name: 'sequentialthinking',
|
||||
description: `A detailed tool for dynamic and reflective problem-solving through thoughts.
|
||||
This tool helps analyze problems through a flexible thinking process that can adapt and evolve.
|
||||
Each thought can build on, question, or revise previous insights as understanding deepens.
|
||||
|
||||
When to use this tool:
|
||||
- Breaking down complex problems into steps
|
||||
- Planning and design with room for revision
|
||||
- Analysis that might need course correction
|
||||
- Problems where the full scope might not be clear initially
|
||||
- Problems that require a multi-step solution
|
||||
- Tasks that need to maintain context over multiple steps
|
||||
- Situations where irrelevant information needs to be filtered out
|
||||
|
||||
Key features:
|
||||
- You can adjust total_thoughts up or down as you progress
|
||||
- You can question or revise previous thoughts
|
||||
- You can add more thoughts even after reaching what seemed like the end
|
||||
- You can express uncertainty and explore alternative approaches
|
||||
- Not every thought needs to build linearly - you can branch or backtrack
|
||||
- Generates a solution hypothesis
|
||||
- Verifies the hypothesis based on the Chain of Thought steps
|
||||
- Repeats the process until satisfied
|
||||
- Provides a correct answer
|
||||
|
||||
Parameters explained:
|
||||
- thought: Your current thinking step, which can include:
|
||||
* Regular analytical steps
|
||||
* Revisions of previous thoughts
|
||||
* Questions about previous decisions
|
||||
* Realizations about needing more analysis
|
||||
* Changes in approach
|
||||
* Hypothesis generation
|
||||
* Hypothesis verification
|
||||
- next_thought_needed: True if you need more thinking, even if at what seemed like the end
|
||||
- thought_number: Current number in sequence (can go beyond initial total if needed)
|
||||
- total_thoughts: Current estimate of thoughts needed (can be adjusted up/down)
|
||||
- is_revision: A boolean indicating if this thought revises previous thinking
|
||||
- revises_thought: If is_revision is true, which thought number is being reconsidered
|
||||
- branch_from_thought: If branching, which thought number is the branching point
|
||||
- branch_id: Identifier for the current branch (if any)
|
||||
- needs_more_thoughts: If reaching end but realizing more thoughts needed
|
||||
|
||||
You should:
|
||||
1. Start with an initial estimate of needed thoughts, but be ready to adjust
|
||||
2. Feel free to question or revise previous thoughts
|
||||
3. Don't hesitate to add more thoughts if needed, even at the "end"
|
||||
4. Express uncertainty when present
|
||||
5. Mark thoughts that revise previous thinking or branch into new paths
|
||||
6. Ignore information that is irrelevant to the current step
|
||||
7. Generate a solution hypothesis when appropriate
|
||||
8. Verify the hypothesis based on the Chain of Thought steps
|
||||
9. Repeat the process until satisfied with the solution
|
||||
10. Provide a single, ideally correct answer as the final output
|
||||
11. Only set next_thought_needed to false when truly done and a satisfactory answer is reached`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
thought: {
|
||||
type: 'string',
|
||||
description: 'Your current thinking step'
|
||||
},
|
||||
nextThoughtNeeded: {
|
||||
type: 'boolean',
|
||||
description: 'Whether another thought step is needed'
|
||||
},
|
||||
thoughtNumber: {
|
||||
type: 'integer',
|
||||
description: 'Current thought number',
|
||||
minimum: 1
|
||||
},
|
||||
totalThoughts: {
|
||||
type: 'integer',
|
||||
description: 'Estimated total thoughts needed',
|
||||
minimum: 1
|
||||
},
|
||||
isRevision: {
|
||||
type: 'boolean',
|
||||
description: 'Whether this revises previous thinking'
|
||||
},
|
||||
revisesThought: {
|
||||
type: 'integer',
|
||||
description: 'Which thought is being reconsidered',
|
||||
minimum: 1
|
||||
},
|
||||
branchFromThought: {
|
||||
type: 'integer',
|
||||
description: 'Branching point thought number',
|
||||
minimum: 1
|
||||
},
|
||||
branchId: {
|
||||
type: 'string',
|
||||
description: 'Branch identifier'
|
||||
},
|
||||
needsMoreThoughts: {
|
||||
type: 'boolean',
|
||||
description: 'If more thoughts are needed'
|
||||
}
|
||||
},
|
||||
required: ['thought', 'nextThoughtNeeded', 'thoughtNumber', 'totalThoughts']
|
||||
}
|
||||
}
|
||||
|
||||
class ThinkingServer {
|
||||
public server: Server
|
||||
private thinkingServer: SequentialThinkingServer
|
||||
|
||||
constructor() {
|
||||
this.thinkingServer = new SequentialThinkingServer()
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'sequential-thinking-server',
|
||||
version: '0.2.0'
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
}
|
||||
)
|
||||
this.initialize()
|
||||
}
|
||||
|
||||
initialize() {
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: [SEQUENTIAL_THINKING_TOOL]
|
||||
}))
|
||||
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === 'sequentialthinking') {
|
||||
return this.thinkingServer.processThought(request.params.arguments)
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Unknown tool: ${request.params.name}`
|
||||
}
|
||||
],
|
||||
isError: true
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default ThinkingServer
|
||||
@@ -3,14 +3,60 @@ import { KnowledgeBaseParams } from '@types'
|
||||
|
||||
export default abstract class BaseReranker {
|
||||
protected base: KnowledgeBaseParams
|
||||
|
||||
constructor(base: KnowledgeBaseParams) {
|
||||
if (!base.rerankModel) {
|
||||
throw new Error('Rerank model is required')
|
||||
}
|
||||
this.base = base
|
||||
}
|
||||
|
||||
abstract rerank(query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]>
|
||||
|
||||
/**
|
||||
* Get Rerank Request Url
|
||||
*/
|
||||
protected getRerankUrl() {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
// 必须携带/v1,否则会404
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
return `${baseURL}/rerank`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Rerank Result
|
||||
* @param searchResults
|
||||
* @param rerankResults
|
||||
* @protected
|
||||
*/
|
||||
protected getRerankResult(
|
||||
searchResults: ExtractChunkData[],
|
||||
rerankResults: Array<{
|
||||
index: number
|
||||
relevance_score: number
|
||||
}>
|
||||
) {
|
||||
const resultMap = new Map(rerankResults.map((result) => [result.index, result.relevance_score || 0]))
|
||||
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
}
|
||||
|
||||
public defaultHeaders() {
|
||||
return {
|
||||
Authorization: `Bearer ${this.base.rerankApiKey}`,
|
||||
@@ -18,7 +64,7 @@ export default abstract class BaseReranker {
|
||||
}
|
||||
}
|
||||
|
||||
public formatErrorMessage(url: string, error: any, requestBody: any) {
|
||||
protected formatErrorMessage(url: string, error: any, requestBody: any) {
|
||||
const errorDetails = {
|
||||
url: url,
|
||||
message: error.message,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import AxiosProxy from '@main/services/AxiosProxy'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
@@ -10,16 +10,7 @@ export default class JinaReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
// 必须携带/v1,否则会404
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
const url = this.getRerankUrl()
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
@@ -29,26 +20,12 @@ export default class JinaReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
const { data } = await AxiosProxy.axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
|
||||
const rerankResults = data.results
|
||||
console.log(rerankResults)
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
return this.getRerankResult(searchResults, rerankResults)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
console.error('Jina Reranker API Error:', errorDetails)
|
||||
throw new Error(`重排序请求失败: ${error.message}\n请求详情: ${errorDetails}`)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import axiosProxy from '@main/services/AxiosProxy'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
@@ -10,16 +10,7 @@ export default class SiliconFlowReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
// 必须携带/v1,否则会404
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
const url = this.getRerankUrl()
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
@@ -31,23 +22,10 @@ export default class SiliconFlowReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
const { data } = await axiosProxy.axios.post(url, requestBody, { headers: this.defaultHeaders() })
|
||||
|
||||
const rerankResults = data.results
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
return this.getRerankResult(searchResults, rerankResults)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import axiosProxy from '@main/services/AxiosProxy'
|
||||
import { KnowledgeBaseParams } from '@types'
|
||||
import axios from 'axios'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
@@ -10,15 +10,7 @@ export default class VoyageReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
public rerank = async (query: string, searchResults: ExtractChunkData[]): Promise<ExtractChunkData[]> => {
|
||||
let baseURL = this.base?.rerankBaseURL?.endsWith('/')
|
||||
? this.base.rerankBaseURL.slice(0, -1)
|
||||
: this.base.rerankBaseURL
|
||||
|
||||
if (baseURL && !baseURL.endsWith('/v1')) {
|
||||
baseURL = `${baseURL}/v1`
|
||||
}
|
||||
|
||||
const url = `${baseURL}/rerank`
|
||||
const url = this.getRerankUrl()
|
||||
|
||||
const requestBody = {
|
||||
model: this.base.rerankModel,
|
||||
@@ -30,28 +22,14 @@ export default class VoyageReranker extends BaseReranker {
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.post(url, requestBody, {
|
||||
const { data } = await axiosProxy.axios.post(url, requestBody, {
|
||||
headers: {
|
||||
...this.defaultHeaders()
|
||||
}
|
||||
})
|
||||
|
||||
const rerankResults = data.data
|
||||
|
||||
const resultMap = new Map(rerankResults.map((result: any) => [result.index, result.relevance_score || 0]))
|
||||
|
||||
return searchResults
|
||||
.map((doc: ExtractChunkData, index: number) => {
|
||||
const score = resultMap.get(index)
|
||||
if (score === undefined) return undefined
|
||||
|
||||
return {
|
||||
...doc,
|
||||
score
|
||||
}
|
||||
})
|
||||
.filter((doc): doc is ExtractChunkData => doc !== undefined)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
return this.getRerankResult(searchResults, rerankResults)
|
||||
} catch (error: any) {
|
||||
const errorDetails = this.formatErrorMessage(url, error, requestBody)
|
||||
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { isWin } from '@main/constant'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { UpdateInfo } from 'builder-util-runtime'
|
||||
import { app, BrowserWindow, dialog } from 'electron'
|
||||
import logger from 'electron-log'
|
||||
import { AppUpdater as _AppUpdater, autoUpdater } from 'electron-updater'
|
||||
|
||||
import icon from '../../../build/icon.png?asset'
|
||||
import { configManager } from './ConfigManager'
|
||||
|
||||
export default class AppUpdater {
|
||||
autoUpdater: _AppUpdater = autoUpdater
|
||||
@@ -14,7 +17,8 @@ export default class AppUpdater {
|
||||
|
||||
autoUpdater.logger = logger
|
||||
autoUpdater.forceDevUpdateConfig = !app.isPackaged
|
||||
autoUpdater.autoDownload = true
|
||||
autoUpdater.autoDownload = configManager.getAutoUpdate()
|
||||
autoUpdater.autoInstallOnAppQuit = configManager.getAutoUpdate()
|
||||
|
||||
// 检测下载错误
|
||||
autoUpdater.on('error', (error) => {
|
||||
@@ -24,27 +28,27 @@ export default class AppUpdater {
|
||||
stack: error.stack,
|
||||
time: new Date().toISOString()
|
||||
})
|
||||
mainWindow.webContents.send('update-error', error)
|
||||
mainWindow.webContents.send(IpcChannel.UpdateError, error)
|
||||
})
|
||||
|
||||
autoUpdater.on('update-available', (releaseInfo: UpdateInfo) => {
|
||||
logger.info('检测到新版本', releaseInfo)
|
||||
mainWindow.webContents.send('update-available', releaseInfo)
|
||||
mainWindow.webContents.send(IpcChannel.UpdateAvailable, releaseInfo)
|
||||
})
|
||||
|
||||
// 检测到不需要更新时
|
||||
autoUpdater.on('update-not-available', () => {
|
||||
mainWindow.webContents.send('update-not-available')
|
||||
mainWindow.webContents.send(IpcChannel.UpdateNotAvailable)
|
||||
})
|
||||
|
||||
// 更新下载进度
|
||||
autoUpdater.on('download-progress', (progress) => {
|
||||
mainWindow.webContents.send('download-progress', progress)
|
||||
mainWindow.webContents.send(IpcChannel.DownloadProgress, progress)
|
||||
})
|
||||
|
||||
// 当需要更新的内容下载完成后
|
||||
autoUpdater.on('update-downloaded', (releaseInfo: UpdateInfo) => {
|
||||
mainWindow.webContents.send('update-downloaded', releaseInfo)
|
||||
mainWindow.webContents.send(IpcChannel.UpdateDownloaded, releaseInfo)
|
||||
this.releaseInfo = releaseInfo
|
||||
logger.info('下载完成', releaseInfo)
|
||||
})
|
||||
@@ -52,6 +56,40 @@ export default class AppUpdater {
|
||||
this.autoUpdater = autoUpdater
|
||||
}
|
||||
|
||||
public setAutoUpdate(isActive: boolean) {
|
||||
autoUpdater.autoDownload = isActive
|
||||
autoUpdater.autoInstallOnAppQuit = isActive
|
||||
}
|
||||
|
||||
public async checkForUpdates() {
|
||||
if (isWin && 'PORTABLE_EXECUTABLE_DIR' in process.env) {
|
||||
return {
|
||||
currentVersion: app.getVersion(),
|
||||
updateInfo: null
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const update = await this.autoUpdater.checkForUpdates()
|
||||
if (update?.isUpdateAvailable && !this.autoUpdater.autoDownload) {
|
||||
// 如果 autoDownload 为 false,则需要再调用下面的函数触发下
|
||||
// do not use await, because it will block the return of this function
|
||||
this.autoUpdater.downloadUpdate()
|
||||
}
|
||||
|
||||
return {
|
||||
currentVersion: this.autoUpdater.currentVersion,
|
||||
updateInfo: update?.updateInfo
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to check for update:', error)
|
||||
return {
|
||||
currentVersion: app.getVersion(),
|
||||
updateInfo: null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async showUpdateDialog(mainWindow: BrowserWindow) {
|
||||
if (!this.releaseInfo) {
|
||||
return
|
||||
@@ -73,7 +111,7 @@ export default class AppUpdater {
|
||||
app.isQuitting = true
|
||||
setImmediate(() => autoUpdater.quitAndInstall())
|
||||
} else {
|
||||
mainWindow.webContents.send('update-downloaded-cancelled')
|
||||
mainWindow.webContents.send(IpcChannel.UpdateDownloadedCancelled)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
29
src/main/services/AxiosProxy.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { AxiosInstance, default as axios_ } from 'axios'
|
||||
import { ProxyAgent } from 'proxy-agent'
|
||||
|
||||
import { proxyManager } from './ProxyManager'
|
||||
|
||||
class AxiosProxy {
|
||||
private cacheAxios: AxiosInstance | null = null
|
||||
private proxyAgent: ProxyAgent | null = null
|
||||
|
||||
get axios(): AxiosInstance {
|
||||
const currentProxyAgent = proxyManager.getProxyAgent()
|
||||
|
||||
// 如果代理发生变化或尚未初始化,则重新创建 axios 实例
|
||||
if (this.cacheAxios === null || (currentProxyAgent !== null && this.proxyAgent !== currentProxyAgent)) {
|
||||
this.proxyAgent = currentProxyAgent
|
||||
|
||||
// 创建带有代理配置的 axios 实例
|
||||
this.cacheAxios = axios_.create({
|
||||
proxy: false,
|
||||
httpAgent: currentProxyAgent || undefined,
|
||||
httpsAgent: currentProxyAgent || undefined
|
||||
})
|
||||
}
|
||||
|
||||
return this.cacheAxios
|
||||
}
|
||||
}
|
||||
|
||||
export default new AxiosProxy()
|
||||
@@ -1,8 +1,10 @@
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { WebDavConfig } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import archiver from 'archiver'
|
||||
import { exec } from 'child_process'
|
||||
import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import extract from 'extract-zip'
|
||||
import * as fs from 'fs-extra'
|
||||
import * as path from 'path'
|
||||
import { createClient, CreateDirectoryOptions, FileStat } from 'webdav'
|
||||
@@ -21,6 +23,7 @@ class BackupManager {
|
||||
this.backupToWebdav = this.backupToWebdav.bind(this)
|
||||
this.restoreFromWebdav = this.restoreFromWebdav.bind(this)
|
||||
this.listWebdavFiles = this.listWebdavFiles.bind(this)
|
||||
this.deleteWebdavFile = this.deleteWebdavFile.bind(this)
|
||||
}
|
||||
|
||||
private async setWritableRecursive(dirPath: string): Promise<void> {
|
||||
@@ -79,7 +82,7 @@ class BackupManager {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
|
||||
mainWindow?.webContents.send('backup-progress', processData)
|
||||
mainWindow?.webContents.send(IpcChannel.BackupProgress, processData)
|
||||
Logger.log('[BackupManager] backup progress', processData)
|
||||
}
|
||||
|
||||
@@ -89,6 +92,7 @@ class BackupManager {
|
||||
|
||||
// 使用流的方式写入 data.json
|
||||
const tempDataPath = path.join(this.tempDir, 'data.json')
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writeStream = fs.createWriteStream(tempDataPath)
|
||||
writeStream.write(data)
|
||||
@@ -97,6 +101,7 @@ class BackupManager {
|
||||
writeStream.on('finish', () => resolve())
|
||||
writeStream.on('error', (error) => reject(error))
|
||||
})
|
||||
|
||||
onProgress({ stage: 'writing_data', progress: 20, total: 100 })
|
||||
|
||||
// 复制 Data 目录到临时目录
|
||||
@@ -110,18 +115,92 @@ class BackupManager {
|
||||
// 使用流式复制
|
||||
await this.copyDirWithProgress(sourcePath, tempDataDir, (size) => {
|
||||
copiedSize += size
|
||||
const progress = Math.min(80, 20 + Math.floor((copiedSize / totalSize) * 60))
|
||||
const progress = Math.min(50, Math.floor((copiedSize / totalSize) * 50))
|
||||
onProgress({ stage: 'copying_files', progress, total: 100 })
|
||||
})
|
||||
|
||||
await this.setWritableRecursive(tempDataDir)
|
||||
onProgress({ stage: 'compressing', progress: 80, total: 100 })
|
||||
onProgress({ stage: 'preparing_compression', progress: 50, total: 100 })
|
||||
|
||||
// 使用 adm-zip 创建压缩文件
|
||||
const zip = new AdmZip()
|
||||
zip.addLocalFolder(this.tempDir)
|
||||
// 创建输出文件流
|
||||
const backupedFilePath = path.join(destinationPath, fileName)
|
||||
zip.writeZip(backupedFilePath)
|
||||
const output = fs.createWriteStream(backupedFilePath)
|
||||
|
||||
// 创建 archiver 实例,启用 ZIP64 支持
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: 1 }, // 使用最低压缩级别以提高速度
|
||||
zip64: true // 启用 ZIP64 支持以处理大文件
|
||||
})
|
||||
|
||||
let lastProgress = 50
|
||||
let totalEntries = 0
|
||||
let processedEntries = 0
|
||||
let totalBytes = 0
|
||||
let processedBytes = 0
|
||||
|
||||
// 首先计算总文件数和总大小
|
||||
const calculateTotals = async (dirPath: string) => {
|
||||
const items = await fs.readdir(dirPath, { withFileTypes: true })
|
||||
for (const item of items) {
|
||||
const fullPath = path.join(dirPath, item.name)
|
||||
if (item.isDirectory()) {
|
||||
await calculateTotals(fullPath)
|
||||
} else {
|
||||
totalEntries++
|
||||
const stats = await fs.stat(fullPath)
|
||||
totalBytes += stats.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await calculateTotals(this.tempDir)
|
||||
|
||||
// 监听文件添加事件
|
||||
archive.on('entry', () => {
|
||||
processedEntries++
|
||||
if (totalEntries > 0) {
|
||||
const progressPercent = Math.min(55, 50 + Math.floor((processedEntries / totalEntries) * 5))
|
||||
if (progressPercent > lastProgress) {
|
||||
lastProgress = progressPercent
|
||||
onProgress({ stage: 'compressing', progress: progressPercent, total: 100 })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// 监听数据写入事件
|
||||
archive.on('data', (chunk) => {
|
||||
processedBytes += chunk.length
|
||||
if (totalBytes > 0) {
|
||||
const progressPercent = Math.min(99, 55 + Math.floor((processedBytes / totalBytes) * 44))
|
||||
if (progressPercent > lastProgress) {
|
||||
lastProgress = progressPercent
|
||||
onProgress({ stage: 'compressing', progress: progressPercent, total: 100 })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// 使用 Promise 等待压缩完成
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
output.on('close', () => {
|
||||
onProgress({ stage: 'compressing', progress: 100, total: 100 })
|
||||
resolve()
|
||||
})
|
||||
archive.on('error', reject)
|
||||
archive.on('warning', (err: any) => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
Logger.warn('[BackupManager] Archive warning:', err)
|
||||
}
|
||||
})
|
||||
|
||||
// 将输出流连接到压缩器
|
||||
archive.pipe(output)
|
||||
|
||||
// 添加整个临时目录到压缩文件
|
||||
archive.directory(this.tempDir, false)
|
||||
|
||||
// 完成压缩
|
||||
archive.finalize()
|
||||
})
|
||||
|
||||
// 清理临时目录
|
||||
await fs.remove(this.tempDir)
|
||||
@@ -131,6 +210,8 @@ class BackupManager {
|
||||
return backupedFilePath
|
||||
} catch (error) {
|
||||
Logger.error('[BackupManager] Backup failed:', error)
|
||||
// 确保清理临时目录
|
||||
await fs.remove(this.tempDir).catch(() => {})
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -139,7 +220,7 @@ class BackupManager {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
const onProgress = (processData: { stage: string; progress: number; total: number }) => {
|
||||
mainWindow?.webContents.send('restore-progress', processData)
|
||||
mainWindow?.webContents.send(IpcChannel.RestoreProgress, processData)
|
||||
Logger.log('[BackupManager] restore progress', processData)
|
||||
}
|
||||
|
||||
@@ -149,16 +230,22 @@ class BackupManager {
|
||||
onProgress({ stage: 'preparing', progress: 0, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 1: unzip backup file', this.tempDir)
|
||||
// 使用 adm-zip 解压
|
||||
const zip = new AdmZip(backupPath)
|
||||
zip.extractAllTo(this.tempDir, true) // true 表示覆盖已存在的文件
|
||||
onProgress({ stage: 'extracting', progress: 20, total: 100 })
|
||||
|
||||
// 使用 extract-zip 解压
|
||||
await extract(backupPath, {
|
||||
dir: this.tempDir,
|
||||
onEntry: () => {
|
||||
// 这里可以处理进度,但 extract-zip 不提供总条目数信息
|
||||
onProgress({ stage: 'extracting', progress: 15, total: 100 })
|
||||
}
|
||||
})
|
||||
onProgress({ stage: 'extracting', progress: 25, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 2: read data.json')
|
||||
// 读取 data.json
|
||||
const dataPath = path.join(this.tempDir, 'data.json')
|
||||
const data = await fs.readFile(dataPath, 'utf-8')
|
||||
onProgress({ stage: 'reading_data', progress: 40, total: 100 })
|
||||
onProgress({ stage: 'reading_data', progress: 35, total: 100 })
|
||||
|
||||
Logger.log('[backup] step 3: restore Data directory')
|
||||
// 恢复 Data 目录
|
||||
@@ -175,7 +262,7 @@ class BackupManager {
|
||||
// 使用流式复制
|
||||
await this.copyDirWithProgress(sourcePath, destPath, (size) => {
|
||||
copiedSize += size
|
||||
const progress = Math.min(90, 40 + Math.floor((copiedSize / totalSize) * 50))
|
||||
const progress = Math.min(85, 35 + Math.floor((copiedSize / totalSize) * 50))
|
||||
onProgress({ stage: 'copying_files', progress, total: 100 })
|
||||
})
|
||||
|
||||
@@ -308,6 +395,16 @@ class BackupManager {
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
return await webdavClient.createDirectory(path, options)
|
||||
}
|
||||
|
||||
async deleteWebdavFile(_: Electron.IpcMainInvokeEvent, fileName: string, webdavConfig: WebDavConfig) {
|
||||
try {
|
||||
const webdavClient = new WebDav(webdavConfig)
|
||||
return await webdavClient.deleteFile(fileName)
|
||||
} catch (error: any) {
|
||||
Logger.error('Failed to delete WebDAV file:', error)
|
||||
throw new Error(error.message || 'Failed to delete backup file')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default BackupManager
|
||||
|
||||
@@ -1,10 +1,24 @@
|
||||
import { ZOOM_SHORTCUTS } from '@shared/config/constant'
|
||||
import { defaultLanguage, ZOOM_SHORTCUTS } from '@shared/config/constant'
|
||||
import { LanguageVarious, Shortcut, ThemeMode } from '@types'
|
||||
import { app } from 'electron'
|
||||
import Store from 'electron-store'
|
||||
|
||||
import { locales } from '../utils/locales'
|
||||
|
||||
enum ConfigKeys {
|
||||
Language = 'language',
|
||||
Theme = 'theme',
|
||||
LaunchToTray = 'launchToTray',
|
||||
Tray = 'tray',
|
||||
TrayOnClose = 'trayOnClose',
|
||||
ZoomFactor = 'ZoomFactor',
|
||||
Shortcuts = 'shortcuts',
|
||||
ClickTrayToShowQuickAssistant = 'clickTrayToShowQuickAssistant',
|
||||
EnableQuickAssistant = 'enableQuickAssistant',
|
||||
AutoUpdate = 'autoUpdate',
|
||||
EnableDataCollection = 'enableDataCollection'
|
||||
}
|
||||
|
||||
export class ConfigManager {
|
||||
private store: Store
|
||||
private subscribers: Map<string, Array<(newValue: any) => void>> = new Map()
|
||||
@@ -14,54 +28,62 @@ export class ConfigManager {
|
||||
}
|
||||
|
||||
getLanguage(): LanguageVarious {
|
||||
const locale = Object.keys(locales).includes(app.getLocale()) ? app.getLocale() : 'en-US'
|
||||
return this.store.get('language', locale) as LanguageVarious
|
||||
const locale = Object.keys(locales).includes(app.getLocale()) ? app.getLocale() : defaultLanguage
|
||||
return this.get(ConfigKeys.Language, locale) as LanguageVarious
|
||||
}
|
||||
|
||||
setLanguage(theme: LanguageVarious) {
|
||||
this.store.set('language', theme)
|
||||
this.set(ConfigKeys.Language, theme)
|
||||
}
|
||||
|
||||
getTheme(): ThemeMode {
|
||||
return this.store.get('theme', ThemeMode.light) as ThemeMode
|
||||
return this.get(ConfigKeys.Theme, ThemeMode.auto)
|
||||
}
|
||||
|
||||
setTheme(theme: ThemeMode) {
|
||||
this.store.set('theme', theme)
|
||||
this.set(ConfigKeys.Theme, theme)
|
||||
}
|
||||
|
||||
getCustomCss(): string {
|
||||
return this.store.get('customCss', '') as string
|
||||
}
|
||||
|
||||
setCustomCss(css: string) {
|
||||
this.store.set('customCss', css)
|
||||
}
|
||||
|
||||
getLaunchToTray(): boolean {
|
||||
return !!this.store.get('launchToTray', false)
|
||||
return !!this.get(ConfigKeys.LaunchToTray, false)
|
||||
}
|
||||
|
||||
setLaunchToTray(value: boolean) {
|
||||
this.store.set('launchToTray', value)
|
||||
this.set(ConfigKeys.LaunchToTray, value)
|
||||
}
|
||||
|
||||
getTray(): boolean {
|
||||
return !!this.store.get('tray', true)
|
||||
return !!this.get(ConfigKeys.Tray, true)
|
||||
}
|
||||
|
||||
setTray(value: boolean) {
|
||||
this.store.set('tray', value)
|
||||
this.notifySubscribers('tray', value)
|
||||
this.set(ConfigKeys.Tray, value)
|
||||
this.notifySubscribers(ConfigKeys.Tray, value)
|
||||
}
|
||||
|
||||
getTrayOnClose(): boolean {
|
||||
return !!this.store.get('trayOnClose', true)
|
||||
return !!this.get(ConfigKeys.TrayOnClose, true)
|
||||
}
|
||||
|
||||
setTrayOnClose(value: boolean) {
|
||||
this.store.set('trayOnClose', value)
|
||||
this.set(ConfigKeys.TrayOnClose, value)
|
||||
}
|
||||
|
||||
getZoomFactor(): number {
|
||||
return this.store.get('zoomFactor', 1) as number
|
||||
return this.get<number>(ConfigKeys.ZoomFactor, 1)
|
||||
}
|
||||
|
||||
setZoomFactor(factor: number) {
|
||||
this.store.set('zoomFactor', factor)
|
||||
this.notifySubscribers('zoomFactor', factor)
|
||||
this.set(ConfigKeys.ZoomFactor, factor)
|
||||
this.notifySubscribers(ConfigKeys.ZoomFactor, factor)
|
||||
}
|
||||
|
||||
subscribe<T>(key: string, callback: (newValue: T) => void) {
|
||||
@@ -89,39 +111,55 @@ export class ConfigManager {
|
||||
}
|
||||
|
||||
getShortcuts() {
|
||||
return this.store.get('shortcuts', ZOOM_SHORTCUTS) as Shortcut[] | []
|
||||
return this.get(ConfigKeys.Shortcuts, ZOOM_SHORTCUTS) as Shortcut[] | []
|
||||
}
|
||||
|
||||
setShortcuts(shortcuts: Shortcut[]) {
|
||||
this.store.set(
|
||||
'shortcuts',
|
||||
this.set(
|
||||
ConfigKeys.Shortcuts,
|
||||
shortcuts.filter((shortcut) => shortcut.system)
|
||||
)
|
||||
this.notifySubscribers('shortcuts', shortcuts)
|
||||
this.notifySubscribers(ConfigKeys.Shortcuts, shortcuts)
|
||||
}
|
||||
|
||||
getClickTrayToShowQuickAssistant(): boolean {
|
||||
return this.store.get('clickTrayToShowQuickAssistant', false) as boolean
|
||||
return this.get<boolean>(ConfigKeys.ClickTrayToShowQuickAssistant, false)
|
||||
}
|
||||
|
||||
setClickTrayToShowQuickAssistant(value: boolean) {
|
||||
this.store.set('clickTrayToShowQuickAssistant', value)
|
||||
this.set(ConfigKeys.ClickTrayToShowQuickAssistant, value)
|
||||
}
|
||||
|
||||
getEnableQuickAssistant(): boolean {
|
||||
return this.store.get('enableQuickAssistant', false) as boolean
|
||||
return this.get(ConfigKeys.EnableQuickAssistant, false)
|
||||
}
|
||||
|
||||
setEnableQuickAssistant(value: boolean) {
|
||||
this.store.set('enableQuickAssistant', value)
|
||||
this.set(ConfigKeys.EnableQuickAssistant, value)
|
||||
}
|
||||
|
||||
set(key: string, value: any) {
|
||||
getAutoUpdate(): boolean {
|
||||
return this.get<boolean>(ConfigKeys.AutoUpdate, true)
|
||||
}
|
||||
|
||||
setAutoUpdate(value: boolean) {
|
||||
this.set(ConfigKeys.AutoUpdate, value)
|
||||
}
|
||||
|
||||
getEnableDataCollection(): boolean {
|
||||
return this.get<boolean>(ConfigKeys.EnableDataCollection, true)
|
||||
}
|
||||
|
||||
setEnableDataCollection(value: boolean) {
|
||||
this.set(ConfigKeys.EnableDataCollection, value)
|
||||
}
|
||||
|
||||
set(key: string, value: unknown) {
|
||||
this.store.set(key, value)
|
||||
}
|
||||
|
||||
get(key: string) {
|
||||
return this.store.get(key)
|
||||
get<T>(key: string, defaultValue?: T) {
|
||||
return this.store.get(key, defaultValue) as T
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
import { app, safeStorage } from 'electron'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
import aoxisProxy from './AxiosProxy'
|
||||
|
||||
// 配置常量,集中管理
|
||||
const CONFIG = {
|
||||
GITHUB_CLIENT_ID: 'Iv1.b507a08c87ecfe98',
|
||||
@@ -93,7 +95,7 @@ class CopilotService {
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios.get(CONFIG.API_URLS.GITHUB_USER, config)
|
||||
const response = await aoxisProxy.axios.get(CONFIG.API_URLS.GITHUB_USER, config)
|
||||
return {
|
||||
login: response.data.login,
|
||||
avatar: response.data.avatar_url
|
||||
@@ -114,7 +116,7 @@ class CopilotService {
|
||||
try {
|
||||
this.updateHeaders(headers)
|
||||
|
||||
const response = await axios.post<AuthResponse>(
|
||||
const response = await aoxisProxy.axios.post<AuthResponse>(
|
||||
CONFIG.API_URLS.GITHUB_DEVICE_CODE,
|
||||
{
|
||||
client_id: CONFIG.GITHUB_CLIENT_ID,
|
||||
@@ -146,7 +148,7 @@ class CopilotService {
|
||||
await this.delay(currentDelay)
|
||||
|
||||
try {
|
||||
const response = await axios.post<TokenResponse>(
|
||||
const response = await aoxisProxy.axios.post<TokenResponse>(
|
||||
CONFIG.API_URLS.GITHUB_ACCESS_TOKEN,
|
||||
{
|
||||
client_id: CONFIG.GITHUB_CLIENT_ID,
|
||||
@@ -208,7 +210,7 @@ class CopilotService {
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
|
||||
const response = await aoxisProxy.axios.get<CopilotTokenResponse>(CONFIG.API_URLS.COPILOT_TOKEN, config)
|
||||
|
||||
return response.data
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getFilesDir, getFileType, getTempDir } from '@main/utils/file'
|
||||
import { documentExts, imageExts } from '@shared/config/constant'
|
||||
import { documentExts, imageExts, MB } from '@shared/config/constant'
|
||||
import { FileType } from '@types'
|
||||
import * as crypto from 'crypto'
|
||||
import {
|
||||
@@ -122,7 +122,7 @@ class FileStorage {
|
||||
private async compressImage(sourcePath: string, destPath: string): Promise<void> {
|
||||
try {
|
||||
const stats = fs.statSync(sourcePath)
|
||||
const fileSizeInMB = stats.size / (1024 * 1024)
|
||||
const fileSizeInMB = stats.size / MB
|
||||
|
||||
// 如果图片大于1MB才进行压缩
|
||||
if (fileSizeInMB > 1) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { FileMetadataResponse, FileState, GoogleAIFileManager } from '@google/generative-ai/server'
|
||||
import { File, FileState, GoogleGenAI, Pager } from '@google/genai'
|
||||
import { FileType } from '@types'
|
||||
import fs from 'fs'
|
||||
|
||||
@@ -8,11 +8,15 @@ export class GeminiService {
|
||||
private static readonly FILE_LIST_CACHE_KEY = 'gemini_file_list'
|
||||
private static readonly CACHE_DURATION = 3000
|
||||
|
||||
static async uploadFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string) {
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
const uploadResult = await fileManager.uploadFile(file.path, {
|
||||
mimeType: 'application/pdf',
|
||||
displayName: file.origin_name
|
||||
static async uploadFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string): Promise<File> {
|
||||
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
|
||||
const uploadResult = await sdk.files.upload({
|
||||
file: file.path,
|
||||
config: {
|
||||
mimeType: 'application/pdf',
|
||||
name: file.id,
|
||||
displayName: file.origin_name
|
||||
}
|
||||
})
|
||||
return uploadResult
|
||||
}
|
||||
@@ -24,40 +28,42 @@ export class GeminiService {
|
||||
}
|
||||
}
|
||||
|
||||
static async retrieveFile(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
file: FileType,
|
||||
apiKey: string
|
||||
): Promise<FileMetadataResponse | undefined> {
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
|
||||
static async retrieveFile(_: Electron.IpcMainInvokeEvent, file: FileType, apiKey: string): Promise<File | undefined> {
|
||||
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
|
||||
const cachedResponse = CacheService.get<any>(GeminiService.FILE_LIST_CACHE_KEY)
|
||||
if (cachedResponse) {
|
||||
return GeminiService.processResponse(cachedResponse, file)
|
||||
}
|
||||
|
||||
const response = await fileManager.listFiles()
|
||||
const response = await sdk.files.list()
|
||||
CacheService.set(GeminiService.FILE_LIST_CACHE_KEY, response, GeminiService.CACHE_DURATION)
|
||||
|
||||
return GeminiService.processResponse(response, file)
|
||||
}
|
||||
|
||||
private static processResponse(response: any, file: FileType) {
|
||||
if (response.files) {
|
||||
return response.files
|
||||
.filter((file) => file.state === FileState.ACTIVE)
|
||||
.find((i) => i.displayName === file.origin_name && Number(i.sizeBytes) === file.size)
|
||||
private static async processResponse(response: Pager<File>, file: FileType) {
|
||||
for await (const f of response) {
|
||||
if (f.state === FileState.ACTIVE) {
|
||||
if (f.displayName === file.origin_name && Number(f.sizeBytes) === file.size) {
|
||||
return f
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
static async listFiles(_: Electron.IpcMainInvokeEvent, apiKey: string) {
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
return await fileManager.listFiles()
|
||||
static async listFiles(_: Electron.IpcMainInvokeEvent, apiKey: string): Promise<File[]> {
|
||||
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
|
||||
const files: File[] = []
|
||||
for await (const f of await sdk.files.list()) {
|
||||
files.push(f)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
static async deleteFile(_: Electron.IpcMainInvokeEvent, apiKey: string, fileId: string) {
|
||||
const fileManager = new GoogleAIFileManager(apiKey)
|
||||
await fileManager.deleteFile(fileId)
|
||||
static async deleteFile(_: Electron.IpcMainInvokeEvent, fileId: string, apiKey: string) {
|
||||
const sdk = new GoogleGenAI({ vertexai: false, apiKey })
|
||||
await sdk.files.delete({ name: fileId })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,9 @@ import { addFileLoader } from '@main/loader'
|
||||
import Reranker from '@main/reranker/Reranker'
|
||||
import { windowService } from '@main/services/WindowService'
|
||||
import { getAllFiles } from '@main/utils/file'
|
||||
import { MB } from '@shared/config/constant'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { FileType, KnowledgeBaseParams, KnowledgeItem } from '@types'
|
||||
import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
@@ -91,7 +93,7 @@ class KnowledgeService {
|
||||
private workload = 0
|
||||
private processingItemCount = 0
|
||||
private knowledgeItemProcessingQueueMappingPromise: Map<LoaderTaskOfSet, () => void> = new Map()
|
||||
private static MAXIMUM_WORKLOAD = 1024 * 1024 * 80
|
||||
private static MAXIMUM_WORKLOAD = 80 * MB
|
||||
private static MAXIMUM_PROCESSING_ITEM_COUNT = 30
|
||||
private static ERROR_LOADER_RETURN: LoaderReturn = { entriesAdded: 0, uniqueId: '', uniqueIds: [''], loaderType: '' }
|
||||
|
||||
@@ -194,7 +196,7 @@ class KnowledgeService {
|
||||
|
||||
const sendDirectoryProcessingPercent = (totalFiles: number, processedFiles: number) => {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
mainWindow?.webContents.send('directory-processing-percent', {
|
||||
mainWindow?.webContents.send(IpcChannel.DirectoryProcessingPercent, {
|
||||
itemId: item.id,
|
||||
percent: (processedFiles / totalFiles) * 100
|
||||
})
|
||||
@@ -270,7 +272,7 @@ class KnowledgeService {
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
})
|
||||
},
|
||||
evaluateTaskWorkload: { workload: 1024 * 1024 * 2 }
|
||||
evaluateTaskWorkload: { workload: 2 * MB }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
@@ -309,7 +311,7 @@ class KnowledgeService {
|
||||
Logger.error(err)
|
||||
return KnowledgeService.ERROR_LOADER_RETURN
|
||||
}),
|
||||
evaluateTaskWorkload: { workload: 1024 * 1024 * 20 }
|
||||
evaluateTaskWorkload: { workload: 20 * MB }
|
||||
}
|
||||
],
|
||||
loaderDoneReturn: null
|
||||
|
||||
@@ -1,18 +1,72 @@
|
||||
import crypto from 'node:crypto'
|
||||
import fs from 'node:fs'
|
||||
import os from 'node:os'
|
||||
import path from 'node:path'
|
||||
|
||||
import { isLinux, isMac, isWin } from '@main/constant'
|
||||
import { createInMemoryMCPServer } from '@main/mcpServers/factory'
|
||||
import { makeSureDirExists } from '@main/utils'
|
||||
import { getBinaryName, getBinaryPath } from '@main/utils/process'
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js'
|
||||
import { SSEClientTransport, SSEClientTransportOptions } from '@modelcontextprotocol/sdk/client/sse.js'
|
||||
import { getDefaultEnvironment, StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'
|
||||
import {
|
||||
StreamableHTTPClientTransport,
|
||||
type StreamableHTTPClientTransportOptions
|
||||
} from '@modelcontextprotocol/sdk/client/streamableHttp'
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory'
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { MCPServer, MCPTool } from '@types'
|
||||
import {
|
||||
GetMCPPromptResponse,
|
||||
GetResourceResponse,
|
||||
MCPCallToolResponse,
|
||||
MCPPrompt,
|
||||
MCPResource,
|
||||
MCPServer,
|
||||
MCPTool
|
||||
} from '@types'
|
||||
import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import { EventEmitter } from 'events'
|
||||
import { memoize } from 'lodash'
|
||||
|
||||
import { CacheService } from './CacheService'
|
||||
import { CallBackServer } from './mcp/oauth/callback'
|
||||
import { McpOAuthClientProvider } from './mcp/oauth/provider'
|
||||
|
||||
// Generic type for caching wrapped functions
|
||||
type CachedFunction<T extends unknown[], R> = (...args: T) => Promise<R>
|
||||
|
||||
/**
|
||||
* Higher-order function to add caching capability to any async function
|
||||
* @param fn The original function to be wrapped with caching
|
||||
* @param getCacheKey Function to generate a cache key from the function arguments
|
||||
* @param ttl Time to live for the cache entry in milliseconds
|
||||
* @param logPrefix Prefix for log messages
|
||||
* @returns The wrapped function with caching capability
|
||||
*/
|
||||
function withCache<T extends unknown[], R>(
|
||||
fn: (...args: T) => Promise<R>,
|
||||
getCacheKey: (...args: T) => string,
|
||||
ttl: number,
|
||||
logPrefix: string
|
||||
): CachedFunction<T, R> {
|
||||
return async (...args: T): Promise<R> => {
|
||||
const cacheKey = getCacheKey(...args)
|
||||
|
||||
if (CacheService.has(cacheKey)) {
|
||||
Logger.info(`${logPrefix} loaded from cache`)
|
||||
const cachedData = CacheService.get<R>(cacheKey)
|
||||
if (cachedData) {
|
||||
return cachedData
|
||||
}
|
||||
}
|
||||
|
||||
const result = await fn(...args)
|
||||
CacheService.set(cacheKey, result, ttl)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
class McpService {
|
||||
private clients: Map<string, Client> = new Map()
|
||||
@@ -32,10 +86,15 @@ class McpService {
|
||||
this.initClient = this.initClient.bind(this)
|
||||
this.listTools = this.listTools.bind(this)
|
||||
this.callTool = this.callTool.bind(this)
|
||||
this.listPrompts = this.listPrompts.bind(this)
|
||||
this.getPrompt = this.getPrompt.bind(this)
|
||||
this.listResources = this.listResources.bind(this)
|
||||
this.getResource = this.getResource.bind(this)
|
||||
this.closeClient = this.closeClient.bind(this)
|
||||
this.removeServer = this.removeServer.bind(this)
|
||||
this.restartServer = this.restartServer.bind(this)
|
||||
this.stopServer = this.stopServer.bind(this)
|
||||
this.cleanup = this.cleanup.bind(this)
|
||||
}
|
||||
|
||||
async initClient(server: MCPServer): Promise<Client> {
|
||||
@@ -44,33 +103,80 @@ class McpService {
|
||||
// Check if we already have a client for this server configuration
|
||||
const existingClient = this.clients.get(serverKey)
|
||||
if (existingClient) {
|
||||
// Check if the existing client is still connected
|
||||
const pingResult = await existingClient.ping()
|
||||
Logger.info(`[MCP] Ping result for ${server.name}:`, pingResult)
|
||||
// If the ping fails, remove the client from the cache
|
||||
// and create a new one
|
||||
if (!pingResult) {
|
||||
try {
|
||||
// Check if the existing client is still connected
|
||||
const pingResult = await existingClient.ping()
|
||||
Logger.info(`[MCP] Ping result for ${server.name}:`, pingResult)
|
||||
// If the ping fails, remove the client from the cache
|
||||
// and create a new one
|
||||
if (!pingResult) {
|
||||
this.clients.delete(serverKey)
|
||||
} else {
|
||||
return existingClient
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Error pinging server ${server.name}:`, error)
|
||||
this.clients.delete(serverKey)
|
||||
} else {
|
||||
return existingClient
|
||||
}
|
||||
}
|
||||
|
||||
// Create new client instance for each connection
|
||||
const client = new Client({ name: 'Cherry Studio', version: app.getVersion() }, { capabilities: {} })
|
||||
|
||||
const args = [...(server.args || [])]
|
||||
|
||||
let transport: StdioClientTransport | SSEClientTransport
|
||||
// let transport: StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
|
||||
const authProvider = new McpOAuthClientProvider({
|
||||
serverUrlHash: crypto
|
||||
.createHash('md5')
|
||||
.update(server.baseUrl || '')
|
||||
.digest('hex')
|
||||
})
|
||||
|
||||
try {
|
||||
const initTransport = async (): Promise<
|
||||
StdioClientTransport | SSEClientTransport | InMemoryTransport | StreamableHTTPClientTransport
|
||||
> => {
|
||||
// Create appropriate transport based on configuration
|
||||
if (server.baseUrl) {
|
||||
transport = new SSEClientTransport(new URL(server.baseUrl))
|
||||
if (server.type === 'inMemory') {
|
||||
Logger.info(`[MCP] Using in-memory transport for server: ${server.name}`)
|
||||
const [clientTransport, serverTransport] = InMemoryTransport.createLinkedPair()
|
||||
// start the in-memory server with the given name and environment variables
|
||||
const inMemoryServer = createInMemoryMCPServer(server.name, args, server.env || {})
|
||||
try {
|
||||
await inMemoryServer.connect(serverTransport)
|
||||
Logger.info(`[MCP] In-memory server started: ${server.name}`)
|
||||
} catch (error: Error | any) {
|
||||
Logger.error(`[MCP] Error starting in-memory server: ${error}`)
|
||||
throw new Error(`Failed to start in-memory server: ${error.message}`)
|
||||
}
|
||||
// set the client transport to the client
|
||||
return clientTransport
|
||||
} else if (server.baseUrl) {
|
||||
if (server.type === 'streamableHttp') {
|
||||
const options: StreamableHTTPClientTransportOptions = {
|
||||
requestInit: {
|
||||
headers: server.headers || {}
|
||||
},
|
||||
authProvider
|
||||
}
|
||||
return new StreamableHTTPClientTransport(new URL(server.baseUrl!), options)
|
||||
} else if (server.type === 'sse') {
|
||||
const options: SSEClientTransportOptions = {
|
||||
eventSourceInit: {
|
||||
fetch: (url, init) => fetch(url, { ...init, headers: server.headers || {} })
|
||||
},
|
||||
requestInit: {
|
||||
headers: server.headers || {}
|
||||
},
|
||||
authProvider
|
||||
}
|
||||
return new SSEClientTransport(new URL(server.baseUrl!), options)
|
||||
} else {
|
||||
throw new Error('Invalid server type')
|
||||
}
|
||||
} else if (server.command) {
|
||||
let cmd = server.command
|
||||
|
||||
if (server.command === 'npx' || server.command === 'bun' || server.command === 'bunx') {
|
||||
if (server.command === 'npx') {
|
||||
cmd = await getBinaryPath('bun')
|
||||
Logger.info(`[MCP] Using command: ${cmd}`)
|
||||
|
||||
@@ -90,10 +196,10 @@ class McpService {
|
||||
}
|
||||
|
||||
// if the server name is mcp-auto-install, use the mcp-registry.json file in the bin directory
|
||||
if (server.name === 'mcp-auto-install') {
|
||||
if (server.name.includes('mcp-auto-install')) {
|
||||
const binPath = await getBinaryPath()
|
||||
makeSureDirExists(binPath)
|
||||
server.env.MCP_REGISTRY_PATH = path.join(binPath, 'mcp-registry.json')
|
||||
server.env.MCP_REGISTRY_PATH = path.join(binPath, '..', 'config', 'mcp-registry.json')
|
||||
}
|
||||
}
|
||||
} else if (server.command === 'uvx' || server.command === 'uv') {
|
||||
@@ -110,20 +216,82 @@ class McpService {
|
||||
Logger.info(`[MCP] Starting server with command: ${cmd} ${args ? args.join(' ') : ''}`)
|
||||
// Logger.info(`[MCP] Environment variables for server:`, server.env)
|
||||
|
||||
transport = new StdioClientTransport({
|
||||
const stdioTransport = new StdioClientTransport({
|
||||
command: cmd,
|
||||
args,
|
||||
env: {
|
||||
...getDefaultEnvironment(),
|
||||
PATH: this.getEnhancedPath(process.env.PATH || ''),
|
||||
PATH: await this.getEnhancedPath(process.env.PATH || ''),
|
||||
...server.env
|
||||
}
|
||||
},
|
||||
stderr: 'pipe'
|
||||
})
|
||||
stdioTransport.stderr?.on('data', (data) =>
|
||||
Logger.info(`[MCP] Stdio stderr for server: ${server.name} `, data.toString())
|
||||
)
|
||||
return stdioTransport
|
||||
} else {
|
||||
throw new Error('Either baseUrl or command must be provided')
|
||||
}
|
||||
}
|
||||
|
||||
await client.connect(transport)
|
||||
const handleAuth = async (client: Client, transport: SSEClientTransport | StreamableHTTPClientTransport) => {
|
||||
Logger.info(`[MCP] Starting OAuth flow for server: ${server.name}`)
|
||||
// Create an event emitter for the OAuth callback
|
||||
const events = new EventEmitter()
|
||||
|
||||
// Create a callback server
|
||||
const callbackServer = new CallBackServer({
|
||||
port: authProvider.config.callbackPort,
|
||||
path: authProvider.config.callbackPath || '/oauth/callback',
|
||||
events
|
||||
})
|
||||
|
||||
// Set a timeout to close the callback server
|
||||
const timeoutId = setTimeout(() => {
|
||||
Logger.warn(`[MCP] OAuth flow timed out for server: ${server.name}`)
|
||||
callbackServer.close()
|
||||
}, 300000) // 5 minutes timeout
|
||||
|
||||
try {
|
||||
// Wait for the authorization code
|
||||
const authCode = await callbackServer.waitForAuthCode()
|
||||
Logger.info(`[MCP] Received auth code: ${authCode}`)
|
||||
|
||||
// Complete the OAuth flow
|
||||
await transport.finishAuth(authCode)
|
||||
|
||||
Logger.info(`[MCP] OAuth flow completed for server: ${server.name}`)
|
||||
|
||||
const newTransport = await initTransport()
|
||||
// Try to connect again
|
||||
await client.connect(newTransport)
|
||||
|
||||
Logger.info(`[MCP] Successfully authenticated with server: ${server.name}`)
|
||||
} catch (oauthError) {
|
||||
Logger.error(`[MCP] OAuth authentication failed for server ${server.name}:`, oauthError)
|
||||
throw new Error(
|
||||
`OAuth authentication failed: ${oauthError instanceof Error ? oauthError.message : String(oauthError)}`
|
||||
)
|
||||
} finally {
|
||||
// Clear the timeout and close the callback server
|
||||
clearTimeout(timeoutId)
|
||||
callbackServer.close()
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const transport = await initTransport()
|
||||
try {
|
||||
await client.connect(transport)
|
||||
} catch (error: Error | any) {
|
||||
if (error instanceof Error && (error.name === 'UnauthorizedError' || error.message.includes('Unauthorized'))) {
|
||||
Logger.info(`[MCP] Authentication required for server: ${server.name}`)
|
||||
await handleAuth(client, transport as SSEClientTransport | StreamableHTTPClientTransport)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Store the new client in the cache
|
||||
this.clients.set(serverKey, client)
|
||||
@@ -132,7 +300,7 @@ class McpService {
|
||||
return client
|
||||
} catch (error: any) {
|
||||
Logger.error(`[MCP] Error activating server ${server.name}:`, error)
|
||||
throw error
|
||||
throw new Error(`[MCP] Error activating server ${server.name}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,31 +339,50 @@ class McpService {
|
||||
await this.initClient(server)
|
||||
}
|
||||
|
||||
async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const client = await this.initClient(server)
|
||||
const serverKey = this.getServerKey(server)
|
||||
const cacheKey = `mcp:list_tool:${serverKey}`
|
||||
if (CacheService.has(cacheKey)) {
|
||||
Logger.info(`[MCP] Tools from ${server.name} loaded from cache`)
|
||||
const cachedTools = CacheService.get<MCPTool[]>(cacheKey)
|
||||
if (cachedTools && cachedTools.length > 0) {
|
||||
return cachedTools
|
||||
async cleanup() {
|
||||
for (const [key] of this.clients) {
|
||||
try {
|
||||
await this.closeClient(key)
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Failed to close client: ${error}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async listToolsImpl(server: MCPServer): Promise<MCPTool[]> {
|
||||
Logger.info(`[MCP] Listing tools for server: ${server.name}`)
|
||||
const { tools } = await client.listTools()
|
||||
const serverTools: MCPTool[] = []
|
||||
tools.map((tool: any) => {
|
||||
const serverTool: MCPTool = {
|
||||
...tool,
|
||||
id: `f${nanoid()}`,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
}
|
||||
serverTools.push(serverTool)
|
||||
})
|
||||
CacheService.set(cacheKey, serverTools, 5 * 60 * 1000)
|
||||
return serverTools
|
||||
const client = await this.initClient(server)
|
||||
try {
|
||||
const { tools } = await client.listTools()
|
||||
const serverTools: MCPTool[] = []
|
||||
tools.map((tool: any) => {
|
||||
const serverTool: MCPTool = {
|
||||
...tool,
|
||||
id: `f${nanoid()}`,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
}
|
||||
serverTools.push(serverTool)
|
||||
})
|
||||
return serverTools
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Failed to list tools for server: ${server.name}`, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async listTools(_: Electron.IpcMainInvokeEvent, server: MCPServer) {
|
||||
const cachedListTools = withCache<[MCPServer], MCPTool[]>(
|
||||
this.listToolsImpl.bind(this),
|
||||
(server) => {
|
||||
const serverKey = this.getServerKey(server)
|
||||
return `mcp:list_tool:${serverKey}`
|
||||
},
|
||||
5 * 60 * 1000, // 5 minutes TTL
|
||||
`[MCP] Tools from ${server.name}`
|
||||
)
|
||||
|
||||
return cachedListTools(server)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -204,12 +391,21 @@ class McpService {
|
||||
public async callTool(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ server, name, args }: { server: MCPServer; name: string; args: any }
|
||||
): Promise<any> {
|
||||
): Promise<MCPCallToolResponse> {
|
||||
try {
|
||||
Logger.info('[MCP] Calling:', server.name, name, args)
|
||||
if (typeof args === 'string') {
|
||||
try {
|
||||
args = JSON.parse(args)
|
||||
} catch (e) {
|
||||
Logger.error('[MCP] args parse error', args)
|
||||
}
|
||||
}
|
||||
const client = await this.initClient(server)
|
||||
const result = await client.callTool({ name, arguments: args })
|
||||
return result
|
||||
const result = await client.callTool({ name, arguments: args }, undefined, {
|
||||
timeout: server.timeout ? server.timeout * 1000 : 60000 // Default timeout of 1 minute
|
||||
})
|
||||
return result as MCPCallToolResponse
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Error calling tool ${name} on ${server.name}:`, error)
|
||||
throw error
|
||||
@@ -225,13 +421,260 @@ class McpService {
|
||||
return { dir, uvPath, bunPath }
|
||||
}
|
||||
|
||||
/**
|
||||
* List prompts available on an MCP server
|
||||
*/
|
||||
private async listPromptsImpl(server: MCPServer): Promise<MCPPrompt[]> {
|
||||
Logger.info(`[MCP] Listing prompts for server: ${server.name}`)
|
||||
const client = await this.initClient(server)
|
||||
try {
|
||||
const { prompts } = await client.listPrompts()
|
||||
const serverPrompts = prompts.map((prompt: any) => ({
|
||||
...prompt,
|
||||
id: `p${nanoid()}`,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
}))
|
||||
return serverPrompts
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Failed to list prompts for server: ${server.name}`, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List prompts available on an MCP server with caching
|
||||
*/
|
||||
public async listPrompts(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<MCPPrompt[]> {
|
||||
const cachedListPrompts = withCache<[MCPServer], MCPPrompt[]>(
|
||||
this.listPromptsImpl.bind(this),
|
||||
(server) => {
|
||||
const serverKey = this.getServerKey(server)
|
||||
return `mcp:list_prompts:${serverKey}`
|
||||
},
|
||||
60 * 60 * 1000, // 60 minutes TTL
|
||||
`[MCP] Prompts from ${server.name}`
|
||||
)
|
||||
return cachedListPrompts(server)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific prompt from an MCP server (implementation)
|
||||
*/
|
||||
private async getPromptImpl(
|
||||
server: MCPServer,
|
||||
name: string,
|
||||
args?: Record<string, any>
|
||||
): Promise<GetMCPPromptResponse> {
|
||||
Logger.info(`[MCP] Getting prompt ${name} from server: ${server.name}`)
|
||||
const client = await this.initClient(server)
|
||||
return await client.getPrompt({ name, arguments: args })
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific prompt from an MCP server with caching
|
||||
*/
|
||||
public async getPrompt(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ server, name, args }: { server: MCPServer; name: string; args?: Record<string, any> }
|
||||
): Promise<GetMCPPromptResponse> {
|
||||
const cachedGetPrompt = withCache<[MCPServer, string, Record<string, any> | undefined], GetMCPPromptResponse>(
|
||||
this.getPromptImpl.bind(this),
|
||||
(server, name, args) => {
|
||||
const serverKey = this.getServerKey(server)
|
||||
const argsKey = args ? JSON.stringify(args) : 'no-args'
|
||||
return `mcp:get_prompt:${serverKey}:${name}:${argsKey}`
|
||||
},
|
||||
30 * 60 * 1000, // 30 minutes TTL
|
||||
`[MCP] Prompt ${name} from ${server.name}`
|
||||
)
|
||||
return await cachedGetPrompt(server, name, args)
|
||||
}
|
||||
|
||||
/**
|
||||
* List resources available on an MCP server (implementation)
|
||||
*/
|
||||
private async listResourcesImpl(server: MCPServer): Promise<MCPResource[]> {
|
||||
Logger.info(`[MCP] Listing resources for server: ${server.name}`)
|
||||
const client = await this.initClient(server)
|
||||
try {
|
||||
const result = await client.listResources()
|
||||
const resources = result.resources || []
|
||||
const serverResources = (Array.isArray(resources) ? resources : []).map((resource: any) => ({
|
||||
...resource,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
}))
|
||||
return serverResources
|
||||
} catch (error) {
|
||||
Logger.error(`[MCP] Failed to list resources for server: ${server.name}`, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List resources available on an MCP server with caching
|
||||
*/
|
||||
public async listResources(_: Electron.IpcMainInvokeEvent, server: MCPServer): Promise<MCPResource[]> {
|
||||
const cachedListResources = withCache<[MCPServer], MCPResource[]>(
|
||||
this.listResourcesImpl.bind(this),
|
||||
(server) => {
|
||||
const serverKey = this.getServerKey(server)
|
||||
return `mcp:list_resources:${serverKey}`
|
||||
},
|
||||
60 * 60 * 1000, // 60 minutes TTL
|
||||
`[MCP] Resources from ${server.name}`
|
||||
)
|
||||
return cachedListResources(server)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific resource from an MCP server (implementation)
|
||||
*/
|
||||
private async getResourceImpl(server: MCPServer, uri: string): Promise<GetResourceResponse> {
|
||||
Logger.info(`[MCP] Getting resource ${uri} from server: ${server.name}`)
|
||||
const client = await this.initClient(server)
|
||||
try {
|
||||
const result = await client.readResource({ uri: uri })
|
||||
const contents: MCPResource[] = []
|
||||
if (result.contents && result.contents.length > 0) {
|
||||
result.contents.forEach((content: any) => {
|
||||
contents.push({
|
||||
...content,
|
||||
serverId: server.id,
|
||||
serverName: server.name
|
||||
})
|
||||
})
|
||||
}
|
||||
return {
|
||||
contents: contents
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
Logger.error(`[MCP] Failed to get resource ${uri} from server: ${server.name}`, error)
|
||||
throw new Error(`Failed to get resource ${uri} from server: ${server.name}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific resource from an MCP server with caching
|
||||
*/
|
||||
public async getResource(
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
{ server, uri }: { server: MCPServer; uri: string }
|
||||
): Promise<GetResourceResponse> {
|
||||
const cachedGetResource = withCache<[MCPServer, string], GetResourceResponse>(
|
||||
this.getResourceImpl.bind(this),
|
||||
(server, uri) => {
|
||||
const serverKey = this.getServerKey(server)
|
||||
return `mcp:get_resource:${serverKey}:${uri}`
|
||||
},
|
||||
30 * 60 * 1000, // 30 minutes TTL
|
||||
`[MCP] Resource ${uri} from ${server.name}`
|
||||
)
|
||||
return await cachedGetResource(server, uri)
|
||||
}
|
||||
|
||||
private findPowerShellExecutable() {
|
||||
const psPath = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe' // Standard WinPS path
|
||||
const pwshPath = 'C:\\Program Files\\PowerShell\\7\\pwsh.exe'
|
||||
|
||||
if (fs.existsSync(psPath)) {
|
||||
return psPath
|
||||
}
|
||||
if (fs.existsSync(pwshPath)) {
|
||||
return pwshPath
|
||||
}
|
||||
return 'powershell.exe'
|
||||
}
|
||||
|
||||
private getSystemPath = memoize(async (): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let command: string
|
||||
let shell: string
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
shell = this.findPowerShellExecutable()
|
||||
command = '$env:PATH'
|
||||
} else {
|
||||
// 尝试获取当前用户的默认 shell
|
||||
|
||||
let userShell = process.env.SHELL
|
||||
if (!userShell) {
|
||||
if (fs.existsSync('/bin/zsh')) {
|
||||
userShell = '/bin/zsh'
|
||||
} else if (fs.existsSync('/bin/bash')) {
|
||||
userShell = '/bin/bash'
|
||||
} else if (fs.existsSync('/bin/fish')) {
|
||||
userShell = '/bin/fish'
|
||||
} else {
|
||||
userShell = '/bin/sh'
|
||||
}
|
||||
}
|
||||
shell = userShell
|
||||
|
||||
// 根据不同的 shell 构建不同的命令
|
||||
if (userShell.includes('zsh')) {
|
||||
command =
|
||||
'source /etc/zshenv 2>/dev/null || true; source ~/.zshenv 2>/dev/null || true; source /etc/zprofile 2>/dev/null || true; source ~/.zprofile 2>/dev/null || true; source /etc/zshrc 2>/dev/null || true; source ~/.zshrc 2>/dev/null || true; source /etc/zlogin 2>/dev/null || true; source ~/.zlogin 2>/dev/null || true; echo $PATH'
|
||||
} else if (userShell.includes('bash')) {
|
||||
command =
|
||||
'source /etc/profile 2>/dev/null || true; source ~/.bash_profile 2>/dev/null || true; source ~/.bash_login 2>/dev/null || true; source ~/.profile 2>/dev/null || true; source ~/.bashrc 2>/dev/null || true; echo $PATH'
|
||||
} else if (userShell.includes('fish')) {
|
||||
command =
|
||||
'source /etc/fish/config.fish 2>/dev/null || true; source ~/.config/fish/config.fish 2>/dev/null || true; source ~/.config/fish/config.local.fish 2>/dev/null || true; echo $PATH'
|
||||
} else {
|
||||
// 默认使用 zsh
|
||||
shell = '/bin/zsh'
|
||||
command =
|
||||
'source /etc/zshenv 2>/dev/null || true; source ~/.zshenv 2>/dev/null || true; source /etc/zprofile 2>/dev/null || true; source ~/.zprofile 2>/dev/null || true; source /etc/zshrc 2>/dev/null || true; source ~/.zshrc 2>/dev/null || true; source /etc/zlogin 2>/dev/null || true; source ~/.zlogin 2>/dev/null || true; echo $PATH'
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Using shell: ${shell} with command: ${command}`)
|
||||
const child = require('child_process').spawn(shell, ['-c', command], {
|
||||
env: { ...process.env },
|
||||
cwd: app.getPath('home')
|
||||
})
|
||||
|
||||
let path = ''
|
||||
child.stdout.on('data', (data: Buffer) => {
|
||||
path += data.toString()
|
||||
})
|
||||
|
||||
child.stderr.on('data', (data: Buffer) => {
|
||||
console.error('Error getting PATH:', data.toString())
|
||||
})
|
||||
|
||||
child.on('error', (error: Error) => {
|
||||
reject(new Error(`Failed to get system PATH, ${error.message}`))
|
||||
})
|
||||
|
||||
child.on('close', (code: number) => {
|
||||
if (code === 0) {
|
||||
const trimmedPath = path.trim()
|
||||
resolve(trimmedPath)
|
||||
} else {
|
||||
reject(new Error(`Failed to get system PATH, exit code: ${code}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Get enhanced PATH including common tool locations
|
||||
*/
|
||||
private getEnhancedPath(originalPath: string): string {
|
||||
private async getEnhancedPath(originalPath: string): Promise<string> {
|
||||
let systemPath = ''
|
||||
try {
|
||||
systemPath = await this.getSystemPath()
|
||||
} catch (error) {
|
||||
Logger.error('[MCP] Failed to get system PATH:', error)
|
||||
}
|
||||
// 将原始 PATH 按分隔符分割成数组
|
||||
const pathSeparator = process.platform === 'win32' ? ';' : ':'
|
||||
const existingPaths = new Set(originalPath.split(pathSeparator).filter(Boolean))
|
||||
const existingPaths = new Set(
|
||||
[...systemPath.split(pathSeparator), ...originalPath.split(pathSeparator)].filter(Boolean)
|
||||
)
|
||||
const homeDir = process.env.HOME || process.env.USERPROFILE || ''
|
||||
|
||||
// 定义要添加的新路径
|
||||
@@ -290,4 +733,5 @@ class McpService {
|
||||
}
|
||||
}
|
||||
|
||||
export default new McpService()
|
||||
const mcpService = new McpService()
|
||||
export default mcpService
|
||||
|
||||
@@ -5,6 +5,8 @@ import { XMLParser } from 'fast-xml-parser'
|
||||
import { isNil, partial } from 'lodash'
|
||||
import { type FileStat } from 'webdav'
|
||||
|
||||
import { createOAuthUrl, decryptSecret } from '../integration/nutstore/sso/lib/index.mjs'
|
||||
|
||||
interface OAuthResponse {
|
||||
username: string
|
||||
userid: string
|
||||
@@ -30,18 +32,18 @@ interface WebDAVResponse {
|
||||
}
|
||||
|
||||
export async function getNutstoreSSOUrl() {
|
||||
const { createOAuthUrl } = await import('../integration/nutstore/sso/lib')
|
||||
|
||||
const url = createOAuthUrl({
|
||||
const url = await createOAuthUrl({
|
||||
app: 'cherrystudio'
|
||||
})
|
||||
return url
|
||||
}
|
||||
|
||||
export async function decryptToken(token: string) {
|
||||
const { decrypt } = await import('../integration/nutstore/sso/lib')
|
||||
try {
|
||||
const decrypted = decrypt('cherrystudio', token)
|
||||
const decrypted = await decryptSecret({
|
||||
app: 'cherrystudio',
|
||||
s: token
|
||||
})
|
||||
return JSON.parse(decrypted) as OAuthResponse
|
||||
} catch (error) {
|
||||
console.error('解密失败:', error)
|
||||
|
||||
@@ -1,3 +1,12 @@
|
||||
import { exec } from 'node:child_process'
|
||||
import fs from 'node:fs/promises'
|
||||
import path from 'node:path'
|
||||
import { promisify } from 'node:util'
|
||||
|
||||
import { app } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
|
||||
import { handleMcpProtocolUrl } from './urlschema/mcp-install'
|
||||
import { windowService } from './WindowService'
|
||||
|
||||
export const CHERRY_STUDIO_PROTOCOL = 'cherrystudio'
|
||||
@@ -22,6 +31,12 @@ export function handleProtocolUrl(url: string) {
|
||||
const urlObj = new URL(url)
|
||||
const params = new URLSearchParams(urlObj.search)
|
||||
|
||||
switch (urlObj.hostname.toLowerCase()) {
|
||||
case 'mcp':
|
||||
handleMcpProtocolUrl(urlObj)
|
||||
return
|
||||
}
|
||||
|
||||
// You can send the data to your renderer process
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
@@ -32,3 +47,78 @@ export function handleProtocolUrl(url: string) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
const DESKTOP_FILE_NAME = 'cherrystudio-url-handler.desktop'
|
||||
|
||||
/**
|
||||
* Sets up deep linking for the AppImage build on Linux by creating a .desktop file.
|
||||
* This allows the OS to open cherrystudio:// URLs with this App.
|
||||
*/
|
||||
export async function setupAppImageDeepLink(): Promise<void> {
|
||||
// Only run on Linux and when packaged as an AppImage
|
||||
if (process.platform !== 'linux' || !process.env.APPIMAGE) {
|
||||
return
|
||||
}
|
||||
|
||||
Logger.info('AppImage environment detected on Linux, setting up deep link.')
|
||||
|
||||
try {
|
||||
const appPath = app.getPath('exe')
|
||||
if (!appPath) {
|
||||
Logger.error('Could not determine App path.')
|
||||
return
|
||||
}
|
||||
|
||||
const homeDir = app.getPath('home')
|
||||
const applicationsDir = path.join(homeDir, '.local', 'share', 'applications')
|
||||
const desktopFilePath = path.join(applicationsDir, DESKTOP_FILE_NAME)
|
||||
|
||||
// Ensure the applications directory exists
|
||||
await fs.mkdir(applicationsDir, { recursive: true })
|
||||
|
||||
// Content of the .desktop file
|
||||
// %U allows passing the URL to the application
|
||||
// NoDisplay=true hides it from the regular application menu
|
||||
const desktopFileContent = `[Desktop Entry]
|
||||
Name=Cherry Studio
|
||||
Exec=${escapePathForExec(appPath)} %U
|
||||
Terminal=false
|
||||
Type=Application
|
||||
MimeType=x-scheme-handler/${CHERRY_STUDIO_PROTOCOL};
|
||||
NoDisplay=true
|
||||
`
|
||||
|
||||
// Write the .desktop file (overwrite if exists)
|
||||
await fs.writeFile(desktopFilePath, desktopFileContent, 'utf-8')
|
||||
Logger.info(`Created/Updated desktop file: ${desktopFilePath}`)
|
||||
|
||||
// Update the desktop database
|
||||
// It's important to update the database for the changes to take effect
|
||||
try {
|
||||
const { stdout, stderr } = await execAsync(`update-desktop-database ${escapePathForExec(applicationsDir)}`)
|
||||
if (stderr) {
|
||||
Logger.warn(`update-desktop-database stderr: ${stderr}`)
|
||||
}
|
||||
Logger.info(`update-desktop-database stdout: ${stdout}`)
|
||||
Logger.info('Desktop database updated successfully.')
|
||||
} catch (updateError) {
|
||||
Logger.error('Failed to update desktop database:', updateError)
|
||||
// Continue even if update fails, as the file is still created.
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the error but don't prevent the app from starting
|
||||
Logger.error('Failed to setup AppImage deep link:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes a path for safe use within the Exec field of a .desktop file
|
||||
* and for shell commands. Handles spaces and potentially other special characters
|
||||
* by quoting.
|
||||
*/
|
||||
function escapePathForExec(filePath: string): string {
|
||||
// Simple quoting for paths with spaces.
|
||||
return `'${filePath.replace(/'/g, "'\\''")}'`
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ProxyConfig as _ProxyConfig, session } from 'electron'
|
||||
import { socksDispatcher } from 'fetch-socks'
|
||||
import { getSystemProxy } from 'os-proxy-config'
|
||||
import { ProxyAgent as GeneralProxyAgent } from 'proxy-agent'
|
||||
import { ProxyAgent, setGlobalDispatcher } from 'undici'
|
||||
|
||||
@@ -70,15 +71,14 @@ export class ProxyManager {
|
||||
|
||||
private async setSystemProxy(): Promise<void> {
|
||||
try {
|
||||
await this.setSessionsProxy({ mode: 'system' })
|
||||
const proxyString = await session.defaultSession.resolveProxy('https://dummy.com')
|
||||
const [protocol, address] = proxyString.split(';')[0].split(' ')
|
||||
const url = protocol === 'PROXY' ? `http://${address}` : null
|
||||
if (url && url !== this.config.url) {
|
||||
this.config.url = url.toLowerCase()
|
||||
this.setEnvironment(this.config.url)
|
||||
this.proxyAgent = new GeneralProxyAgent()
|
||||
const currentProxy = await getSystemProxy()
|
||||
if (!currentProxy || currentProxy.proxyUrl === this.config.url) {
|
||||
return
|
||||
}
|
||||
await this.setSessionsProxy({ mode: 'system' })
|
||||
this.config.url = currentProxy.proxyUrl.toLowerCase()
|
||||
this.setEnvironment(this.config.url)
|
||||
this.proxyAgent = new GeneralProxyAgent()
|
||||
} catch (error) {
|
||||
console.error('Failed to set system proxy:', error)
|
||||
throw error
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { ipcMain } from 'electron'
|
||||
import { EventEmitter } from 'events'
|
||||
|
||||
@@ -10,6 +11,8 @@ export class ReduxService extends EventEmitter {
|
||||
private stateCache: any = {}
|
||||
private isReady = false
|
||||
|
||||
private readonly STATUS_CHANGE_EVENT = 'statusChange'
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.setupIpcHandlers()
|
||||
@@ -17,15 +20,15 @@ export class ReduxService extends EventEmitter {
|
||||
|
||||
private setupIpcHandlers() {
|
||||
// 监听 store 就绪事件
|
||||
ipcMain.handle('redux-store-ready', () => {
|
||||
ipcMain.handle(IpcChannel.ReduxStoreReady, () => {
|
||||
this.isReady = true
|
||||
this.emit('ready')
|
||||
})
|
||||
|
||||
// 监听 store 状态变化
|
||||
ipcMain.on('redux-state-change', (_, newState) => {
|
||||
ipcMain.on(IpcChannel.ReduxStateChange, (_, newState) => {
|
||||
this.stateCache = newState
|
||||
this.emit('stateChange', newState)
|
||||
this.emit(this.STATUS_CHANGE_EVENT, newState)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -122,19 +125,23 @@ export class ReduxService extends EventEmitter {
|
||||
await this.waitForStoreReady(mainWindow.webContents)
|
||||
|
||||
// 在渲染进程中设置监听
|
||||
await mainWindow.webContents.executeJavaScript(`
|
||||
await mainWindow.webContents.executeJavaScript(
|
||||
`
|
||||
if (!window._storeSubscriptions) {
|
||||
window._storeSubscriptions = new Set();
|
||||
|
||||
// 设置全局状态变化监听
|
||||
const unsubscribe = window.store.subscribe(() => {
|
||||
const state = window.store.getState();
|
||||
window.electron.ipcRenderer.send('redux-state-change', state);
|
||||
window.electron.ipcRenderer.send('` +
|
||||
IpcChannel.ReduxStateChange +
|
||||
`', state);
|
||||
});
|
||||
|
||||
window._storeSubscriptions.add(unsubscribe);
|
||||
}
|
||||
`)
|
||||
`
|
||||
)
|
||||
|
||||
// 在主进程中处理回调
|
||||
const handler = async () => {
|
||||
@@ -146,9 +153,9 @@ export class ReduxService extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
this.on('stateChange', handler)
|
||||
this.on(this.STATUS_CHANGE_EVENT, handler)
|
||||
return () => {
|
||||
this.off('stateChange', handler)
|
||||
this.off(this.STATUS_CHANGE_EVENT, handler)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,41 +187,41 @@ export class ReduxService extends EventEmitter {
|
||||
export const reduxService = new ReduxService()
|
||||
|
||||
/** example
|
||||
async function example() {
|
||||
try {
|
||||
// 读取状态
|
||||
const settings = await reduxService.select('state.settings')
|
||||
console.log('settings', settings)
|
||||
async function example() {
|
||||
try {
|
||||
// 读取状态
|
||||
const settings = await reduxService.select('state.settings')
|
||||
console.log('settings', settings)
|
||||
|
||||
// 派发 action
|
||||
await reduxService.dispatch({
|
||||
type: 'settings/updateApiKey',
|
||||
payload: 'new-api-key'
|
||||
})
|
||||
// 派发 action
|
||||
await reduxService.dispatch({
|
||||
type: 'settings/updateApiKey',
|
||||
payload: 'new-api-key'
|
||||
})
|
||||
|
||||
// 订阅状态变化
|
||||
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
|
||||
console.log('API key changed:', newValue)
|
||||
})
|
||||
// 订阅状态变化
|
||||
const unsubscribe = await reduxService.subscribe('state.settings.apiKey', (newValue) => {
|
||||
console.log('API key changed:', newValue)
|
||||
})
|
||||
|
||||
// 批量执行 actions
|
||||
await reduxService.batch([
|
||||
{ type: 'action1', payload: 'data1' },
|
||||
{ type: 'action2', payload: 'data2' }
|
||||
])
|
||||
// 批量执行 actions
|
||||
await reduxService.batch([
|
||||
{ type: 'action1', payload: 'data1' },
|
||||
{ type: 'action2', payload: 'data2' }
|
||||
])
|
||||
|
||||
// 同步方法虽然可能不是最新的数据,但响应更快
|
||||
const apiKey = reduxService.selectSync('state.settings.apiKey')
|
||||
console.log('apiKey', apiKey)
|
||||
// 同步方法虽然可能不是最新的数据,但响应更快
|
||||
const apiKey = reduxService.selectSync('state.settings.apiKey')
|
||||
console.log('apiKey', apiKey)
|
||||
|
||||
// 处理保证是最新的数据
|
||||
const apiKey1 = await reduxService.select('state.settings.apiKey')
|
||||
console.log('apiKey1', apiKey1)
|
||||
// 处理保证是最新的数据
|
||||
const apiKey1 = await reduxService.select('state.settings.apiKey')
|
||||
console.log('apiKey1', apiKey1)
|
||||
|
||||
// 取消订阅
|
||||
unsubscribe()
|
||||
} catch (error) {
|
||||
console.error('Error:', error)
|
||||
}
|
||||
}
|
||||
*/
|
||||
// 取消订阅
|
||||
unsubscribe()
|
||||
} catch (error) {
|
||||
console.error('Error:', error)
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
82
src/main/services/SearchService.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { is } from '@electron-toolkit/utils'
|
||||
import { BrowserWindow } from 'electron'
|
||||
|
||||
export class SearchService {
|
||||
private static instance: SearchService | null = null
|
||||
private searchWindows: Record<string, BrowserWindow> = {}
|
||||
public static getInstance(): SearchService {
|
||||
if (!SearchService.instance) {
|
||||
SearchService.instance = new SearchService()
|
||||
}
|
||||
return SearchService.instance
|
||||
}
|
||||
|
||||
constructor() {
|
||||
// Initialize the service
|
||||
}
|
||||
|
||||
private async createNewSearchWindow(uid: string): Promise<BrowserWindow> {
|
||||
const newWindow = new BrowserWindow({
|
||||
width: 800,
|
||||
height: 600,
|
||||
show: false,
|
||||
webPreferences: {
|
||||
nodeIntegration: true,
|
||||
contextIsolation: false,
|
||||
devTools: is.dev
|
||||
}
|
||||
})
|
||||
newWindow.webContents.session.webRequest.onBeforeSendHeaders({ urls: ['*://*/*'] }, (details, callback) => {
|
||||
const headers = {
|
||||
...details.requestHeaders,
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
||||
}
|
||||
callback({ requestHeaders: headers })
|
||||
})
|
||||
this.searchWindows[uid] = newWindow
|
||||
newWindow.on('closed', () => {
|
||||
delete this.searchWindows[uid]
|
||||
})
|
||||
return newWindow
|
||||
}
|
||||
|
||||
public async openSearchWindow(uid: string): Promise<void> {
|
||||
await this.createNewSearchWindow(uid)
|
||||
}
|
||||
|
||||
public async closeSearchWindow(uid: string): Promise<void> {
|
||||
const window = this.searchWindows[uid]
|
||||
if (window) {
|
||||
window.close()
|
||||
delete this.searchWindows[uid]
|
||||
}
|
||||
}
|
||||
|
||||
public async openUrlInSearchWindow(uid: string, url: string): Promise<any> {
|
||||
let window = this.searchWindows[uid]
|
||||
if (window) {
|
||||
await window.loadURL(url)
|
||||
} else {
|
||||
window = await this.createNewSearchWindow(uid)
|
||||
await window.loadURL(url)
|
||||
}
|
||||
|
||||
// Get the page content after loading the URL
|
||||
// Wait for the page to fully load before getting the content
|
||||
await new Promise<void>((resolve) => {
|
||||
const loadTimeout = setTimeout(() => resolve(), 10000) // 10 second timeout
|
||||
window.webContents.once('did-finish-load', () => {
|
||||
clearTimeout(loadTimeout)
|
||||
// Small delay to ensure JavaScript has executed
|
||||
setTimeout(resolve, 500)
|
||||
})
|
||||
})
|
||||
|
||||
// Get the page content after ensuring it's fully loaded
|
||||
const content = await window.webContents.executeJavaScript('document.documentElement.outerHTML')
|
||||
return content
|
||||
}
|
||||
}
|
||||
|
||||
export const searchService = SearchService.getInstance()
|
||||
@@ -26,6 +26,7 @@ export default class WebDav {
|
||||
this.putFileContents = this.putFileContents.bind(this)
|
||||
this.getFileContents = this.getFileContents.bind(this)
|
||||
this.createDirectory = this.createDirectory.bind(this)
|
||||
this.deleteFile = this.deleteFile.bind(this)
|
||||
}
|
||||
|
||||
public putFileContents = async (
|
||||
@@ -98,4 +99,19 @@ export default class WebDav {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
public deleteFile = async (filename: string) => {
|
||||
if (!this.instance) {
|
||||
throw new Error('WebDAV client not initialized')
|
||||
}
|
||||
|
||||
const remoteFilePath = `${this.webdavPath}/${filename}`
|
||||
|
||||
try {
|
||||
return await this.instance.deleteFile(remoteFilePath)
|
||||
} catch (error) {
|
||||
Logger.error('[WebDAV] Error deleting file on WebDAV:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
35
src/main/services/WebviewService.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { session, shell, webContents } from 'electron'
|
||||
|
||||
/**
|
||||
* init the useragent of the webview session
|
||||
* remove the CherryStudio and Electron from the useragent
|
||||
*/
|
||||
export function initSessionUserAgent() {
|
||||
const wvSession = session.fromPartition('persist:webview')
|
||||
const newChromeVersion = '135.0.7049.96'
|
||||
const originUA = wvSession.getUserAgent()
|
||||
const newUA = originUA
|
||||
.replace(/CherryStudio\/\S+\s/, '')
|
||||
.replace(/Electron\/\S+\s/, '')
|
||||
.replace(/Chrome\/\d+\.\d+\.\d+\.\d+/, `Chrome/${newChromeVersion}`)
|
||||
|
||||
wvSession.setUserAgent(newUA)
|
||||
}
|
||||
|
||||
/**
|
||||
* WebviewService handles the behavior of links opened from webview elements
|
||||
* It controls whether links should be opened within the application or in an external browser
|
||||
*/
|
||||
export function setOpenLinkExternal(webviewId: number, isExternal: boolean) {
|
||||
const webview = webContents.fromId(webviewId)
|
||||
if (!webview) return
|
||||
|
||||
webview.setWindowOpenHandler(({ url }) => {
|
||||
if (isExternal) {
|
||||
shell.openExternal(url)
|
||||
return { action: 'deny' }
|
||||
} else {
|
||||
return { action: 'allow' }
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
import { is } from '@electron-toolkit/utils'
|
||||
import { isDev, isLinux, isMac, isWin } from '@main/constant'
|
||||
import { getFilesDir } from '@main/utils/file'
|
||||
import { app, BrowserWindow, ipcMain, Menu, MenuItem, shell } from 'electron'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { ThemeMode } from '@types'
|
||||
import { app, BrowserWindow, ipcMain, Menu, MenuItem, nativeTheme, shell } from 'electron'
|
||||
import Logger from 'electron-log'
|
||||
import windowStateKeeper from 'electron-window-state'
|
||||
import { join } from 'path'
|
||||
@@ -10,13 +12,13 @@ import icon from '../../../build/icon.png?asset'
|
||||
import { titleBarOverlayDark, titleBarOverlayLight } from '../config'
|
||||
import { locales } from '../utils/locales'
|
||||
import { configManager } from './ConfigManager'
|
||||
import { initSessionUserAgent } from './WebviewService'
|
||||
|
||||
export class WindowService {
|
||||
private static instance: WindowService | null = null
|
||||
private mainWindow: BrowserWindow | null = null
|
||||
private miniWindow: BrowserWindow | null = null
|
||||
private isPinnedMiniWindow: boolean = false
|
||||
private wasFullScreen: boolean = false
|
||||
//hacky-fix: store the focused status of mainWindow before miniWindow shows
|
||||
//to restore the focus status when miniWindow hides
|
||||
private wasMainWindowFocused: boolean = false
|
||||
@@ -40,10 +42,17 @@ export class WindowService {
|
||||
|
||||
const mainWindowState = windowStateKeeper({
|
||||
defaultWidth: 1080,
|
||||
defaultHeight: 670
|
||||
defaultHeight: 670,
|
||||
fullScreen: false,
|
||||
maximize: false
|
||||
})
|
||||
|
||||
const theme = configManager.getTheme()
|
||||
if (theme === ThemeMode.auto) {
|
||||
nativeTheme.themeSource = 'system'
|
||||
} else {
|
||||
nativeTheme.themeSource = theme
|
||||
}
|
||||
|
||||
this.mainWindow = new BrowserWindow({
|
||||
x: mainWindowState.x,
|
||||
@@ -52,14 +61,15 @@ export class WindowService {
|
||||
height: mainWindowState.height,
|
||||
minWidth: 1080,
|
||||
minHeight: 600,
|
||||
show: false, // 初始不显示
|
||||
show: false,
|
||||
autoHideMenuBar: true,
|
||||
transparent: isMac,
|
||||
vibrancy: 'sidebar',
|
||||
visualEffectState: 'active',
|
||||
titleBarStyle: isLinux ? 'default' : 'hidden',
|
||||
titleBarOverlay: theme === 'dark' ? titleBarOverlayDark : titleBarOverlayLight,
|
||||
backgroundColor: isMac ? undefined : theme === 'dark' ? '#181818' : '#FFFFFF',
|
||||
titleBarOverlay: nativeTheme.shouldUseDarkColors ? titleBarOverlayDark : titleBarOverlayLight,
|
||||
backgroundColor: isMac ? undefined : nativeTheme.shouldUseDarkColors ? '#181818' : '#FFFFFF',
|
||||
darkTheme: nativeTheme.shouldUseDarkColors,
|
||||
trafficLightPosition: { x: 8, y: 12 },
|
||||
...(isLinux ? { icon } : {}),
|
||||
webPreferences: {
|
||||
@@ -79,42 +89,16 @@ export class WindowService {
|
||||
this.miniWindow = this.createMiniWindow(true)
|
||||
}
|
||||
|
||||
//init the MinApp webviews' useragent
|
||||
initSessionUserAgent()
|
||||
|
||||
return this.mainWindow
|
||||
}
|
||||
|
||||
public createMinappWindow({
|
||||
url,
|
||||
parent,
|
||||
windowOptions
|
||||
}: {
|
||||
url: string
|
||||
parent?: BrowserWindow
|
||||
windowOptions?: Electron.BrowserWindowConstructorOptions
|
||||
}): BrowserWindow {
|
||||
const width = windowOptions?.width || 1000
|
||||
const height = windowOptions?.height || 680
|
||||
|
||||
const minappWindow = new BrowserWindow({
|
||||
width,
|
||||
height,
|
||||
autoHideMenuBar: true,
|
||||
title: 'Cherry Studio',
|
||||
...windowOptions,
|
||||
parent,
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/minapp.js'),
|
||||
sandbox: false,
|
||||
contextIsolation: false
|
||||
}
|
||||
})
|
||||
|
||||
minappWindow.loadURL(url)
|
||||
return minappWindow
|
||||
}
|
||||
|
||||
private setupMainWindow(mainWindow: BrowserWindow, mainWindowState: any) {
|
||||
mainWindowState.manage(mainWindow)
|
||||
|
||||
this.setupMaximize(mainWindow, mainWindowState.isMaximized)
|
||||
this.setupContextMenu(mainWindow)
|
||||
this.setupWindowEvents(mainWindow)
|
||||
this.setupWebContentsHandlers(mainWindow)
|
||||
@@ -122,6 +106,17 @@ export class WindowService {
|
||||
this.loadMainWindowContent(mainWindow)
|
||||
}
|
||||
|
||||
private setupMaximize(mainWindow: BrowserWindow, isMaximized: boolean) {
|
||||
if (isMaximized) {
|
||||
// 如果是从托盘启动,则需要延迟最大化,否则显示的就不是重启前的最大化窗口了
|
||||
configManager.getLaunchToTray()
|
||||
? mainWindow.once('show', () => {
|
||||
mainWindow.maximize()
|
||||
})
|
||||
: mainWindow.maximize()
|
||||
}
|
||||
}
|
||||
|
||||
private setupContextMenu(mainWindow: BrowserWindow) {
|
||||
if (!this.contextMenu) {
|
||||
const locale = locales[configManager.getLanguage()]
|
||||
@@ -167,13 +162,11 @@ export class WindowService {
|
||||
|
||||
// 处理全屏相关事件
|
||||
mainWindow.on('enter-full-screen', () => {
|
||||
this.wasFullScreen = true
|
||||
mainWindow.webContents.send('fullscreen-status-changed', true)
|
||||
mainWindow.webContents.send(IpcChannel.FullscreenStatusChanged, true)
|
||||
})
|
||||
|
||||
mainWindow.on('leave-full-screen', () => {
|
||||
this.wasFullScreen = false
|
||||
mainWindow.webContents.send('fullscreen-status-changed', false)
|
||||
mainWindow.webContents.send(IpcChannel.FullscreenStatusChanged, false)
|
||||
})
|
||||
|
||||
// set the zoom factor again when the window is going to resize
|
||||
@@ -222,9 +215,11 @@ export class WindowService {
|
||||
|
||||
const oauthProviderUrls = [
|
||||
'https://account.siliconflow.cn/oauth',
|
||||
'https://cloud.siliconflow.cn/bills',
|
||||
'https://cloud.siliconflow.cn/expensebill',
|
||||
'https://aihubmix.com/token',
|
||||
'https://aihubmix.com/topup'
|
||||
'https://aihubmix.com/topup',
|
||||
'https://aihubmix.com/statistics'
|
||||
]
|
||||
|
||||
if (oauthProviderUrls.some((link) => url.startsWith(link))) {
|
||||
@@ -274,6 +269,7 @@ export class WindowService {
|
||||
private loadMainWindowContent(mainWindow: BrowserWindow) {
|
||||
if (is.dev && process.env['ELECTRON_RENDERER_URL']) {
|
||||
mainWindow.loadURL(process.env['ELECTRON_RENDERER_URL'])
|
||||
// mainWindow.webContents.openDevTools()
|
||||
} else {
|
||||
mainWindow.loadFile(join(__dirname, '../renderer/index.html'))
|
||||
}
|
||||
@@ -303,23 +299,20 @@ export class WindowService {
|
||||
}
|
||||
}
|
||||
|
||||
//上述逻辑以下,是“开启托盘+设置关闭时最小化到托盘”的情况
|
||||
// 如果是Windows或Linux,且处于全屏状态,则退出应用
|
||||
if (this.wasFullScreen) {
|
||||
if (isWin || isLinux) {
|
||||
return app.quit()
|
||||
} else {
|
||||
event.preventDefault()
|
||||
mainWindow.setFullScreen(false)
|
||||
return
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 上述逻辑以下:
|
||||
* win/linux: 是“开启托盘+设置关闭时最小化到托盘”的情况
|
||||
* mac: 任何情况都会到这里,因此需要单独处理mac
|
||||
*/
|
||||
|
||||
event.preventDefault()
|
||||
|
||||
mainWindow.hide()
|
||||
|
||||
//for mac users, should hide dock icon if close to tray
|
||||
app.dock?.hide()
|
||||
if (isMac && isTrayOnClose) {
|
||||
app.dock?.hide()
|
||||
}
|
||||
})
|
||||
|
||||
mainWindow.on('closed', () => {
|
||||
@@ -343,13 +336,38 @@ export class WindowService {
|
||||
this.mainWindow.restore()
|
||||
return
|
||||
}
|
||||
//[macOS] Known Issue
|
||||
// setVisibleOnAllWorkspaces true/false will NOT bring window to current desktop in Mac (works fine with Windows)
|
||||
// AppleScript may be a solution, but it's not worth
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(true)
|
||||
|
||||
/**
|
||||
* About setVisibleOnAllWorkspaces
|
||||
*
|
||||
* [macOS] Known Issue
|
||||
* setVisibleOnAllWorkspaces true/false will NOT bring window to current desktop in Mac (works fine with Windows)
|
||||
* AppleScript may be a solution, but it's not worth
|
||||
*
|
||||
* [Linux] Known Issue
|
||||
* setVisibleOnAllWorkspaces 在 Linux 环境下(特别是 KDE Wayland)会导致窗口进入"假弹出"状态
|
||||
* 因此在 Linux 环境下不执行这两行代码
|
||||
*/
|
||||
if (!isLinux) {
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* [macOS] After being closed in fullscreen, the fullscreen behavior will become strange when window shows again
|
||||
* So we need to set it to FALSE explicitly.
|
||||
* althougle other platforms don't have the issue, but it's a good practice to do so
|
||||
*
|
||||
* Check if window is visible to prevent interrupting fullscreen state when clicking dock icon
|
||||
*/
|
||||
if (this.mainWindow.isFullScreen() && !this.mainWindow.isVisible()) {
|
||||
this.mainWindow.setFullScreen(false)
|
||||
}
|
||||
|
||||
this.mainWindow.show()
|
||||
this.mainWindow.focus()
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(false)
|
||||
if (!isLinux) {
|
||||
this.mainWindow.setVisibleOnAllWorkspaces(false)
|
||||
}
|
||||
} else {
|
||||
this.mainWindow = this.createMainWindow()
|
||||
}
|
||||
@@ -357,7 +375,9 @@ export class WindowService {
|
||||
|
||||
public toggleMainWindow() {
|
||||
// should not toggle main window when in full screen
|
||||
if (this.wasFullScreen) {
|
||||
// but if the main window is close to tray when it's in full screen, we can show it again
|
||||
// (it's a bug in macos, because we can close the window when it's in full screen, and the state will be remained)
|
||||
if (this.mainWindow?.isFullScreen() && this.mainWindow?.isVisible()) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -411,7 +431,8 @@ export class WindowService {
|
||||
//miniWindow should show in current desktop
|
||||
this.miniWindow?.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true })
|
||||
//make miniWindow always on top of fullscreen apps with level set
|
||||
this.miniWindow.setAlwaysOnTop(true, 'screen-saver', 1)
|
||||
//[mac] level higher than 'floating' will cover the pinyin input method
|
||||
this.miniWindow.setAlwaysOnTop(true, 'floating')
|
||||
|
||||
this.miniWindow.on('ready-to-show', () => {
|
||||
if (isPreload) {
|
||||
@@ -434,14 +455,14 @@ export class WindowService {
|
||||
})
|
||||
|
||||
this.miniWindow.on('hide', () => {
|
||||
this.miniWindow?.webContents.send('hide-mini-window')
|
||||
this.miniWindow?.webContents.send(IpcChannel.HideMiniWindow)
|
||||
})
|
||||
|
||||
this.miniWindow.on('show', () => {
|
||||
this.miniWindow?.webContents.send('show-mini-window')
|
||||
this.miniWindow?.webContents.send(IpcChannel.ShowMiniWindow)
|
||||
})
|
||||
|
||||
ipcMain.on('miniwindow-reload', () => {
|
||||
ipcMain.on(IpcChannel.MiniWindowReload, () => {
|
||||
this.miniWindow?.reload()
|
||||
})
|
||||
|
||||
@@ -547,7 +568,7 @@ export class WindowService {
|
||||
// 点击其他地方时隐藏窗口
|
||||
this.selectionMenuWindow.on('blur', () => {
|
||||
this.selectionMenuWindow?.hide()
|
||||
this.miniWindow?.webContents.send('selection-action', {
|
||||
this.miniWindow?.webContents.send(IpcChannel.SelectionAction, {
|
||||
action: 'home',
|
||||
selectedText: this.lastSelectedText
|
||||
})
|
||||
@@ -565,12 +586,12 @@ export class WindowService {
|
||||
private setupSelectionMenuEvents() {
|
||||
if (!this.selectionMenuWindow) return
|
||||
|
||||
ipcMain.removeHandler('selection-menu:action')
|
||||
ipcMain.handle('selection-menu:action', (_, action) => {
|
||||
ipcMain.removeHandler(IpcChannel.SelectionMenu_Action)
|
||||
ipcMain.handle(IpcChannel.SelectionMenu_Action, (_, action) => {
|
||||
this.selectionMenuWindow?.hide()
|
||||
this.showMiniWindow()
|
||||
setTimeout(() => {
|
||||
this.miniWindow?.webContents.send('selection-action', {
|
||||
this.miniWindow?.webContents.send(IpcChannel.SelectionAction, {
|
||||
action,
|
||||
selectedText: this.lastSelectedText
|
||||
})
|
||||
|
||||
76
src/main/services/mcp/oauth/callback.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import Logger from 'electron-log'
|
||||
import EventEmitter from 'events'
|
||||
import http from 'http'
|
||||
import { URL } from 'url'
|
||||
|
||||
import { OAuthCallbackServerOptions } from './types'
|
||||
|
||||
export class CallBackServer {
|
||||
private server: Promise<http.Server>
|
||||
private events: EventEmitter
|
||||
|
||||
constructor(options: OAuthCallbackServerOptions) {
|
||||
const { port, path, events } = options
|
||||
this.events = events
|
||||
this.server = this.initialize(port, path)
|
||||
}
|
||||
|
||||
initialize(port: number, path: string): Promise<http.Server> {
|
||||
const server = http.createServer((req, res) => {
|
||||
// Only handle requests to the callback path
|
||||
if (req.url?.startsWith(path)) {
|
||||
try {
|
||||
// Parse the URL to extract the authorization code
|
||||
const url = new URL(req.url, `http://127.0.0.1:${port}`)
|
||||
const code = url.searchParams.get('code')
|
||||
if (code) {
|
||||
// Emit the code event
|
||||
this.events.emit('auth-code-received', code)
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.error('Error processing OAuth callback:', error)
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' })
|
||||
res.end('Internal Server Error')
|
||||
}
|
||||
} else {
|
||||
// Not a callback request
|
||||
res.writeHead(404, { 'Content-Type': 'text/plain' })
|
||||
res.end('Not Found')
|
||||
}
|
||||
})
|
||||
|
||||
// Handle server errors
|
||||
server.on('error', (error) => {
|
||||
Logger.error('OAuth callback server error:', error)
|
||||
})
|
||||
|
||||
const runningServer = new Promise<http.Server>((resolve, reject) => {
|
||||
server.listen(port, () => {
|
||||
Logger.info(`OAuth callback server listening on port ${port}`)
|
||||
resolve(server)
|
||||
})
|
||||
|
||||
server.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
return runningServer
|
||||
}
|
||||
|
||||
get getServer(): Promise<http.Server> {
|
||||
return this.server
|
||||
}
|
||||
|
||||
async close() {
|
||||
const server = await this.server
|
||||
server.close()
|
||||
}
|
||||
|
||||
async waitForAuthCode(): Promise<string> {
|
||||
return new Promise((resolve) => {
|
||||
this.events.once('auth-code-received', (code) => {
|
||||
resolve(code)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
78
src/main/services/mcp/oauth/provider.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import path from 'node:path'
|
||||
|
||||
import { getConfigDir } from '@main/utils/file'
|
||||
import { OAuthClientProvider } from '@modelcontextprotocol/sdk/client/auth'
|
||||
import { OAuthClientInformation, OAuthClientInformationFull, OAuthTokens } from '@modelcontextprotocol/sdk/shared/auth'
|
||||
import Logger from 'electron-log'
|
||||
import open from 'open'
|
||||
|
||||
import { JsonFileStorage } from './storage'
|
||||
import { OAuthProviderOptions } from './types'
|
||||
|
||||
export class McpOAuthClientProvider implements OAuthClientProvider {
|
||||
private storage: JsonFileStorage
|
||||
public readonly config: Required<OAuthProviderOptions>
|
||||
|
||||
constructor(options: OAuthProviderOptions) {
|
||||
const configDir = path.join(getConfigDir(), 'mcp', 'oauth')
|
||||
this.config = {
|
||||
serverUrlHash: options.serverUrlHash,
|
||||
callbackPort: options.callbackPort || 12346,
|
||||
callbackPath: options.callbackPath || '/oauth/callback',
|
||||
configDir: options.configDir || configDir,
|
||||
clientName: options.clientName || 'Cherry Studio',
|
||||
clientUri: options.clientUri || 'https://github.com/CherryHQ/cherry-studio'
|
||||
}
|
||||
this.storage = new JsonFileStorage(this.config.serverUrlHash, this.config.configDir)
|
||||
}
|
||||
|
||||
get redirectUrl(): string {
|
||||
return `http://127.0.0.1:${this.config.callbackPort}${this.config.callbackPath}`
|
||||
}
|
||||
|
||||
get clientMetadata() {
|
||||
return {
|
||||
redirect_uris: [this.redirectUrl],
|
||||
token_endpoint_auth_method: 'none',
|
||||
grant_types: ['authorization_code', 'refresh_token'],
|
||||
response_types: ['code'],
|
||||
client_name: this.config.clientName,
|
||||
client_uri: this.config.clientUri
|
||||
}
|
||||
}
|
||||
|
||||
async clientInformation(): Promise<OAuthClientInformation | undefined> {
|
||||
return this.storage.getClientInformation()
|
||||
}
|
||||
|
||||
async saveClientInformation(info: OAuthClientInformationFull): Promise<void> {
|
||||
await this.storage.saveClientInformation(info)
|
||||
}
|
||||
|
||||
async tokens(): Promise<OAuthTokens | undefined> {
|
||||
return this.storage.getTokens()
|
||||
}
|
||||
|
||||
async saveTokens(tokens: OAuthTokens): Promise<void> {
|
||||
await this.storage.saveTokens(tokens)
|
||||
}
|
||||
|
||||
async redirectToAuthorization(authorizationUrl: URL): Promise<void> {
|
||||
try {
|
||||
// Open the browser to the authorization URL
|
||||
await open(authorizationUrl.toString())
|
||||
Logger.info('Browser opened automatically.')
|
||||
} catch (error) {
|
||||
Logger.error('Could not open browser automatically.')
|
||||
throw error // Let caller handle the error
|
||||
}
|
||||
}
|
||||
|
||||
async saveCodeVerifier(codeVerifier: string): Promise<void> {
|
||||
await this.storage.saveCodeVerifier(codeVerifier)
|
||||
}
|
||||
|
||||
async codeVerifier(): Promise<string> {
|
||||
return this.storage.getCodeVerifier()
|
||||
}
|
||||
}
|
||||
120
src/main/services/mcp/oauth/storage.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import {
|
||||
OAuthClientInformation,
|
||||
OAuthClientInformationFull,
|
||||
OAuthTokens
|
||||
} from '@modelcontextprotocol/sdk/shared/auth.js'
|
||||
import Logger from 'electron-log'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
import { IOAuthStorage, OAuthStorageData, OAuthStorageSchema } from './types'
|
||||
|
||||
export class JsonFileStorage implements IOAuthStorage {
|
||||
private readonly filePath: string
|
||||
private cache: OAuthStorageData | null = null
|
||||
|
||||
constructor(
|
||||
readonly serverUrlHash: string,
|
||||
configDir: string
|
||||
) {
|
||||
this.filePath = path.join(configDir, `${serverUrlHash}_oauth.json`)
|
||||
}
|
||||
|
||||
private async readStorage(): Promise<OAuthStorageData> {
|
||||
if (this.cache) {
|
||||
return this.cache
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await fs.readFile(this.filePath, 'utf-8')
|
||||
const parsed = JSON.parse(data)
|
||||
const validated = OAuthStorageSchema.parse(parsed)
|
||||
this.cache = validated
|
||||
return validated
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
||||
// File doesn't exist, return initial state
|
||||
const initial: OAuthStorageData = { lastUpdated: Date.now() }
|
||||
await this.writeStorage(initial)
|
||||
return initial
|
||||
}
|
||||
Logger.error('Error reading OAuth storage:', error)
|
||||
throw new Error(`Failed to read OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
|
||||
private async writeStorage(data: OAuthStorageData): Promise<void> {
|
||||
try {
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(path.dirname(this.filePath), { recursive: true })
|
||||
|
||||
// Update timestamp
|
||||
data.lastUpdated = Date.now()
|
||||
|
||||
// Write file atomically
|
||||
const tempPath = `${this.filePath}.tmp`
|
||||
await fs.writeFile(tempPath, JSON.stringify(data, null, 2))
|
||||
await fs.rename(tempPath, this.filePath)
|
||||
|
||||
// Update cache
|
||||
this.cache = data
|
||||
} catch (error) {
|
||||
Logger.error('Error writing OAuth storage:', error)
|
||||
throw new Error(`Failed to write OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
|
||||
async getClientInformation(): Promise<OAuthClientInformation | undefined> {
|
||||
const data = await this.readStorage()
|
||||
return data.clientInfo
|
||||
}
|
||||
|
||||
async saveClientInformation(info: OAuthClientInformationFull): Promise<void> {
|
||||
const data = await this.readStorage()
|
||||
await this.writeStorage({
|
||||
...data,
|
||||
clientInfo: info
|
||||
})
|
||||
}
|
||||
|
||||
async getTokens(): Promise<OAuthTokens | undefined> {
|
||||
const data = await this.readStorage()
|
||||
return data.tokens
|
||||
}
|
||||
|
||||
async saveTokens(tokens: OAuthTokens): Promise<void> {
|
||||
const data = await this.readStorage()
|
||||
await this.writeStorage({
|
||||
...data,
|
||||
tokens
|
||||
})
|
||||
}
|
||||
|
||||
async getCodeVerifier(): Promise<string> {
|
||||
const data = await this.readStorage()
|
||||
if (!data.codeVerifier) {
|
||||
throw new Error('No code verifier saved for session')
|
||||
}
|
||||
return data.codeVerifier
|
||||
}
|
||||
|
||||
async saveCodeVerifier(codeVerifier: string): Promise<void> {
|
||||
const data = await this.readStorage()
|
||||
await this.writeStorage({
|
||||
...data,
|
||||
codeVerifier
|
||||
})
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(this.filePath)
|
||||
this.cache = null
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && error.code !== 'ENOENT') {
|
||||
Logger.error('Error clearing OAuth storage:', error)
|
||||
throw new Error(`Failed to clear OAuth storage: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
61
src/main/services/mcp/oauth/types.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import {
|
||||
OAuthClientInformation,
|
||||
OAuthClientInformationFull,
|
||||
OAuthTokens
|
||||
} from '@modelcontextprotocol/sdk/shared/auth.js'
|
||||
import EventEmitter from 'events'
|
||||
import { z } from 'zod'
|
||||
|
||||
export interface OAuthStorageData {
|
||||
clientInfo?: OAuthClientInformation
|
||||
tokens?: OAuthTokens
|
||||
codeVerifier?: string
|
||||
lastUpdated: number
|
||||
}
|
||||
|
||||
export const OAuthStorageSchema = z.object({
|
||||
clientInfo: z.any().optional(),
|
||||
tokens: z.any().optional(),
|
||||
codeVerifier: z.string().optional(),
|
||||
lastUpdated: z.number()
|
||||
})
|
||||
|
||||
export interface IOAuthStorage {
|
||||
getClientInformation(): Promise<OAuthClientInformation | undefined>
|
||||
saveClientInformation(info: OAuthClientInformationFull): Promise<void>
|
||||
getTokens(): Promise<OAuthTokens | undefined>
|
||||
saveTokens(tokens: OAuthTokens): Promise<void>
|
||||
getCodeVerifier(): Promise<string>
|
||||
saveCodeVerifier(codeVerifier: string): Promise<void>
|
||||
clear(): Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* OAuth callback server setup options
|
||||
*/
|
||||
export interface OAuthCallbackServerOptions {
|
||||
/** Port for the callback server */
|
||||
port: number
|
||||
/** Path for the callback endpoint */
|
||||
path: string
|
||||
/** Event emitter to signal when auth code is received */
|
||||
events: EventEmitter
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating an OAuth client provider
|
||||
*/
|
||||
export interface OAuthProviderOptions {
|
||||
/** Server URL to connect to */
|
||||
serverUrlHash: string
|
||||
/** Port for the OAuth callback server */
|
||||
callbackPort?: number
|
||||
/** Path for the OAuth callback endpoint */
|
||||
callbackPath?: string
|
||||
/** Directory to store OAuth credentials */
|
||||
configDir?: string
|
||||
/** Client name to use for OAuth registration */
|
||||
clientName?: string
|
||||
/** Client URI to use for OAuth registration */
|
||||
clientUri?: string
|
||||
}
|
||||
76
src/main/services/urlschema/mcp-install.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { nanoid } from '@reduxjs/toolkit'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { MCPServer } from '@types'
|
||||
import Logger from 'electron-log'
|
||||
|
||||
import { windowService } from '../WindowService'
|
||||
|
||||
function installMCPServer(server: MCPServer) {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
if (!server.id) {
|
||||
server.id = nanoid()
|
||||
}
|
||||
|
||||
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.send(IpcChannel.Mcp_AddServer, server)
|
||||
}
|
||||
}
|
||||
|
||||
function installMCPServers(servers: Record<string, MCPServer>) {
|
||||
for (const name in servers) {
|
||||
const server = servers[name]
|
||||
if (!server.name) {
|
||||
server.name = name
|
||||
}
|
||||
installMCPServer(server)
|
||||
}
|
||||
}
|
||||
|
||||
export function handleMcpProtocolUrl(url: URL) {
|
||||
const params = new URLSearchParams(url.search)
|
||||
switch (url.pathname) {
|
||||
case '/install': {
|
||||
// jsonConfig example:
|
||||
// {
|
||||
// "mcpServers": {
|
||||
// "everything": {
|
||||
// "command": "npx",
|
||||
// "args": [
|
||||
// "-y",
|
||||
// "@modelcontextprotocol/server-everything"
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// cherrystudio://mcp/install?servers={base64Encode(JSON.stringify(jsonConfig))}
|
||||
const data = params.get('servers')
|
||||
if (data) {
|
||||
const stringify = Buffer.from(data, 'base64').toString('utf8')
|
||||
Logger.info('install MCP servers from urlschema: ', stringify)
|
||||
const jsonConfig = JSON.parse(stringify)
|
||||
Logger.info('install MCP servers from urlschema: ', jsonConfig)
|
||||
|
||||
// support both {mcpServers: [servers]}, [servers] and {server}
|
||||
if (jsonConfig.mcpServers) {
|
||||
installMCPServers(jsonConfig.mcpServers)
|
||||
} else if (Array.isArray(jsonConfig)) {
|
||||
for (const server of jsonConfig) {
|
||||
installMCPServer(server)
|
||||
}
|
||||
} else {
|
||||
installMCPServer(jsonConfig)
|
||||
}
|
||||
}
|
||||
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.executeJavaScript("window.navigate('/settings/mcp')")
|
||||
}
|
||||
break
|
||||
}
|
||||
default:
|
||||
console.error(`Unknown MCP protocol URL: ${url}`)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
import * as fs from 'node:fs'
|
||||
import os from 'node:os'
|
||||
import path from 'node:path'
|
||||
|
||||
import { isMac } from '@main/constant'
|
||||
import { audioExts, documentExts, imageExts, textExts, videoExts } from '@shared/config/constant'
|
||||
import { FileType, FileTypes } from '@types'
|
||||
import { app } from 'electron'
|
||||
@@ -74,3 +76,20 @@ export function getTempDir() {
|
||||
export function getFilesDir() {
|
||||
return path.join(app.getPath('userData'), 'Data', 'Files')
|
||||
}
|
||||
|
||||
export function getConfigDir() {
|
||||
return path.join(os.homedir(), '.cherrystudio', 'config')
|
||||
}
|
||||
|
||||
export function getAppConfigDir(name: string) {
|
||||
return path.join(getConfigDir(), name)
|
||||
}
|
||||
|
||||
export function setUserDataDir() {
|
||||
if (!isMac) {
|
||||
const dir = path.join(path.dirname(app.getPath('exe')), 'data')
|
||||
if (fs.existsSync(dir) && fs.statSync(dir).isDirectory()) {
|
||||
app.setPath('userData', dir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
181
src/preload/index.d.ts
vendored
@@ -1,181 +0,0 @@
|
||||
import { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { ElectronAPI } from '@electron-toolkit/preload'
|
||||
import type { FileMetadataResponse, ListFilesResponse, UploadFileResponse } from '@google/generative-ai/server'
|
||||
import type { MCPServer, MCPTool } from '@renderer/types'
|
||||
import { AppInfo, FileType, KnowledgeBaseParams, KnowledgeItem, LanguageVarious, WebDavConfig } from '@renderer/types'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import type { OpenDialogOptions } from 'electron'
|
||||
import type { UpdateInfo } from 'electron-updater'
|
||||
|
||||
interface BackupFile {
|
||||
fileName: string
|
||||
modifiedTime: string
|
||||
size: number
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
electron: ElectronAPI
|
||||
api: {
|
||||
getAppInfo: () => Promise<AppInfo>
|
||||
checkForUpdate: () => Promise<{ currentVersion: string; updateInfo: UpdateInfo | null }>
|
||||
showUpdateDialog: () => Promise<void>
|
||||
openWebsite: (url: string) => void
|
||||
setProxy: (proxy: string | undefined) => void
|
||||
setLanguage: (theme: LanguageVarious) => void
|
||||
setLaunchOnBoot: (isActive: boolean) => void
|
||||
setLaunchToTray: (isActive: boolean) => void
|
||||
setTray: (isActive: boolean) => void
|
||||
setTrayOnClose: (isActive: boolean) => void
|
||||
restartTray: () => void
|
||||
setTheme: (theme: 'light' | 'dark') => void
|
||||
minApp: (options: { url: string; windowOptions?: Electron.BrowserWindowConstructorOptions }) => void
|
||||
reload: () => void
|
||||
clearCache: () => Promise<{ success: boolean; error?: string }>
|
||||
system: {
|
||||
getDeviceType: () => Promise<'mac' | 'windows' | 'linux'>
|
||||
}
|
||||
zip: {
|
||||
compress: (text: string) => Promise<Buffer>
|
||||
decompress: (text: Buffer) => Promise<string>
|
||||
}
|
||||
backup: {
|
||||
backup: (fileName: string, data: string, destinationPath?: string) => Promise<Readable>
|
||||
restore: (backupPath: string) => Promise<string>
|
||||
backupToWebdav: (data: string, webdavConfig: WebDavConfig) => Promise<boolean>
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) => Promise<string>
|
||||
listWebdavFiles: (webdavConfig: WebDavConfig) => Promise<BackupFile[]>
|
||||
checkConnection: (webdavConfig: WebDavConfig) => Promise<boolean>
|
||||
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) => Promise<void>
|
||||
}
|
||||
file: {
|
||||
select: (options?: OpenDialogOptions) => Promise<FileType[] | null>
|
||||
upload: (file: FileType) => Promise<FileType>
|
||||
delete: (fileId: string) => Promise<void>
|
||||
read: (fileId: string) => Promise<string>
|
||||
clear: () => Promise<void>
|
||||
get: (filePath: string) => Promise<FileType | null>
|
||||
selectFolder: () => Promise<string | null>
|
||||
create: (fileName: string) => Promise<string>
|
||||
write: (filePath: string, data: Uint8Array | string) => Promise<void>
|
||||
open: (options?: OpenDialogOptions) => Promise<{ fileName: string; filePath: string; content: Buffer } | null>
|
||||
openPath: (path: string) => Promise<void>
|
||||
save: (
|
||||
path: string,
|
||||
content: string | NodeJS.ArrayBufferView,
|
||||
options?: SaveDialogOptions
|
||||
) => Promise<string | null>
|
||||
saveImage: (name: string, data: string) => void
|
||||
base64Image: (fileId: string) => Promise<{ mime: string; base64: string; data: string }>
|
||||
download: (url: string) => Promise<FileType | null>
|
||||
copy: (fileId: string, destPath: string) => Promise<void>
|
||||
binaryFile: (fileId: string) => Promise<{ data: Buffer; mime: string }>
|
||||
}
|
||||
fs: {
|
||||
read: (path: string) => Promise<string>
|
||||
}
|
||||
export: {
|
||||
toWord: (markdown: string, fileName: string) => Promise<void>
|
||||
}
|
||||
openPath: (path: string) => Promise<void>
|
||||
shortcuts: {
|
||||
update: (shortcuts: Shortcut[]) => Promise<void>
|
||||
}
|
||||
knowledgeBase: {
|
||||
create: (base: KnowledgeBaseParams) => Promise<void>
|
||||
reset: (base: KnowledgeBaseParams) => Promise<void>
|
||||
delete: (id: string) => Promise<void>
|
||||
add: ({
|
||||
base,
|
||||
item,
|
||||
forceReload = false
|
||||
}: {
|
||||
base: KnowledgeBaseParams
|
||||
item: KnowledgeItem
|
||||
forceReload?: boolean
|
||||
}) => Promise<LoaderReturn>
|
||||
remove: ({
|
||||
uniqueId,
|
||||
uniqueIds,
|
||||
base
|
||||
}: {
|
||||
uniqueId: string
|
||||
uniqueIds: string[]
|
||||
base: KnowledgeBaseParams
|
||||
}) => Promise<void>
|
||||
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) => Promise<ExtractChunkData[]>
|
||||
rerank: ({
|
||||
search,
|
||||
base,
|
||||
results
|
||||
}: {
|
||||
search: string
|
||||
base: KnowledgeBaseParams
|
||||
results: ExtractChunkData[]
|
||||
}) => Promise<ExtractChunkData[]>
|
||||
}
|
||||
window: {
|
||||
setMinimumSize: (width: number, height: number) => Promise<void>
|
||||
resetMinimumSize: () => Promise<void>
|
||||
}
|
||||
gemini: {
|
||||
uploadFile: (file: FileType, apiKey: string) => Promise<UploadFileResponse>
|
||||
retrieveFile: (file: FileType, apiKey: string) => Promise<FileMetadataResponse | undefined>
|
||||
base64File: (file: FileType) => Promise<{ data: string; mimeType: string }>
|
||||
listFiles: (apiKey: string) => Promise<ListFilesResponse>
|
||||
deleteFile: (apiKey: string, fileId: string) => Promise<void>
|
||||
}
|
||||
selectionMenu: {
|
||||
action: (action: string) => Promise<void>
|
||||
}
|
||||
config: {
|
||||
set: (key: string, value: any) => Promise<void>
|
||||
get: (key: string) => Promise<any>
|
||||
}
|
||||
miniWindow: {
|
||||
show: () => Promise<void>
|
||||
hide: () => Promise<void>
|
||||
close: () => Promise<void>
|
||||
toggle: () => Promise<void>
|
||||
setPin: (isPinned: boolean) => Promise<void>
|
||||
}
|
||||
aes: {
|
||||
encrypt: (text: string, secretKey: string, iv: string) => Promise<{ iv: string; encryptedData: string }>
|
||||
decrypt: (encryptedData: string, iv: string, secretKey: string) => Promise<string>
|
||||
}
|
||||
shell: {
|
||||
openExternal: (url: string, options?: OpenExternalOptions) => Promise<void>
|
||||
}
|
||||
mcp: {
|
||||
removeServer: (server: MCPServer) => Promise<void>
|
||||
restartServer: (server: MCPServer) => Promise<void>
|
||||
stopServer: (server: MCPServer) => Promise<void>
|
||||
listTools: (server: MCPServer) => Promise<MCPTool[]>
|
||||
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) => Promise<any>
|
||||
getInstallInfo: () => Promise<{ dir: string; uvPath: string; bunPath: string }>
|
||||
}
|
||||
copilot: {
|
||||
getAuthMessage: (
|
||||
headers?: Record<string, string>
|
||||
) => Promise<{ device_code: string; user_code: string; verification_uri: string }>
|
||||
getCopilotToken: (device_code: string, headers?: Record<string, string>) => Promise<{ access_token: string }>
|
||||
saveCopilotToken: (access_token: string) => Promise<void>
|
||||
getToken: (headers?: Record<string, string>) => Promise<{ token: string }>
|
||||
logout: () => Promise<void>
|
||||
getUser: (token: string) => Promise<{ login: string; avatar: string }>
|
||||
}
|
||||
isBinaryExist: (name: string) => Promise<boolean>
|
||||
getBinaryPath: (name: string) => Promise<string>
|
||||
installUVBinary: () => Promise<void>
|
||||
installBunBinary: () => Promise<void>
|
||||
protocol: {
|
||||
onReceiveData: (callback: (data: { url: string; params: any }) => void) => () => void
|
||||
}
|
||||
nutstore: {
|
||||
getSSOUrl: () => Promise<string>
|
||||
decryptToken: (token: string) => Promise<{ username: string; access_token: string }>
|
||||
getDirectoryContents: (token: string, path: string) => Promise<any>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,79 +1,87 @@
|
||||
import type { ExtractChunkData } from '@cherrystudio/embedjs-interfaces'
|
||||
import { electronAPI } from '@electron-toolkit/preload'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { FileType, KnowledgeBaseParams, KnowledgeItem, MCPServer, Shortcut, WebDavConfig } from '@types'
|
||||
import { contextBridge, ipcRenderer, OpenDialogOptions, shell } from 'electron'
|
||||
import { CreateDirectoryOptions } from 'webdav'
|
||||
|
||||
// Custom APIs for renderer
|
||||
const api = {
|
||||
getAppInfo: () => ipcRenderer.invoke('app:info'),
|
||||
reload: () => ipcRenderer.invoke('app:reload'),
|
||||
setProxy: (proxy: string) => ipcRenderer.invoke('app:proxy', proxy),
|
||||
checkForUpdate: () => ipcRenderer.invoke('app:check-for-update'),
|
||||
showUpdateDialog: () => ipcRenderer.invoke('app:show-update-dialog'),
|
||||
setLanguage: (lang: string) => ipcRenderer.invoke('app:set-language', lang),
|
||||
setLaunchOnBoot: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-on-boot', isActive),
|
||||
setLaunchToTray: (isActive: boolean) => ipcRenderer.invoke('app:set-launch-to-tray', isActive),
|
||||
setTray: (isActive: boolean) => ipcRenderer.invoke('app:set-tray', isActive),
|
||||
setTrayOnClose: (isActive: boolean) => ipcRenderer.invoke('app:set-tray-on-close', isActive),
|
||||
restartTray: () => ipcRenderer.invoke('app:restart-tray'),
|
||||
setTheme: (theme: 'light' | 'dark') => ipcRenderer.invoke('app:set-theme', theme),
|
||||
openWebsite: (url: string) => ipcRenderer.invoke('open:website', url),
|
||||
minApp: (url: string) => ipcRenderer.invoke('minapp', url),
|
||||
clearCache: () => ipcRenderer.invoke('app:clear-cache'),
|
||||
getAppInfo: () => ipcRenderer.invoke(IpcChannel.App_Info),
|
||||
reload: () => ipcRenderer.invoke(IpcChannel.App_Reload),
|
||||
setProxy: (proxy: string | undefined) => ipcRenderer.invoke(IpcChannel.App_Proxy, proxy),
|
||||
checkForUpdate: () => ipcRenderer.invoke(IpcChannel.App_CheckForUpdate),
|
||||
showUpdateDialog: () => ipcRenderer.invoke(IpcChannel.App_ShowUpdateDialog),
|
||||
setLanguage: (lang: string) => ipcRenderer.invoke(IpcChannel.App_SetLanguage, lang),
|
||||
setLaunchOnBoot: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetLaunchOnBoot, isActive),
|
||||
setLaunchToTray: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetLaunchToTray, isActive),
|
||||
setTray: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetTray, isActive),
|
||||
setTrayOnClose: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetTrayOnClose, isActive),
|
||||
restartTray: () => ipcRenderer.invoke(IpcChannel.App_RestartTray),
|
||||
setTheme: (theme: 'light' | 'dark' | 'auto') => ipcRenderer.invoke(IpcChannel.App_SetTheme, theme),
|
||||
setCustomCss: (css: string) => ipcRenderer.invoke(IpcChannel.App_SetCustomCss, css),
|
||||
setAutoUpdate: (isActive: boolean) => ipcRenderer.invoke(IpcChannel.App_SetAutoUpdate, isActive),
|
||||
openWebsite: (url: string) => ipcRenderer.invoke(IpcChannel.Open_Website, url),
|
||||
clearCache: () => ipcRenderer.invoke(IpcChannel.App_ClearCache),
|
||||
system: {
|
||||
getDeviceType: () => ipcRenderer.invoke('system:getDeviceType')
|
||||
getDeviceType: () => ipcRenderer.invoke(IpcChannel.System_GetDeviceType),
|
||||
getHostname: () => ipcRenderer.invoke(IpcChannel.System_GetHostname)
|
||||
},
|
||||
zip: {
|
||||
compress: (text: string) => ipcRenderer.invoke('zip:compress', text),
|
||||
decompress: (text: Buffer) => ipcRenderer.invoke('zip:decompress', text)
|
||||
compress: (text: string) => ipcRenderer.invoke(IpcChannel.Zip_Compress, text),
|
||||
decompress: (text: Buffer) => ipcRenderer.invoke(IpcChannel.Zip_Decompress, text)
|
||||
},
|
||||
backup: {
|
||||
backup: (fileName: string, data: string, destinationPath?: string) =>
|
||||
ipcRenderer.invoke('backup:backup', fileName, data, destinationPath),
|
||||
restore: (backupPath: string) => ipcRenderer.invoke('backup:restore', backupPath),
|
||||
ipcRenderer.invoke(IpcChannel.Backup_Backup, fileName, data, destinationPath),
|
||||
restore: (backupPath: string) => ipcRenderer.invoke(IpcChannel.Backup_Restore, backupPath),
|
||||
backupToWebdav: (data: string, webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke('backup:backupToWebdav', data, webdavConfig),
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:restoreFromWebdav', webdavConfig),
|
||||
listWebdavFiles: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:listWebdavFiles', webdavConfig),
|
||||
checkConnection: (webdavConfig: WebDavConfig) => ipcRenderer.invoke('backup:checkConnection', webdavConfig),
|
||||
ipcRenderer.invoke(IpcChannel.Backup_BackupToWebdav, data, webdavConfig),
|
||||
restoreFromWebdav: (webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke(IpcChannel.Backup_RestoreFromWebdav, webdavConfig),
|
||||
listWebdavFiles: (webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke(IpcChannel.Backup_ListWebdavFiles, webdavConfig),
|
||||
checkConnection: (webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke(IpcChannel.Backup_CheckConnection, webdavConfig),
|
||||
createDirectory: (webdavConfig: WebDavConfig, path: string, options?: CreateDirectoryOptions) =>
|
||||
ipcRenderer.invoke('backup:createDirectory', webdavConfig, path, options)
|
||||
ipcRenderer.invoke(IpcChannel.Backup_CreateDirectory, webdavConfig, path, options),
|
||||
deleteWebdavFile: (fileName: string, webdavConfig: WebDavConfig) =>
|
||||
ipcRenderer.invoke(IpcChannel.Backup_DeleteWebdavFile, fileName, webdavConfig)
|
||||
},
|
||||
file: {
|
||||
select: (options?: OpenDialogOptions) => ipcRenderer.invoke('file:select', options),
|
||||
upload: (filePath: string) => ipcRenderer.invoke('file:upload', filePath),
|
||||
delete: (fileId: string) => ipcRenderer.invoke('file:delete', fileId),
|
||||
read: (fileId: string) => ipcRenderer.invoke('file:read', fileId),
|
||||
clear: () => ipcRenderer.invoke('file:clear'),
|
||||
get: (filePath: string) => ipcRenderer.invoke('file:get', filePath),
|
||||
create: (fileName: string) => ipcRenderer.invoke('file:create', fileName),
|
||||
write: (filePath: string, data: Uint8Array | string) => ipcRenderer.invoke('file:write', filePath, data),
|
||||
open: (options?: { decompress: boolean }) => ipcRenderer.invoke('file:open', options),
|
||||
openPath: (path: string) => ipcRenderer.invoke('file:openPath', path),
|
||||
save: (path: string, content: string, options?: { compress: boolean }) =>
|
||||
ipcRenderer.invoke('file:save', path, content, options),
|
||||
selectFolder: () => ipcRenderer.invoke('file:selectFolder'),
|
||||
saveImage: (name: string, data: string) => ipcRenderer.invoke('file:saveImage', name, data),
|
||||
base64Image: (fileId: string) => ipcRenderer.invoke('file:base64Image', fileId),
|
||||
download: (url: string) => ipcRenderer.invoke('file:download', url),
|
||||
copy: (fileId: string, destPath: string) => ipcRenderer.invoke('file:copy', fileId, destPath),
|
||||
binaryFile: (fileId: string) => ipcRenderer.invoke('file:binaryFile', fileId)
|
||||
select: (options?: OpenDialogOptions) => ipcRenderer.invoke(IpcChannel.File_Select, options),
|
||||
upload: (file: FileType) => ipcRenderer.invoke(IpcChannel.File_Upload, file),
|
||||
delete: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Delete, fileId),
|
||||
read: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Read, fileId),
|
||||
clear: () => ipcRenderer.invoke(IpcChannel.File_Clear),
|
||||
get: (filePath: string) => ipcRenderer.invoke(IpcChannel.File_Get, filePath),
|
||||
create: (fileName: string) => ipcRenderer.invoke(IpcChannel.File_Create, fileName),
|
||||
write: (filePath: string, data: Uint8Array | string) => ipcRenderer.invoke(IpcChannel.File_Write, filePath, data),
|
||||
open: (options?: OpenDialogOptions) => ipcRenderer.invoke(IpcChannel.File_Open, options),
|
||||
openPath: (path: string) => ipcRenderer.invoke(IpcChannel.File_OpenPath, path),
|
||||
save: (path: string, content: string | NodeJS.ArrayBufferView, options?: any) =>
|
||||
ipcRenderer.invoke(IpcChannel.File_Save, path, content, options),
|
||||
selectFolder: () => ipcRenderer.invoke(IpcChannel.File_SelectFolder),
|
||||
saveImage: (name: string, data: string) => ipcRenderer.invoke(IpcChannel.File_SaveImage, name, data),
|
||||
base64Image: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_Base64Image, fileId),
|
||||
download: (url: string) => ipcRenderer.invoke(IpcChannel.File_Download, url),
|
||||
copy: (fileId: string, destPath: string) => ipcRenderer.invoke(IpcChannel.File_Copy, fileId, destPath),
|
||||
binaryFile: (fileId: string) => ipcRenderer.invoke(IpcChannel.File_BinaryFile, fileId)
|
||||
},
|
||||
fs: {
|
||||
read: (path: string) => ipcRenderer.invoke('fs:read', path)
|
||||
read: (path: string) => ipcRenderer.invoke(IpcChannel.Fs_Read, path)
|
||||
},
|
||||
export: {
|
||||
toWord: (markdown: string, fileName: string) => ipcRenderer.invoke('export:word', markdown, fileName)
|
||||
toWord: (markdown: string, fileName: string) => ipcRenderer.invoke(IpcChannel.Export_Word, markdown, fileName)
|
||||
},
|
||||
openPath: (path: string) => ipcRenderer.invoke('open:path', path),
|
||||
openPath: (path: string) => ipcRenderer.invoke(IpcChannel.Open_Path, path),
|
||||
shortcuts: {
|
||||
update: (shortcuts: Shortcut[]) => ipcRenderer.invoke('shortcuts:update', shortcuts)
|
||||
update: (shortcuts: Shortcut[]) => ipcRenderer.invoke(IpcChannel.Shortcuts_Update, shortcuts)
|
||||
},
|
||||
knowledgeBase: {
|
||||
create: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:create', base),
|
||||
reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke('knowledge-base:reset', base),
|
||||
delete: (id: string) => ipcRenderer.invoke('knowledge-base:delete', id),
|
||||
create: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Create, base),
|
||||
reset: (base: KnowledgeBaseParams) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Reset, base),
|
||||
delete: (id: string) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Delete, id),
|
||||
add: ({
|
||||
base,
|
||||
item,
|
||||
@@ -82,71 +90,80 @@ const api = {
|
||||
base: KnowledgeBaseParams
|
||||
item: KnowledgeItem
|
||||
forceReload?: boolean
|
||||
}) => ipcRenderer.invoke('knowledge-base:add', { base, item, forceReload }),
|
||||
}) => ipcRenderer.invoke(IpcChannel.KnowledgeBase_Add, { base, item, forceReload }),
|
||||
remove: ({ uniqueId, uniqueIds, base }: { uniqueId: string; uniqueIds: string[]; base: KnowledgeBaseParams }) =>
|
||||
ipcRenderer.invoke('knowledge-base:remove', { uniqueId, uniqueIds, base }),
|
||||
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Remove, { uniqueId, uniqueIds, base }),
|
||||
search: ({ search, base }: { search: string; base: KnowledgeBaseParams }) =>
|
||||
ipcRenderer.invoke('knowledge-base:search', { search, base }),
|
||||
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Search, { search, base }),
|
||||
rerank: ({ search, base, results }: { search: string; base: KnowledgeBaseParams; results: ExtractChunkData[] }) =>
|
||||
ipcRenderer.invoke('knowledge-base:rerank', { search, base, results })
|
||||
ipcRenderer.invoke(IpcChannel.KnowledgeBase_Rerank, { search, base, results })
|
||||
},
|
||||
window: {
|
||||
setMinimumSize: (width: number, height: number) => ipcRenderer.invoke('window:set-minimum-size', width, height),
|
||||
resetMinimumSize: () => ipcRenderer.invoke('window:reset-minimum-size')
|
||||
setMinimumSize: (width: number, height: number) =>
|
||||
ipcRenderer.invoke(IpcChannel.Windows_SetMinimumSize, width, height),
|
||||
resetMinimumSize: () => ipcRenderer.invoke(IpcChannel.Windows_ResetMinimumSize)
|
||||
},
|
||||
gemini: {
|
||||
uploadFile: (file: FileType, apiKey: string) => ipcRenderer.invoke('gemini:upload-file', file, apiKey),
|
||||
base64File: (file: FileType) => ipcRenderer.invoke('gemini:base64-file', file),
|
||||
retrieveFile: (file: FileType, apiKey: string) => ipcRenderer.invoke('gemini:retrieve-file', file, apiKey),
|
||||
listFiles: (apiKey: string) => ipcRenderer.invoke('gemini:list-files', apiKey),
|
||||
deleteFile: (apiKey: string, fileId: string) => ipcRenderer.invoke('gemini:delete-file', apiKey, fileId)
|
||||
uploadFile: (file: FileType, apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_UploadFile, file, apiKey),
|
||||
base64File: (file: FileType) => ipcRenderer.invoke(IpcChannel.Gemini_Base64File, file),
|
||||
retrieveFile: (file: FileType, apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_RetrieveFile, file, apiKey),
|
||||
listFiles: (apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_ListFiles, apiKey),
|
||||
deleteFile: (fileId: string, apiKey: string) => ipcRenderer.invoke(IpcChannel.Gemini_DeleteFile, fileId, apiKey)
|
||||
},
|
||||
selectionMenu: {
|
||||
action: (action: string) => ipcRenderer.invoke('selection-menu:action', action)
|
||||
action: (action: string) => ipcRenderer.invoke(IpcChannel.SelectionMenu_Action, action)
|
||||
},
|
||||
config: {
|
||||
set: (key: string, value: any) => ipcRenderer.invoke('config:set', key, value),
|
||||
get: (key: string) => ipcRenderer.invoke('config:get', key)
|
||||
set: (key: string, value: any) => ipcRenderer.invoke(IpcChannel.Config_Set, key, value),
|
||||
get: (key: string) => ipcRenderer.invoke(IpcChannel.Config_Get, key)
|
||||
},
|
||||
miniWindow: {
|
||||
show: () => ipcRenderer.invoke('miniwindow:show'),
|
||||
hide: () => ipcRenderer.invoke('miniwindow:hide'),
|
||||
close: () => ipcRenderer.invoke('miniwindow:close'),
|
||||
toggle: () => ipcRenderer.invoke('miniwindow:toggle'),
|
||||
setPin: (isPinned: boolean) => ipcRenderer.invoke('miniwindow:set-pin', isPinned)
|
||||
show: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Show),
|
||||
hide: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Hide),
|
||||
close: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Close),
|
||||
toggle: () => ipcRenderer.invoke(IpcChannel.MiniWindow_Toggle),
|
||||
setPin: (isPinned: boolean) => ipcRenderer.invoke(IpcChannel.MiniWindow_SetPin, isPinned)
|
||||
},
|
||||
aes: {
|
||||
encrypt: (text: string, secretKey: string, iv: string) => ipcRenderer.invoke('aes:encrypt', text, secretKey, iv),
|
||||
encrypt: (text: string, secretKey: string, iv: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.Aes_Encrypt, text, secretKey, iv),
|
||||
decrypt: (encryptedData: string, iv: string, secretKey: string) =>
|
||||
ipcRenderer.invoke('aes:decrypt', encryptedData, iv, secretKey)
|
||||
ipcRenderer.invoke(IpcChannel.Aes_Decrypt, encryptedData, iv, secretKey)
|
||||
},
|
||||
mcp: {
|
||||
removeServer: (server: MCPServer) => ipcRenderer.invoke('mcp:remove-server', server),
|
||||
restartServer: (server: MCPServer) => ipcRenderer.invoke('mcp:restart-server', server),
|
||||
stopServer: (server: MCPServer) => ipcRenderer.invoke('mcp:stop-server', server),
|
||||
listTools: (server: MCPServer) => ipcRenderer.invoke('mcp:list-tools', server),
|
||||
removeServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RemoveServer, server),
|
||||
restartServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_RestartServer, server),
|
||||
stopServer: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_StopServer, server),
|
||||
listTools: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListTools, server),
|
||||
callTool: ({ server, name, args }: { server: MCPServer; name: string; args: any }) =>
|
||||
ipcRenderer.invoke('mcp:call-tool', { server, name, args }),
|
||||
getInstallInfo: () => ipcRenderer.invoke('mcp:get-install-info')
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_CallTool, { server, name, args }),
|
||||
listPrompts: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListPrompts, server),
|
||||
getPrompt: ({ server, name, args }: { server: MCPServer; name: string; args?: Record<string, any> }) =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetPrompt, { server, name, args }),
|
||||
listResources: (server: MCPServer) => ipcRenderer.invoke(IpcChannel.Mcp_ListResources, server),
|
||||
getResource: ({ server, uri }: { server: MCPServer; uri: string }) =>
|
||||
ipcRenderer.invoke(IpcChannel.Mcp_GetResource, { server, uri }),
|
||||
getInstallInfo: () => ipcRenderer.invoke(IpcChannel.Mcp_GetInstallInfo)
|
||||
},
|
||||
shell: {
|
||||
openExternal: shell.openExternal
|
||||
openExternal: (url: string, options?: Electron.OpenExternalOptions) => shell.openExternal(url, options)
|
||||
},
|
||||
copilot: {
|
||||
getAuthMessage: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-auth-message', headers),
|
||||
getAuthMessage: (headers?: Record<string, string>) =>
|
||||
ipcRenderer.invoke(IpcChannel.Copilot_GetAuthMessage, headers),
|
||||
getCopilotToken: (device_code: string, headers?: Record<string, string>) =>
|
||||
ipcRenderer.invoke('copilot:get-copilot-token', device_code, headers),
|
||||
saveCopilotToken: (access_token: string) => ipcRenderer.invoke('copilot:save-copilot-token', access_token),
|
||||
getToken: (headers?: Record<string, string>) => ipcRenderer.invoke('copilot:get-token', headers),
|
||||
logout: () => ipcRenderer.invoke('copilot:logout'),
|
||||
getUser: (token: string) => ipcRenderer.invoke('copilot:get-user', token)
|
||||
ipcRenderer.invoke(IpcChannel.Copilot_GetCopilotToken, device_code, headers),
|
||||
saveCopilotToken: (access_token: string) => ipcRenderer.invoke(IpcChannel.Copilot_SaveCopilotToken, access_token),
|
||||
getToken: (headers?: Record<string, string>) => ipcRenderer.invoke(IpcChannel.Copilot_GetToken, headers),
|
||||
logout: () => ipcRenderer.invoke(IpcChannel.Copilot_Logout),
|
||||
getUser: (token: string) => ipcRenderer.invoke(IpcChannel.Copilot_GetUser, token)
|
||||
},
|
||||
|
||||
// Binary related APIs
|
||||
isBinaryExist: (name: string) => ipcRenderer.invoke('app:is-binary-exist', name),
|
||||
getBinaryPath: (name: string) => ipcRenderer.invoke('app:get-binary-path', name),
|
||||
installUVBinary: () => ipcRenderer.invoke('app:install-uv-binary'),
|
||||
installBunBinary: () => ipcRenderer.invoke('app:install-bun-binary'),
|
||||
isBinaryExist: (name: string) => ipcRenderer.invoke(IpcChannel.App_IsBinaryExist, name),
|
||||
getBinaryPath: (name: string) => ipcRenderer.invoke(IpcChannel.App_GetBinaryPath, name),
|
||||
installUVBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallUvBinary),
|
||||
installBunBinary: () => ipcRenderer.invoke(IpcChannel.App_InstallBunBinary),
|
||||
protocol: {
|
||||
onReceiveData: (callback: (data: { url: string; params: any }) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, data: { url: string; params: any }) => {
|
||||
@@ -159,10 +176,19 @@ const api = {
|
||||
}
|
||||
},
|
||||
nutstore: {
|
||||
getSSOUrl: () => ipcRenderer.invoke('nutstore:get-sso-url'),
|
||||
decryptToken: (token: string) => ipcRenderer.invoke('nutstore:decrypt-token', token),
|
||||
getSSOUrl: () => ipcRenderer.invoke(IpcChannel.Nutstore_GetSsoUrl),
|
||||
decryptToken: (token: string) => ipcRenderer.invoke(IpcChannel.Nutstore_DecryptToken, token),
|
||||
getDirectoryContents: (token: string, path: string) =>
|
||||
ipcRenderer.invoke('nutstore:get-directory-contents', token, path)
|
||||
ipcRenderer.invoke(IpcChannel.Nutstore_GetDirectoryContents, token, path)
|
||||
},
|
||||
searchService: {
|
||||
openSearchWindow: (uid: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_Open, uid),
|
||||
closeSearchWindow: (uid: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_Close, uid),
|
||||
openUrlInSearchWindow: (uid: string, url: string) => ipcRenderer.invoke(IpcChannel.SearchWindow_OpenUrl, uid, url)
|
||||
},
|
||||
webview: {
|
||||
setOpenLinkExternal: (webviewId: number, isExternal: boolean) =>
|
||||
ipcRenderer.invoke(IpcChannel.Webview_SetOpenLinkExternal, webviewId, isExternal)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,9 +200,9 @@ if (process.contextIsolated) {
|
||||
contextBridge.exposeInMainWorld('electron', electronAPI)
|
||||
contextBridge.exposeInMainWorld('api', api)
|
||||
contextBridge.exposeInMainWorld('obsidian', {
|
||||
getVaults: () => ipcRenderer.invoke('obsidian:get-vaults'),
|
||||
getFolders: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName),
|
||||
getFiles: (vaultName: string) => ipcRenderer.invoke('obsidian:get-files', vaultName)
|
||||
getVaults: () => ipcRenderer.invoke(IpcChannel.Obsidian_GetVaults),
|
||||
getFolders: (vaultName: string) => ipcRenderer.invoke(IpcChannel.Obsidian_GetFiles, vaultName),
|
||||
getFiles: (vaultName: string) => ipcRenderer.invoke(IpcChannel.Obsidian_GetFiles, vaultName)
|
||||
})
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
@@ -187,3 +213,5 @@ if (process.contextIsolated) {
|
||||
// @ts-ignore (define in dts)
|
||||
window.api = api
|
||||
}
|
||||
|
||||
export type WindowApiType = typeof api
|
||||
|
||||
11
src/preload/preload.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { ElectronAPI } from '@electron-toolkit/preload'
|
||||
|
||||
import type { WindowApiType } from './index'
|
||||
|
||||
/** you don't need to declare this in your code, it's automatically generated */
|
||||
declare global {
|
||||
interface Window {
|
||||
electron: ElectronAPI
|
||||
api: WindowApiType
|
||||
}
|
||||
}
|
||||