Compare commits
7 Commits
fix/thinki
...
feat/backu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c319a4f4d0 | ||
|
|
980f20fcca | ||
|
|
f846a27418 | ||
|
|
d74d66dcbf | ||
|
|
a2c1011c55 | ||
|
|
06cba0e3bf | ||
|
|
ae392eb2ef |
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -1,5 +1,4 @@
|
||||
/src/renderer/src/store/ @0xfullex
|
||||
/src/renderer/src/databases/ @0xfullex
|
||||
/src/main/services/ConfigManager.ts @0xfullex
|
||||
/packages/shared/IpcChannel.ts @0xfullex
|
||||
/src/main/ipc.ts @0xfullex
|
||||
/src/main/ipc.ts @0xfullex
|
||||
2
.github/ISSUE_TEMPLATE/0_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/0_bug_report.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 🐛 Bug Report
|
||||
name: 🐛 Bug Report (English)
|
||||
description: Create a report to help us improve
|
||||
title: '[Bug]: '
|
||||
labels: ['BUG']
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/1_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/1_feature_request.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 💡 Feature Request
|
||||
name: 💡 Feature Request (English)
|
||||
description: Suggest an idea for this project
|
||||
title: '[Feature]: '
|
||||
labels: ['feature']
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/3_others.yml
vendored
2
.github/ISSUE_TEMPLATE/3_others.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 🤔 Other Questions
|
||||
name: 🤔 Other Questions (English)
|
||||
description: Submit questions that don't fit into bug reports or feature requests
|
||||
title: '[Other]: '
|
||||
body:
|
||||
|
||||
12
.github/pull_request_template.md
vendored
12
.github/pull_request_template.md
vendored
@@ -3,18 +3,6 @@
|
||||
1. Consider creating this PR as draft: https://github.com/CherryHQ/cherry-studio/blob/main/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
<!--
|
||||
|
||||
⚠️ Important: Redux/IndexedDB Data-Changing Feature PRs Temporarily On Hold ⚠️
|
||||
|
||||
Please note: For our current development cycle, we are not accepting feature Pull Requests that introduce changes to Redux data models or IndexedDB schemas.
|
||||
|
||||
While we value your contributions, PRs of this nature will be blocked without merge. We welcome all other contributions (bug fixes, perf enhancements, docs, etc.). Thank you!
|
||||
|
||||
Once version 2.0.0 is released, we will resume reviewing feature PRs.
|
||||
|
||||
-->
|
||||
|
||||
### What this PR does
|
||||
|
||||
Before this PR:
|
||||
|
||||
89
.github/workflows/auto-i18n.yml
vendored
89
.github/workflows/auto-i18n.yml
vendored
@@ -1,21 +1,19 @@
|
||||
name: Auto I18N Weekly
|
||||
name: Auto I18N
|
||||
|
||||
env:
|
||||
TRANSLATION_API_KEY: ${{ secrets.TRANSLATE_API_KEY }}
|
||||
TRANSLATION_MODEL: ${{ vars.AUTO_I18N_MODEL || 'deepseek/deepseek-v3.1'}}
|
||||
TRANSLATION_BASE_URL: ${{ vars.AUTO_I18N_BASE_URL || 'https://api.ppinfra.com/openai'}}
|
||||
TRANSLATION_BASE_LOCALE: ${{ vars.AUTO_I18N_BASE_LOCALE || 'en-us'}}
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs at 00:00 UTC every Sunday.
|
||||
# This corresponds to 08:00 AM UTC+8 (Beijing time) every Sunday.
|
||||
- cron: "0 0 * * 0"
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-i18n:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == 'CherryHQ/cherry-studio'
|
||||
name: Auto I18N
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -25,69 +23,44 @@ jobs:
|
||||
- name: 🐈⬛ Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: 📦 Setting Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 20
|
||||
|
||||
- name: 📦 Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: 📂 Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💾 Cache yarn dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
- name: 📦 Install dependencies in isolated directory
|
||||
run: |
|
||||
yarn install
|
||||
# 在临时目录安装依赖
|
||||
mkdir -p /tmp/translation-deps
|
||||
cd /tmp/translation-deps
|
||||
echo '{"dependencies": {"@cherrystudio/openai": "^6.5.0", "cli-progress": "^3.12.0", "tsx": "^4.20.3", "@biomejs/biome": "2.2.4"}}' > package.json
|
||||
npm install --no-package-lock
|
||||
|
||||
# 设置 NODE_PATH 让项目能找到这些依赖
|
||||
echo "NODE_PATH=/tmp/translation-deps/node_modules" >> $GITHUB_ENV
|
||||
|
||||
- name: 🏃♀️ Translate
|
||||
run: yarn sync:i18n && yarn auto:i18n
|
||||
run: npx tsx scripts/sync-i18n.ts && npx tsx scripts/auto-translate-i18n.ts
|
||||
|
||||
- name: 🔍 Format
|
||||
run: yarn format
|
||||
run: cd /tmp/translation-deps && npx biome format --config-path /home/runner/work/cherry-studio/cherry-studio/biome.jsonc --write /home/runner/work/cherry-studio/cherry-studio/src/renderer/src/i18n/
|
||||
|
||||
- name: 🔍 Check for changes
|
||||
id: git_status
|
||||
- name: 🔄 Commit changes
|
||||
run: |
|
||||
# Check if there are any uncommitted changes
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add .
|
||||
git reset -- package.json yarn.lock # 不提交 package.json 和 yarn.lock 的更改
|
||||
git diff --exit-code --quiet || echo "::set-output name=has_changes::true"
|
||||
git status --porcelain
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "fix(i18n): Auto update translations for PR #${{ github.event.pull_request.number }}"
|
||||
fi
|
||||
|
||||
- name: 📅 Set current date for PR title
|
||||
id: set_date
|
||||
run: echo "CURRENT_DATE=$(date +'%b %d, %Y')" >> $GITHUB_ENV # e.g., "Jun 06, 2024"
|
||||
|
||||
- name: 🚀 Create Pull Request if changes exist
|
||||
if: steps.git_status.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
- name: 🚀 Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the built-in GITHUB_TOKEN for bot actions
|
||||
commit-message: "feat(bot): Weekly automated script run"
|
||||
title: "🤖 Weekly Automated Update: ${{ env.CURRENT_DATE }}"
|
||||
body: |
|
||||
This PR includes changes generated by the weekly auto i18n.
|
||||
Review the changes before merging.
|
||||
|
||||
---
|
||||
_Generated by the automated weekly workflow_
|
||||
branch: "auto-i18n-weekly-${{ github.run_id }}" # Unique branch name
|
||||
base: "main" # Or 'develop', set your base branch
|
||||
delete-branch: true # Delete the branch after merging or closing the PR
|
||||
|
||||
- name: 📢 Notify if no changes
|
||||
if: steps.git_status.outputs.has_changes != 'true'
|
||||
run: echo "Bot script ran, but no changes were detected. No PR created."
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
10
.github/workflows/github-issue-tracker.yml
vendored
10
.github/workflows/github-issue-tracker.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
types: [opened]
|
||||
schedule:
|
||||
# Run every day at 8:30 Beijing Time (00:30 UTC)
|
||||
- cron: "30 0 * * *"
|
||||
- cron: '30 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -54,9 +54,9 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check_time.outputs.should_delay == 'false'
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: '20'
|
||||
|
||||
- name: Process issue with Claude
|
||||
if: steps.check_time.outputs.should_delay == 'false'
|
||||
@@ -121,9 +121,9 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: '20'
|
||||
|
||||
- name: Process pending issues with Claude
|
||||
uses: anthropics/claude-code-action@main
|
||||
|
||||
10
.github/workflows/issue-management.yml
vendored
10
.github/workflows/issue-management.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
contents: none
|
||||
steps:
|
||||
- name: Close needs-more-info issues
|
||||
uses: actions/stale@v10
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
only-labels: 'needs-more-info'
|
||||
@@ -29,10 +29,8 @@ jobs:
|
||||
days-before-close: 0 # Close immediately after stale
|
||||
stale-issue-label: 'inactive'
|
||||
close-issue-label: 'closed:no-response'
|
||||
exempt-all-milestones: true
|
||||
exempt-all-assignees: true
|
||||
stale-issue-message: |
|
||||
This issue has been labeled as needing more information and has been inactive for ${{ env.daysBeforeStale }} days.
|
||||
This issue has been labeled as needing more information and has been inactive for ${{ env.daysBeforeStale }} days.
|
||||
It will be closed now due to lack of additional information.
|
||||
|
||||
该问题被标记为"需要更多信息"且已经 ${{ env.daysBeforeStale }} 天没有任何活动,将立即关闭。
|
||||
@@ -42,14 +40,12 @@ jobs:
|
||||
days-before-pr-close: -1
|
||||
|
||||
- name: Close inactive issues
|
||||
uses: actions/stale@v10
|
||||
uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: ${{ env.daysBeforeStale }}
|
||||
days-before-close: ${{ env.daysBeforeClose }}
|
||||
stale-issue-label: 'inactive'
|
||||
exempt-all-milestones: true
|
||||
exempt-all-assignees: true
|
||||
stale-issue-message: |
|
||||
This issue has been inactive for a prolonged period and will be closed automatically in ${{ env.daysBeforeClose }} days.
|
||||
该问题已长时间处于闲置状态,${{ env.daysBeforeClose }} 天后将自动关闭。
|
||||
|
||||
10
.github/workflows/nightly-build.yml
vendored
10
.github/workflows/nightly-build.yml
vendored
@@ -3,7 +3,7 @@ name: Nightly Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 17 * * *" # 1:00 BJ Time
|
||||
- cron: '0 17 * * *' # 1:00 BJ Time
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -56,9 +56,9 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 20
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
@@ -208,7 +208,7 @@ jobs:
|
||||
echo "总计: $(find renamed-artifacts -type f | wc -l) 个文件"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cherry-studio-nightly-${{ steps.date.outputs.date }}-${{ matrix.os }}
|
||||
path: renamed-artifacts/*
|
||||
|
||||
6
.github/workflows/pr-ci.yml
vendored
6
.github/workflows/pr-ci.yml
vendored
@@ -24,12 +24,12 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 20
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
|
||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -4,9 +4,9 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Release tag (e.g. v1.0.0)"
|
||||
description: 'Release tag (e.g. v1.0.0)'
|
||||
required: true
|
||||
default: "v1.0.0"
|
||||
default: 'v1.0.0'
|
||||
push:
|
||||
tags:
|
||||
- v*.*.*
|
||||
@@ -47,9 +47,9 @@ jobs:
|
||||
npm version "$VERSION" --no-git-tag-version --allow-same-version
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
node-version: 20
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
@@ -127,5 +127,5 @@ jobs:
|
||||
allowUpdates: true
|
||||
makeLatest: false
|
||||
tag: ${{ steps.get-tag.outputs.tag }}
|
||||
artifacts: "dist/*.exe,dist/*.zip,dist/*.dmg,dist/*.AppImage,dist/*.snap,dist/*.deb,dist/*.rpm,dist/*.tar.gz,dist/latest*.yml,dist/rc*.yml,dist/beta*.yml,dist/*.blockmap"
|
||||
artifacts: 'dist/*.exe,dist/*.zip,dist/*.dmg,dist/*.AppImage,dist/*.snap,dist/*.deb,dist/*.rpm,dist/*.tar.gz,dist/latest*.yml,dist/rc*.yml,dist/beta*.yml,dist/*.blockmap'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -140,7 +140,7 @@
|
||||
"typescript/await-thenable": "warn",
|
||||
// "typescript/ban-ts-comment": "error",
|
||||
"typescript/no-array-constructor": "error",
|
||||
"typescript/consistent-type-imports": "error",
|
||||
// "typescript/consistent-type-imports": "error",
|
||||
"typescript/no-array-delete": "warn",
|
||||
"typescript/no-base-to-string": "warn",
|
||||
"typescript/no-duplicate-enum-values": "error",
|
||||
|
||||
13
.yarn/patches/@ai-sdk-google-npm-2.0.20-b9102f9d54.patch
vendored
Normal file
13
.yarn/patches/@ai-sdk-google-npm-2.0.20-b9102f9d54.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 69ab1599c76801dc1167551b6fa283dded123466..f0af43bba7ad1196fe05338817e65b4ebda40955 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId?.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
@@ -1,26 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 4cc66d83af1cef39f6447dc62e680251e05ddf9f..eb9819cb674c1808845ceb29936196c4bb355172 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index a032505ec54e132dc386dde001dc51f710f84c83..5efada51b9a8b56e3f01b35e734908ebe3c37043 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
@@ -1,131 +0,0 @@
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index b3f018730a93639aad7c203f15fb1aeb766c73f4..ade2a43d66e9184799d072153df61ef7be4ea110 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -296,7 +296,14 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
metadata: huggingfaceOptions == null ? void 0 : huggingfaceOptions.metadata,
|
||||
instructions: huggingfaceOptions == null ? void 0 : huggingfaceOptions.instructions,
|
||||
...preparedTools && { tools: preparedTools },
|
||||
- ...preparedToolChoice && { tool_choice: preparedToolChoice }
|
||||
+ ...preparedToolChoice && { tool_choice: preparedToolChoice },
|
||||
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||
+ reasoning: {
|
||||
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||
+ effort: huggingfaceOptions.reasoningEffort,
|
||||
+ }),
|
||||
+ },
|
||||
+ }),
|
||||
};
|
||||
return { args: baseArgs, warnings };
|
||||
}
|
||||
@@ -365,6 +372,20 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
}
|
||||
break;
|
||||
}
|
||||
+ case 'reasoning': {
|
||||
+ for (const contentPart of part.content) {
|
||||
+ content.push({
|
||||
+ type: 'reasoning',
|
||||
+ text: contentPart.text,
|
||||
+ providerMetadata: {
|
||||
+ huggingface: {
|
||||
+ itemId: part.id,
|
||||
+ },
|
||||
+ },
|
||||
+ });
|
||||
+ }
|
||||
+ break;
|
||||
+ }
|
||||
case "mcp_call": {
|
||||
content.push({
|
||||
type: "tool-call",
|
||||
@@ -519,6 +540,11 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
id: value.item.call_id,
|
||||
toolName: value.item.name
|
||||
});
|
||||
+ } else if (value.item.type === 'reasoning') {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-start',
|
||||
+ id: value.item.id,
|
||||
+ });
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -570,6 +596,22 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
});
|
||||
return;
|
||||
}
|
||||
+ if (isReasoningDeltaChunk(value)) {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-delta',
|
||||
+ id: value.item_id,
|
||||
+ delta: value.delta,
|
||||
+ });
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
+ if (isReasoningEndChunk(value)) {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-end',
|
||||
+ id: value.item_id,
|
||||
+ });
|
||||
+ return;
|
||||
+ }
|
||||
},
|
||||
flush(controller) {
|
||||
controller.enqueue({
|
||||
@@ -593,7 +635,8 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
var huggingfaceResponsesProviderOptionsSchema = z2.object({
|
||||
metadata: z2.record(z2.string(), z2.string()).optional(),
|
||||
instructions: z2.string().optional(),
|
||||
- strictJsonSchema: z2.boolean().optional()
|
||||
+ strictJsonSchema: z2.boolean().optional(),
|
||||
+ reasoningEffort: z2.string().optional(),
|
||||
});
|
||||
var huggingfaceResponsesResponseSchema = z2.object({
|
||||
id: z2.string(),
|
||||
@@ -727,12 +770,31 @@ var responseCreatedChunkSchema = z2.object({
|
||||
model: z2.string()
|
||||
})
|
||||
});
|
||||
+var reasoningTextDeltaChunkSchema = z2.object({
|
||||
+ type: z2.literal('response.reasoning_text.delta'),
|
||||
+ item_id: z2.string(),
|
||||
+ output_index: z2.number(),
|
||||
+ content_index: z2.number(),
|
||||
+ delta: z2.string(),
|
||||
+ sequence_number: z2.number(),
|
||||
+});
|
||||
+
|
||||
+var reasoningTextEndChunkSchema = z2.object({
|
||||
+ type: z2.literal('response.reasoning_text.done'),
|
||||
+ item_id: z2.string(),
|
||||
+ output_index: z2.number(),
|
||||
+ content_index: z2.number(),
|
||||
+ text: z2.string(),
|
||||
+ sequence_number: z2.number(),
|
||||
+});
|
||||
var huggingfaceResponsesChunkSchema = z2.union([
|
||||
responseOutputItemAddedSchema,
|
||||
responseOutputItemDoneSchema,
|
||||
textDeltaChunkSchema,
|
||||
responseCompletedChunkSchema,
|
||||
responseCreatedChunkSchema,
|
||||
+ reasoningTextDeltaChunkSchema,
|
||||
+ reasoningTextEndChunkSchema,
|
||||
z2.object({ type: z2.string() }).loose()
|
||||
// fallback for unknown chunks
|
||||
]);
|
||||
@@ -751,6 +813,12 @@ function isResponseCompletedChunk(chunk) {
|
||||
function isResponseCreatedChunk(chunk) {
|
||||
return chunk.type === "response.created";
|
||||
}
|
||||
+function isReasoningDeltaChunk(chunk) {
|
||||
+ return chunk.type === 'response.reasoning_text.delta';
|
||||
+}
|
||||
+function isReasoningEndChunk(chunk) {
|
||||
+ return chunk.type === 'response.reasoning_text.done';
|
||||
+}
|
||||
|
||||
// src/huggingface-provider.ts
|
||||
function createHuggingFace(options = {}) {
|
||||
@@ -1,76 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index cc6652c4e7f32878a64a2614115bf7eeb3b7c890..76e989017549c89b45d633525efb1f318026d9b2 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -274,6 +274,7 @@ var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
message: import_v42.z.object({
|
||||
role: import_v42.z.literal("assistant").nullish(),
|
||||
content: import_v42.z.string().nullish(),
|
||||
+ reasoning_content: import_v42.z.string().nullish(),
|
||||
tool_calls: import_v42.z.array(
|
||||
import_v42.z.object({
|
||||
id: import_v42.z.string().nullish(),
|
||||
@@ -340,6 +341,7 @@ var openaiChatChunkSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
delta: import_v42.z.object({
|
||||
role: import_v42.z.enum(["assistant"]).nullish(),
|
||||
content: import_v42.z.string().nullish(),
|
||||
+ reasoning_content: import_v42.z.string().nullish(),
|
||||
tool_calls: import_v42.z.array(
|
||||
import_v42.z.object({
|
||||
index: import_v42.z.number(),
|
||||
@@ -785,6 +787,14 @@ var OpenAIChatLanguageModel = class {
|
||||
if (text != null && text.length > 0) {
|
||||
content.push({ type: "text", text });
|
||||
}
|
||||
+ const reasoning =
|
||||
+ choice.message.reasoning_content;
|
||||
+ if (reasoning != null && reasoning.length > 0) {
|
||||
+ content.push({
|
||||
+ type: 'reasoning',
|
||||
+ text: reasoning,
|
||||
+ });
|
||||
+ }
|
||||
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
||||
content.push({
|
||||
type: "tool-call",
|
||||
@@ -866,6 +876,7 @@ var OpenAIChatLanguageModel = class {
|
||||
};
|
||||
let isFirstChunk = true;
|
||||
let isActiveText = false;
|
||||
+ let isActiveReasoning = false;
|
||||
const providerMetadata = { openai: {} };
|
||||
return {
|
||||
stream: response.pipeThrough(
|
||||
@@ -920,6 +931,22 @@ var OpenAIChatLanguageModel = class {
|
||||
return;
|
||||
}
|
||||
const delta = choice.delta;
|
||||
+ const reasoningContent = delta.reasoning_content;
|
||||
+ if (reasoningContent) {
|
||||
+ if (!isActiveReasoning) {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-start',
|
||||
+ id: 'reasoning-0',
|
||||
+ });
|
||||
+ isActiveReasoning = true;
|
||||
+ }
|
||||
+
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-delta',
|
||||
+ id: 'reasoning-0',
|
||||
+ delta: reasoningContent,
|
||||
+ });
|
||||
+ }
|
||||
if (delta.content != null) {
|
||||
if (!isActiveText) {
|
||||
controller.enqueue({ type: "text-start", id: "0" });
|
||||
@@ -1032,6 +1059,9 @@ var OpenAIChatLanguageModel = class {
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
+ if (isActiveReasoning) {
|
||||
+ controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });
|
||||
+ }
|
||||
if (isActiveText) {
|
||||
controller.enqueue({ type: "text-end", id: "0" });
|
||||
}
|
||||
@@ -1,24 +1,24 @@
|
||||
diff --git a/sdk.mjs b/sdk.mjs
|
||||
index 10162e5b1624f8ce667768943347a6e41089ad2f..32568ae08946590e382270c88d85fba81187568e 100755
|
||||
index 461e9a2ba246778261108a682762ffcf26f7224e..44bd667d9f591969d36a105ba5eb8b478c738dd8 100644
|
||||
--- a/sdk.mjs
|
||||
+++ b/sdk.mjs
|
||||
@@ -6213,7 +6213,7 @@ function createAbortController(maxListeners = DEFAULT_MAX_LISTENERS) {
|
||||
@@ -6215,7 +6215,7 @@ function createAbortController(maxListeners = DEFAULT_MAX_LISTENERS) {
|
||||
}
|
||||
|
||||
|
||||
// ../src/transport/ProcessTransport.ts
|
||||
-import { spawn } from "child_process";
|
||||
+import { fork } from "child_process";
|
||||
import { createInterface } from "readline";
|
||||
|
||||
|
||||
// ../src/utils/fsOperations.ts
|
||||
@@ -6487,14 +6487,11 @@ class ProcessTransport {
|
||||
@@ -6473,14 +6473,11 @@ class ProcessTransport {
|
||||
const errorMessage = isNativeBinary(pathToClaudeCodeExecutable) ? `Claude Code native binary not found at ${pathToClaudeCodeExecutable}. Please ensure Claude Code is installed via native installer or specify a valid path with options.pathToClaudeCodeExecutable.` : `Claude Code executable not found at ${pathToClaudeCodeExecutable}. Is options.pathToClaudeCodeExecutable set?`;
|
||||
throw new ReferenceError(errorMessage);
|
||||
}
|
||||
- const isNative = isNativeBinary(pathToClaudeCodeExecutable);
|
||||
- const spawnCommand = isNative ? pathToClaudeCodeExecutable : executable;
|
||||
- const spawnArgs = isNative ? [...executableArgs, ...args] : [...executableArgs, pathToClaudeCodeExecutable, ...args];
|
||||
- this.logForDebugging(isNative ? `Spawning Claude Code native binary: ${spawnCommand} ${spawnArgs.join(" ")}` : `Spawning Claude Code process: ${spawnCommand} ${spawnArgs.join(" ")}`);
|
||||
- const spawnArgs = isNative ? args : [...executableArgs, pathToClaudeCodeExecutable, ...args];
|
||||
- this.logForDebugging(isNative ? `Spawning Claude Code native binary: ${pathToClaudeCodeExecutable} ${args.join(" ")}` : `Spawning Claude Code process: ${executable} ${[...executableArgs, pathToClaudeCodeExecutable, ...args].join(" ")}`);
|
||||
+ this.logForDebugging(`Forking Claude Code Node.js process: ${pathToClaudeCodeExecutable} ${args.join(" ")}`);
|
||||
const stderrMode = env.DEBUG || stderr ? "pipe" : "ignore";
|
||||
- this.child = spawn(spawnCommand, spawnArgs, {
|
||||
71
.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch
vendored
Normal file
71
.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
diff --git a/dist/utils/tiktoken.cjs b/dist/utils/tiktoken.cjs
|
||||
index 973b0d0e75aeaf8de579419af31b879b32975413..f23c7caa8b9dc8bd404132725346a4786f6b278b 100644
|
||||
--- a/dist/utils/tiktoken.cjs
|
||||
+++ b/dist/utils/tiktoken.cjs
|
||||
@@ -1,25 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.encodingForModel = exports.getEncoding = void 0;
|
||||
-const lite_1 = require("js-tiktoken/lite");
|
||||
const async_caller_js_1 = require("./async_caller.cjs");
|
||||
const cache = {};
|
||||
const caller = /* #__PURE__ */ new async_caller_js_1.AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) {
|
||||
- cache[encoding] = caller
|
||||
- .fetch(`https://tiktoken.pages.dev/js/${encoding}.json`)
|
||||
- .then((res) => res.json())
|
||||
- .then((data) => new lite_1.Tiktoken(data))
|
||||
- .catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- }
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
exports.getEncoding = getEncoding;
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding((0, lite_1.getEncodingNameForModel)(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
exports.encodingForModel = encodingForModel;
|
||||
diff --git a/dist/utils/tiktoken.js b/dist/utils/tiktoken.js
|
||||
index 8e41ee6f00f2f9c7fa2c59fa2b2f4297634b97aa..aa5f314a6349ad0d1c5aea8631a56aad099176e0 100644
|
||||
--- a/dist/utils/tiktoken.js
|
||||
+++ b/dist/utils/tiktoken.js
|
||||
@@ -1,20 +1,9 @@
|
||||
-import { Tiktoken, getEncodingNameForModel, } from "js-tiktoken/lite";
|
||||
import { AsyncCaller } from "./async_caller.js";
|
||||
const cache = {};
|
||||
const caller = /* #__PURE__ */ new AsyncCaller({});
|
||||
export async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) {
|
||||
- cache[encoding] = caller
|
||||
- .fetch(`https://tiktoken.pages.dev/js/${encoding}.json`)
|
||||
- .then((res) => res.json())
|
||||
- .then((data) => new Tiktoken(data))
|
||||
- .catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- }
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
export async function encodingForModel(model) {
|
||||
- return getEncoding(getEncodingNameForModel(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
diff --git a/package.json b/package.json
|
||||
index 36072aecf700fca1bc49832a19be832eca726103..90b8922fba1c3d1b26f78477c891b07816d6238a 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -37,7 +37,6 @@
|
||||
"ansi-styles": "^5.0.0",
|
||||
"camelcase": "6",
|
||||
"decamelize": "1.2.0",
|
||||
- "js-tiktoken": "^1.0.12",
|
||||
"langsmith": ">=0.2.8 <0.4.0",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
@@ -1,68 +0,0 @@
|
||||
diff --git a/dist/utils/tiktoken.cjs b/dist/utils/tiktoken.cjs
|
||||
index c5b41f121d2e3d24c3a4969e31fa1acffdcad3b9..ec724489dcae79ee6c61acf2d4d84bd19daef036 100644
|
||||
--- a/dist/utils/tiktoken.cjs
|
||||
+++ b/dist/utils/tiktoken.cjs
|
||||
@@ -1,6 +1,5 @@
|
||||
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
||||
const require_utils_async_caller = require('./async_caller.cjs');
|
||||
-const js_tiktoken_lite = require_rolldown_runtime.__toESM(require("js-tiktoken/lite"));
|
||||
|
||||
//#region src/utils/tiktoken.ts
|
||||
var tiktoken_exports = {};
|
||||
@@ -11,14 +10,10 @@ require_rolldown_runtime.__export(tiktoken_exports, {
|
||||
const cache = {};
|
||||
const caller = /* @__PURE__ */ new require_utils_async_caller.AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) cache[encoding] = caller.fetch(`https://tiktoken.pages.dev/js/${encoding}.json`).then((res) => res.json()).then((data) => new js_tiktoken_lite.Tiktoken(data)).catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding((0, js_tiktoken_lite.getEncodingNameForModel)(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
|
||||
//#endregion
|
||||
diff --git a/dist/utils/tiktoken.js b/dist/utils/tiktoken.js
|
||||
index 641acca03cb92f04a6fa5c9c31f1880ce635572e..707389970ad957aa0ff20ef37fa8dd2875be737c 100644
|
||||
--- a/dist/utils/tiktoken.js
|
||||
+++ b/dist/utils/tiktoken.js
|
||||
@@ -1,6 +1,5 @@
|
||||
import { __export } from "../_virtual/rolldown_runtime.js";
|
||||
import { AsyncCaller } from "./async_caller.js";
|
||||
-import { Tiktoken, getEncodingNameForModel } from "js-tiktoken/lite";
|
||||
|
||||
//#region src/utils/tiktoken.ts
|
||||
var tiktoken_exports = {};
|
||||
@@ -11,14 +10,10 @@ __export(tiktoken_exports, {
|
||||
const cache = {};
|
||||
const caller = /* @__PURE__ */ new AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) cache[encoding] = caller.fetch(`https://tiktoken.pages.dev/js/${encoding}.json`).then((res) => res.json()).then((data) => new Tiktoken(data)).catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding(getEncodingNameForModel(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
|
||||
//#endregion
|
||||
diff --git a/package.json b/package.json
|
||||
index a24f8fc61de58526051999260f2ebee5f136354b..e885359e8966e7730c51772533ce37e01edb3046 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -20,7 +20,6 @@
|
||||
"ansi-styles": "^5.0.0",
|
||||
"camelcase": "6",
|
||||
"decamelize": "1.2.0",
|
||||
- "js-tiktoken": "^1.0.12",
|
||||
"langsmith": "^0.3.64",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
19
.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch
vendored
Normal file
19
.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
diff --git a/dist/embeddings.js b/dist/embeddings.js
|
||||
index 1f8154be3e9c22442a915eb4b85fa6d2a21b0d0c..dc13ef4a30e6c282824a5357bcee9bd0ae222aab 100644
|
||||
--- a/dist/embeddings.js
|
||||
+++ b/dist/embeddings.js
|
||||
@@ -214,10 +214,12 @@ export class OpenAIEmbeddings extends Embeddings {
|
||||
* @returns Promise that resolves to an embedding for the document.
|
||||
*/
|
||||
async embedQuery(text) {
|
||||
+ const isBaiduCloud = this.clientConfig.baseURL.includes('baidubce.com')
|
||||
+ const input = this.stripNewLines ? text.replace(/\n/g, ' ') : text
|
||||
const params = {
|
||||
model: this.model,
|
||||
- input: this.stripNewLines ? text.replace(/\n/g, " ") : text,
|
||||
- };
|
||||
+ input: isBaiduCloud ? [input] : input
|
||||
+ }
|
||||
if (this.dimensions) {
|
||||
params.dimensions = this.dimensions;
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
diff --git a/dist/embeddings.js b/dist/embeddings.js
|
||||
index 6f4b928d3e4717309382e1b5c2e31ab5bc6c5af0..bc79429c88a6d27d4997a2740c4d8ae0707f5991 100644
|
||||
--- a/dist/embeddings.js
|
||||
+++ b/dist/embeddings.js
|
||||
@@ -94,9 +94,11 @@ var OpenAIEmbeddings = class extends Embeddings {
|
||||
* @returns Promise that resolves to an embedding for the document.
|
||||
*/
|
||||
async embedQuery(text) {
|
||||
+ const isBaiduCloud = this.clientConfig.baseURL.includes('baidubce.com');
|
||||
+ const input = this.stripNewLines ? text.replace(/\n/g, " ") : text
|
||||
const params = {
|
||||
model: this.model,
|
||||
- input: this.stripNewLines ? text.replace(/\n/g, " ") : text
|
||||
+ input: isBaiduCloud ? [input] : input
|
||||
};
|
||||
if (this.dimensions) params.dimensions = this.dimensions;
|
||||
if (this.encodingFormat) params.encoding_format = this.encodingFormat;
|
||||
@@ -7,11 +7,11 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Keep it clear**: Write code that is easy to read, maintain, and explain.
|
||||
- **Match the house style**: Reuse existing patterns, naming, and conventions.
|
||||
- **Search smart**: Prefer `ast-grep` for semantic queries; fall back to `rg`/`grep` when needed.
|
||||
- **Build with HeroUI**: Use HeroUI for every new UI component; never add `antd` or `styled-components`.
|
||||
- **Log centrally**: Route all logging through `loggerService` with the right context—no `console.log`.
|
||||
- **Research via subagent**: Lean on `subagent` for external docs, APIs, news, and references.
|
||||
- **Always propose before executing**: Before making any changes, clearly explain your planned approach and wait for explicit user approval to ensure alignment and prevent unwanted modifications.
|
||||
- **Write conventional commits with emoji**: Commit small, focused changes using emoji-prefixed Conventional Commit messages (e.g., `✨ feat:`, `🐛 fix:`, `♻️ refactor:`, `
|
||||
📝 docs:`).
|
||||
- **Seek review**: Ask a human developer to review substantial changes before merging.
|
||||
- **Commit in rhythm**: Keep commits small, conventional, and emoji-tagged.
|
||||
|
||||
## Development Commands
|
||||
|
||||
@@ -40,6 +40,7 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Services** (`src/main/services/`): MCPService, KnowledgeService, WindowService, etc.
|
||||
- **Build System**: Electron-Vite with experimental rolldown-vite, yarn workspaces.
|
||||
- **State Management**: Redux Toolkit (`src/renderer/src/store/`) for predictable state.
|
||||
- **UI Components**: HeroUI (`@heroui/*`) for all new UI elements.
|
||||
|
||||
### Logging
|
||||
```typescript
|
||||
|
||||
@@ -65,28 +65,7 @@ The Test Plan aims to provide users with a more stable application experience an
|
||||
### Other Suggestions
|
||||
|
||||
- **Contact Developers**: Before submitting a PR, you can contact the developers first to discuss or get help.
|
||||
|
||||
## Important Contribution Guidelines & Focus Areas
|
||||
|
||||
Please review the following critical information before submitting your Pull Request:
|
||||
|
||||
### Temporary Restriction on Data-Changing Feature PRs 🚫
|
||||
|
||||
**Currently, we are NOT accepting feature Pull Requests that introduce changes to our Redux data models or IndexedDB schemas.**
|
||||
|
||||
Our core team is currently focused on significant architectural updates that involve these data structures. To ensure stability and focus during this period, contributions of this nature will be temporarily managed internally.
|
||||
|
||||
* **PRs that require changes to Redux state shape or IndexedDB schemas will be closed.**
|
||||
* **This restriction is temporary and will be lifted with the release of `v2.0.0`.** You can track the progress of `v2.0.0` and its related discussions on issue [#10162](https://github.com/CherryHQ/cherry-studio/pull/10162).
|
||||
|
||||
We highly encourage contributions for:
|
||||
* Bug fixes 🐞
|
||||
* Performance improvements 🚀
|
||||
* Documentation updates 📚
|
||||
* Features that **do not** alter Redux data models or IndexedDB schemas (e.g., UI enhancements, new components, minor refactors). ✨
|
||||
|
||||
We appreciate your understanding and continued support during this important development phase. Thank you!
|
||||
|
||||
- **Become a Core Developer**: If you contribute to the project consistently, congratulations, you can become a core developer and gain project membership status. Please check our [Membership Guide](https://github.com/CherryHQ/community/blob/main/docs/membership.en.md).
|
||||
|
||||
## Contact Us
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
<p align="center">English | <a href="./docs/README.zh.md">中文</a> | <a href="https://cherry-ai.com">Official Site</a> | <a href="https://docs.cherry-ai.com/cherry-studio-wen-dang/en-us">Documents</a> | <a href="./docs/dev.md">Development</a> | <a href="https://github.com/CherryHQ/cherry-studio/issues">Feedback</a><br></p>
|
||||
|
||||
<div align="center">
|
||||
|
||||
|
||||
[![][deepwiki-shield]][deepwiki-link]
|
||||
[![][twitter-shield]][twitter-link]
|
||||
[![][discord-shield]][discord-link]
|
||||
@@ -45,7 +45,7 @@
|
||||
|
||||
</div>
|
||||
<div align="center">
|
||||
|
||||
|
||||
[![][github-release-shield]][github-release-link]
|
||||
[![][github-nightly-shield]][github-nightly-link]
|
||||
[![][github-contributors-shield]][github-contributors-link]
|
||||
@@ -248,10 +248,10 @@ The Enterprise Edition addresses core challenges in team collaboration by centra
|
||||
|
||||
| Feature | Community Edition | Enterprise Edition |
|
||||
| :---------------- | :----------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Open Source** | ✅ Yes | ⭕️ Partially released to customers |
|
||||
| **Open Source** | ✅ Yes | ⭕️ Partially released to customers |
|
||||
| **Cost** | Free for Personal Use / Commercial License | Buyout / Subscription Fee |
|
||||
| **Admin Backend** | — | ● Centralized **Model** Access<br>● **Employee** Management<br>● Shared **Knowledge Base**<br>● **Access** Control<br>● **Data** Backup |
|
||||
| **Server** | — | ✅ Dedicated Private Deployment |
|
||||
| **Server** | — | ✅ Dedicated Private Deployment |
|
||||
|
||||
## Get the Enterprise Edition
|
||||
|
||||
|
||||
@@ -21,11 +21,7 @@
|
||||
"quoteStyle": "single"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"includes": ["**", "!**/.claude/**"],
|
||||
"maxSize": 2097152
|
||||
},
|
||||
"files": { "ignoreUnknown": false },
|
||||
"formatter": {
|
||||
"attributePosition": "auto",
|
||||
"bracketSameLine": false,
|
||||
|
||||
@@ -69,28 +69,7 @@ git commit --signoff -m "Your commit message"
|
||||
### 其他建议
|
||||
|
||||
- **联系开发者**:在提交 PR 之前,您可以先和开发者进行联系,共同探讨或者获取帮助。
|
||||
|
||||
## 重要贡献指南与关注点
|
||||
|
||||
在提交 Pull Request 之前,请务必阅读以下关键信息:
|
||||
|
||||
### 🚫 暂时限制涉及数据更改的功能性 PR
|
||||
|
||||
**目前,我们不接受涉及 Redux 数据模型或 IndexedDB schema 变更的功能性 Pull Request。**
|
||||
|
||||
我们的核心团队目前正专注于涉及这些数据结构的关键架构更新和基础工作。为确保在此期间的稳定性与专注,此类贡献将暂时由内部进行管理。
|
||||
|
||||
* **需要更改 Redux 状态结构或 IndexedDB schema 的 PR 将会被关闭。**
|
||||
* **此限制是临时性的,并将在 `v2.0.0` 版本发布后解除。** 您可以通过 Issue [#10162](https://github.com/CherryHQ/cherry-studio/pull/10162) 跟踪 `v2.0.0` 的进展及相关讨论。
|
||||
|
||||
我们非常鼓励以下类型的贡献:
|
||||
* 错误修复 🐞
|
||||
* 性能改进 🚀
|
||||
* 文档更新 📚
|
||||
* 不改变 Redux 数据模型或 IndexedDB schema 的功能(例如,UI 增强、新组件、小型重构)。✨
|
||||
|
||||
感谢您在此重要开发阶段的理解与持续支持。谢谢!
|
||||
|
||||
- **成为核心开发者**:如果您能够稳定为项目贡献,恭喜您可以成为项目核心开发者,获取到项目成员身份。请查看我们的[成员指南](https://github.com/CherryHQ/community/blob/main/membership.md)
|
||||
|
||||
## 联系我们
|
||||
|
||||
|
||||
@@ -18,13 +18,13 @@ yarn
|
||||
|
||||
### Setup Node.js
|
||||
|
||||
Download and install [Node.js v22.x.x](https://nodejs.org/en/download)
|
||||
Download and install [Node.js v20.x.x](https://nodejs.org/en/download)
|
||||
|
||||
### Setup Yarn
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
corepack prepare yarn@4.9.1 --activate
|
||||
corepack prepare yarn@4.6.0 --activate
|
||||
```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
@@ -11,8 +11,6 @@ The Test Plan is divided into the RC channel and the Beta channel, with the foll
|
||||
|
||||
Users can enable the "Test Plan" and select the version channel in the software's `Settings` > `About`. Please note that the versions in the "Test Plan" cannot guarantee data consistency, so be sure to back up your data before using them.
|
||||
|
||||
After enabling the RC channel or Beta channel, if a stable version is released, users will still be upgraded to the stable version.
|
||||
|
||||
Users are welcome to submit issues or provide feedback through other channels for any bugs encountered during testing. Your feedback is very important to us.
|
||||
|
||||
## Developer Guide
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
|
||||
用户可以在软件的`设置`-`关于`中,开启“测试计划”并选择版本通道。请注意“测试计划”的版本无法保证数据的一致性,请使用前一定要备份数据。
|
||||
|
||||
用户选择RC版通道或Beta版通道后,若发布了正式版,仍旧会升级到正式版。
|
||||
|
||||
用户在测试过程中发现的BUG,欢迎提交issue或通过其他渠道反馈。用户的反馈对我们非常重要。
|
||||
|
||||
## 开发者指南
|
||||
|
||||
@@ -21,8 +21,6 @@ files:
|
||||
- "**/*"
|
||||
- "!**/{.vscode,.yarn,.yarn-lock,.github,.cursorrules,.prettierrc}"
|
||||
- "!electron.vite.config.{js,ts,mjs,cjs}}"
|
||||
- "!.*"
|
||||
- "!components.json"
|
||||
- "!**/{.eslintignore,.eslintrc.js,.eslintrc.json,.eslintcache,root.eslint.config.js,eslint.config.js,.eslintrc.cjs,.prettierignore,.prettierrc.yaml,eslint.config.mjs,dev-app-update.yml,CHANGELOG.md,README.md,biome.jsonc}"
|
||||
- "!**/{.env,.env.*,.npmrc,pnpm-lock.yaml}"
|
||||
- "!**/{tsconfig.json,tsconfig.tsbuildinfo,tsconfig.node.json,tsconfig.web.json}"
|
||||
@@ -66,12 +64,6 @@ asarUnpack:
|
||||
- resources/**
|
||||
- "**/*.{metal,exp,lib}"
|
||||
- "node_modules/@img/sharp-libvips-*/**"
|
||||
|
||||
# copy from node_modules/claude-code-plugins/plugins to resources/data/claude-code-pluginso
|
||||
extraResources:
|
||||
- from: "./node_modules/claude-code-plugins/plugins/"
|
||||
to: "claude-code-plugins"
|
||||
|
||||
win:
|
||||
executableName: Cherry Studio
|
||||
artifactName: ${productName}-${version}-${arch}-setup.${ext}
|
||||
@@ -135,59 +127,60 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
What's New in v1.7.0-beta.4
|
||||
|
||||
Major Changes:
|
||||
- UI Framework Upgrade: Improved performance and user experience with new design system
|
||||
- App Menu i18n: Menu now supports multiple languages and syncs with app language settings
|
||||
What's New in v1.7.0-beta.2
|
||||
|
||||
New Features:
|
||||
- AWS Bedrock API Key: Support Bedrock API key authentication with Extended Thinking (reasoning) capability
|
||||
- SophNet Provider: Added support for SophNet LLM provider
|
||||
- Auto Session Rename: Agent sessions automatically rename based on conversation topics
|
||||
- TopP Parameter: Added TopP parameter support for more precise model control
|
||||
- Reasoning Effort Control: Quick access to reasoning effort settings in input bar
|
||||
- Session Settings: Manage session-specific settings and model configurations independently
|
||||
- Notes Full-Text Search: Search across all notes with match highlighting
|
||||
- Built-in DiDi MCP Server: Integration with DiDi ride-hailing services (China only)
|
||||
- Intel OV OCR: Hardware-accelerated OCR using Intel NPU
|
||||
- Auto-start API Server: Automatically starts when agents exist
|
||||
|
||||
Improvements:
|
||||
- Topics & Sessions: Enhanced UI with better styling and smoother interactions
|
||||
- Quick Panel: Improved option visibility and control
|
||||
- Painting Models: Smarter model initialization with better defaults
|
||||
- System Shutdown: Better handling of shutdown events to prevent data loss
|
||||
- Smaller Package Size: Optimized build configuration for faster downloads
|
||||
- Agent model selection now requires explicit user choice
|
||||
- Added Mistral AI provider support
|
||||
- Added NewAPI generic provider support
|
||||
- Improved navbar layout consistency across different modes
|
||||
- Enhanced chat component responsiveness
|
||||
- Better code block display on small screens
|
||||
- Updated OVMS to 2025.3 official release
|
||||
- Added Greek language support
|
||||
|
||||
Bug Fixes:
|
||||
- Fixed Perplexity provider support and API host formatting
|
||||
- Fixed CherryAI provider support and API host formatting
|
||||
- Fixed i18n translations for painting image size options
|
||||
- Fixed agent session message token usage tracking
|
||||
- Fixed prompt stream handling on completion or error
|
||||
- Fixed message API initialization issues
|
||||
- Fixed GitHub Copilot gpt-5-codex streaming issues
|
||||
- Fixed assistant creation failures
|
||||
- Fixed translate auto-copy functionality
|
||||
- Fixed miniapps external link opening
|
||||
- Fixed message layout and overflow issues
|
||||
- Fixed API key parsing to preserve spaces
|
||||
- Fixed agent display in different navbar layouts
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
v1.7.0-beta.4 新特性
|
||||
|
||||
重大变更:
|
||||
- UI 框架升级:采用新设计系统,提升性能和用户体验
|
||||
- 应用菜单国际化:菜单支持多语言,并自动同步应用语言设置
|
||||
v1.7.0-beta.2 新特性
|
||||
|
||||
新功能:
|
||||
- AWS Bedrock API 密钥:支持 Bedrock API 密钥身份验证,并支持扩展思考(推理)能力
|
||||
- SophNet 提供商:添加 SophNet LLM 提供商支持
|
||||
- 自动会话重命名:Agent 会话根据对话主题自动重命名
|
||||
- TopP 参数:添加 TopP 参数支持,更精确控制模型输出
|
||||
- 会话设置:独立管理会话特定的设置和模型配置
|
||||
- 笔记全文搜索:跨所有笔记搜索并高亮匹配内容
|
||||
- 内置滴滴 MCP 服务器:集成滴滴打车服务(仅限中国地区)
|
||||
- Intel OV OCR:使用 Intel NPU 的硬件加速 OCR
|
||||
- 自动启动 API 服务器:当存在 Agent 时自动启动
|
||||
|
||||
改进:
|
||||
- 主题和会话:增强 UI,改进样式和交互体验
|
||||
- 快速面板:改进选项可见性和控制
|
||||
- 绘图模型:更智能的模型初始化和更好的默认值
|
||||
- 系统关机:更好地处理关机事件,防止数据丢失
|
||||
- 更小的安装包:优化构建配置,下载更快
|
||||
- Agent 模型选择现在需要用户显式选择
|
||||
- 添加 Mistral AI 提供商支持
|
||||
- 添加 NewAPI 通用提供商支持
|
||||
- 改进不同模式下的导航栏布局一致性
|
||||
- 增强聊天组件响应式设计
|
||||
- 优化小屏幕代码块显示
|
||||
- 更新 OVMS 至 2025.3 正式版
|
||||
- 添加希腊语支持
|
||||
|
||||
问题修复:
|
||||
- 修复 Perplexity 提供商支持和 API 主机格式化
|
||||
- 修复 CherryAI 提供商支持和 API 主机格式化
|
||||
- 修复绘图图像大小选项的 i18n 翻译
|
||||
- 修复 Agent 会话消息的 token 使用量跟踪
|
||||
- 修复完成或错误时的提示流处理
|
||||
- 修复消息 API 初始化问题
|
||||
- 修复 GitHub Copilot gpt-5-codex 流式传输问题
|
||||
- 修复助手创建失败
|
||||
- 修复翻译自动复制功能
|
||||
- 修复小程序外部链接打开
|
||||
- 修复消息布局和溢出问题
|
||||
- 修复 API 密钥解析以保留空格
|
||||
- 修复不同导航栏布局中的 Agent 显示
|
||||
<!--LANG:END-->
|
||||
|
||||
46
package.json
46
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.7.0-beta.3",
|
||||
"version": "1.7.0-beta.2",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@@ -78,25 +78,20 @@
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.25#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.25-08bbabb5d3.patch",
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.1#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.1-d937b73fed.patch",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@libsql/win32-x64-msvc": "^0.4.7",
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"@paymoapp/electron-shutdown-handler": "^1.1.2",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"express": "^5.1.0",
|
||||
"font-list": "^2.0.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"gray-matter": "^4.0.3",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsdom": "26.1.0",
|
||||
"node-stream-zip": "^1.15.0",
|
||||
"officeparser": "^4.2.0",
|
||||
"os-proxy-config": "^1.1.2",
|
||||
"qrcode.react": "^4.2.0",
|
||||
"selection-hook": "^1.0.12",
|
||||
"sharp": "^0.34.3",
|
||||
"socket.io": "^4.8.1",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"tesseract.js": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
@@ -106,17 +101,16 @@
|
||||
"@agentic/exa": "^7.3.3",
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.42",
|
||||
"@ai-sdk/google-vertex": "^3.0.48",
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.4#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.4-8080836bc1.patch",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.35",
|
||||
"@ai-sdk/google-vertex": "^3.0.40",
|
||||
"@ai-sdk/mistral": "^2.0.19",
|
||||
"@ai-sdk/perplexity": "^2.0.13",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
"@aws-sdk/client-bedrock": "^3.910.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@aws-sdk/client-bedrock": "^3.840.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.840.0",
|
||||
"@aws-sdk/client-s3": "^3.840.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
@@ -147,15 +141,14 @@
|
||||
"@eslint/js": "^9.22.0",
|
||||
"@google/genai": "patch:@google/genai@npm%3A1.0.1#~/.yarn/patches/@google-genai-npm-1.0.1-e26f0f9af7.patch",
|
||||
"@hello-pangea/dnd": "^18.0.1",
|
||||
"@heroui/react": "^2.8.3",
|
||||
"@kangfenmao/keyv-storage": "^0.1.0",
|
||||
"@langchain/community": "^1.0.0",
|
||||
"@langchain/core": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"@langchain/openai": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/community": "^0.3.50",
|
||||
"@mistralai/mistralai": "^1.7.5",
|
||||
"@modelcontextprotocol/sdk": "^1.17.5",
|
||||
"@mozilla/readability": "^0.6.0",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@openrouter/ai-sdk-provider": "^1.2.0",
|
||||
"@openrouter/ai-sdk-provider": "^1.1.2",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/core": "2.0.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
|
||||
@@ -201,7 +194,6 @@
|
||||
"@types/fs-extra": "^11",
|
||||
"@types/he": "^1",
|
||||
"@types/html-to-text": "^9",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/lodash": "^4.17.5",
|
||||
"@types/markdown-it": "^14",
|
||||
"@types/md5": "^2.3.5",
|
||||
@@ -231,7 +223,7 @@
|
||||
"@viz-js/lang-dot": "^1.0.5",
|
||||
"@viz-js/viz": "^3.14.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"ai": "^5.0.76",
|
||||
"ai": "^5.0.68",
|
||||
"antd": "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch",
|
||||
"archiver": "^7.0.1",
|
||||
"async-mutex": "^0.5.0",
|
||||
@@ -241,7 +233,6 @@
|
||||
"check-disk-space": "3.4.0",
|
||||
"cheerio": "^1.1.2",
|
||||
"chokidar": "^4.0.3",
|
||||
"claude-code-plugins": "1.0.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"clsx": "^2.1.1",
|
||||
"code-inspector-plugin": "^0.20.14",
|
||||
@@ -348,7 +339,6 @@
|
||||
"striptags": "^3.2.0",
|
||||
"styled-components": "^6.1.11",
|
||||
"swr": "^2.3.6",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwindcss": "^4.1.13",
|
||||
"tar": "^7.4.3",
|
||||
"tiny-pinyin": "^1.3.2",
|
||||
@@ -374,11 +364,12 @@
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"resolutions": {
|
||||
"@smithy/types": "4.7.1",
|
||||
"@codemirror/language": "6.11.3",
|
||||
"@codemirror/lint": "6.8.5",
|
||||
"@codemirror/view": "6.38.1",
|
||||
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.4.0": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"app-builder-lib@npm:26.0.13": "patch:app-builder-lib@npm%3A26.0.13#~/.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch",
|
||||
"app-builder-lib@npm:26.0.15": "patch:app-builder-lib@npm%3A26.0.15#~/.yarn/patches/app-builder-lib-npm-26.0.15-360e5b0476.patch",
|
||||
"atomically@npm:^1.7.0": "patch:atomically@npm%3A1.7.0#~/.yarn/patches/atomically-npm-1.7.0-e742e5293b.patch",
|
||||
@@ -394,18 +385,13 @@
|
||||
"undici": "6.21.2",
|
||||
"vite": "npm:rolldown-vite@7.1.5",
|
||||
"tesseract.js@npm:*": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
"@ai-sdk/google@npm:2.0.23": "patch:@ai-sdk/google@npm%3A2.0.23#~/.yarn/patches/@ai-sdk-google-npm-2.0.23-81682e07b0.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.52": "patch:@ai-sdk/openai@npm%3A2.0.52#~/.yarn/patches/@ai-sdk-openai-npm-2.0.52-b36d949c76.patch",
|
||||
"@ai-sdk/google@npm:2.0.20": "patch:@ai-sdk/google@npm%3A2.0.20#~/.yarn/patches/@ai-sdk-google-npm-2.0.20-b9102f9d54.patch",
|
||||
"@img/sharp-darwin-arm64": "0.34.3",
|
||||
"@img/sharp-darwin-x64": "0.34.3",
|
||||
"@img/sharp-linux-arm": "0.34.3",
|
||||
"@img/sharp-linux-arm64": "0.34.3",
|
||||
"@img/sharp-linux-x64": "0.34.3",
|
||||
"@img/sharp-win32-x64": "0.34.3",
|
||||
"openai@npm:5.12.2": "npm:@cherrystudio/openai@6.5.0",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.6.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch"
|
||||
"@img/sharp-win32-x64": "0.34.3"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"lint-staged": {
|
||||
|
||||
@@ -36,10 +36,10 @@
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.32",
|
||||
"@ai-sdk/azure": "^2.0.53",
|
||||
"@ai-sdk/anthropic": "^2.0.27",
|
||||
"@ai-sdk/azure": "^2.0.49",
|
||||
"@ai-sdk/deepseek": "^1.0.23",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.52#~/.yarn/patches/@ai-sdk-openai-npm-2.0.52-b36d949c76.patch",
|
||||
"@ai-sdk/openai": "^2.0.48",
|
||||
"@ai-sdk/openai-compatible": "^1.0.22",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.12",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* 中间件管理器
|
||||
* 专注于 AI SDK 中间件的管理,与插件系统分离
|
||||
*/
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
/**
|
||||
* 创建中间件列表
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* 中间件系统类型定义
|
||||
*/
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
/**
|
||||
* 具名中间件接口
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* 模型包装工具函数
|
||||
* 用于将中间件应用到LanguageModel上
|
||||
*/
|
||||
import type { LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { wrapLanguageModel } from 'ai'
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* 集成了来自 ModelCreator 的特殊处理逻辑
|
||||
*/
|
||||
|
||||
import type { EmbeddingModelV2, ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { EmbeddingModelV2, ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import { wrapModelWithMiddlewares } from '../middleware/wrapper'
|
||||
import { DEFAULT_SEPARATOR, globalRegistryManagement } from '../providers/RegistryManagement'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Creation 模块类型定义
|
||||
*/
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import type { ProviderId, ProviderSettingsMap } from '../providers/types'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ExtractProviderOptions, ProviderOptionsMap, TypedProviderOptions } from './types'
|
||||
import { ExtractProviderOptions, ProviderOptionsMap, TypedProviderOptions } from './types'
|
||||
|
||||
/**
|
||||
* 创建特定供应商的选项
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { AiRequestContext } from '../../types'
|
||||
import { StreamEventManager } from './StreamEventManager'
|
||||
import { type TagConfig, TagExtractor } from './tagExtraction'
|
||||
import { ToolExecutor } from './ToolExecutor'
|
||||
import type { PromptToolUseConfig, ToolUseResult } from './type'
|
||||
import { PromptToolUseConfig, ToolUseResult } from './type'
|
||||
|
||||
/**
|
||||
* 工具使用标签配置
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { ToolSet } from 'ai'
|
||||
import { ToolSet } from 'ai'
|
||||
|
||||
import type { AiRequestContext } from '../..'
|
||||
import { AiRequestContext } from '../..'
|
||||
|
||||
/**
|
||||
* 解析结果类型
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import type { anthropic } from '@ai-sdk/anthropic'
|
||||
import type { google } from '@ai-sdk/google'
|
||||
import type { openai } from '@ai-sdk/openai'
|
||||
import type { InferToolInput, InferToolOutput } from 'ai'
|
||||
import { type Tool } from 'ai'
|
||||
import { anthropic } from '@ai-sdk/anthropic'
|
||||
import { google } from '@ai-sdk/google'
|
||||
import { openai } from '@ai-sdk/openai'
|
||||
import { InferToolInput, InferToolOutput, type Tool } from 'ai'
|
||||
|
||||
import type { ProviderOptionsMap } from '../../../options/types'
|
||||
import type { OpenRouterSearchConfig } from './openrouter'
|
||||
import { ProviderOptionsMap } from '../../../options/types'
|
||||
import { OpenRouterSearchConfig } from './openrouter'
|
||||
|
||||
/**
|
||||
* 从 AI SDK 的工具函数中提取参数类型,以确保类型安全。
|
||||
|
||||
@@ -9,8 +9,7 @@ import { openai } from '@ai-sdk/openai'
|
||||
import { createOpenRouterOptions, createXaiOptions, mergeProviderOptions } from '../../../options'
|
||||
import { definePlugin } from '../../'
|
||||
import type { AiRequestContext } from '../../types'
|
||||
import type { WebSearchPluginConfig } from './helper'
|
||||
import { DEFAULT_WEB_SEARCH_CONFIG } from './helper'
|
||||
import { DEFAULT_WEB_SEARCH_CONFIG, WebSearchPluginConfig } from './helper'
|
||||
|
||||
/**
|
||||
* 网络搜索插件
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { AiPlugin, AiRequestContext } from './types'
|
||||
import { AiPlugin, AiRequestContext } from './types'
|
||||
|
||||
/**
|
||||
* 插件管理器
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* 例如: aihubmix:anthropic:claude-3.5-sonnet
|
||||
*/
|
||||
|
||||
import type { ProviderV2 } from '@ai-sdk/provider'
|
||||
import { ProviderV2 } from '@ai-sdk/provider'
|
||||
import { customProvider } from 'ai'
|
||||
|
||||
import { globalRegistryManagement } from './RegistryManagement'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* 基于 AI SDK 原生的 createProviderRegistry
|
||||
*/
|
||||
|
||||
import type { EmbeddingModelV2, ImageModelV2, LanguageModelV2, ProviderV2 } from '@ai-sdk/provider'
|
||||
import { EmbeddingModelV2, ImageModelV2, LanguageModelV2, ProviderV2 } from '@ai-sdk/provider'
|
||||
import { createProviderRegistry, type ProviderRegistryProvider } from 'ai'
|
||||
|
||||
type PROVIDERS = Record<string, ProviderV2>
|
||||
|
||||
@@ -7,14 +7,12 @@ import { createAzure } from '@ai-sdk/azure'
|
||||
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import type { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
import { createXai } from '@ai-sdk/xai'
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider'
|
||||
import type { Provider } from 'ai'
|
||||
import { customProvider } from 'ai'
|
||||
import { customProvider, Provider } from 'ai'
|
||||
import * as z from 'zod'
|
||||
|
||||
/**
|
||||
@@ -30,8 +28,7 @@ export const baseProviderIds = [
|
||||
'azure',
|
||||
'azure-responses',
|
||||
'deepseek',
|
||||
'openrouter',
|
||||
'huggingface'
|
||||
'openrouter'
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -135,12 +132,6 @@ export const baseProviders = [
|
||||
name: 'OpenRouter',
|
||||
creator: createOpenRouter,
|
||||
supportsImageGeneration: true
|
||||
},
|
||||
{
|
||||
id: 'huggingface',
|
||||
name: 'HuggingFace',
|
||||
creator: createHuggingFace,
|
||||
supportsImageGeneration: true
|
||||
}
|
||||
] as const satisfies BaseProvider[]
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { type DeepSeekProviderSettings } from '@ai-sdk/deepseek'
|
||||
import { type GoogleGenerativeAIProviderSettings } from '@ai-sdk/google'
|
||||
import { type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { type OpenAICompatibleProviderSettings } from '@ai-sdk/openai-compatible'
|
||||
import type {
|
||||
import {
|
||||
EmbeddingModelV2 as EmbeddingModel,
|
||||
ImageModelV2 as ImageModel,
|
||||
LanguageModelV2 as LanguageModel,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage as aiGenerateImage, NoImageGeneratedError } from 'ai'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* 运行时执行器
|
||||
* 专注于插件化的AI调用处理
|
||||
*/
|
||||
import type { ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModel } from 'ai'
|
||||
import { ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import {
|
||||
experimental_generateImage as _generateImage,
|
||||
generateObject as _generateObject,
|
||||
generateText as _generateText,
|
||||
LanguageModel,
|
||||
streamObject as _streamObject,
|
||||
streamText as _streamText
|
||||
} from 'ai'
|
||||
|
||||
@@ -11,7 +11,7 @@ export type { RuntimeConfig } from './types'
|
||||
|
||||
// === 便捷工厂函数 ===
|
||||
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import { type AiPlugin } from '../plugins'
|
||||
import { type ProviderId, type ProviderSettingsMap } from '../providers/types'
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
/* eslint-disable @eslint-react/naming-convention/context-name */
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import type {
|
||||
experimental_generateImage,
|
||||
generateObject,
|
||||
generateText,
|
||||
LanguageModel,
|
||||
streamObject,
|
||||
streamText
|
||||
} from 'ai'
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage, generateObject, generateText, LanguageModel, streamObject, streamText } from 'ai'
|
||||
|
||||
import { type AiPlugin, createContext, PluginManager } from '../plugins'
|
||||
import { type ProviderId } from '../providers/types'
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/**
|
||||
* Runtime 层类型定义
|
||||
*/
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import type { experimental_generateImage, generateObject, generateText, streamObject, streamText } from 'ai'
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage, generateObject, generateText, streamObject, streamText } from 'ai'
|
||||
|
||||
import { type ModelConfig } from '../models/types'
|
||||
import { type AiPlugin } from '../plugins'
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { Node } from '@tiptap/core'
|
||||
import { Extension } from '@tiptap/core'
|
||||
import { Extension, Node } from '@tiptap/core'
|
||||
|
||||
import type { TableCellOptions } from '../cell/index.js'
|
||||
import { TableCell } from '../cell/index.js'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { SpanKind, SpanStatusCode } from '@opentelemetry/api'
|
||||
import type { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
import type { SpanEntity } from '../types/config'
|
||||
import { SpanEntity } from '../types/config'
|
||||
|
||||
/**
|
||||
* convert ReadableSpan to SpanEntity
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export interface TraceCache {
|
||||
createSpan: (span: ReadableSpan) => void
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { ExportResult } from '@opentelemetry/core'
|
||||
import { ExportResultCode } from '@opentelemetry/core'
|
||||
import type { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
|
||||
import { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type SaveFunction = (spans: ReadableSpan[]) => Promise<void>
|
||||
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { Context, trace } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
import type { TraceCache } from '../core/traceCache'
|
||||
import { TraceCache } from '../core/traceCache'
|
||||
|
||||
export class CacheBatchSpanProcessor extends BatchSpanProcessor {
|
||||
private cache: TraceCache
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import type { EventEmitter } from 'stream'
|
||||
import { Context } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { EventEmitter } from 'stream'
|
||||
|
||||
import { convertSpanToSpanEntity } from '../core/spanConvert'
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { Context, trace } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type SpanFunction = (span: ReadableSpan) => void
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { Link } from '@opentelemetry/api'
|
||||
import type { TimedEvent } from '@opentelemetry/sdk-trace-base'
|
||||
import { Link } from '@opentelemetry/api'
|
||||
import { TimedEvent } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type AttributeValue =
|
||||
| string
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import type { Tracer } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import { trace, Tracer } from '@opentelemetry/api'
|
||||
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks'
|
||||
import { W3CTraceContextPropagator } from '@opentelemetry/core'
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
|
||||
import type { SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node'
|
||||
|
||||
import type { TraceConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig, TraceConfig } from '../trace-core/types/config'
|
||||
|
||||
export class NodeTracer {
|
||||
private static provider: NodeTracerProvider
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { Context, ContextManager } from '@opentelemetry/api'
|
||||
import { ROOT_CONTEXT } from '@opentelemetry/api'
|
||||
import { Context, ContextManager, ROOT_CONTEXT } from '@opentelemetry/api'
|
||||
|
||||
export class TopicContextManager implements ContextManager {
|
||||
private topicContextStack: Map<string, Context[]>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { context } from '@opentelemetry/api'
|
||||
import { Context, context } from '@opentelemetry/api'
|
||||
|
||||
const originalPromise = globalThis.Promise
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { W3CTraceContextPropagator } from '@opentelemetry/core'
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
|
||||
import type { SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web'
|
||||
|
||||
import type { TraceConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig, TraceConfig } from '../trace-core/types/config'
|
||||
import { TopicContextManager } from './TopicContextManager'
|
||||
|
||||
export const contextManager = new TopicContextManager()
|
||||
|
||||
@@ -96,10 +96,6 @@ export enum IpcChannel {
|
||||
AgentMessage_PersistExchange = 'agent-message:persist-exchange',
|
||||
AgentMessage_GetHistory = 'agent-message:get-history',
|
||||
|
||||
AgentToolPermission_Request = 'agent-tool-permission:request',
|
||||
AgentToolPermission_Response = 'agent-tool-permission:response',
|
||||
AgentToolPermission_Result = 'agent-tool-permission:result',
|
||||
|
||||
//copilot
|
||||
Copilot_GetAuthMessage = 'copilot:get-auth-message',
|
||||
Copilot_GetCopilotToken = 'copilot:get-copilot-token',
|
||||
@@ -322,7 +318,6 @@ export enum IpcChannel {
|
||||
ApiServer_Stop = 'api-server:stop',
|
||||
ApiServer_Restart = 'api-server:restart',
|
||||
ApiServer_GetStatus = 'api-server:get-status',
|
||||
ApiServer_Ready = 'api-server:ready',
|
||||
// NOTE: This api is not be used.
|
||||
ApiServer_GetConfig = 'api-server:get-config',
|
||||
|
||||
@@ -355,21 +350,5 @@ export enum IpcChannel {
|
||||
Ovms_StopOVMS = 'ovms:stop-ovms',
|
||||
|
||||
// CherryAI
|
||||
Cherryai_GetSignature = 'cherryai:get-signature',
|
||||
|
||||
// Claude Code Plugins
|
||||
ClaudeCodePlugin_ListAvailable = 'claudeCodePlugin:list-available',
|
||||
ClaudeCodePlugin_Install = 'claudeCodePlugin:install',
|
||||
ClaudeCodePlugin_Uninstall = 'claudeCodePlugin:uninstall',
|
||||
ClaudeCodePlugin_ListInstalled = 'claudeCodePlugin:list-installed',
|
||||
ClaudeCodePlugin_InvalidateCache = 'claudeCodePlugin:invalidate-cache',
|
||||
ClaudeCodePlugin_ReadContent = 'claudeCodePlugin:read-content',
|
||||
ClaudeCodePlugin_WriteContent = 'claudeCodePlugin:write-content',
|
||||
|
||||
// WebSocket
|
||||
WebSocket_Start = 'webSocket:start',
|
||||
WebSocket_Stop = 'webSocket:stop',
|
||||
WebSocket_Status = 'webSocket:status',
|
||||
WebSocket_SendFile = 'webSocket:send-file',
|
||||
WebSocket_GetAllCandidates = 'webSocket:get-all-candidates'
|
||||
Cherryai_GetSignature = 'cherryai:get-signature'
|
||||
}
|
||||
|
||||
@@ -9,9 +9,9 @@
|
||||
*/
|
||||
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import type { TextBlockParam } from '@anthropic-ai/sdk/resources'
|
||||
import { TextBlockParam } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import type { Provider } from '@types'
|
||||
import { Provider } from '@types'
|
||||
import type { ModelMessage } from 'ai'
|
||||
|
||||
const logger = loggerService.withContext('anthropic-sdk')
|
||||
|
||||
@@ -470,6 +470,3 @@ export const MACOS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
|
||||
})
|
||||
}
|
||||
]
|
||||
|
||||
// resources/scripts should be maintained manually
|
||||
export const HOME_CHERRY_DIR = '.cherrystudio'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ProcessingStatus } from '@types'
|
||||
import { ProcessingStatus } from '@types'
|
||||
|
||||
export type LoaderReturn = {
|
||||
entriesAdded: number
|
||||
@@ -31,16 +31,3 @@ export type WebviewKeyEvent = {
|
||||
shift: boolean
|
||||
alt: boolean
|
||||
}
|
||||
|
||||
export interface WebSocketStatusResponse {
|
||||
isRunning: boolean
|
||||
port?: number
|
||||
ip?: string
|
||||
clientConnected: boolean
|
||||
}
|
||||
|
||||
export interface WebSocketCandidatesResponse {
|
||||
host: string
|
||||
interface: string
|
||||
priority: number
|
||||
}
|
||||
|
||||
@@ -18,10 +18,8 @@ import { sortedObjectByKeys } from './sort'
|
||||
// ========== SCRIPT CONFIGURATION AREA - MODIFY SETTINGS HERE ==========
|
||||
const SCRIPT_CONFIG = {
|
||||
// 🔧 Concurrency Control Configuration
|
||||
MAX_CONCURRENT_TRANSLATIONS: process.env.TRANSLATION_MAX_CONCURRENT_REQUESTS
|
||||
? parseInt(process.env.TRANSLATION_MAX_CONCURRENT_REQUESTS)
|
||||
: 5, // Max concurrent requests (Make sure the concurrency level does not exceed your provider's limits.)
|
||||
TRANSLATION_DELAY_MS: process.env.TRANSLATION_DELAY_MS ? parseInt(process.env.TRANSLATION_DELAY_MS) : 500, // Delay between requests to avoid rate limiting (Recommended: 100-500ms, Range: 0-5000ms)
|
||||
MAX_CONCURRENT_TRANSLATIONS: 5, // Max concurrent requests (Make sure the concurrency level does not exceed your provider's limits.)
|
||||
TRANSLATION_DELAY_MS: 100, // Delay between requests to avoid rate limiting (Recommended: 100-500ms, Range: 0-5000ms)
|
||||
|
||||
// 🔑 API Configuration
|
||||
API_KEY: process.env.TRANSLATION_API_KEY || '', // API key from environment variable
|
||||
@@ -58,7 +56,7 @@ Performance Optimization Recommendations:
|
||||
- For unstable services: MAX_CONCURRENT_TRANSLATIONS=2, TRANSLATION_DELAY_MS=500
|
||||
|
||||
Environment Variables:
|
||||
- TRANSLATION_BASE_LOCALE: Base locale for translation (default: 'en-us')
|
||||
- BASE_LOCALE: Base locale for translation (default: 'en-us')
|
||||
- TRANSLATION_BASE_URL: Custom API endpoint URL
|
||||
- TRANSLATION_MODEL: Custom translation model name
|
||||
*/
|
||||
@@ -260,7 +258,7 @@ const main = async () => {
|
||||
|
||||
const localesDir = path.join(__dirname, '../src/renderer/src/i18n/locales')
|
||||
const translateDir = path.join(__dirname, '../src/renderer/src/i18n/translate')
|
||||
const baseLocale = process.env.TRANSLATION_BASE_LOCALE ?? 'en-us'
|
||||
const baseLocale = process.env.BASE_LOCALE ?? 'en-us'
|
||||
const baseFileName = `${baseLocale}.json`
|
||||
const baseLocalePath = path.join(__dirname, '../src/renderer/src/i18n/locales', baseFileName)
|
||||
if (!fs.existsSync(baseLocalePath)) {
|
||||
@@ -286,7 +284,6 @@ const main = async () => {
|
||||
const translateFiles = getFiles(translateDir)
|
||||
const files = [...localeFiles, ...translateFiles]
|
||||
|
||||
console.info(`📂 Base Locale: ${baseLocale}`)
|
||||
console.info('📂 Files to translate:')
|
||||
files.forEach((filePath) => {
|
||||
const filename = path.basename(filePath, '.json')
|
||||
|
||||
@@ -5,7 +5,7 @@ import { sortedObjectByKeys } from './sort'
|
||||
|
||||
const localesDir = path.join(__dirname, '../src/renderer/src/i18n/locales')
|
||||
const translateDir = path.join(__dirname, '../src/renderer/src/i18n/translate')
|
||||
const baseLocale = process.env.TRANSLATION_BASE_LOCALE ?? 'en-us'
|
||||
const baseLocale = process.env.BASE_LOCALE ?? 'zh-cn'
|
||||
const baseFileName = `${baseLocale}.json`
|
||||
const baseFilePath = path.join(localesDir, baseFileName)
|
||||
|
||||
@@ -13,45 +13,45 @@ type I18NValue = string | { [key: string]: I18NValue }
|
||||
type I18N = { [key: string]: I18NValue }
|
||||
|
||||
/**
|
||||
* Recursively sync target object to match template object structure
|
||||
* 1. Add keys that exist in template but missing in target (with '[to be translated]')
|
||||
* 2. Remove keys that exist in target but not in template
|
||||
* 3. Recursively sync nested objects
|
||||
* 递归同步 target 对象,使其与 template 对象保持一致
|
||||
* 1. 如果 template 中存在 target 中缺少的 key,则添加('[to be translated]')
|
||||
* 2. 如果 target 中存在 template 中不存在的 key,则删除
|
||||
* 3. 对于子对象,递归同步
|
||||
*
|
||||
* @param target Target object (language object to be updated)
|
||||
* @param template Base locale object (Chinese)
|
||||
* @returns Returns whether target was updated
|
||||
* @param target 目标对象(需要更新的语言对象)
|
||||
* @param template 主模板对象(中文)
|
||||
* @returns 返回是否对 target 进行了更新
|
||||
*/
|
||||
function syncRecursively(target: I18N, template: I18N): void {
|
||||
// Add keys that exist in template but missing in target
|
||||
// 添加 template 中存在但 target 中缺少的 key
|
||||
for (const key in template) {
|
||||
if (!(key in target)) {
|
||||
target[key] =
|
||||
typeof template[key] === 'object' && template[key] !== null ? {} : `[to be translated]:${template[key]}`
|
||||
console.log(`Added new property: ${key}`)
|
||||
console.log(`添加新属性:${key}`)
|
||||
}
|
||||
if (typeof template[key] === 'object' && template[key] !== null) {
|
||||
if (typeof target[key] !== 'object' || target[key] === null) {
|
||||
target[key] = {}
|
||||
}
|
||||
// Recursively sync nested objects
|
||||
// 递归同步子对象
|
||||
syncRecursively(target[key], template[key])
|
||||
}
|
||||
}
|
||||
|
||||
// Remove keys that exist in target but not in template
|
||||
// 删除 target 中存在但 template 中没有的 key
|
||||
for (const targetKey in target) {
|
||||
if (!(targetKey in template)) {
|
||||
console.log(`Removed excess property: ${targetKey}`)
|
||||
console.log(`移除多余属性:${targetKey}`)
|
||||
delete target[targetKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check JSON object for duplicate keys and collect all duplicates
|
||||
* @param obj Object to check
|
||||
* @returns Returns array of duplicate keys (empty array if no duplicates)
|
||||
* 检查 JSON 对象中是否存在重复键,并收集所有重复键
|
||||
* @param obj 要检查的对象
|
||||
* @returns 返回重复键的数组(若无重复则返回空数组)
|
||||
*/
|
||||
function checkDuplicateKeys(obj: I18N): string[] {
|
||||
const keys = new Set<string>()
|
||||
@@ -62,7 +62,7 @@ function checkDuplicateKeys(obj: I18N): string[] {
|
||||
const fullPath = path ? `${path}.${key}` : key
|
||||
|
||||
if (keys.has(fullPath)) {
|
||||
// When duplicate key found, add to array (avoid duplicate additions)
|
||||
// 发现重复键时,添加到数组中(避免重复添加)
|
||||
if (!duplicateKeys.includes(fullPath)) {
|
||||
duplicateKeys.push(fullPath)
|
||||
}
|
||||
@@ -70,7 +70,7 @@ function checkDuplicateKeys(obj: I18N): string[] {
|
||||
keys.add(fullPath)
|
||||
}
|
||||
|
||||
// Recursively check nested objects
|
||||
// 递归检查子对象
|
||||
if (typeof obj[key] === 'object' && obj[key] !== null) {
|
||||
checkObject(obj[key], fullPath)
|
||||
}
|
||||
@@ -83,7 +83,7 @@ function checkDuplicateKeys(obj: I18N): string[] {
|
||||
|
||||
function syncTranslations() {
|
||||
if (!fs.existsSync(baseFilePath)) {
|
||||
console.error(`Base locale file ${baseFileName} does not exist, please check path or filename`)
|
||||
console.error(`主模板文件 ${baseFileName} 不存在,请检查路径或文件名`)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -92,24 +92,24 @@ function syncTranslations() {
|
||||
try {
|
||||
baseJson = JSON.parse(baseContent)
|
||||
} catch (error) {
|
||||
console.error(`Error parsing ${baseFileName}. ${error}`)
|
||||
console.error(`解析 ${baseFileName} 出错。${error}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if base locale has duplicate keys
|
||||
// 检查主模板是否存在重复键
|
||||
const duplicateKeys = checkDuplicateKeys(baseJson)
|
||||
if (duplicateKeys.length > 0) {
|
||||
throw new Error(`Base locale file ${baseFileName} has the following duplicate keys:\n${duplicateKeys.join('\n')}`)
|
||||
throw new Error(`主模板文件 ${baseFileName} 存在以下重复键:\n${duplicateKeys.join('\n')}`)
|
||||
}
|
||||
|
||||
// Sort base locale
|
||||
// 为主模板排序
|
||||
const sortedJson = sortedObjectByKeys(baseJson)
|
||||
if (JSON.stringify(baseJson) !== JSON.stringify(sortedJson)) {
|
||||
try {
|
||||
fs.writeFileSync(baseFilePath, JSON.stringify(sortedJson, null, 2) + '\n', 'utf-8')
|
||||
console.log(`Base locale has been sorted`)
|
||||
console.log(`主模板已排序`)
|
||||
} catch (error) {
|
||||
console.error(`Error writing ${baseFilePath}.`, error)
|
||||
console.error(`写入 ${baseFilePath} 出错。`, error)
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -124,7 +124,7 @@ function syncTranslations() {
|
||||
.map((filename) => path.join(translateDir, filename))
|
||||
const files = [...localeFiles, ...translateFiles]
|
||||
|
||||
// Sync keys
|
||||
// 同步键
|
||||
for (const filePath of files) {
|
||||
const filename = path.basename(filePath)
|
||||
let targetJson: I18N = {}
|
||||
@@ -132,7 +132,7 @@ function syncTranslations() {
|
||||
const fileContent = fs.readFileSync(filePath, 'utf-8')
|
||||
targetJson = JSON.parse(fileContent)
|
||||
} catch (error) {
|
||||
console.error(`Error parsing ${filename}, skipping this file.`, error)
|
||||
console.error(`解析 ${filename} 出错,跳过此文件。`, error)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -142,9 +142,9 @@ function syncTranslations() {
|
||||
|
||||
try {
|
||||
fs.writeFileSync(filePath, JSON.stringify(sortedJson, null, 2) + '\n', 'utf-8')
|
||||
console.log(`File ${filename} has been sorted and synced to match base locale content`)
|
||||
console.log(`文件 ${filename} 已排序并同步更新为主模板的内容`)
|
||||
} catch (error) {
|
||||
console.error(`Error writing ${filename}. ${error}`)
|
||||
console.error(`写入 ${filename} 出错。${error}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { ApiServerConfig } from '@types'
|
||||
import { ApiServerConfig } from '@types'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
import { loggerService } from '../services/LoggerService'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import crypto from 'crypto'
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
|
||||
import { config } from '../config'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Express } from 'express'
|
||||
import { Express } from 'express'
|
||||
import swaggerJSDoc from 'swagger-jsdoc'
|
||||
import swaggerUi from 'swagger-ui-express'
|
||||
|
||||
@@ -171,7 +171,7 @@ const swaggerOptions: swaggerJSDoc.Options = {
|
||||
}
|
||||
]
|
||||
},
|
||||
apis: ['./src/main/apiServer/routes/**/*.ts', './src/main/apiServer/app.ts']
|
||||
apis: ['./src/main/apiServer/routes/*.ts', './src/main/apiServer/app.ts']
|
||||
}
|
||||
|
||||
export function setupOpenAPIDocumentation(app: Express) {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { AgentModelValidationError, agentService, sessionService } from '@main/services/agents'
|
||||
import type { ListAgentsResponse } from '@types'
|
||||
import { type ReplaceAgentRequest, type UpdateAgentRequest } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import { ListAgentsResponse, type ReplaceAgentRequest, type UpdateAgentRequest } from '@types'
|
||||
import { Request, Response } from 'express'
|
||||
|
||||
import type { ValidationRequest } from '../validators/zodValidator'
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { loggerService } from '@logger'
|
||||
import { MESSAGE_STREAM_TIMEOUT_MS } from '@main/apiServer/config/timeouts'
|
||||
import { createStreamAbortController, STREAM_TIMEOUT_REASON } from '@main/apiServer/utils/createStreamAbortController'
|
||||
import { agentService, sessionMessageService, sessionService } from '@main/services/agents'
|
||||
import type { Request, Response } from 'express'
|
||||
import { Request, Response } from 'express'
|
||||
|
||||
const logger = loggerService.withContext('ApiServerMessagesHandlers')
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { AgentModelValidationError, sessionMessageService, sessionService } from '@main/services/agents'
|
||||
import type { ListAgentSessionsResponse, UpdateSessionResponse } from '@types'
|
||||
import { type ReplaceSessionRequest } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import { ListAgentSessionsResponse, type ReplaceSessionRequest, UpdateSessionResponse } from '@types'
|
||||
import { Request, Response } from 'express'
|
||||
|
||||
import type { ValidationRequest } from '../validators/zodValidator'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Request, Response } from 'express'
|
||||
import { Request, Response } from 'express'
|
||||
|
||||
import { agentService } from '../../../../services/agents'
|
||||
import { loggerService } from '../../../../services/LoggerService'
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
import type { ZodType } from 'zod'
|
||||
import { ZodError } from 'zod'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { ZodError, ZodType } from 'zod'
|
||||
|
||||
export interface ValidationRequest extends Request {
|
||||
validatedBody?: any
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { ChatCompletionCreateParams } from '@cherrystudio/openai/resources'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
import { ChatCompletionCreateParams } from '@cherrystudio/openai/resources'
|
||||
import express, { Request, Response } from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
import express, { Request, Response } from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { mcpApiService } from '../services/mcp'
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import type { MessageCreateParams } from '@anthropic-ai/sdk/resources'
|
||||
import { MessageCreateParams } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import type { Provider } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
import { Provider } from '@types'
|
||||
import express, { Request, Response } from 'express'
|
||||
|
||||
import { messagesService } from '../services/messages'
|
||||
import { getProviderById, validateModelId } from '../utils'
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import type { ApiModelsResponse } from '@types'
|
||||
import { ApiModelsFilterSchema } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
import { ApiModelsFilterSchema, ApiModelsResponse } from '@types'
|
||||
import express, { Request, Response } from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { modelsService } from '../services/models'
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { createServer } from 'node:http'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
|
||||
import { agentService } from '../services/agents'
|
||||
import { windowService } from '../services/WindowService'
|
||||
import { app } from './app'
|
||||
import { config } from './config'
|
||||
|
||||
@@ -45,13 +43,6 @@ export class ApiServer {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.server!.listen(port, host, () => {
|
||||
logger.info('API server started', { host, port })
|
||||
|
||||
// Notify renderer that API server is ready
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.send(IpcChannel.ApiServer_Ready)
|
||||
}
|
||||
|
||||
resolve()
|
||||
})
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import OpenAI from '@cherrystudio/openai'
|
||||
import type { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from '@cherrystudio/openai/resources'
|
||||
import type { Provider } from '@types'
|
||||
import { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from '@cherrystudio/openai/resources'
|
||||
import { Provider } from '@types'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import type { ModelValidationError } from '../utils'
|
||||
import { validateModelId } from '../utils'
|
||||
import { ModelValidationError, validateModelId } from '../utils'
|
||||
|
||||
const logger = loggerService.withContext('ChatCompletionService')
|
||||
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import mcpService from '@main/services/MCPService'
|
||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp'
|
||||
import type { JSONRPCMessage, MessageExtraInfo } from '@modelcontextprotocol/sdk/types'
|
||||
import { isJSONRPCRequest, JSONRPCMessageSchema } from '@modelcontextprotocol/sdk/types'
|
||||
import type { MCPServer } from '@types'
|
||||
import {
|
||||
isJSONRPCRequest,
|
||||
JSONRPCMessage,
|
||||
JSONRPCMessageSchema,
|
||||
MessageExtraInfo
|
||||
} from '@modelcontextprotocol/sdk/types'
|
||||
import { MCPServer } from '@types'
|
||||
import { randomUUID } from 'crypto'
|
||||
import { EventEmitter } from 'events'
|
||||
import type { Request, Response } from 'express'
|
||||
import type { IncomingMessage, ServerResponse } from 'http'
|
||||
import { Request, Response } from 'express'
|
||||
import { IncomingMessage, ServerResponse } from 'http'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { getMcpServerById, getMCPServersFromRedux } from '../utils/mcp'
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import type Anthropic from '@anthropic-ai/sdk'
|
||||
import type { MessageCreateParams, MessageStreamEvent } from '@anthropic-ai/sdk/resources'
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { MessageCreateParams, MessageStreamEvent } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { buildClaudeCodeSystemMessage, getSdkClient } from '@shared/anthropic'
|
||||
import type { Provider } from '@types'
|
||||
import type { Response } from 'express'
|
||||
import { Provider } from '@types'
|
||||
import { Response } from 'express'
|
||||
|
||||
const logger = loggerService.withContext('MessagesService')
|
||||
const EXCLUDED_FORWARD_HEADERS: ReadonlySet<string> = new Set([
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { isEmpty } from 'lodash'
|
||||
|
||||
import type { ApiModel, ApiModelsFilter, ApiModelsResponse } from '../../../renderer/src/types/apiModels'
|
||||
import { ApiModel, ApiModelsFilter, ApiModelsResponse } from '../../../renderer/src/types/apiModels'
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import {
|
||||
getAvailableProviders,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { CacheService } from '@main/services/CacheService'
|
||||
import { loggerService } from '@main/services/LoggerService'
|
||||
import { reduxService } from '@main/services/ReduxService'
|
||||
import type { ApiModel, Model, Provider } from '@types'
|
||||
import { ApiModel, Model, Provider } from '@types'
|
||||
|
||||
const logger = loggerService.withContext('ApiServerUtils')
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { CacheService } from '@main/services/CacheService'
|
||||
import mcpService from '@main/services/MCPService'
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import type { ListToolsResult } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import type { MCPServer } from '@types'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, ListToolsResult } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { MCPServer } from '@types'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { reduxService } from '../../services/ReduxService'
|
||||
|
||||
@@ -21,7 +21,6 @@ import { appMenuService } from './services/AppMenuService'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import mcpService from './services/MCPService'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
handleProtocolUrl,
|
||||
@@ -31,7 +30,6 @@ import {
|
||||
import selectionService, { initSelectionService } from './services/SelectionService'
|
||||
import { registerShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { versionService } from './services/VersionService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { initWebviewHotkeys } from './services/WebviewService'
|
||||
|
||||
@@ -112,10 +110,6 @@ if (!app.requestSingleInstanceLock()) {
|
||||
// Some APIs can only be used after this event occurs.
|
||||
|
||||
app.whenReady().then(async () => {
|
||||
// Record current version for tracking
|
||||
// A preparation for v2 data refactoring
|
||||
versionService.recordCurrentVersion()
|
||||
|
||||
initWebviewHotkeys()
|
||||
// Set app user model id for windows
|
||||
electronApp.setAppUserModelId(import.meta.env.VITE_MAIN_BUNDLE_ID || 'com.kangfenmao.CherryStudio')
|
||||
@@ -133,7 +127,6 @@ if (!app.requestSingleInstanceLock()) {
|
||||
appMenuService?.setupApplicationMenu()
|
||||
|
||||
nodeTraceService.init()
|
||||
powerMonitorService.init()
|
||||
|
||||
app.on('activate', function () {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
159
src/main/ipc.ts
159
src/main/ipc.ts
@@ -8,12 +8,10 @@ import { generateSignature } from '@main/integration/cherryai'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { handleZoomFactor } from '@main/utils/zoom'
|
||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import type { UpgradeChannel } from '@shared/config/constant'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH } from '@shared/config/constant'
|
||||
import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH, UpgradeChannel } from '@shared/config/constant'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { PluginError } from '@types'
|
||||
import type {
|
||||
import {
|
||||
AgentPersistedMessage,
|
||||
FileMetadata,
|
||||
Notification,
|
||||
@@ -24,12 +22,10 @@ import type {
|
||||
ThemeMode
|
||||
} from '@types'
|
||||
import checkDiskSpace from 'check-disk-space'
|
||||
import type { ProxyConfig } from 'electron'
|
||||
import { BrowserWindow, dialog, ipcMain, session, shell, systemPreferences, webContents } from 'electron'
|
||||
import { BrowserWindow, dialog, ipcMain, ProxyConfig, session, shell, systemPreferences, webContents } from 'electron'
|
||||
import fontList from 'font-list'
|
||||
|
||||
import { agentMessageRepository } from './services/agents/database'
|
||||
import { PluginService } from './services/agents/plugins/PluginService'
|
||||
import { apiServerService } from './services/ApiServerService'
|
||||
import appService from './services/AppService'
|
||||
import AppUpdater from './services/AppUpdater'
|
||||
@@ -50,7 +46,6 @@ import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ocrService } from './services/ocr/OcrService'
|
||||
import OvmsManager from './services/OvmsManager'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import { proxyManager } from './services/ProxyManager'
|
||||
import { pythonService } from './services/PythonService'
|
||||
import { FileServiceManager } from './services/remotefile/FileServiceManager'
|
||||
@@ -73,7 +68,6 @@ import {
|
||||
import storeSyncService from './services/StoreSyncService'
|
||||
import { themeService } from './services/ThemeService'
|
||||
import VertexAIService from './services/VertexAIService'
|
||||
import WebSocketService from './services/WebSocketService'
|
||||
import { setOpenLinkExternal } from './services/WebviewService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { calculateDirectorySize, getResourcePath } from './utils'
|
||||
@@ -99,34 +93,13 @@ const vertexAIService = VertexAIService.getInstance()
|
||||
const memoryService = MemoryService.getInstance()
|
||||
const dxtService = new DxtService()
|
||||
const ovmsManager = new OvmsManager()
|
||||
const pluginService = PluginService.getInstance()
|
||||
|
||||
function normalizeError(error: unknown): Error {
|
||||
return error instanceof Error ? error : new Error(String(error))
|
||||
}
|
||||
|
||||
function extractPluginError(error: unknown): PluginError | null {
|
||||
if (error && typeof error === 'object' && 'type' in error && typeof (error as { type: unknown }).type === 'string') {
|
||||
return error as PluginError
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const appUpdater = new AppUpdater()
|
||||
const notificationService = new NotificationService()
|
||||
|
||||
// Register shutdown handlers
|
||||
powerMonitorService.registerShutdownHandler(() => {
|
||||
appUpdater.setAutoUpdate(false)
|
||||
})
|
||||
|
||||
powerMonitorService.registerShutdownHandler(() => {
|
||||
const mw = windowService.getMainWindow()
|
||||
if (mw && !mw.isDestroyed()) {
|
||||
mw.webContents.send(IpcChannel.App_SaveData)
|
||||
}
|
||||
})
|
||||
// Initialize Python service with main window
|
||||
pythonService.setMainWindow(mainWindow)
|
||||
|
||||
const checkMainWindow = () => {
|
||||
if (!mainWindow || mainWindow.isDestroyed()) {
|
||||
@@ -917,124 +890,4 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
|
||||
// CherryAI
|
||||
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
|
||||
|
||||
// Claude Code Plugins
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_ListAvailable, async () => {
|
||||
try {
|
||||
const data = await pluginService.listAvailable()
|
||||
return { success: true, data }
|
||||
} catch (error) {
|
||||
const pluginError = extractPluginError(error)
|
||||
if (pluginError) {
|
||||
logger.error('Failed to list available plugins', pluginError)
|
||||
return { success: false, error: pluginError }
|
||||
}
|
||||
|
||||
const err = normalizeError(error)
|
||||
logger.error('Failed to list available plugins', err)
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
type: 'TRANSACTION_FAILED',
|
||||
operation: 'list-available',
|
||||
reason: err.message
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_Install, async (_, options) => {
|
||||
try {
|
||||
const data = await pluginService.install(options)
|
||||
return { success: true, data }
|
||||
} catch (error) {
|
||||
logger.error('Failed to install plugin', { options, error })
|
||||
return { success: false, error }
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_Uninstall, async (_, options) => {
|
||||
try {
|
||||
await pluginService.uninstall(options)
|
||||
return { success: true, data: undefined }
|
||||
} catch (error) {
|
||||
logger.error('Failed to uninstall plugin', { options, error })
|
||||
return { success: false, error }
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_ListInstalled, async (_, agentId: string) => {
|
||||
try {
|
||||
const data = await pluginService.listInstalled(agentId)
|
||||
return { success: true, data }
|
||||
} catch (error) {
|
||||
const pluginError = extractPluginError(error)
|
||||
if (pluginError) {
|
||||
logger.error('Failed to list installed plugins', { agentId, error: pluginError })
|
||||
return { success: false, error: pluginError }
|
||||
}
|
||||
|
||||
const err = normalizeError(error)
|
||||
logger.error('Failed to list installed plugins', { agentId, error: err })
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
type: 'TRANSACTION_FAILED',
|
||||
operation: 'list-installed',
|
||||
reason: err.message
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_InvalidateCache, async () => {
|
||||
try {
|
||||
pluginService.invalidateCache()
|
||||
return { success: true, data: undefined }
|
||||
} catch (error) {
|
||||
const pluginError = extractPluginError(error)
|
||||
if (pluginError) {
|
||||
logger.error('Failed to invalidate plugin cache', pluginError)
|
||||
return { success: false, error: pluginError }
|
||||
}
|
||||
|
||||
const err = normalizeError(error)
|
||||
logger.error('Failed to invalidate plugin cache', err)
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
type: 'TRANSACTION_FAILED',
|
||||
operation: 'invalidate-cache',
|
||||
reason: err.message
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_ReadContent, async (_, sourcePath: string) => {
|
||||
try {
|
||||
const data = await pluginService.readContent(sourcePath)
|
||||
return { success: true, data }
|
||||
} catch (error) {
|
||||
logger.error('Failed to read plugin content', { sourcePath, error })
|
||||
return { success: false, error }
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.ClaudeCodePlugin_WriteContent, async (_, options) => {
|
||||
try {
|
||||
await pluginService.writeContent(options.agentId, options.filename, options.type, options.content)
|
||||
return { success: true, data: undefined }
|
||||
} catch (error) {
|
||||
logger.error('Failed to write plugin content', { options, error })
|
||||
return { success: false, error }
|
||||
}
|
||||
})
|
||||
|
||||
// WebSocket
|
||||
ipcMain.handle(IpcChannel.WebSocket_Start, WebSocketService.start)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Stop, WebSocketService.stop)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Status, WebSocketService.getStatus)
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { TraceMethod } from '@mcp-trace/trace-core'
|
||||
import type { ApiClient } from '@types'
|
||||
import { ApiClient } from '@types'
|
||||
|
||||
import EmbeddingsFactory from './EmbeddingsFactory'
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { OllamaEmbeddings } from '@cherrystudio/embedjs-ollama'
|
||||
import { OpenAiEmbeddings } from '@cherrystudio/embedjs-openai'
|
||||
import type { ApiClient } from '@types'
|
||||
import { net } from 'electron'
|
||||
import { AzureOpenAiEmbeddings } from '@cherrystudio/embedjs-openai/src/azure-openai-embeddings'
|
||||
import { ApiClient } from '@types'
|
||||
|
||||
import { VoyageEmbeddings } from './VoyageEmbeddings'
|
||||
|
||||
export default class EmbeddingsFactory {
|
||||
static create({ embedApiClient, dimensions }: { embedApiClient: ApiClient; dimensions?: number }): BaseEmbeddings {
|
||||
const batchSize = 10
|
||||
const { model, provider, apiKey, baseURL } = embedApiClient
|
||||
const { model, provider, apiKey, apiVersion, baseURL } = embedApiClient
|
||||
if (provider === 'voyageai') {
|
||||
return new VoyageEmbeddings({
|
||||
modelName: model,
|
||||
@@ -38,13 +38,22 @@ export default class EmbeddingsFactory {
|
||||
}
|
||||
})
|
||||
}
|
||||
// NOTE: Azure OpenAI 也走 OpenAIEmbeddings, baseURL是https://xxxx.openai.azure.com/openai/v1
|
||||
if (apiVersion !== undefined) {
|
||||
return new AzureOpenAiEmbeddings({
|
||||
azureOpenAIApiKey: apiKey,
|
||||
azureOpenAIApiVersion: apiVersion,
|
||||
azureOpenAIApiDeploymentName: model,
|
||||
azureOpenAIEndpoint: baseURL,
|
||||
dimensions,
|
||||
batchSize
|
||||
})
|
||||
}
|
||||
return new OpenAiEmbeddings({
|
||||
model,
|
||||
apiKey,
|
||||
dimensions,
|
||||
batchSize,
|
||||
configuration: { baseURL, fetch: net.fetch as typeof fetch }
|
||||
configuration: { baseURL }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import type { RAGApplication } from '@cherrystudio/embedjs'
|
||||
import { JsonLoader, LocalPathLoader, TextLoader } from '@cherrystudio/embedjs'
|
||||
import { JsonLoader, LocalPathLoader, RAGApplication, TextLoader } from '@cherrystudio/embedjs'
|
||||
import type { AddLoaderReturn } from '@cherrystudio/embedjs-interfaces'
|
||||
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
|
||||
import { loggerService } from '@logger'
|
||||
import { readTextFileWithAutoEncoding } from '@main/utils/file'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import type { FileMetadata, KnowledgeBaseParams } from '@types'
|
||||
import { LoaderReturn } from '@shared/config/types'
|
||||
import { FileMetadata, KnowledgeBaseParams } from '@types'
|
||||
|
||||
import { DraftsExportLoader } from './draftsExportLoader'
|
||||
import { EpubLoader } from './epubLoader'
|
||||
|
||||
@@ -3,8 +3,7 @@ import { cleanString } from '@cherrystudio/embedjs-utils'
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'
|
||||
import { loggerService } from '@logger'
|
||||
import md5 from 'md5'
|
||||
import type { OfficeParserConfig } from 'officeparser'
|
||||
import { parseOfficeAsync } from 'officeparser'
|
||||
import { OfficeParserConfig, parseOfficeAsync } from 'officeparser'
|
||||
|
||||
const logger = loggerService.withContext('OdLoader')
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import path from 'node:path'
|
||||
import { loggerService } from '@logger'
|
||||
import { windowService } from '@main/services/WindowService'
|
||||
import { getFileExt, getTempDir } from '@main/utils/file'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { PDFDocument } from 'pdf-lib'
|
||||
|
||||
const logger = loggerService.withContext('BasePreprocessProvider')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
|
||||
import BasePreprocessProvider from './BasePreprocessProvider'
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import { net } from 'electron'
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import { net } from 'electron'
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user