Compare commits
60 Commits
feat/termi
...
fix/thinki
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49320936f8 | ||
|
|
e268e69597 | ||
|
|
10e78ac60e | ||
|
|
44b2b859da | ||
|
|
bfef0c5580 | ||
|
|
1e8055031a | ||
|
|
8e33ff8d90 | ||
|
|
a619000340 | ||
|
|
78278ce96d | ||
|
|
76483d828e | ||
|
|
816a92c609 | ||
|
|
83e4d4363f | ||
|
|
1103449a4f | ||
|
|
56c7a7f066 | ||
|
|
caa59c4c50 | ||
|
|
2546dfbe5d | ||
|
|
5fea202a7d | ||
|
|
7dce1d776b | ||
|
|
346af4d338 | ||
|
|
abd5d3b96f | ||
|
|
49bd298d37 | ||
|
|
714a28ac29 | ||
|
|
0cf81c04c8 | ||
|
|
4186e9c990 | ||
|
|
d8f68a6056 | ||
|
|
11bf50e722 | ||
|
|
32a84311aa | ||
|
|
6eaa2b2461 | ||
|
|
9f00f00546 | ||
|
|
bd94d23343 | ||
|
|
5f1c14e2c0 | ||
|
|
cdc12d5092 | ||
|
|
e5967fd874 | ||
|
|
e2f1d80697 | ||
|
|
28bc89ac7c | ||
|
|
dc06c103e0 | ||
|
|
1f0381aebe | ||
|
|
fb02a61a48 | ||
|
|
562fbb3ff7 | ||
|
|
1018ad87b8 | ||
|
|
82ca35fc29 | ||
|
|
fe53b0914a | ||
|
|
67a379641f | ||
|
|
9dbc6fbf67 | ||
|
|
8da43ab794 | ||
|
|
2a06c606e1 | ||
|
|
b6dcf2f5fa | ||
|
|
68e0d8b0f1 | ||
|
|
7f1c234ac1 | ||
|
|
c1fd23742f | ||
|
|
d792bf7fe0 | ||
|
|
f8a599322f | ||
|
|
aa810a7ead | ||
|
|
b586e1796e | ||
|
|
fa2ec69fa9 | ||
|
|
dd8690b592 | ||
|
|
09e6b9741e | ||
|
|
0767952a6f | ||
|
|
72299f833a | ||
|
|
7badaf02b9 |
2
.github/ISSUE_TEMPLATE/0_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/0_bug_report.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 🐛 Bug Report (English)
|
||||
name: 🐛 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: '[Bug]: '
|
||||
labels: ['BUG']
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/1_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/1_feature_request.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 💡 Feature Request (English)
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
title: '[Feature]: '
|
||||
labels: ['feature']
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/3_others.yml
vendored
2
.github/ISSUE_TEMPLATE/3_others.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: 🤔 Other Questions (English)
|
||||
name: 🤔 Other Questions
|
||||
description: Submit questions that don't fit into bug reports or feature requests
|
||||
title: '[Other]: '
|
||||
body:
|
||||
|
||||
89
.github/workflows/auto-i18n.yml
vendored
89
.github/workflows/auto-i18n.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Auto I18N
|
||||
name: Auto I18N Weekly
|
||||
|
||||
env:
|
||||
TRANSLATION_API_KEY: ${{ secrets.TRANSLATE_API_KEY }}
|
||||
@@ -7,14 +7,15 @@ env:
|
||||
TRANSLATION_BASE_LOCALE: ${{ vars.AUTO_I18N_BASE_LOCALE || 'en-us'}}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
schedule:
|
||||
# Runs at 00:00 UTC every Sunday.
|
||||
# This corresponds to 08:00 AM UTC+8 (Beijing time) every Sunday.
|
||||
- cron: "0 0 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-i18n:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == 'CherryHQ/cherry-studio'
|
||||
name: Auto I18N
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -24,45 +25,69 @@ jobs:
|
||||
- name: 🐈⬛ Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: 📦 Setting Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
package-manager-cache: false
|
||||
node-version: 22
|
||||
|
||||
- name: 📦 Install dependencies in isolated directory
|
||||
- name: 📦 Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: 📂 Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💾 Cache yarn dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
run: |
|
||||
# 在临时目录安装依赖
|
||||
mkdir -p /tmp/translation-deps
|
||||
cd /tmp/translation-deps
|
||||
echo '{"dependencies": {"@cherrystudio/openai": "^6.5.0", "cli-progress": "^3.12.0", "tsx": "^4.20.3", "@biomejs/biome": "2.2.4"}}' > package.json
|
||||
npm install --no-package-lock
|
||||
|
||||
# 设置 NODE_PATH 让项目能找到这些依赖
|
||||
echo "NODE_PATH=/tmp/translation-deps/node_modules" >> $GITHUB_ENV
|
||||
yarn install
|
||||
|
||||
- name: 🏃♀️ Translate
|
||||
run: npx tsx scripts/sync-i18n.ts && npx tsx scripts/auto-translate-i18n.ts
|
||||
run: yarn sync:i18n && yarn auto:i18n
|
||||
|
||||
- name: 🔍 Format
|
||||
run: cd /tmp/translation-deps && npx biome format --config-path /home/runner/work/cherry-studio/cherry-studio/biome.jsonc --write /home/runner/work/cherry-studio/cherry-studio/src/renderer/src/i18n/
|
||||
run: yarn format
|
||||
|
||||
- name: 🔄 Commit changes
|
||||
- name: 🔍 Check for changes
|
||||
id: git_status
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add .
|
||||
# Check if there are any uncommitted changes
|
||||
git reset -- package.json yarn.lock # 不提交 package.json 和 yarn.lock 的更改
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "fix(i18n): Auto update translations for PR #${{ github.event.pull_request.number }}"
|
||||
fi
|
||||
git diff --exit-code --quiet || echo "::set-output name=has_changes::true"
|
||||
git status --porcelain
|
||||
|
||||
- name: 🚀 Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
- name: 📅 Set current date for PR title
|
||||
id: set_date
|
||||
run: echo "CURRENT_DATE=$(date +'%b %d, %Y')" >> $GITHUB_ENV # e.g., "Jun 06, 2024"
|
||||
|
||||
- name: 🚀 Create Pull Request if changes exist
|
||||
if: steps.git_status.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the built-in GITHUB_TOKEN for bot actions
|
||||
commit-message: "feat(bot): Weekly automated script run"
|
||||
title: "🤖 Weekly Automated Update: ${{ env.CURRENT_DATE }}"
|
||||
body: |
|
||||
This PR includes changes generated by the weekly auto i18n.
|
||||
Review the changes before merging.
|
||||
|
||||
---
|
||||
_Generated by the automated weekly workflow_
|
||||
branch: "auto-i18n-weekly-${{ github.run_id }}" # Unique branch name
|
||||
base: "main" # Or 'develop', set your base branch
|
||||
delete-branch: true # Delete the branch after merging or closing the PR
|
||||
|
||||
- name: 📢 Notify if no changes
|
||||
if: steps.git_status.outputs.has_changes != 'true'
|
||||
run: echo "Bot script ran, but no changes were detected. No PR created."
|
||||
|
||||
10
.github/workflows/github-issue-tracker.yml
vendored
10
.github/workflows/github-issue-tracker.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
types: [opened]
|
||||
schedule:
|
||||
# Run every day at 8:30 Beijing Time (00:30 UTC)
|
||||
- cron: '30 0 * * *'
|
||||
- cron: "30 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -54,9 +54,9 @@ jobs:
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.check_time.outputs.should_delay == 'false'
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: 22
|
||||
|
||||
- name: Process issue with Claude
|
||||
if: steps.check_time.outputs.should_delay == 'false'
|
||||
@@ -121,9 +121,9 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: 22
|
||||
|
||||
- name: Process pending issues with Claude
|
||||
uses: anthropics/claude-code-action@main
|
||||
|
||||
4
.github/workflows/issue-management.yml
vendored
4
.github/workflows/issue-management.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
contents: none
|
||||
steps:
|
||||
- name: Close needs-more-info issues
|
||||
uses: actions/stale@v9
|
||||
uses: actions/stale@v10
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
only-labels: 'needs-more-info'
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
days-before-pr-close: -1
|
||||
|
||||
- name: Close inactive issues
|
||||
uses: actions/stale@v9
|
||||
uses: actions/stale@v10
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: ${{ env.daysBeforeStale }}
|
||||
|
||||
10
.github/workflows/nightly-build.yml
vendored
10
.github/workflows/nightly-build.yml
vendored
@@ -3,7 +3,7 @@ name: Nightly Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 17 * * *' # 1:00 BJ Time
|
||||
- cron: "0 17 * * *" # 1:00 BJ Time
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -56,9 +56,9 @@ jobs:
|
||||
ref: main
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 22
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
@@ -208,7 +208,7 @@ jobs:
|
||||
echo "总计: $(find renamed-artifacts -type f | wc -l) 个文件"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: cherry-studio-nightly-${{ steps.date.outputs.date }}-${{ matrix.os }}
|
||||
path: renamed-artifacts/*
|
||||
|
||||
6
.github/workflows/pr-ci.yml
vendored
6
.github/workflows/pr-ci.yml
vendored
@@ -24,12 +24,12 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 22
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
|
||||
12
.github/workflows/release.yml
vendored
12
.github/workflows/release.yml
vendored
@@ -4,9 +4,9 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Release tag (e.g. v1.0.0)'
|
||||
description: "Release tag (e.g. v1.0.0)"
|
||||
required: true
|
||||
default: 'v1.0.0'
|
||||
default: "v1.0.0"
|
||||
push:
|
||||
tags:
|
||||
- v*.*.*
|
||||
@@ -47,9 +47,9 @@ jobs:
|
||||
npm version "$VERSION" --no-git-tag-version --allow-same-version
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 22
|
||||
|
||||
- name: macos-latest dependencies fix
|
||||
if: matrix.os == 'macos-latest'
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
brew install python-setuptools
|
||||
|
||||
- name: Install corepack
|
||||
run: corepack enable && corepack prepare yarn@4.6.0 --activate
|
||||
run: corepack enable && corepack prepare yarn@4.9.1 --activate
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
@@ -127,5 +127,5 @@ jobs:
|
||||
allowUpdates: true
|
||||
makeLatest: false
|
||||
tag: ${{ steps.get-tag.outputs.tag }}
|
||||
artifacts: 'dist/*.exe,dist/*.zip,dist/*.dmg,dist/*.AppImage,dist/*.snap,dist/*.deb,dist/*.rpm,dist/*.tar.gz,dist/latest*.yml,dist/rc*.yml,dist/beta*.yml,dist/*.blockmap'
|
||||
artifacts: "dist/*.exe,dist/*.zip,dist/*.dmg,dist/*.AppImage,dist/*.snap,dist/*.deb,dist/*.rpm,dist/*.tar.gz,dist/latest*.yml,dist/rc*.yml,dist/beta*.yml,dist/*.blockmap"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -140,7 +140,7 @@
|
||||
"typescript/await-thenable": "warn",
|
||||
// "typescript/ban-ts-comment": "error",
|
||||
"typescript/no-array-constructor": "error",
|
||||
// "typescript/consistent-type-imports": "error",
|
||||
"typescript/consistent-type-imports": "error",
|
||||
"typescript/no-array-delete": "warn",
|
||||
"typescript/no-base-to-string": "warn",
|
||||
"typescript/no-duplicate-enum-values": "error",
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
diff --git a/dist/utils/tiktoken.cjs b/dist/utils/tiktoken.cjs
|
||||
index 973b0d0e75aeaf8de579419af31b879b32975413..f23c7caa8b9dc8bd404132725346a4786f6b278b 100644
|
||||
--- a/dist/utils/tiktoken.cjs
|
||||
+++ b/dist/utils/tiktoken.cjs
|
||||
@@ -1,25 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.encodingForModel = exports.getEncoding = void 0;
|
||||
-const lite_1 = require("js-tiktoken/lite");
|
||||
const async_caller_js_1 = require("./async_caller.cjs");
|
||||
const cache = {};
|
||||
const caller = /* #__PURE__ */ new async_caller_js_1.AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) {
|
||||
- cache[encoding] = caller
|
||||
- .fetch(`https://tiktoken.pages.dev/js/${encoding}.json`)
|
||||
- .then((res) => res.json())
|
||||
- .then((data) => new lite_1.Tiktoken(data))
|
||||
- .catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- }
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
exports.getEncoding = getEncoding;
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding((0, lite_1.getEncodingNameForModel)(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
exports.encodingForModel = encodingForModel;
|
||||
diff --git a/dist/utils/tiktoken.js b/dist/utils/tiktoken.js
|
||||
index 8e41ee6f00f2f9c7fa2c59fa2b2f4297634b97aa..aa5f314a6349ad0d1c5aea8631a56aad099176e0 100644
|
||||
--- a/dist/utils/tiktoken.js
|
||||
+++ b/dist/utils/tiktoken.js
|
||||
@@ -1,20 +1,9 @@
|
||||
-import { Tiktoken, getEncodingNameForModel, } from "js-tiktoken/lite";
|
||||
import { AsyncCaller } from "./async_caller.js";
|
||||
const cache = {};
|
||||
const caller = /* #__PURE__ */ new AsyncCaller({});
|
||||
export async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) {
|
||||
- cache[encoding] = caller
|
||||
- .fetch(`https://tiktoken.pages.dev/js/${encoding}.json`)
|
||||
- .then((res) => res.json())
|
||||
- .then((data) => new Tiktoken(data))
|
||||
- .catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- }
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
export async function encodingForModel(model) {
|
||||
- return getEncoding(getEncodingNameForModel(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
diff --git a/package.json b/package.json
|
||||
index 36072aecf700fca1bc49832a19be832eca726103..90b8922fba1c3d1b26f78477c891b07816d6238a 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -37,7 +37,6 @@
|
||||
"ansi-styles": "^5.0.0",
|
||||
"camelcase": "6",
|
||||
"decamelize": "1.2.0",
|
||||
- "js-tiktoken": "^1.0.12",
|
||||
"langsmith": ">=0.2.8 <0.4.0",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
68
.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch
vendored
Normal file
68
.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
diff --git a/dist/utils/tiktoken.cjs b/dist/utils/tiktoken.cjs
|
||||
index c5b41f121d2e3d24c3a4969e31fa1acffdcad3b9..ec724489dcae79ee6c61acf2d4d84bd19daef036 100644
|
||||
--- a/dist/utils/tiktoken.cjs
|
||||
+++ b/dist/utils/tiktoken.cjs
|
||||
@@ -1,6 +1,5 @@
|
||||
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
||||
const require_utils_async_caller = require('./async_caller.cjs');
|
||||
-const js_tiktoken_lite = require_rolldown_runtime.__toESM(require("js-tiktoken/lite"));
|
||||
|
||||
//#region src/utils/tiktoken.ts
|
||||
var tiktoken_exports = {};
|
||||
@@ -11,14 +10,10 @@ require_rolldown_runtime.__export(tiktoken_exports, {
|
||||
const cache = {};
|
||||
const caller = /* @__PURE__ */ new require_utils_async_caller.AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) cache[encoding] = caller.fetch(`https://tiktoken.pages.dev/js/${encoding}.json`).then((res) => res.json()).then((data) => new js_tiktoken_lite.Tiktoken(data)).catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding((0, js_tiktoken_lite.getEncodingNameForModel)(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
|
||||
//#endregion
|
||||
diff --git a/dist/utils/tiktoken.js b/dist/utils/tiktoken.js
|
||||
index 641acca03cb92f04a6fa5c9c31f1880ce635572e..707389970ad957aa0ff20ef37fa8dd2875be737c 100644
|
||||
--- a/dist/utils/tiktoken.js
|
||||
+++ b/dist/utils/tiktoken.js
|
||||
@@ -1,6 +1,5 @@
|
||||
import { __export } from "../_virtual/rolldown_runtime.js";
|
||||
import { AsyncCaller } from "./async_caller.js";
|
||||
-import { Tiktoken, getEncodingNameForModel } from "js-tiktoken/lite";
|
||||
|
||||
//#region src/utils/tiktoken.ts
|
||||
var tiktoken_exports = {};
|
||||
@@ -11,14 +10,10 @@ __export(tiktoken_exports, {
|
||||
const cache = {};
|
||||
const caller = /* @__PURE__ */ new AsyncCaller({});
|
||||
async function getEncoding(encoding) {
|
||||
- if (!(encoding in cache)) cache[encoding] = caller.fetch(`https://tiktoken.pages.dev/js/${encoding}.json`).then((res) => res.json()).then((data) => new Tiktoken(data)).catch((e) => {
|
||||
- delete cache[encoding];
|
||||
- throw e;
|
||||
- });
|
||||
- return await cache[encoding];
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
async function encodingForModel(model) {
|
||||
- return getEncoding(getEncodingNameForModel(model));
|
||||
+ throw new Error("TikToken Not implemented");
|
||||
}
|
||||
|
||||
//#endregion
|
||||
diff --git a/package.json b/package.json
|
||||
index a24f8fc61de58526051999260f2ebee5f136354b..e885359e8966e7730c51772533ce37e01edb3046 100644
|
||||
--- a/package.json
|
||||
+++ b/package.json
|
||||
@@ -20,7 +20,6 @@
|
||||
"ansi-styles": "^5.0.0",
|
||||
"camelcase": "6",
|
||||
"decamelize": "1.2.0",
|
||||
- "js-tiktoken": "^1.0.12",
|
||||
"langsmith": "^0.3.64",
|
||||
"mustache": "^4.2.0",
|
||||
"p-queue": "^6.6.2",
|
||||
@@ -1,19 +0,0 @@
|
||||
diff --git a/dist/embeddings.js b/dist/embeddings.js
|
||||
index 1f8154be3e9c22442a915eb4b85fa6d2a21b0d0c..dc13ef4a30e6c282824a5357bcee9bd0ae222aab 100644
|
||||
--- a/dist/embeddings.js
|
||||
+++ b/dist/embeddings.js
|
||||
@@ -214,10 +214,12 @@ export class OpenAIEmbeddings extends Embeddings {
|
||||
* @returns Promise that resolves to an embedding for the document.
|
||||
*/
|
||||
async embedQuery(text) {
|
||||
+ const isBaiduCloud = this.clientConfig.baseURL.includes('baidubce.com')
|
||||
+ const input = this.stripNewLines ? text.replace(/\n/g, ' ') : text
|
||||
const params = {
|
||||
model: this.model,
|
||||
- input: this.stripNewLines ? text.replace(/\n/g, " ") : text,
|
||||
- };
|
||||
+ input: isBaiduCloud ? [input] : input
|
||||
+ }
|
||||
if (this.dimensions) {
|
||||
params.dimensions = this.dimensions;
|
||||
}
|
||||
17
.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch
vendored
Normal file
17
.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
diff --git a/dist/embeddings.js b/dist/embeddings.js
|
||||
index 6f4b928d3e4717309382e1b5c2e31ab5bc6c5af0..bc79429c88a6d27d4997a2740c4d8ae0707f5991 100644
|
||||
--- a/dist/embeddings.js
|
||||
+++ b/dist/embeddings.js
|
||||
@@ -94,9 +94,11 @@ var OpenAIEmbeddings = class extends Embeddings {
|
||||
* @returns Promise that resolves to an embedding for the document.
|
||||
*/
|
||||
async embedQuery(text) {
|
||||
+ const isBaiduCloud = this.clientConfig.baseURL.includes('baidubce.com');
|
||||
+ const input = this.stripNewLines ? text.replace(/\n/g, " ") : text
|
||||
const params = {
|
||||
model: this.model,
|
||||
- input: this.stripNewLines ? text.replace(/\n/g, " ") : text
|
||||
+ input: isBaiduCloud ? [input] : input
|
||||
};
|
||||
if (this.dimensions) params.dimensions = this.dimensions;
|
||||
if (this.encodingFormat) params.encoding_format = this.encodingFormat;
|
||||
@@ -7,7 +7,6 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Keep it clear**: Write code that is easy to read, maintain, and explain.
|
||||
- **Match the house style**: Reuse existing patterns, naming, and conventions.
|
||||
- **Search smart**: Prefer `ast-grep` for semantic queries; fall back to `rg`/`grep` when needed.
|
||||
- **Build with HeroUI**: Use HeroUI for every new UI component; never add `antd` or `styled-components`.
|
||||
- **Log centrally**: Route all logging through `loggerService` with the right context—no `console.log`.
|
||||
- **Research via subagent**: Lean on `subagent` for external docs, APIs, news, and references.
|
||||
- **Always propose before executing**: Before making any changes, clearly explain your planned approach and wait for explicit user approval to ensure alignment and prevent unwanted modifications.
|
||||
@@ -41,7 +40,6 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Services** (`src/main/services/`): MCPService, KnowledgeService, WindowService, etc.
|
||||
- **Build System**: Electron-Vite with experimental rolldown-vite, yarn workspaces.
|
||||
- **State Management**: Redux Toolkit (`src/renderer/src/store/`) for predictable state.
|
||||
- **UI Components**: HeroUI (`@heroui/*`) for all new UI elements.
|
||||
|
||||
### Logging
|
||||
```typescript
|
||||
|
||||
@@ -77,7 +77,7 @@ Please review the following critical information before submitting your Pull Req
|
||||
Our core team is currently focused on significant architectural updates that involve these data structures. To ensure stability and focus during this period, contributions of this nature will be temporarily managed internally.
|
||||
|
||||
* **PRs that require changes to Redux state shape or IndexedDB schemas will be closed.**
|
||||
* **This restriction is temporary and will be lifted with the release of `v2.0.0`.** You can track the progress of `v2.0.0` and its related discussions on issue [#10162](https://github.com/YOUR_ORG/YOUR_REPO/issues/10162) (please replace with your actual repo link).
|
||||
* **This restriction is temporary and will be lifted with the release of `v2.0.0`.** You can track the progress of `v2.0.0` and its related discussions on issue [#10162](https://github.com/CherryHQ/cherry-studio/pull/10162).
|
||||
|
||||
We highly encourage contributions for:
|
||||
* Bug fixes 🐞
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"includes": ["**"],
|
||||
"includes": ["**", "!**/.claude/**"],
|
||||
"maxSize": 2097152
|
||||
},
|
||||
"formatter": {
|
||||
|
||||
@@ -81,7 +81,7 @@ git commit --signoff -m "Your commit message"
|
||||
我们的核心团队目前正专注于涉及这些数据结构的关键架构更新和基础工作。为确保在此期间的稳定性与专注,此类贡献将暂时由内部进行管理。
|
||||
|
||||
* **需要更改 Redux 状态结构或 IndexedDB schema 的 PR 将会被关闭。**
|
||||
* **此限制是临时性的,并将在 `v2.0.0` 版本发布后解除。** 您可以通过 Issue [#10162](https://github.com/YOUR_ORG/YOUR_REPO/issues/10162) (请替换为您的实际仓库链接) 跟踪 `v2.0.0` 的进展及相关讨论。
|
||||
* **此限制是临时性的,并将在 `v2.0.0` 版本发布后解除。** 您可以通过 Issue [#10162](https://github.com/CherryHQ/cherry-studio/pull/10162) 跟踪 `v2.0.0` 的进展及相关讨论。
|
||||
|
||||
我们非常鼓励以下类型的贡献:
|
||||
* 错误修复 🐞
|
||||
|
||||
@@ -18,13 +18,13 @@ yarn
|
||||
|
||||
### Setup Node.js
|
||||
|
||||
Download and install [Node.js v20.x.x](https://nodejs.org/en/download)
|
||||
Download and install [Node.js v22.x.x](https://nodejs.org/en/download)
|
||||
|
||||
### Setup Yarn
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
corepack prepare yarn@4.6.0 --activate
|
||||
corepack prepare yarn@4.9.1 --activate
|
||||
```
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
@@ -11,6 +11,8 @@ The Test Plan is divided into the RC channel and the Beta channel, with the foll
|
||||
|
||||
Users can enable the "Test Plan" and select the version channel in the software's `Settings` > `About`. Please note that the versions in the "Test Plan" cannot guarantee data consistency, so be sure to back up your data before using them.
|
||||
|
||||
After enabling the RC channel or Beta channel, if a stable version is released, users will still be upgraded to the stable version.
|
||||
|
||||
Users are welcome to submit issues or provide feedback through other channels for any bugs encountered during testing. Your feedback is very important to us.
|
||||
|
||||
## Developer Guide
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
|
||||
用户可以在软件的`设置`-`关于`中,开启“测试计划”并选择版本通道。请注意“测试计划”的版本无法保证数据的一致性,请使用前一定要备份数据。
|
||||
|
||||
用户选择RC版通道或Beta版通道后,若发布了正式版,仍旧会升级到正式版。
|
||||
|
||||
用户在测试过程中发现的BUG,欢迎提交issue或通过其他渠道反馈。用户的反馈对我们非常重要。
|
||||
|
||||
## 开发者指南
|
||||
|
||||
@@ -21,6 +21,8 @@ files:
|
||||
- "**/*"
|
||||
- "!**/{.vscode,.yarn,.yarn-lock,.github,.cursorrules,.prettierrc}"
|
||||
- "!electron.vite.config.{js,ts,mjs,cjs}}"
|
||||
- "!.*"
|
||||
- "!components.json"
|
||||
- "!**/{.eslintignore,.eslintrc.js,.eslintrc.json,.eslintcache,root.eslint.config.js,eslint.config.js,.eslintrc.cjs,.prettierignore,.prettierrc.yaml,eslint.config.mjs,dev-app-update.yml,CHANGELOG.md,README.md,biome.jsonc}"
|
||||
- "!**/{.env,.env.*,.npmrc,pnpm-lock.yaml}"
|
||||
- "!**/{tsconfig.json,tsconfig.tsbuildinfo,tsconfig.node.json,tsconfig.web.json}"
|
||||
@@ -133,60 +135,59 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
What's New in v1.7.0-beta.2
|
||||
What's New in v1.7.0-beta.4
|
||||
|
||||
Major Changes:
|
||||
- UI Framework Upgrade: Improved performance and user experience with new design system
|
||||
- App Menu i18n: Menu now supports multiple languages and syncs with app language settings
|
||||
|
||||
New Features:
|
||||
- Session Settings: Manage session-specific settings and model configurations independently
|
||||
- Notes Full-Text Search: Search across all notes with match highlighting
|
||||
- Built-in DiDi MCP Server: Integration with DiDi ride-hailing services (China only)
|
||||
- Intel OV OCR: Hardware-accelerated OCR using Intel NPU
|
||||
- Auto-start API Server: Automatically starts when agents exist
|
||||
- AWS Bedrock API Key: Support Bedrock API key authentication with Extended Thinking (reasoning) capability
|
||||
- SophNet Provider: Added support for SophNet LLM provider
|
||||
- Auto Session Rename: Agent sessions automatically rename based on conversation topics
|
||||
- TopP Parameter: Added TopP parameter support for more precise model control
|
||||
- Reasoning Effort Control: Quick access to reasoning effort settings in input bar
|
||||
|
||||
Improvements:
|
||||
- Agent model selection now requires explicit user choice
|
||||
- Added Mistral AI provider support
|
||||
- Added NewAPI generic provider support
|
||||
- Improved navbar layout consistency across different modes
|
||||
- Enhanced chat component responsiveness
|
||||
- Better code block display on small screens
|
||||
- Updated OVMS to 2025.3 official release
|
||||
- Added Greek language support
|
||||
- Topics & Sessions: Enhanced UI with better styling and smoother interactions
|
||||
- Quick Panel: Improved option visibility and control
|
||||
- Painting Models: Smarter model initialization with better defaults
|
||||
- System Shutdown: Better handling of shutdown events to prevent data loss
|
||||
- Smaller Package Size: Optimized build configuration for faster downloads
|
||||
|
||||
Bug Fixes:
|
||||
- Fixed GitHub Copilot gpt-5-codex streaming issues
|
||||
- Fixed assistant creation failures
|
||||
- Fixed translate auto-copy functionality
|
||||
- Fixed miniapps external link opening
|
||||
- Fixed message layout and overflow issues
|
||||
- Fixed API key parsing to preserve spaces
|
||||
- Fixed agent display in different navbar layouts
|
||||
- Fixed Perplexity provider support and API host formatting
|
||||
- Fixed CherryAI provider support and API host formatting
|
||||
- Fixed i18n translations for painting image size options
|
||||
- Fixed agent session message token usage tracking
|
||||
- Fixed prompt stream handling on completion or error
|
||||
- Fixed message API initialization issues
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
v1.7.0-beta.2 新特性
|
||||
v1.7.0-beta.4 新特性
|
||||
|
||||
重大变更:
|
||||
- UI 框架升级:采用新设计系统,提升性能和用户体验
|
||||
- 应用菜单国际化:菜单支持多语言,并自动同步应用语言设置
|
||||
|
||||
新功能:
|
||||
- 会话设置:独立管理会话特定的设置和模型配置
|
||||
- 笔记全文搜索:跨所有笔记搜索并高亮匹配内容
|
||||
- 内置滴滴 MCP 服务器:集成滴滴打车服务(仅限中国地区)
|
||||
- Intel OV OCR:使用 Intel NPU 的硬件加速 OCR
|
||||
- 自动启动 API 服务器:当存在 Agent 时自动启动
|
||||
- AWS Bedrock API 密钥:支持 Bedrock API 密钥身份验证,并支持扩展思考(推理)能力
|
||||
- SophNet 提供商:添加 SophNet LLM 提供商支持
|
||||
- 自动会话重命名:Agent 会话根据对话主题自动重命名
|
||||
- TopP 参数:添加 TopP 参数支持,更精确控制模型输出
|
||||
|
||||
改进:
|
||||
- Agent 模型选择现在需要用户显式选择
|
||||
- 添加 Mistral AI 提供商支持
|
||||
- 添加 NewAPI 通用提供商支持
|
||||
- 改进不同模式下的导航栏布局一致性
|
||||
- 增强聊天组件响应式设计
|
||||
- 优化小屏幕代码块显示
|
||||
- 更新 OVMS 至 2025.3 正式版
|
||||
- 添加希腊语支持
|
||||
- 主题和会话:增强 UI,改进样式和交互体验
|
||||
- 快速面板:改进选项可见性和控制
|
||||
- 绘图模型:更智能的模型初始化和更好的默认值
|
||||
- 系统关机:更好地处理关机事件,防止数据丢失
|
||||
- 更小的安装包:优化构建配置,下载更快
|
||||
|
||||
问题修复:
|
||||
- 修复 GitHub Copilot gpt-5-codex 流式传输问题
|
||||
- 修复助手创建失败
|
||||
- 修复翻译自动复制功能
|
||||
- 修复小程序外部链接打开
|
||||
- 修复消息布局和溢出问题
|
||||
- 修复 API 密钥解析以保留空格
|
||||
- 修复不同导航栏布局中的 Agent 显示
|
||||
- 修复 Perplexity 提供商支持和 API 主机格式化
|
||||
- 修复 CherryAI 提供商支持和 API 主机格式化
|
||||
- 修复绘图图像大小选项的 i18n 翻译
|
||||
- 修复 Agent 会话消息的 token 使用量跟踪
|
||||
- 修复完成或错误时的提示流处理
|
||||
- 修复消息 API 初始化问题
|
||||
<!--LANG:END-->
|
||||
|
||||
27
package.json
27
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.7.0-beta.2",
|
||||
"version": "1.7.0-beta.3",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@@ -82,6 +82,7 @@
|
||||
"@libsql/client": "0.14.0",
|
||||
"@libsql/win32-x64-msvc": "^0.4.7",
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"@paymoapp/electron-shutdown-handler": "^1.1.2",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"express": "^5.1.0",
|
||||
"font-list": "^2.0.0",
|
||||
@@ -92,8 +93,10 @@
|
||||
"node-stream-zip": "^1.15.0",
|
||||
"officeparser": "^4.2.0",
|
||||
"os-proxy-config": "^1.1.2",
|
||||
"qrcode.react": "^4.2.0",
|
||||
"selection-hook": "^1.0.12",
|
||||
"sharp": "^0.34.3",
|
||||
"socket.io": "^4.8.1",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"tesseract.js": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
@@ -111,9 +114,9 @@
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
"@aws-sdk/client-bedrock": "^3.840.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.840.0",
|
||||
"@aws-sdk/client-s3": "^3.840.0",
|
||||
"@aws-sdk/client-bedrock": "^3.910.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
@@ -144,9 +147,10 @@
|
||||
"@eslint/js": "^9.22.0",
|
||||
"@google/genai": "patch:@google/genai@npm%3A1.0.1#~/.yarn/patches/@google-genai-npm-1.0.1-e26f0f9af7.patch",
|
||||
"@hello-pangea/dnd": "^18.0.1",
|
||||
"@heroui/react": "^2.8.3",
|
||||
"@kangfenmao/keyv-storage": "^0.1.0",
|
||||
"@langchain/community": "^0.3.50",
|
||||
"@langchain/community": "^1.0.0",
|
||||
"@langchain/core": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"@langchain/openai": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@mistralai/mistralai": "^1.7.5",
|
||||
"@modelcontextprotocol/sdk": "^1.17.5",
|
||||
"@mozilla/readability": "^0.6.0",
|
||||
@@ -344,6 +348,7 @@
|
||||
"striptags": "^3.2.0",
|
||||
"styled-components": "^6.1.11",
|
||||
"swr": "^2.3.6",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwindcss": "^4.1.13",
|
||||
"tar": "^7.4.3",
|
||||
"tiny-pinyin": "^1.3.2",
|
||||
@@ -369,12 +374,11 @@
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"resolutions": {
|
||||
"@smithy/types": "4.7.1",
|
||||
"@codemirror/language": "6.11.3",
|
||||
"@codemirror/lint": "6.8.5",
|
||||
"@codemirror/view": "6.38.1",
|
||||
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A0.3.44#~/.yarn/patches/@langchain-core-npm-0.3.44-41d5c3cb0a.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.4.0": "patch:@langchain/openai@npm%3A0.3.16#~/.yarn/patches/@langchain-openai-npm-0.3.16-e525b59526.patch",
|
||||
"@langchain/core@npm:^0.3.26": "patch:@langchain/core@npm%3A1.0.2#~/.yarn/patches/@langchain-core-npm-1.0.2-183ef83fe4.patch",
|
||||
"app-builder-lib@npm:26.0.13": "patch:app-builder-lib@npm%3A26.0.13#~/.yarn/patches/app-builder-lib-npm-26.0.13-a064c9e1d0.patch",
|
||||
"app-builder-lib@npm:26.0.15": "patch:app-builder-lib@npm%3A26.0.15#~/.yarn/patches/app-builder-lib-npm-26.0.15-360e5b0476.patch",
|
||||
"atomically@npm:^1.7.0": "patch:atomically@npm%3A1.7.0#~/.yarn/patches/atomically-npm-1.7.0-e742e5293b.patch",
|
||||
@@ -398,7 +402,10 @@
|
||||
"@img/sharp-linux-arm64": "0.34.3",
|
||||
"@img/sharp-linux-x64": "0.34.3",
|
||||
"@img/sharp-win32-x64": "0.34.3",
|
||||
"openai@npm:5.12.2": "npm:@cherrystudio/openai@6.5.0"
|
||||
"openai@npm:5.12.2": "npm:@cherrystudio/openai@6.5.0",
|
||||
"@langchain/openai@npm:>=0.1.0 <0.6.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"lint-staged": {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* 中间件管理器
|
||||
* 专注于 AI SDK 中间件的管理,与插件系统分离
|
||||
*/
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
/**
|
||||
* 创建中间件列表
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* 中间件系统类型定义
|
||||
*/
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
/**
|
||||
* 具名中间件接口
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* 模型包装工具函数
|
||||
* 用于将中间件应用到LanguageModel上
|
||||
*/
|
||||
import { LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import { wrapLanguageModel } from 'ai'
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* 集成了来自 ModelCreator 的特殊处理逻辑
|
||||
*/
|
||||
|
||||
import { EmbeddingModelV2, ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { EmbeddingModelV2, ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import { wrapModelWithMiddlewares } from '../middleware/wrapper'
|
||||
import { DEFAULT_SEPARATOR, globalRegistryManagement } from '../providers/RegistryManagement'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Creation 模块类型定义
|
||||
*/
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import type { ProviderId, ProviderSettingsMap } from '../providers/types'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ExtractProviderOptions, ProviderOptionsMap, TypedProviderOptions } from './types'
|
||||
import type { ExtractProviderOptions, ProviderOptionsMap, TypedProviderOptions } from './types'
|
||||
|
||||
/**
|
||||
* 创建特定供应商的选项
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { AiRequestContext } from '../../types'
|
||||
import { StreamEventManager } from './StreamEventManager'
|
||||
import { type TagConfig, TagExtractor } from './tagExtraction'
|
||||
import { ToolExecutor } from './ToolExecutor'
|
||||
import { PromptToolUseConfig, ToolUseResult } from './type'
|
||||
import type { PromptToolUseConfig, ToolUseResult } from './type'
|
||||
|
||||
/**
|
||||
* 工具使用标签配置
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ToolSet } from 'ai'
|
||||
import type { ToolSet } from 'ai'
|
||||
|
||||
import { AiRequestContext } from '../..'
|
||||
import type { AiRequestContext } from '../..'
|
||||
|
||||
/**
|
||||
* 解析结果类型
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { anthropic } from '@ai-sdk/anthropic'
|
||||
import { google } from '@ai-sdk/google'
|
||||
import { openai } from '@ai-sdk/openai'
|
||||
import { InferToolInput, InferToolOutput, type Tool } from 'ai'
|
||||
import type { anthropic } from '@ai-sdk/anthropic'
|
||||
import type { google } from '@ai-sdk/google'
|
||||
import type { openai } from '@ai-sdk/openai'
|
||||
import type { InferToolInput, InferToolOutput } from 'ai'
|
||||
import { type Tool } from 'ai'
|
||||
|
||||
import { ProviderOptionsMap } from '../../../options/types'
|
||||
import { OpenRouterSearchConfig } from './openrouter'
|
||||
import type { ProviderOptionsMap } from '../../../options/types'
|
||||
import type { OpenRouterSearchConfig } from './openrouter'
|
||||
|
||||
/**
|
||||
* 从 AI SDK 的工具函数中提取参数类型,以确保类型安全。
|
||||
|
||||
@@ -9,7 +9,8 @@ import { openai } from '@ai-sdk/openai'
|
||||
import { createOpenRouterOptions, createXaiOptions, mergeProviderOptions } from '../../../options'
|
||||
import { definePlugin } from '../../'
|
||||
import type { AiRequestContext } from '../../types'
|
||||
import { DEFAULT_WEB_SEARCH_CONFIG, WebSearchPluginConfig } from './helper'
|
||||
import type { WebSearchPluginConfig } from './helper'
|
||||
import { DEFAULT_WEB_SEARCH_CONFIG } from './helper'
|
||||
|
||||
/**
|
||||
* 网络搜索插件
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AiPlugin, AiRequestContext } from './types'
|
||||
import type { AiPlugin, AiRequestContext } from './types'
|
||||
|
||||
/**
|
||||
* 插件管理器
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* 例如: aihubmix:anthropic:claude-3.5-sonnet
|
||||
*/
|
||||
|
||||
import { ProviderV2 } from '@ai-sdk/provider'
|
||||
import type { ProviderV2 } from '@ai-sdk/provider'
|
||||
import { customProvider } from 'ai'
|
||||
|
||||
import { globalRegistryManagement } from './RegistryManagement'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* 基于 AI SDK 原生的 createProviderRegistry
|
||||
*/
|
||||
|
||||
import { EmbeddingModelV2, ImageModelV2, LanguageModelV2, ProviderV2 } from '@ai-sdk/provider'
|
||||
import type { EmbeddingModelV2, ImageModelV2, LanguageModelV2, ProviderV2 } from '@ai-sdk/provider'
|
||||
import { createProviderRegistry, type ProviderRegistryProvider } from 'ai'
|
||||
|
||||
type PROVIDERS = Record<string, ProviderV2>
|
||||
|
||||
@@ -10,10 +10,11 @@ import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
import { createXai } from '@ai-sdk/xai'
|
||||
import { createOpenRouter } from '@openrouter/ai-sdk-provider'
|
||||
import { customProvider, Provider } from 'ai'
|
||||
import type { Provider } from 'ai'
|
||||
import { customProvider } from 'ai'
|
||||
import * as z from 'zod'
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,7 +4,7 @@ import { type DeepSeekProviderSettings } from '@ai-sdk/deepseek'
|
||||
import { type GoogleGenerativeAIProviderSettings } from '@ai-sdk/google'
|
||||
import { type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { type OpenAICompatibleProviderSettings } from '@ai-sdk/openai-compatible'
|
||||
import {
|
||||
import type {
|
||||
EmbeddingModelV2 as EmbeddingModel,
|
||||
ImageModelV2 as ImageModel,
|
||||
LanguageModelV2 as LanguageModel,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage as aiGenerateImage, NoImageGeneratedError } from 'ai'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
* 运行时执行器
|
||||
* 专注于插件化的AI调用处理
|
||||
*/
|
||||
import { ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { ImageModelV2, LanguageModelV2, LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModel } from 'ai'
|
||||
import {
|
||||
experimental_generateImage as _generateImage,
|
||||
generateObject as _generateObject,
|
||||
generateText as _generateText,
|
||||
LanguageModel,
|
||||
streamObject as _streamObject,
|
||||
streamText as _streamText
|
||||
} from 'ai'
|
||||
|
||||
@@ -11,7 +11,7 @@ export type { RuntimeConfig } from './types'
|
||||
|
||||
// === 便捷工厂函数 ===
|
||||
|
||||
import { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
import type { LanguageModelV2Middleware } from '@ai-sdk/provider'
|
||||
|
||||
import { type AiPlugin } from '../plugins'
|
||||
import { type ProviderId, type ProviderSettingsMap } from '../providers/types'
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
/* eslint-disable @eslint-react/naming-convention/context-name */
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage, generateObject, generateText, LanguageModel, streamObject, streamText } from 'ai'
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import type {
|
||||
experimental_generateImage,
|
||||
generateObject,
|
||||
generateText,
|
||||
LanguageModel,
|
||||
streamObject,
|
||||
streamText
|
||||
} from 'ai'
|
||||
|
||||
import { type AiPlugin, createContext, PluginManager } from '../plugins'
|
||||
import { type ProviderId } from '../providers/types'
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/**
|
||||
* Runtime 层类型定义
|
||||
*/
|
||||
import { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import { experimental_generateImage, generateObject, generateText, streamObject, streamText } from 'ai'
|
||||
import type { ImageModelV2 } from '@ai-sdk/provider'
|
||||
import type { experimental_generateImage, generateObject, generateText, streamObject, streamText } from 'ai'
|
||||
|
||||
import { type ModelConfig } from '../models/types'
|
||||
import { type AiPlugin } from '../plugins'
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Extension, Node } from '@tiptap/core'
|
||||
import type { Node } from '@tiptap/core'
|
||||
import { Extension } from '@tiptap/core'
|
||||
|
||||
import type { TableCellOptions } from '../cell/index.js'
|
||||
import { TableCell } from '../cell/index.js'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { SpanKind, SpanStatusCode } from '@opentelemetry/api'
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
import type { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
import { SpanEntity } from '../types/config'
|
||||
import type { SpanEntity } from '../types/config'
|
||||
|
||||
/**
|
||||
* convert ReadableSpan to SpanEntity
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
import type { ReadableSpan } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export interface TraceCache {
|
||||
createSpan: (span: ReadableSpan) => void
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
|
||||
import { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import type { ExportResult } from '@opentelemetry/core'
|
||||
import { ExportResultCode } from '@opentelemetry/core'
|
||||
import type { ReadableSpan, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type SaveFunction = (spans: ReadableSpan[]) => Promise<void>
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { Context, trace } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
import { TraceCache } from '../core/traceCache'
|
||||
import type { TraceCache } from '../core/traceCache'
|
||||
|
||||
export class CacheBatchSpanProcessor extends BatchSpanProcessor {
|
||||
private cache: TraceCache
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Context } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { EventEmitter } from 'stream'
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import type { EventEmitter } from 'stream'
|
||||
|
||||
import { convertSpanToSpanEntity } from '../core/spanConvert'
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { Context, trace } from '@opentelemetry/api'
|
||||
import { BatchSpanProcessor, BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import type { BufferConfig, ReadableSpan, Span, SpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type SpanFunction = (span: ReadableSpan) => void
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Link } from '@opentelemetry/api'
|
||||
import { TimedEvent } from '@opentelemetry/sdk-trace-base'
|
||||
import type { Link } from '@opentelemetry/api'
|
||||
import type { TimedEvent } from '@opentelemetry/sdk-trace-base'
|
||||
|
||||
export type AttributeValue =
|
||||
| string
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import { trace, Tracer } from '@opentelemetry/api'
|
||||
import type { Tracer } from '@opentelemetry/api'
|
||||
import { trace } from '@opentelemetry/api'
|
||||
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks'
|
||||
import { W3CTraceContextPropagator } from '@opentelemetry/core'
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import type { SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node'
|
||||
|
||||
import { defaultConfig, TraceConfig } from '../trace-core/types/config'
|
||||
import type { TraceConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig } from '../trace-core/types/config'
|
||||
|
||||
export class NodeTracer {
|
||||
private static provider: NodeTracerProvider
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Context, ContextManager, ROOT_CONTEXT } from '@opentelemetry/api'
|
||||
import type { Context, ContextManager } from '@opentelemetry/api'
|
||||
import { ROOT_CONTEXT } from '@opentelemetry/api'
|
||||
|
||||
export class TopicContextManager implements ContextManager {
|
||||
private topicContextStack: Map<string, Context[]>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Context, context } from '@opentelemetry/api'
|
||||
import type { Context } from '@opentelemetry/api'
|
||||
import { context } from '@opentelemetry/api'
|
||||
|
||||
const originalPromise = globalThis.Promise
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { W3CTraceContextPropagator } from '@opentelemetry/core'
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import type { SpanProcessor } from '@opentelemetry/sdk-trace-base'
|
||||
import { BatchSpanProcessor, ConsoleSpanExporter } from '@opentelemetry/sdk-trace-base'
|
||||
import { WebTracerProvider } from '@opentelemetry/sdk-trace-web'
|
||||
|
||||
import { defaultConfig, TraceConfig } from '../trace-core/types/config'
|
||||
import type { TraceConfig } from '../trace-core/types/config'
|
||||
import { defaultConfig } from '../trace-core/types/config'
|
||||
import { TopicContextManager } from './TopicContextManager'
|
||||
|
||||
export const contextManager = new TopicContextManager()
|
||||
|
||||
@@ -322,6 +322,7 @@ export enum IpcChannel {
|
||||
ApiServer_Stop = 'api-server:stop',
|
||||
ApiServer_Restart = 'api-server:restart',
|
||||
ApiServer_GetStatus = 'api-server:get-status',
|
||||
ApiServer_Ready = 'api-server:ready',
|
||||
// NOTE: This api is not be used.
|
||||
ApiServer_GetConfig = 'api-server:get-config',
|
||||
|
||||
@@ -363,5 +364,12 @@ export enum IpcChannel {
|
||||
ClaudeCodePlugin_ListInstalled = 'claudeCodePlugin:list-installed',
|
||||
ClaudeCodePlugin_InvalidateCache = 'claudeCodePlugin:invalidate-cache',
|
||||
ClaudeCodePlugin_ReadContent = 'claudeCodePlugin:read-content',
|
||||
ClaudeCodePlugin_WriteContent = 'claudeCodePlugin:write-content'
|
||||
ClaudeCodePlugin_WriteContent = 'claudeCodePlugin:write-content',
|
||||
|
||||
// WebSocket
|
||||
WebSocket_Start = 'webSocket:start',
|
||||
WebSocket_Stop = 'webSocket:stop',
|
||||
WebSocket_Status = 'webSocket:status',
|
||||
WebSocket_SendFile = 'webSocket:send-file',
|
||||
WebSocket_GetAllCandidates = 'webSocket:get-all-candidates'
|
||||
}
|
||||
|
||||
@@ -9,9 +9,9 @@
|
||||
*/
|
||||
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { TextBlockParam } from '@anthropic-ai/sdk/resources'
|
||||
import type { TextBlockParam } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import { Provider } from '@types'
|
||||
import type { Provider } from '@types'
|
||||
import type { ModelMessage } from 'ai'
|
||||
|
||||
const logger = loggerService.withContext('anthropic-sdk')
|
||||
|
||||
@@ -470,3 +470,6 @@ export const MACOS_TERMINALS_WITH_COMMANDS: TerminalConfigWithCommand[] = [
|
||||
})
|
||||
}
|
||||
]
|
||||
|
||||
// resources/scripts should be maintained manually
|
||||
export const HOME_CHERRY_DIR = '.cherrystudio'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ProcessingStatus } from '@types'
|
||||
import type { ProcessingStatus } from '@types'
|
||||
|
||||
export type LoaderReturn = {
|
||||
entriesAdded: number
|
||||
@@ -31,3 +31,16 @@ export type WebviewKeyEvent = {
|
||||
shift: boolean
|
||||
alt: boolean
|
||||
}
|
||||
|
||||
export interface WebSocketStatusResponse {
|
||||
isRunning: boolean
|
||||
port?: number
|
||||
ip?: string
|
||||
clientConnected: boolean
|
||||
}
|
||||
|
||||
export interface WebSocketCandidatesResponse {
|
||||
host: string
|
||||
interface: string
|
||||
priority: number
|
||||
}
|
||||
|
||||
@@ -18,8 +18,10 @@ import { sortedObjectByKeys } from './sort'
|
||||
// ========== SCRIPT CONFIGURATION AREA - MODIFY SETTINGS HERE ==========
|
||||
const SCRIPT_CONFIG = {
|
||||
// 🔧 Concurrency Control Configuration
|
||||
MAX_CONCURRENT_TRANSLATIONS: 5, // Max concurrent requests (Make sure the concurrency level does not exceed your provider's limits.)
|
||||
TRANSLATION_DELAY_MS: 100, // Delay between requests to avoid rate limiting (Recommended: 100-500ms, Range: 0-5000ms)
|
||||
MAX_CONCURRENT_TRANSLATIONS: process.env.TRANSLATION_MAX_CONCURRENT_REQUESTS
|
||||
? parseInt(process.env.TRANSLATION_MAX_CONCURRENT_REQUESTS)
|
||||
: 5, // Max concurrent requests (Make sure the concurrency level does not exceed your provider's limits.)
|
||||
TRANSLATION_DELAY_MS: process.env.TRANSLATION_DELAY_MS ? parseInt(process.env.TRANSLATION_DELAY_MS) : 500, // Delay between requests to avoid rate limiting (Recommended: 100-500ms, Range: 0-5000ms)
|
||||
|
||||
// 🔑 API Configuration
|
||||
API_KEY: process.env.TRANSLATION_API_KEY || '', // API key from environment variable
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ApiServerConfig } from '@types'
|
||||
import type { ApiServerConfig } from '@types'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
import { loggerService } from '../services/LoggerService'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import crypto from 'crypto'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
|
||||
import { config } from '../config'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Express } from 'express'
|
||||
import type { Express } from 'express'
|
||||
import swaggerJSDoc from 'swagger-jsdoc'
|
||||
import swaggerUi from 'swagger-ui-express'
|
||||
|
||||
@@ -171,7 +171,7 @@ const swaggerOptions: swaggerJSDoc.Options = {
|
||||
}
|
||||
]
|
||||
},
|
||||
apis: ['./src/main/apiServer/routes/*.ts', './src/main/apiServer/app.ts']
|
||||
apis: ['./src/main/apiServer/routes/**/*.ts', './src/main/apiServer/app.ts']
|
||||
}
|
||||
|
||||
export function setupOpenAPIDocumentation(app: Express) {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { AgentModelValidationError, agentService, sessionService } from '@main/services/agents'
|
||||
import { ListAgentsResponse, type ReplaceAgentRequest, type UpdateAgentRequest } from '@types'
|
||||
import { Request, Response } from 'express'
|
||||
import type { ListAgentsResponse } from '@types'
|
||||
import { type ReplaceAgentRequest, type UpdateAgentRequest } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
|
||||
import type { ValidationRequest } from '../validators/zodValidator'
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { loggerService } from '@logger'
|
||||
import { MESSAGE_STREAM_TIMEOUT_MS } from '@main/apiServer/config/timeouts'
|
||||
import { createStreamAbortController, STREAM_TIMEOUT_REASON } from '@main/apiServer/utils/createStreamAbortController'
|
||||
import { agentService, sessionMessageService, sessionService } from '@main/services/agents'
|
||||
import { Request, Response } from 'express'
|
||||
import type { Request, Response } from 'express'
|
||||
|
||||
const logger = loggerService.withContext('ApiServerMessagesHandlers')
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { AgentModelValidationError, sessionMessageService, sessionService } from '@main/services/agents'
|
||||
import { ListAgentSessionsResponse, type ReplaceSessionRequest, UpdateSessionResponse } from '@types'
|
||||
import { Request, Response } from 'express'
|
||||
import type { ListAgentSessionsResponse, UpdateSessionResponse } from '@types'
|
||||
import { type ReplaceSessionRequest } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
|
||||
import type { ValidationRequest } from '../validators/zodValidator'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Request, Response } from 'express'
|
||||
import type { Request, Response } from 'express'
|
||||
|
||||
import { agentService } from '../../../../services/agents'
|
||||
import { loggerService } from '../../../../services/LoggerService'
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { ZodError, ZodType } from 'zod'
|
||||
import type { NextFunction, Request, Response } from 'express'
|
||||
import type { ZodType } from 'zod'
|
||||
import { ZodError } from 'zod'
|
||||
|
||||
export interface ValidationRequest extends Request {
|
||||
validatedBody?: any
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { ChatCompletionCreateParams } from '@cherrystudio/openai/resources'
|
||||
import express, { Request, Response } from 'express'
|
||||
import type { ChatCompletionCreateParams } from '@cherrystudio/openai/resources'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import express, { Request, Response } from 'express'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { mcpApiService } from '../services/mcp'
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { MessageCreateParams } from '@anthropic-ai/sdk/resources'
|
||||
import type { MessageCreateParams } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import { Provider } from '@types'
|
||||
import express, { Request, Response } from 'express'
|
||||
import type { Provider } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
|
||||
import { messagesService } from '../services/messages'
|
||||
import { getProviderById, validateModelId } from '../utils'
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { ApiModelsFilterSchema, ApiModelsResponse } from '@types'
|
||||
import express, { Request, Response } from 'express'
|
||||
import type { ApiModelsResponse } from '@types'
|
||||
import { ApiModelsFilterSchema } from '@types'
|
||||
import type { Request, Response } from 'express'
|
||||
import express from 'express'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { modelsService } from '../services/models'
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { createServer } from 'node:http'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
|
||||
import { agentService } from '../services/agents'
|
||||
import { windowService } from '../services/WindowService'
|
||||
import { app } from './app'
|
||||
import { config } from './config'
|
||||
|
||||
@@ -43,6 +45,13 @@ export class ApiServer {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.server!.listen(port, host, () => {
|
||||
logger.info('API server started', { host, port })
|
||||
|
||||
// Notify renderer that API server is ready
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
if (mainWindow && !mainWindow.isDestroyed()) {
|
||||
mainWindow.webContents.send(IpcChannel.ApiServer_Ready)
|
||||
}
|
||||
|
||||
resolve()
|
||||
})
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import OpenAI from '@cherrystudio/openai'
|
||||
import { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from '@cherrystudio/openai/resources'
|
||||
import { Provider } from '@types'
|
||||
import type { ChatCompletionCreateParams, ChatCompletionCreateParamsStreaming } from '@cherrystudio/openai/resources'
|
||||
import type { Provider } from '@types'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { ModelValidationError, validateModelId } from '../utils'
|
||||
import type { ModelValidationError } from '../utils'
|
||||
import { validateModelId } from '../utils'
|
||||
|
||||
const logger = loggerService.withContext('ChatCompletionService')
|
||||
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
import mcpService from '@main/services/MCPService'
|
||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp'
|
||||
import {
|
||||
isJSONRPCRequest,
|
||||
JSONRPCMessage,
|
||||
JSONRPCMessageSchema,
|
||||
MessageExtraInfo
|
||||
} from '@modelcontextprotocol/sdk/types'
|
||||
import { MCPServer } from '@types'
|
||||
import type { JSONRPCMessage, MessageExtraInfo } from '@modelcontextprotocol/sdk/types'
|
||||
import { isJSONRPCRequest, JSONRPCMessageSchema } from '@modelcontextprotocol/sdk/types'
|
||||
import type { MCPServer } from '@types'
|
||||
import { randomUUID } from 'crypto'
|
||||
import { EventEmitter } from 'events'
|
||||
import { Request, Response } from 'express'
|
||||
import { IncomingMessage, ServerResponse } from 'http'
|
||||
import type { Request, Response } from 'express'
|
||||
import type { IncomingMessage, ServerResponse } from 'http'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { getMcpServerById, getMCPServersFromRedux } from '../utils/mcp'
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import Anthropic from '@anthropic-ai/sdk'
|
||||
import { MessageCreateParams, MessageStreamEvent } from '@anthropic-ai/sdk/resources'
|
||||
import type Anthropic from '@anthropic-ai/sdk'
|
||||
import type { MessageCreateParams, MessageStreamEvent } from '@anthropic-ai/sdk/resources'
|
||||
import { loggerService } from '@logger'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { buildClaudeCodeSystemMessage, getSdkClient } from '@shared/anthropic'
|
||||
import { Provider } from '@types'
|
||||
import { Response } from 'express'
|
||||
import type { Provider } from '@types'
|
||||
import type { Response } from 'express'
|
||||
|
||||
const logger = loggerService.withContext('MessagesService')
|
||||
const EXCLUDED_FORWARD_HEADERS: ReadonlySet<string> = new Set([
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { isEmpty } from 'lodash'
|
||||
|
||||
import { ApiModel, ApiModelsFilter, ApiModelsResponse } from '../../../renderer/src/types/apiModels'
|
||||
import type { ApiModel, ApiModelsFilter, ApiModelsResponse } from '../../../renderer/src/types/apiModels'
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import {
|
||||
getAvailableProviders,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { CacheService } from '@main/services/CacheService'
|
||||
import { loggerService } from '@main/services/LoggerService'
|
||||
import { reduxService } from '@main/services/ReduxService'
|
||||
import { ApiModel, Model, Provider } from '@types'
|
||||
import type { ApiModel, Model, Provider } from '@types'
|
||||
|
||||
const logger = loggerService.withContext('ApiServerUtils')
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { CacheService } from '@main/services/CacheService'
|
||||
import mcpService from '@main/services/MCPService'
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema, ListToolsResult } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { MCPServer } from '@types'
|
||||
import type { ListToolsResult } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import type { MCPServer } from '@types'
|
||||
|
||||
import { loggerService } from '../../services/LoggerService'
|
||||
import { reduxService } from '../../services/ReduxService'
|
||||
|
||||
@@ -21,6 +21,7 @@ import { appMenuService } from './services/AppMenuService'
|
||||
import { configManager } from './services/ConfigManager'
|
||||
import mcpService from './services/MCPService'
|
||||
import { nodeTraceService } from './services/NodeTraceService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import {
|
||||
CHERRY_STUDIO_PROTOCOL,
|
||||
handleProtocolUrl,
|
||||
@@ -30,6 +31,7 @@ import {
|
||||
import selectionService, { initSelectionService } from './services/SelectionService'
|
||||
import { registerShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { versionService } from './services/VersionService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { initWebviewHotkeys } from './services/WebviewService'
|
||||
|
||||
@@ -110,6 +112,10 @@ if (!app.requestSingleInstanceLock()) {
|
||||
// Some APIs can only be used after this event occurs.
|
||||
|
||||
app.whenReady().then(async () => {
|
||||
// Record current version for tracking
|
||||
// A preparation for v2 data refactoring
|
||||
versionService.recordCurrentVersion()
|
||||
|
||||
initWebviewHotkeys()
|
||||
// Set app user model id for windows
|
||||
electronApp.setAppUserModelId(import.meta.env.VITE_MAIN_BUNDLE_ID || 'com.kangfenmao.CherryStudio')
|
||||
@@ -127,6 +133,7 @@ if (!app.requestSingleInstanceLock()) {
|
||||
appMenuService?.setupApplicationMenu()
|
||||
|
||||
nodeTraceService.init()
|
||||
powerMonitorService.init()
|
||||
|
||||
app.on('activate', function () {
|
||||
const mainWindow = windowService.getMainWindow()
|
||||
|
||||
@@ -8,11 +8,12 @@ import { generateSignature } from '@main/integration/cherryai'
|
||||
import anthropicService from '@main/services/AnthropicService'
|
||||
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
|
||||
import { handleZoomFactor } from '@main/utils/zoom'
|
||||
import { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH, UpgradeChannel } from '@shared/config/constant'
|
||||
import type { SpanEntity, TokenUsage } from '@mcp-trace/trace-core'
|
||||
import type { UpgradeChannel } from '@shared/config/constant'
|
||||
import { MIN_WINDOW_HEIGHT, MIN_WINDOW_WIDTH } from '@shared/config/constant'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import type { PluginError } from '@types'
|
||||
import {
|
||||
import type {
|
||||
AgentPersistedMessage,
|
||||
FileMetadata,
|
||||
Notification,
|
||||
@@ -23,10 +24,12 @@ import {
|
||||
ThemeMode
|
||||
} from '@types'
|
||||
import checkDiskSpace from 'check-disk-space'
|
||||
import { BrowserWindow, dialog, ipcMain, ProxyConfig, session, shell, systemPreferences, webContents } from 'electron'
|
||||
import type { ProxyConfig } from 'electron'
|
||||
import { BrowserWindow, dialog, ipcMain, session, shell, systemPreferences, webContents } from 'electron'
|
||||
import fontList from 'font-list'
|
||||
|
||||
import { agentMessageRepository } from './services/agents/database'
|
||||
import { PluginService } from './services/agents/plugins/PluginService'
|
||||
import { apiServerService } from './services/ApiServerService'
|
||||
import appService from './services/AppService'
|
||||
import AppUpdater from './services/AppUpdater'
|
||||
@@ -47,7 +50,7 @@ import * as NutstoreService from './services/NutstoreService'
|
||||
import ObsidianVaultService from './services/ObsidianVaultService'
|
||||
import { ocrService } from './services/ocr/OcrService'
|
||||
import OvmsManager from './services/OvmsManager'
|
||||
import { PluginService } from './services/PluginService'
|
||||
import powerMonitorService from './services/PowerMonitorService'
|
||||
import { proxyManager } from './services/ProxyManager'
|
||||
import { pythonService } from './services/PythonService'
|
||||
import { FileServiceManager } from './services/remotefile/FileServiceManager'
|
||||
@@ -70,6 +73,7 @@ import {
|
||||
import storeSyncService from './services/StoreSyncService'
|
||||
import { themeService } from './services/ThemeService'
|
||||
import VertexAIService from './services/VertexAIService'
|
||||
import WebSocketService from './services/WebSocketService'
|
||||
import { setOpenLinkExternal } from './services/WebviewService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import { calculateDirectorySize, getResourcePath } from './utils'
|
||||
@@ -112,8 +116,17 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
const appUpdater = new AppUpdater()
|
||||
const notificationService = new NotificationService()
|
||||
|
||||
// Initialize Python service with main window
|
||||
pythonService.setMainWindow(mainWindow)
|
||||
// Register shutdown handlers
|
||||
powerMonitorService.registerShutdownHandler(() => {
|
||||
appUpdater.setAutoUpdate(false)
|
||||
})
|
||||
|
||||
powerMonitorService.registerShutdownHandler(() => {
|
||||
const mw = windowService.getMainWindow()
|
||||
if (mw && !mw.isDestroyed()) {
|
||||
mw.webContents.send(IpcChannel.App_SaveData)
|
||||
}
|
||||
})
|
||||
|
||||
const checkMainWindow = () => {
|
||||
if (!mainWindow || mainWindow.isDestroyed()) {
|
||||
@@ -1017,4 +1030,11 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
return { success: false, error }
|
||||
}
|
||||
})
|
||||
|
||||
// WebSocket
|
||||
ipcMain.handle(IpcChannel.WebSocket_Start, WebSocketService.start)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Stop, WebSocketService.stop)
|
||||
ipcMain.handle(IpcChannel.WebSocket_Status, WebSocketService.getStatus)
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { TraceMethod } from '@mcp-trace/trace-core'
|
||||
import { ApiClient } from '@types'
|
||||
import type { ApiClient } from '@types'
|
||||
|
||||
import EmbeddingsFactory from './EmbeddingsFactory'
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { BaseEmbeddings } from '@cherrystudio/embedjs-interfaces'
|
||||
import { OllamaEmbeddings } from '@cherrystudio/embedjs-ollama'
|
||||
import { OpenAiEmbeddings } from '@cherrystudio/embedjs-openai'
|
||||
import { ApiClient } from '@types'
|
||||
import type { ApiClient } from '@types'
|
||||
import { net } from 'electron'
|
||||
|
||||
import { VoyageEmbeddings } from './VoyageEmbeddings'
|
||||
|
||||
@@ -43,7 +44,7 @@ export default class EmbeddingsFactory {
|
||||
apiKey,
|
||||
dimensions,
|
||||
batchSize,
|
||||
configuration: { baseURL }
|
||||
configuration: { baseURL, fetch: net.fetch as typeof fetch }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { JsonLoader, LocalPathLoader, RAGApplication, TextLoader } from '@cherrystudio/embedjs'
|
||||
import type { RAGApplication } from '@cherrystudio/embedjs'
|
||||
import { JsonLoader, LocalPathLoader, TextLoader } from '@cherrystudio/embedjs'
|
||||
import type { AddLoaderReturn } from '@cherrystudio/embedjs-interfaces'
|
||||
import { WebLoader } from '@cherrystudio/embedjs-loader-web'
|
||||
import { loggerService } from '@logger'
|
||||
import { readTextFileWithAutoEncoding } from '@main/utils/file'
|
||||
import { LoaderReturn } from '@shared/config/types'
|
||||
import { FileMetadata, KnowledgeBaseParams } from '@types'
|
||||
import type { LoaderReturn } from '@shared/config/types'
|
||||
import type { FileMetadata, KnowledgeBaseParams } from '@types'
|
||||
|
||||
import { DraftsExportLoader } from './draftsExportLoader'
|
||||
import { EpubLoader } from './epubLoader'
|
||||
|
||||
@@ -3,7 +3,8 @@ import { cleanString } from '@cherrystudio/embedjs-utils'
|
||||
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'
|
||||
import { loggerService } from '@logger'
|
||||
import md5 from 'md5'
|
||||
import { OfficeParserConfig, parseOfficeAsync } from 'officeparser'
|
||||
import type { OfficeParserConfig } from 'officeparser'
|
||||
import { parseOfficeAsync } from 'officeparser'
|
||||
|
||||
const logger = loggerService.withContext('OdLoader')
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import path from 'node:path'
|
||||
import { loggerService } from '@logger'
|
||||
import { windowService } from '@main/services/WindowService'
|
||||
import { getFileExt, getTempDir } from '@main/utils/file'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import { PDFDocument } from 'pdf-lib'
|
||||
|
||||
const logger = loggerService.withContext('BasePreprocessProvider')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
|
||||
import BasePreprocessProvider from './BasePreprocessProvider'
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import { net } from 'electron'
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import { net } from 'electron'
|
||||
|
||||
|
||||
@@ -4,11 +4,12 @@ import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import { MistralClientManager } from '@main/services/MistralClientManager'
|
||||
import { MistralService } from '@main/services/remotefile/MistralService'
|
||||
import { Mistral } from '@mistralai/mistralai'
|
||||
import { DocumentURLChunk } from '@mistralai/mistralai/models/components/documenturlchunk'
|
||||
import { ImageURLChunk } from '@mistralai/mistralai/models/components/imageurlchunk'
|
||||
import { OCRResponse } from '@mistralai/mistralai/models/components/ocrresponse'
|
||||
import { FileMetadata, FileTypes, PreprocessProvider, Provider } from '@types'
|
||||
import type { Mistral } from '@mistralai/mistralai'
|
||||
import type { DocumentURLChunk } from '@mistralai/mistralai/models/components/documenturlchunk'
|
||||
import type { ImageURLChunk } from '@mistralai/mistralai/models/components/imageurlchunk'
|
||||
import type { OCRResponse } from '@mistralai/mistralai/models/components/ocrresponse'
|
||||
import type { FileMetadata, PreprocessProvider, Provider } from '@types'
|
||||
import { FileTypes } from '@types'
|
||||
import path from 'path'
|
||||
|
||||
import BasePreprocessProvider from './BasePreprocessProvider'
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'node:path'
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { fileStorage } from '@main/services/FileStorage'
|
||||
import { FileMetadata, PreprocessProvider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider } from '@types'
|
||||
import AdmZip from 'adm-zip'
|
||||
import { net } from 'electron'
|
||||
import FormData from 'form-data'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { FileMetadata, PreprocessProvider as Provider } from '@types'
|
||||
import type { FileMetadata, PreprocessProvider as Provider } from '@types'
|
||||
|
||||
import BasePreprocessProvider from './BasePreprocessProvider'
|
||||
import type BasePreprocessProvider from './BasePreprocessProvider'
|
||||
import PreprocessProviderFactory from './PreprocessProviderFactory'
|
||||
|
||||
export default class PreprocessProvider {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { PreprocessProvider } from '@types'
|
||||
import type { PreprocessProvider } from '@types'
|
||||
|
||||
import BasePreprocessProvider from './BasePreprocessProvider'
|
||||
import type BasePreprocessProvider from './BasePreprocessProvider'
|
||||
import DefaultPreprocessProvider from './DefaultPreprocessProvider'
|
||||
import Doc2xPreprocessProvider from './Doc2xPreprocessProvider'
|
||||
import MineruPreprocessProvider from './MineruPreprocessProvider'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { DEFAULT_DOCUMENT_COUNT, DEFAULT_RELEVANT_SCORE } from '@main/utils/knowledge'
|
||||
import { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
import type { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
|
||||
import { MultiModalDocument, RerankStrategy } from './strategies/RerankStrategy'
|
||||
import type { MultiModalDocument, RerankStrategy } from './strategies/RerankStrategy'
|
||||
import { StrategyFactory } from './strategies/StrategyFactory'
|
||||
|
||||
export default abstract class BaseReranker {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
import type { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
import { net } from 'electron'
|
||||
|
||||
import BaseReranker from './BaseReranker'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
import type { KnowledgeBaseParams, KnowledgeSearchResult } from '@types'
|
||||
|
||||
import GeneralReranker from './GeneralReranker'
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
import type { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
export class BailianStrategy implements RerankStrategy {
|
||||
buildUrl(): string {
|
||||
return 'https://dashscope.aliyuncs.com/api/v1/services/rerank/text-rerank/text-rerank'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
import type { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
export class DefaultStrategy implements RerankStrategy {
|
||||
buildUrl(baseURL?: string): string {
|
||||
if (baseURL && baseURL.endsWith('/')) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
import type { MultiModalDocument, RerankStrategy } from './RerankStrategy'
|
||||
export class JinaStrategy implements RerankStrategy {
|
||||
buildUrl(baseURL?: string): string {
|
||||
if (baseURL && baseURL.endsWith('/')) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user