Compare commits
28 Commits
copilot/fi
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3093a9e5d0 | ||
|
|
3274723b1e | ||
|
|
5c724a03a6 | ||
|
|
a95e776699 | ||
|
|
be99f4df71 | ||
|
|
0f1a487bb0 | ||
|
|
2df8bb58df | ||
|
|
62976f6fe0 | ||
|
|
77529b3cd3 | ||
|
|
c8e9a10190 | ||
|
|
0e011ff35f | ||
|
|
40a64a7c92 | ||
|
|
dc9503ef8b | ||
|
|
f2c8484c48 | ||
|
|
a9c9224835 | ||
|
|
43223fd1f5 | ||
|
|
4bac843b37 | ||
|
|
34723934f4 | ||
|
|
096c36caf8 | ||
|
|
139950e193 | ||
|
|
31eec403f7 | ||
|
|
7fd4837a47 | ||
|
|
90b0c8b4a6 | ||
|
|
556353e910 | ||
|
|
11fb730b4d | ||
|
|
2511113b62 | ||
|
|
a29b2bb3d6 | ||
|
|
d2be450906 |
@@ -1,26 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index ff305b112779b718f21a636a27b1196125a332d9..cf32ff5086d4d9e56f8fe90c98724559083bafc3 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 57659290f1cec74878a385626ad75b2a4d5cd3fc..d04e5927ec3725b6ffdb80868bfa1b5a48849537 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
152
.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch
vendored
Normal file
152
.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index c2ef089c42e13a8ee4a833899a415564130e5d79..75efa7baafb0f019fb44dd50dec1641eee8879e7 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index d75c0cc13c41192408c1f3f2d29d76a7bffa6268..ada730b8cb97d9b7d4cb32883a1d1ff416404d9b 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
- return modelId.includes("/") ? modelId : `models/${modelId}`;
|
||||
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
|
||||
}
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/internal/index.js b/dist/internal/index.js
|
||||
index 277cac8dc734bea2fb4f3e9a225986b402b24f48..bb704cd79e602eb8b0cee1889e42497d59ccdb7a 100644
|
||||
--- a/dist/internal/index.js
|
||||
+++ b/dist/internal/index.js
|
||||
@@ -432,7 +432,15 @@ function prepareTools({
|
||||
var _a;
|
||||
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
||||
const toolWarnings = [];
|
||||
- const isGemini2 = modelId.includes("gemini-2");
|
||||
+ // These changes could be safely removed when @ai-sdk/google v3 released.
|
||||
+ const isLatest = (
|
||||
+ [
|
||||
+ 'gemini-flash-latest',
|
||||
+ 'gemini-flash-lite-latest',
|
||||
+ 'gemini-pro-latest',
|
||||
+ ]
|
||||
+ ).some(id => id === modelId);
|
||||
+ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
||||
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
||||
const supportsFileSearch = modelId.includes("gemini-2.5");
|
||||
if (tools == null) {
|
||||
@@ -458,7 +466,7 @@ function prepareTools({
|
||||
providerDefinedTools.forEach((tool) => {
|
||||
switch (tool.id) {
|
||||
case "google.google_search":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ googleSearch: {} });
|
||||
} else if (supportsDynamicRetrieval) {
|
||||
googleTools2.push({
|
||||
@@ -474,7 +482,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.url_context":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ urlContext: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -485,7 +493,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.code_execution":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ codeExecution: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -507,7 +515,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.vertex_rag_store":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({
|
||||
retrieval: {
|
||||
vertex_rag_store: {
|
||||
diff --git a/dist/internal/index.mjs b/dist/internal/index.mjs
|
||||
index 03b7cc591be9b58bcc2e775a96740d9f98862a10..347d2c12e1cee79f0f8bb258f3844fb0522a6485 100644
|
||||
--- a/dist/internal/index.mjs
|
||||
+++ b/dist/internal/index.mjs
|
||||
@@ -424,7 +424,15 @@ function prepareTools({
|
||||
var _a;
|
||||
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
||||
const toolWarnings = [];
|
||||
- const isGemini2 = modelId.includes("gemini-2");
|
||||
+ // These changes could be safely removed when @ai-sdk/google v3 released.
|
||||
+ const isLatest = (
|
||||
+ [
|
||||
+ 'gemini-flash-latest',
|
||||
+ 'gemini-flash-lite-latest',
|
||||
+ 'gemini-pro-latest',
|
||||
+ ]
|
||||
+ ).some(id => id === modelId);
|
||||
+ const isGemini2OrNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || isLatest;
|
||||
const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
|
||||
const supportsFileSearch = modelId.includes("gemini-2.5");
|
||||
if (tools == null) {
|
||||
@@ -450,7 +458,7 @@ function prepareTools({
|
||||
providerDefinedTools.forEach((tool) => {
|
||||
switch (tool.id) {
|
||||
case "google.google_search":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ googleSearch: {} });
|
||||
} else if (supportsDynamicRetrieval) {
|
||||
googleTools2.push({
|
||||
@@ -466,7 +474,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.url_context":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ urlContext: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -477,7 +485,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.code_execution":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({ codeExecution: {} });
|
||||
} else {
|
||||
toolWarnings.push({
|
||||
@@ -499,7 +507,7 @@ function prepareTools({
|
||||
}
|
||||
break;
|
||||
case "google.vertex_rag_store":
|
||||
- if (isGemini2) {
|
||||
+ if (isGemini2OrNewer) {
|
||||
googleTools2.push({
|
||||
retrieval: {
|
||||
vertex_rag_store: {
|
||||
@@ -1434,9 +1442,7 @@ var googleTools = {
|
||||
vertexRagStore
|
||||
};
|
||||
export {
|
||||
- GoogleGenerativeAILanguageModel,
|
||||
getGroundingMetadataSchema,
|
||||
- getUrlContextMetadataSchema,
|
||||
- googleTools
|
||||
+ getUrlContextMetadataSchema, GoogleGenerativeAILanguageModel, googleTools
|
||||
};
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
\ No newline at end of file
|
||||
26
package.json
26
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "CherryStudio",
|
||||
"version": "1.6.5",
|
||||
"version": "1.7.0-rc.1",
|
||||
"private": true,
|
||||
"description": "A powerful AI assistant for producer.",
|
||||
"main": "./out/main/index.js",
|
||||
@@ -74,9 +74,10 @@
|
||||
"format:check": "biome format && biome lint",
|
||||
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky",
|
||||
"claude": "dotenv -e .env -- claude",
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public"
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --preid alpha --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --preid beta --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --access public",
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch",
|
||||
@@ -108,11 +109,14 @@
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.53",
|
||||
"@ai-sdk/anthropic": "^2.0.44",
|
||||
"@ai-sdk/cerebras": "^1.0.31",
|
||||
"@ai-sdk/gateway": "^2.0.9",
|
||||
"@ai-sdk/google-vertex": "^3.0.62",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch",
|
||||
"@ai-sdk/google-vertex": "^3.0.68",
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch",
|
||||
"@ai-sdk/mistral": "^2.0.23",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/perplexity": "^2.0.17",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
@@ -121,7 +125,7 @@
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
"@cherrystudio/embedjs-libsql": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-csv": "^0.1.31",
|
||||
@@ -135,7 +139,7 @@
|
||||
"@cherrystudio/embedjs-ollama": "^0.1.31",
|
||||
"@cherrystudio/embedjs-openai": "^0.1.31",
|
||||
"@cherrystudio/extension-table-plus": "workspace:^",
|
||||
"@cherrystudio/openai": "^6.5.0",
|
||||
"@cherrystudio/openai": "^6.9.0",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
@@ -165,7 +169,7 @@
|
||||
"@opentelemetry/sdk-trace-base": "^2.0.0",
|
||||
"@opentelemetry/sdk-trace-node": "^2.0.0",
|
||||
"@opentelemetry/sdk-trace-web": "^2.0.0",
|
||||
"@opeoginni/github-copilot-openai-compatible": "0.1.19",
|
||||
"@opeoginni/github-copilot-openai-compatible": "0.1.21",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@radix-ui/react-context-menu": "^2.2.16",
|
||||
"@reduxjs/toolkit": "^2.2.5",
|
||||
@@ -211,8 +215,8 @@
|
||||
"@types/mime-types": "^3",
|
||||
"@types/node": "^22.17.1",
|
||||
"@types/pako": "^1.0.2",
|
||||
"@types/react": "^19.0.12",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@types/react": "^19.2.6",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@types/react-transition-group": "^4.4.12",
|
||||
"@types/react-window": "^1",
|
||||
@@ -408,7 +412,7 @@
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@ai-sdk/openai@npm:2.0.64": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/google@npm:2.0.31": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch"
|
||||
"@ai-sdk/google@npm:2.0.36": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"lint-staged": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-sdk-provider",
|
||||
"version": "0.1.0",
|
||||
"version": "0.1.2",
|
||||
"description": "Cherry Studio AI SDK provider bundle with CherryIN routing.",
|
||||
"keywords": [
|
||||
"ai-sdk",
|
||||
|
||||
@@ -71,7 +71,7 @@ Cherry Studio AI Core 是一个基于 Vercel AI SDK 的统一 AI Provider 接口
|
||||
## 安装
|
||||
|
||||
```bash
|
||||
npm install @cherrystudio/ai-core ai
|
||||
npm install @cherrystudio/ai-core ai @ai-sdk/google @ai-sdk/openai
|
||||
```
|
||||
|
||||
### React Native
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-core",
|
||||
"version": "1.0.1",
|
||||
"version": "1.0.9",
|
||||
"description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
@@ -33,19 +33,19 @@
|
||||
},
|
||||
"homepage": "https://github.com/CherryHQ/cherry-studio#readme",
|
||||
"peerDependencies": {
|
||||
"@ai-sdk/google": "^2.0.36",
|
||||
"@ai-sdk/openai": "^2.0.64",
|
||||
"@cherrystudio/ai-sdk-provider": "^0.1.2",
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.43",
|
||||
"@ai-sdk/azure": "^2.0.66",
|
||||
"@ai-sdk/deepseek": "^1.0.27",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai-compatible": "^1.0.26",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.16",
|
||||
"@ai-sdk/xai": "^2.0.31",
|
||||
"@cherrystudio/ai-sdk-provider": "workspace:*",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -4,12 +4,7 @@
|
||||
*/
|
||||
export const BUILT_IN_PLUGIN_PREFIX = 'built-in:'
|
||||
|
||||
export { googleToolsPlugin } from './googleToolsPlugin'
|
||||
export { createLoggingPlugin } from './logging'
|
||||
export { createPromptToolUsePlugin } from './toolUsePlugin/promptToolUsePlugin'
|
||||
export type {
|
||||
PromptToolUseConfig,
|
||||
ToolUseRequestContext,
|
||||
ToolUseResult
|
||||
} from './toolUsePlugin/type'
|
||||
export { webSearchPlugin, type WebSearchPluginConfig } from './webSearchPlugin'
|
||||
export * from './googleToolsPlugin'
|
||||
export * from './toolUsePlugin/promptToolUsePlugin'
|
||||
export * from './toolUsePlugin/type'
|
||||
export * from './webSearchPlugin'
|
||||
|
||||
@@ -32,7 +32,7 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
|
||||
})
|
||||
|
||||
// 导出类型定义供开发者使用
|
||||
export type { WebSearchPluginConfig, WebSearchToolOutputSchema } from './helper'
|
||||
export * from './helper'
|
||||
|
||||
// 默认导出
|
||||
export default webSearchPlugin
|
||||
|
||||
@@ -44,7 +44,7 @@ export {
|
||||
// ==================== 基础数据和类型 ====================
|
||||
|
||||
// 基础Provider数据源
|
||||
export { baseProviderIds, baseProviders } from './schemas'
|
||||
export { baseProviderIds, baseProviders, isBaseProvider } from './schemas'
|
||||
|
||||
// 类型定义和Schema
|
||||
export type {
|
||||
|
||||
@@ -7,7 +7,6 @@ import { createAzure } from '@ai-sdk/azure'
|
||||
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import type { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
@@ -33,8 +32,7 @@ export const baseProviderIds = [
|
||||
'deepseek',
|
||||
'openrouter',
|
||||
'cherryin',
|
||||
'cherryin-chat',
|
||||
'huggingface'
|
||||
'cherryin-chat'
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -158,12 +156,6 @@ export const baseProviders = [
|
||||
})
|
||||
},
|
||||
supportsImageGeneration: true
|
||||
},
|
||||
{
|
||||
id: 'huggingface',
|
||||
name: 'HuggingFace',
|
||||
creator: createHuggingFace,
|
||||
supportsImageGeneration: true
|
||||
}
|
||||
] as const satisfies BaseProvider[]
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ export enum IpcChannel {
|
||||
App_SetFullScreen = 'app:set-full-screen',
|
||||
App_IsFullScreen = 'app:is-full-screen',
|
||||
App_GetSystemFonts = 'app:get-system-fonts',
|
||||
APP_CrashRenderProcess = 'app:crash-render-process',
|
||||
|
||||
App_MacIsProcessTrusted = 'app:mac-is-process-trusted',
|
||||
App_MacRequestProcessTrust = 'app:mac-request-process-trust',
|
||||
@@ -195,6 +196,9 @@ export enum IpcChannel {
|
||||
File_ValidateNotesDirectory = 'file:validateNotesDirectory',
|
||||
File_StartWatcher = 'file:startWatcher',
|
||||
File_StopWatcher = 'file:stopWatcher',
|
||||
File_PauseWatcher = 'file:pauseWatcher',
|
||||
File_ResumeWatcher = 'file:resumeWatcher',
|
||||
File_BatchUploadMarkdown = 'file:batchUploadMarkdown',
|
||||
File_ShowInFolder = 'file:showInFolder',
|
||||
|
||||
// file service
|
||||
|
||||
@@ -199,7 +199,7 @@ export enum FeedUrl {
|
||||
|
||||
export enum UpdateConfigUrl {
|
||||
GITHUB = 'https://raw.githubusercontent.com/CherryHQ/cherry-studio/refs/heads/x-files/app-upgrade-config/app-upgrade-config.json',
|
||||
GITCODE = 'https://raw.gitcode.com/CherryHQ/cherry-studio/raw/x-files/app-upgrade-config/app-upgrade-config.json'
|
||||
GITCODE = 'https://raw.gitcode.com/CherryHQ/cherry-studio/raw/x-files%2Fapp-upgrade-config/app-upgrade-config.json'
|
||||
}
|
||||
|
||||
export enum UpgradeChannel {
|
||||
|
||||
@@ -10,7 +10,7 @@ export type LoaderReturn = {
|
||||
messageSource?: 'preprocess' | 'embedding' | 'validation'
|
||||
}
|
||||
|
||||
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir'
|
||||
export type FileChangeEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir' | 'refresh'
|
||||
|
||||
export type FileChangeEvent = {
|
||||
eventType: FileChangeEventType
|
||||
|
||||
@@ -8,7 +8,7 @@ import '@main/config'
|
||||
import { loggerService } from '@logger'
|
||||
import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||
import { app } from 'electron'
|
||||
import { app, crashReporter } from 'electron'
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
import { isDev, isLinux, isWin } from './constant'
|
||||
|
||||
@@ -37,6 +37,14 @@ import { initWebviewHotkeys } from './services/WebviewService'
|
||||
|
||||
const logger = loggerService.withContext('MainEntry')
|
||||
|
||||
// enable local crash reports
|
||||
crashReporter.start({
|
||||
companyName: 'CherryHQ',
|
||||
productName: 'CherryStudio',
|
||||
submitURL: '',
|
||||
uploadToServer: false
|
||||
})
|
||||
|
||||
/**
|
||||
* Disable hardware acceleration if setting is enabled
|
||||
*/
|
||||
|
||||
@@ -557,6 +557,9 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.File_ValidateNotesDirectory, fileManager.validateNotesDirectory.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_StartWatcher, fileManager.startFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_StopWatcher, fileManager.stopFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_PauseWatcher, fileManager.pauseFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_ResumeWatcher, fileManager.resumeFileWatcher.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_BatchUploadMarkdown, fileManager.batchUploadMarkdownFiles.bind(fileManager))
|
||||
ipcMain.handle(IpcChannel.File_ShowInFolder, fileManager.showInFolder.bind(fileManager))
|
||||
|
||||
// file service
|
||||
@@ -1038,4 +1041,8 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.WebSocket_Status, WebSocketService.getStatus)
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
|
||||
ipcMain.handle(IpcChannel.APP_CrashRenderProcess, () => {
|
||||
mainWindow.webContents.forcefullyCrashRenderer()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ type ApiResponse<T> = {
|
||||
type BatchUploadResponse = {
|
||||
batch_id: string
|
||||
file_urls: string[]
|
||||
headers?: Record<string, string>[]
|
||||
}
|
||||
|
||||
type ExtractProgress = {
|
||||
@@ -55,7 +56,7 @@ type QuotaResponse = {
|
||||
export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
constructor(provider: PreprocessProvider, userId?: string) {
|
||||
super(provider, userId)
|
||||
// todo:免费期结束后删除
|
||||
// TODO: remove after free period ends
|
||||
this.provider.apiKey = this.provider.apiKey || import.meta.env.MAIN_VITE_MINERU_API_KEY
|
||||
}
|
||||
|
||||
@@ -68,21 +69,21 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
logger.info(`MinerU preprocess processing started: ${filePath}`)
|
||||
await this.validateFile(filePath)
|
||||
|
||||
// 1. 获取上传URL并上传文件
|
||||
// 1. Get upload URL and upload file
|
||||
const batchId = await this.uploadFile(file)
|
||||
logger.info(`MinerU file upload completed: batch_id=${batchId}`)
|
||||
|
||||
// 2. 等待处理完成并获取结果
|
||||
// 2. Wait for completion and fetch results
|
||||
const extractResult = await this.waitForCompletion(sourceId, batchId, file.origin_name)
|
||||
logger.info(`MinerU processing completed for batch: ${batchId}`)
|
||||
|
||||
// 3. 下载并解压文件
|
||||
// 3. Download and extract output
|
||||
const { path: outputPath } = await this.downloadAndExtractFile(extractResult.full_zip_url!, file)
|
||||
|
||||
// 4. check quota
|
||||
const quota = await this.checkQuota()
|
||||
|
||||
// 5. 创建处理后的文件信息
|
||||
// 5. Create processed file metadata
|
||||
return {
|
||||
processedFile: this.createProcessedFileInfo(file, outputPath),
|
||||
quota
|
||||
@@ -115,23 +116,48 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
|
||||
private async validateFile(filePath: string): Promise<void> {
|
||||
// Phase 1: check file size (without loading into memory)
|
||||
logger.info(`Validating PDF file: ${filePath}`)
|
||||
const stats = await fs.promises.stat(filePath)
|
||||
const fileSizeBytes = stats.size
|
||||
|
||||
// Ensure file size is under 200MB
|
||||
if (fileSizeBytes >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(fileSizeBytes / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
|
||||
// Phase 2: check page count (requires reading file with error handling)
|
||||
const pdfBuffer = await fs.promises.readFile(filePath)
|
||||
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
try {
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
|
||||
// 文件页数小于600页
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
// 文件大小小于200MB
|
||||
if (pdfBuffer.length >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
// Ensure page count is under 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
|
||||
logger.info(`PDF validation passed: ${doc.numPages} pages, ${Math.round(fileSizeBytes / (1024 * 1024))}MB`)
|
||||
} catch (error: any) {
|
||||
// If the page limit is exceeded, rethrow immediately
|
||||
if (error.message.includes('exceeds the limit')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
// If PDF parsing fails, log a detailed warning but continue processing
|
||||
logger.warn(
|
||||
`Failed to parse PDF structure (file may be corrupted or use non-standard format). ` +
|
||||
`Skipping page count validation. Will attempt to process with MinerU API. ` +
|
||||
`Error details: ${error.message}. ` +
|
||||
`Suggestion: If processing fails, try repairing the PDF using tools like Adobe Acrobat or online PDF repair services.`
|
||||
)
|
||||
// Do not throw; continue processing
|
||||
}
|
||||
}
|
||||
|
||||
private createProcessedFileInfo(file: FileMetadata, outputPath: string): FileMetadata {
|
||||
// 查找解压后的主要文件
|
||||
// Locate the main extracted file
|
||||
let finalPath = ''
|
||||
let finalName = file.origin_name.replace('.pdf', '.md')
|
||||
|
||||
@@ -143,14 +169,14 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
const originalMdPath = path.join(outputPath, mdFile)
|
||||
const newMdPath = path.join(outputPath, finalName)
|
||||
|
||||
// 重命名文件为原始文件名
|
||||
// Rename the file to match the original name
|
||||
try {
|
||||
fs.renameSync(originalMdPath, newMdPath)
|
||||
finalPath = newMdPath
|
||||
logger.info(`Renamed markdown file from ${mdFile} to ${finalName}`)
|
||||
} catch (renameError) {
|
||||
logger.warn(`Failed to rename file ${mdFile} to ${finalName}: ${renameError}`)
|
||||
// 如果重命名失败,使用原文件
|
||||
// If renaming fails, fall back to the original file
|
||||
finalPath = originalMdPath
|
||||
finalName = mdFile
|
||||
}
|
||||
@@ -178,7 +204,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
logger.info(`Downloading MinerU result to: ${zipPath}`)
|
||||
|
||||
try {
|
||||
// 下载ZIP文件
|
||||
// Download the ZIP file
|
||||
const response = await net.fetch(zipUrl, { method: 'GET' })
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`)
|
||||
@@ -187,17 +213,17 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
fs.writeFileSync(zipPath, Buffer.from(arrayBuffer))
|
||||
logger.info(`Downloaded ZIP file: ${zipPath}`)
|
||||
|
||||
// 确保提取目录存在
|
||||
// Ensure the extraction directory exists
|
||||
if (!fs.existsSync(extractPath)) {
|
||||
fs.mkdirSync(extractPath, { recursive: true })
|
||||
}
|
||||
|
||||
// 解压文件
|
||||
// Extract the ZIP contents
|
||||
const zip = new AdmZip(zipPath)
|
||||
zip.extractAllTo(extractPath, true)
|
||||
logger.info(`Extracted files to: ${extractPath}`)
|
||||
|
||||
// 删除临时ZIP文件
|
||||
// Remove the temporary ZIP file
|
||||
fs.unlinkSync(zipPath)
|
||||
|
||||
return { path: extractPath }
|
||||
@@ -209,11 +235,11 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
|
||||
private async uploadFile(file: FileMetadata): Promise<string> {
|
||||
try {
|
||||
// 步骤1: 获取上传URL
|
||||
const { batchId, fileUrls } = await this.getBatchUploadUrls(file)
|
||||
// 步骤2: 上传文件到获取的URL
|
||||
// Step 1: obtain the upload URL
|
||||
const { batchId, fileUrls, uploadHeaders } = await this.getBatchUploadUrls(file)
|
||||
// Step 2: upload the file to the obtained URL
|
||||
const filePath = fileStorage.getFilePathById(file)
|
||||
await this.putFileToUrl(filePath, fileUrls[0])
|
||||
await this.putFileToUrl(filePath, fileUrls[0], file.origin_name, uploadHeaders?.[0])
|
||||
logger.info(`File uploaded successfully: ${filePath}`, { batchId, fileUrls })
|
||||
|
||||
return batchId
|
||||
@@ -223,7 +249,9 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
}
|
||||
|
||||
private async getBatchUploadUrls(file: FileMetadata): Promise<{ batchId: string; fileUrls: string[] }> {
|
||||
private async getBatchUploadUrls(
|
||||
file: FileMetadata
|
||||
): Promise<{ batchId: string; fileUrls: string[]; uploadHeaders?: Record<string, string>[] }> {
|
||||
const endpoint = `${this.provider.apiHost}/api/v4/file-urls/batch`
|
||||
|
||||
const payload = {
|
||||
@@ -254,10 +282,11 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
if (response.ok) {
|
||||
const data: ApiResponse<BatchUploadResponse> = await response.json()
|
||||
if (data.code === 0 && data.data) {
|
||||
const { batch_id, file_urls } = data.data
|
||||
const { batch_id, file_urls, headers: uploadHeaders } = data.data
|
||||
return {
|
||||
batchId: batch_id,
|
||||
fileUrls: file_urls
|
||||
fileUrls: file_urls,
|
||||
uploadHeaders
|
||||
}
|
||||
} else {
|
||||
throw new Error(`API returned error: ${data.msg || JSON.stringify(data)}`)
|
||||
@@ -271,18 +300,28 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
}
|
||||
|
||||
private async putFileToUrl(filePath: string, uploadUrl: string): Promise<void> {
|
||||
private async putFileToUrl(
|
||||
filePath: string,
|
||||
uploadUrl: string,
|
||||
fileName?: string,
|
||||
headers?: Record<string, string>
|
||||
): Promise<void> {
|
||||
try {
|
||||
const fileBuffer = await fs.promises.readFile(filePath)
|
||||
const fileSize = fileBuffer.byteLength
|
||||
const displayName = fileName ?? path.basename(filePath)
|
||||
|
||||
logger.info(`Uploading file to MinerU OSS: ${displayName} (${fileSize} bytes)`)
|
||||
|
||||
// https://mineru.net/apiManage/docs
|
||||
const response = await net.fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
body: fileBuffer
|
||||
headers,
|
||||
body: new Uint8Array(fileBuffer)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
// 克隆 response 以避免消费 body stream
|
||||
// Clone the response to avoid consuming the body stream
|
||||
const responseClone = response.clone()
|
||||
|
||||
try {
|
||||
@@ -353,20 +392,20 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
try {
|
||||
const result = await this.getExtractResults(batchId)
|
||||
|
||||
// 查找对应文件的处理结果
|
||||
// Find the corresponding file result
|
||||
const fileResult = result.extract_result.find((item) => item.file_name === fileName)
|
||||
if (!fileResult) {
|
||||
throw new Error(`File ${fileName} not found in batch results`)
|
||||
}
|
||||
|
||||
// 检查处理状态
|
||||
// Check the processing state
|
||||
if (fileResult.state === 'done' && fileResult.full_zip_url) {
|
||||
logger.info(`Processing completed for file: ${fileName}`)
|
||||
return fileResult
|
||||
} else if (fileResult.state === 'failed') {
|
||||
throw new Error(`Processing failed for file: ${fileName}, error: ${fileResult.err_msg}`)
|
||||
} else if (fileResult.state === 'running') {
|
||||
// 发送进度更新
|
||||
// Send progress updates
|
||||
if (fileResult.extract_progress) {
|
||||
const progress = Math.round(
|
||||
(fileResult.extract_progress.extracted_pages / fileResult.extract_progress.total_pages) * 100
|
||||
@@ -374,7 +413,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
await this.sendPreprocessProgress(sourceId, progress)
|
||||
logger.info(`File ${fileName} processing progress: ${progress}%`)
|
||||
} else {
|
||||
// 如果没有具体进度信息,发送一个通用进度
|
||||
// If no detailed progress information is available, send a generic update
|
||||
await this.sendPreprocessProgress(sourceId, 50)
|
||||
logger.info(`File ${fileName} is still processing...`)
|
||||
}
|
||||
|
||||
@@ -53,18 +53,43 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
}
|
||||
|
||||
private async validateFile(filePath: string): Promise<void> {
|
||||
// 第一阶段:检查文件大小(无需读取文件到内存)
|
||||
logger.info(`Validating PDF file: ${filePath}`)
|
||||
const stats = await fs.promises.stat(filePath)
|
||||
const fileSizeBytes = stats.size
|
||||
|
||||
// File size must be less than 200MB
|
||||
if (fileSizeBytes >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(fileSizeBytes / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
|
||||
// 第二阶段:检查页数(需要读取文件,带错误处理)
|
||||
const pdfBuffer = await fs.promises.readFile(filePath)
|
||||
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
try {
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
|
||||
// File page count must be less than 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
// File size must be less than 200MB
|
||||
if (pdfBuffer.length >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
// File page count must be less than 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
|
||||
logger.info(`PDF validation passed: ${doc.numPages} pages, ${Math.round(fileSizeBytes / (1024 * 1024))}MB`)
|
||||
} catch (error: any) {
|
||||
// 如果是页数超限错误,直接抛出
|
||||
if (error.message.includes('exceeds the limit')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
// PDF 解析失败,记录详细警告但允许继续处理
|
||||
logger.warn(
|
||||
`Failed to parse PDF structure (file may be corrupted or use non-standard format). ` +
|
||||
`Skipping page count validation. Will attempt to process with MinerU API. ` +
|
||||
`Error details: ${error.message}. ` +
|
||||
`Suggestion: If processing fails, try repairing the PDF using tools like Adobe Acrobat or online PDF repair services.`
|
||||
)
|
||||
// 不抛出错误,允许继续处理
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,8 +97,8 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
// Find the main file after extraction
|
||||
let finalPath = ''
|
||||
let finalName = file.origin_name.replace('.pdf', '.md')
|
||||
// Find the corresponding folder by file name
|
||||
outputPath = path.join(outputPath, `${file.origin_name.replace('.pdf', '')}`)
|
||||
// Find the corresponding folder by file id
|
||||
outputPath = path.join(outputPath, file.id)
|
||||
try {
|
||||
const files = fs.readdirSync(outputPath)
|
||||
|
||||
@@ -125,7 +150,7 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
formData.append('return_md', 'true')
|
||||
formData.append('response_format_zip', 'true')
|
||||
formData.append('files', fileBuffer, {
|
||||
filename: file.origin_name
|
||||
filename: file.name
|
||||
})
|
||||
|
||||
while (retries < maxRetries) {
|
||||
@@ -139,7 +164,7 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
...(this.provider.apiKey ? { Authorization: `Bearer ${this.provider.apiKey}` } : {}),
|
||||
...formData.getHeaders()
|
||||
},
|
||||
body: formData.getBuffer()
|
||||
body: new Uint8Array(formData.getBuffer())
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -1605,6 +1605,164 @@ class FileStorage {
|
||||
logger.error('Failed to show item in folder:', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch upload markdown files from native File objects
|
||||
* This handles all I/O operations in the Main process to avoid blocking Renderer
|
||||
*/
|
||||
public batchUploadMarkdownFiles = async (
|
||||
_: Electron.IpcMainInvokeEvent,
|
||||
filePaths: string[],
|
||||
targetPath: string
|
||||
): Promise<{
|
||||
fileCount: number
|
||||
folderCount: number
|
||||
skippedFiles: number
|
||||
}> => {
|
||||
try {
|
||||
logger.info('Starting batch upload', { fileCount: filePaths.length, targetPath })
|
||||
|
||||
const basePath = path.resolve(targetPath)
|
||||
const MARKDOWN_EXTS = ['.md', '.markdown']
|
||||
|
||||
// Filter markdown files
|
||||
const markdownFiles = filePaths.filter((filePath) => {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
return MARKDOWN_EXTS.includes(ext)
|
||||
})
|
||||
|
||||
const skippedFiles = filePaths.length - markdownFiles.length
|
||||
|
||||
if (markdownFiles.length === 0) {
|
||||
return { fileCount: 0, folderCount: 0, skippedFiles }
|
||||
}
|
||||
|
||||
// Collect unique folders needed
|
||||
const foldersSet = new Set<string>()
|
||||
const fileOperations: Array<{ sourcePath: string; targetPath: string }> = []
|
||||
|
||||
for (const filePath of markdownFiles) {
|
||||
try {
|
||||
// Get relative path if file is from a directory upload
|
||||
const fileName = path.basename(filePath)
|
||||
const relativePath = path.dirname(filePath)
|
||||
|
||||
// Determine target directory structure
|
||||
let targetDir = basePath
|
||||
const folderParts: string[] = []
|
||||
|
||||
// Extract folder structure from file path for nested uploads
|
||||
// This is a simplified version - in real scenario we'd need the original directory structure
|
||||
if (relativePath && relativePath !== '.') {
|
||||
const parts = relativePath.split(path.sep)
|
||||
// Get the last few parts that represent the folder structure within upload
|
||||
const relevantParts = parts.slice(Math.max(0, parts.length - 3))
|
||||
folderParts.push(...relevantParts)
|
||||
}
|
||||
|
||||
// Build target directory path
|
||||
for (const part of folderParts) {
|
||||
targetDir = path.join(targetDir, part)
|
||||
foldersSet.add(targetDir)
|
||||
}
|
||||
|
||||
// Determine final file name
|
||||
const nameWithoutExt = fileName.endsWith('.md')
|
||||
? fileName.slice(0, -3)
|
||||
: fileName.endsWith('.markdown')
|
||||
? fileName.slice(0, -9)
|
||||
: fileName
|
||||
|
||||
const { safeName } = await this.fileNameGuard(_, targetDir, nameWithoutExt, true)
|
||||
const finalPath = path.join(targetDir, safeName + '.md')
|
||||
|
||||
fileOperations.push({ sourcePath: filePath, targetPath: finalPath })
|
||||
} catch (error) {
|
||||
logger.error('Failed to prepare file operation:', error as Error, { filePath })
|
||||
}
|
||||
}
|
||||
|
||||
// Create folders in order (shallow to deep)
|
||||
const sortedFolders = Array.from(foldersSet).sort((a, b) => a.length - b.length)
|
||||
for (const folder of sortedFolders) {
|
||||
try {
|
||||
if (!fs.existsSync(folder)) {
|
||||
await fs.promises.mkdir(folder, { recursive: true })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('Folder already exists or creation failed', { folder, error: (error as Error).message })
|
||||
}
|
||||
}
|
||||
|
||||
// Process files in batches
|
||||
const BATCH_SIZE = 10 // Higher batch size since we're in Main process
|
||||
let successCount = 0
|
||||
|
||||
for (let i = 0; i < fileOperations.length; i += BATCH_SIZE) {
|
||||
const batch = fileOperations.slice(i, i + BATCH_SIZE)
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
batch.map(async (op) => {
|
||||
// Read from source and write to target in Main process
|
||||
const content = await fs.promises.readFile(op.sourcePath, 'utf-8')
|
||||
await fs.promises.writeFile(op.targetPath, content, 'utf-8')
|
||||
return true
|
||||
})
|
||||
)
|
||||
|
||||
results.forEach((result, index) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
successCount++
|
||||
} else {
|
||||
logger.error('Failed to upload file:', result.reason, {
|
||||
file: batch[index].sourcePath
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('Batch upload completed', {
|
||||
successCount,
|
||||
folderCount: foldersSet.size,
|
||||
skippedFiles
|
||||
})
|
||||
|
||||
return {
|
||||
fileCount: successCount,
|
||||
folderCount: foldersSet.size,
|
||||
skippedFiles
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Batch upload failed:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause file watcher to prevent events during batch operations
|
||||
*/
|
||||
public pauseFileWatcher = async (): Promise<void> => {
|
||||
if (this.watcher) {
|
||||
logger.debug('Pausing file watcher')
|
||||
// Chokidar doesn't have pause, so we temporarily set a flag
|
||||
// We'll handle this by clearing the debounce timer
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer)
|
||||
this.debounceTimer = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume file watcher and trigger a refresh
|
||||
*/
|
||||
public resumeFileWatcher = async (): Promise<void> => {
|
||||
if (this.watcher && this.currentWatchPath) {
|
||||
logger.debug('Resuming file watcher')
|
||||
// Send a synthetic refresh event to trigger tree reload
|
||||
this.notifyChange('refresh', this.currentWatchPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const fileStorage = new FileStorage()
|
||||
|
||||
@@ -25,7 +25,7 @@ describe('stripLocalCommandTags', () => {
|
||||
|
||||
describe('Claude → AiSDK transform', () => {
|
||||
it('handles tool call streaming lifecycle', () => {
|
||||
const state = new ClaudeStreamState()
|
||||
const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id })
|
||||
const parts: ReturnType<typeof transformSDKMessageToStreamParts>[number][] = []
|
||||
|
||||
const messages: SDKMessage[] = [
|
||||
@@ -182,14 +182,14 @@ describe('Claude → AiSDK transform', () => {
|
||||
(typeof parts)[number],
|
||||
{ type: 'tool-result' }
|
||||
>
|
||||
expect(toolResult.toolCallId).toBe('tool-1')
|
||||
expect(toolResult.toolCallId).toBe('session-123:tool-1')
|
||||
expect(toolResult.toolName).toBe('Bash')
|
||||
expect(toolResult.input).toEqual({ command: 'ls' })
|
||||
expect(toolResult.output).toBe('ok')
|
||||
})
|
||||
|
||||
it('handles streaming text completion', () => {
|
||||
const state = new ClaudeStreamState()
|
||||
const state = new ClaudeStreamState({ agentSessionId: baseStreamMetadata.session_id })
|
||||
const parts: ReturnType<typeof transformSDKMessageToStreamParts>[number][] = []
|
||||
|
||||
const messages: SDKMessage[] = [
|
||||
|
||||
@@ -10,8 +10,21 @@
|
||||
* Every Claude turn gets its own instance. `resetStep` should be invoked once the finish event has
|
||||
* been emitted to avoid leaking state into the next turn.
|
||||
*/
|
||||
import { loggerService } from '@logger'
|
||||
import type { FinishReason, LanguageModelUsage, ProviderMetadata } from 'ai'
|
||||
|
||||
/**
|
||||
* Builds a namespaced tool call ID by combining session ID with raw tool call ID.
|
||||
* This ensures tool calls from different sessions don't conflict even if they have
|
||||
* the same raw ID from the SDK.
|
||||
*
|
||||
* @param sessionId - The agent session ID
|
||||
* @param rawToolCallId - The raw tool call ID from SDK (e.g., "WebFetch_0")
|
||||
*/
|
||||
export function buildNamespacedToolCallId(sessionId: string, rawToolCallId: string): string {
|
||||
return `${sessionId}:${rawToolCallId}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared fields for every block that Claude can stream (text, reasoning, tool).
|
||||
*/
|
||||
@@ -34,6 +47,7 @@ type ReasoningBlockState = BaseBlockState & {
|
||||
type ToolBlockState = BaseBlockState & {
|
||||
kind: 'tool'
|
||||
toolCallId: string
|
||||
rawToolCallId: string
|
||||
toolName: string
|
||||
inputBuffer: string
|
||||
providerMetadata?: ProviderMetadata
|
||||
@@ -48,12 +62,17 @@ type PendingUsageState = {
|
||||
}
|
||||
|
||||
type PendingToolCall = {
|
||||
rawToolCallId: string
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
input: unknown
|
||||
providerMetadata?: ProviderMetadata
|
||||
}
|
||||
|
||||
type ClaudeStreamStateOptions = {
|
||||
agentSessionId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracks the lifecycle of Claude streaming blocks (text, thinking, tool calls)
|
||||
* across individual websocket events. The transformer relies on this class to
|
||||
@@ -61,12 +80,20 @@ type PendingToolCall = {
|
||||
* usage/finish metadata once Anthropic closes a message.
|
||||
*/
|
||||
export class ClaudeStreamState {
|
||||
private logger
|
||||
private readonly agentSessionId: string
|
||||
private blocksByIndex = new Map<number, BlockState>()
|
||||
private toolIndexById = new Map<string, number>()
|
||||
private toolIndexByNamespacedId = new Map<string, number>()
|
||||
private pendingUsage: PendingUsageState = {}
|
||||
private pendingToolCalls = new Map<string, PendingToolCall>()
|
||||
private stepActive = false
|
||||
|
||||
constructor(options: ClaudeStreamStateOptions) {
|
||||
this.logger = loggerService.withContext('ClaudeStreamState')
|
||||
this.agentSessionId = options.agentSessionId
|
||||
this.logger.silly('ClaudeStreamState', options)
|
||||
}
|
||||
|
||||
/** Marks the beginning of a new AiSDK step. */
|
||||
beginStep(): void {
|
||||
this.stepActive = true
|
||||
@@ -104,19 +131,21 @@ export class ClaudeStreamState {
|
||||
/** Caches tool metadata so subsequent input deltas and results can find it. */
|
||||
openToolBlock(
|
||||
index: number,
|
||||
params: { toolCallId: string; toolName: string; providerMetadata?: ProviderMetadata }
|
||||
params: { rawToolCallId: string; toolName: string; providerMetadata?: ProviderMetadata }
|
||||
): ToolBlockState {
|
||||
const toolCallId = buildNamespacedToolCallId(this.agentSessionId, params.rawToolCallId)
|
||||
const block: ToolBlockState = {
|
||||
kind: 'tool',
|
||||
id: params.toolCallId,
|
||||
id: toolCallId,
|
||||
index,
|
||||
toolCallId: params.toolCallId,
|
||||
toolCallId,
|
||||
rawToolCallId: params.rawToolCallId,
|
||||
toolName: params.toolName,
|
||||
inputBuffer: '',
|
||||
providerMetadata: params.providerMetadata
|
||||
}
|
||||
this.blocksByIndex.set(index, block)
|
||||
this.toolIndexById.set(params.toolCallId, index)
|
||||
this.toolIndexByNamespacedId.set(toolCallId, index)
|
||||
return block
|
||||
}
|
||||
|
||||
@@ -125,13 +154,17 @@ export class ClaudeStreamState {
|
||||
}
|
||||
|
||||
getToolBlockById(toolCallId: string): ToolBlockState | undefined {
|
||||
const index = this.toolIndexById.get(toolCallId)
|
||||
const index = this.toolIndexByNamespacedId.get(toolCallId)
|
||||
if (index === undefined) return undefined
|
||||
const block = this.blocksByIndex.get(index)
|
||||
if (!block || block.kind !== 'tool') return undefined
|
||||
return block
|
||||
}
|
||||
|
||||
getToolBlockByRawId(rawToolCallId: string): ToolBlockState | undefined {
|
||||
return this.getToolBlockById(buildNamespacedToolCallId(this.agentSessionId, rawToolCallId))
|
||||
}
|
||||
|
||||
/** Appends streamed text to a text block, returning the updated state when present. */
|
||||
appendTextDelta(index: number, text: string): TextBlockState | undefined {
|
||||
const block = this.blocksByIndex.get(index)
|
||||
@@ -158,10 +191,12 @@ export class ClaudeStreamState {
|
||||
|
||||
/** Records a tool call to be consumed once its result arrives from the user. */
|
||||
registerToolCall(
|
||||
toolCallId: string,
|
||||
rawToolCallId: string,
|
||||
payload: { toolName: string; input: unknown; providerMetadata?: ProviderMetadata }
|
||||
): void {
|
||||
this.pendingToolCalls.set(toolCallId, {
|
||||
const toolCallId = buildNamespacedToolCallId(this.agentSessionId, rawToolCallId)
|
||||
this.pendingToolCalls.set(rawToolCallId, {
|
||||
rawToolCallId,
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
input: payload.input,
|
||||
@@ -170,10 +205,10 @@ export class ClaudeStreamState {
|
||||
}
|
||||
|
||||
/** Retrieves and clears the buffered tool call metadata for the given id. */
|
||||
consumePendingToolCall(toolCallId: string): PendingToolCall | undefined {
|
||||
const entry = this.pendingToolCalls.get(toolCallId)
|
||||
consumePendingToolCall(rawToolCallId: string): PendingToolCall | undefined {
|
||||
const entry = this.pendingToolCalls.get(rawToolCallId)
|
||||
if (entry) {
|
||||
this.pendingToolCalls.delete(toolCallId)
|
||||
this.pendingToolCalls.delete(rawToolCallId)
|
||||
}
|
||||
return entry
|
||||
}
|
||||
@@ -183,12 +218,12 @@ export class ClaudeStreamState {
|
||||
* completion so that downstream tool results can reference the original call.
|
||||
*/
|
||||
completeToolBlock(toolCallId: string, input: unknown, providerMetadata?: ProviderMetadata): void {
|
||||
const block = this.getToolBlockByRawId(toolCallId)
|
||||
this.registerToolCall(toolCallId, {
|
||||
toolName: this.getToolBlockById(toolCallId)?.toolName ?? 'unknown',
|
||||
toolName: block?.toolName ?? 'unknown',
|
||||
input,
|
||||
providerMetadata
|
||||
})
|
||||
const block = this.getToolBlockById(toolCallId)
|
||||
if (block) {
|
||||
block.resolvedInput = input
|
||||
}
|
||||
@@ -200,7 +235,7 @@ export class ClaudeStreamState {
|
||||
if (!block) return undefined
|
||||
this.blocksByIndex.delete(index)
|
||||
if (block.kind === 'tool') {
|
||||
this.toolIndexById.delete(block.toolCallId)
|
||||
this.toolIndexByNamespacedId.delete(block.toolCallId)
|
||||
}
|
||||
return block
|
||||
}
|
||||
@@ -227,7 +262,7 @@ export class ClaudeStreamState {
|
||||
/** Drops cached block metadata for the currently active message. */
|
||||
resetBlocks(): void {
|
||||
this.blocksByIndex.clear()
|
||||
this.toolIndexById.clear()
|
||||
this.toolIndexByNamespacedId.clear()
|
||||
}
|
||||
|
||||
/** Resets the entire step lifecycle after emitting a terminal frame. */
|
||||
@@ -236,6 +271,10 @@ export class ClaudeStreamState {
|
||||
this.resetPendingUsage()
|
||||
this.stepActive = false
|
||||
}
|
||||
|
||||
getNamespacedToolCallId(rawToolCallId: string): string {
|
||||
return buildNamespacedToolCallId(this.agentSessionId, rawToolCallId)
|
||||
}
|
||||
}
|
||||
|
||||
export type { PendingToolCall }
|
||||
|
||||
@@ -13,6 +13,7 @@ import { app } from 'electron'
|
||||
import type { GetAgentSessionResponse } from '../..'
|
||||
import type { AgentServiceInterface, AgentStream, AgentStreamEvent } from '../../interfaces/AgentStreamInterface'
|
||||
import { sessionService } from '../SessionService'
|
||||
import { buildNamespacedToolCallId } from './claude-stream-state'
|
||||
import { promptForToolApproval } from './tool-permissions'
|
||||
import { ClaudeStreamState, transformSDKMessageToStreamParts } from './transform'
|
||||
|
||||
@@ -150,7 +151,10 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
return { behavior: 'allow', updatedInput: input }
|
||||
}
|
||||
|
||||
return promptForToolApproval(toolName, input, options)
|
||||
return promptForToolApproval(toolName, input, {
|
||||
...options,
|
||||
toolCallId: buildNamespacedToolCallId(session.id, options.toolUseID)
|
||||
})
|
||||
}
|
||||
|
||||
// Build SDK options from parameters
|
||||
@@ -346,7 +350,7 @@ class ClaudeCodeService implements AgentServiceInterface {
|
||||
const jsonOutput: SDKMessage[] = []
|
||||
let hasCompleted = false
|
||||
const startTime = Date.now()
|
||||
const streamState = new ClaudeStreamState()
|
||||
const streamState = new ClaudeStreamState({ agentSessionId: sessionId })
|
||||
|
||||
try {
|
||||
for await (const message of query({ prompt: promptStream, options })) {
|
||||
|
||||
@@ -37,6 +37,7 @@ type RendererPermissionRequestPayload = {
|
||||
requestId: string
|
||||
toolName: string
|
||||
toolId: string
|
||||
toolCallId: string
|
||||
description?: string
|
||||
requiresPermissions: boolean
|
||||
input: Record<string, unknown>
|
||||
@@ -206,10 +207,19 @@ const ensureIpcHandlersRegistered = () => {
|
||||
})
|
||||
}
|
||||
|
||||
type PromptForToolApprovalOptions = {
|
||||
signal: AbortSignal
|
||||
suggestions?: PermissionUpdate[]
|
||||
|
||||
// NOTICE: This ID is namespaced with session ID, not the raw SDK tool call ID.
|
||||
// Format: `${sessionId}:${rawToolCallId}`, e.g., `session_123:WebFetch_0`
|
||||
toolCallId: string
|
||||
}
|
||||
|
||||
export async function promptForToolApproval(
|
||||
toolName: string,
|
||||
input: Record<string, unknown>,
|
||||
options?: { signal: AbortSignal; suggestions?: PermissionUpdate[] }
|
||||
options: PromptForToolApprovalOptions
|
||||
): Promise<PermissionResult> {
|
||||
if (shouldAutoApproveTools) {
|
||||
logger.debug('promptForToolApproval auto-approving tool for test', {
|
||||
@@ -245,6 +255,7 @@ export async function promptForToolApproval(
|
||||
logger.info('Requesting user approval for tool usage', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
description: toolMetadata?.description
|
||||
})
|
||||
|
||||
@@ -252,6 +263,7 @@ export async function promptForToolApproval(
|
||||
requestId,
|
||||
toolName,
|
||||
toolId: toolMetadata?.id ?? toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
description: toolMetadata?.description,
|
||||
requiresPermissions: toolMetadata?.requirePermissions ?? false,
|
||||
input: sanitizedInput,
|
||||
@@ -266,6 +278,7 @@ export async function promptForToolApproval(
|
||||
logger.debug('Registering tool permission request', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId,
|
||||
requiresPermissions: requestPayload.requiresPermissions,
|
||||
timeoutMs: TOOL_APPROVAL_TIMEOUT_MS,
|
||||
suggestionCount: sanitizedSuggestions.length
|
||||
@@ -273,7 +286,11 @@ export async function promptForToolApproval(
|
||||
|
||||
return new Promise<PermissionResult>((resolve) => {
|
||||
const timeout = setTimeout(() => {
|
||||
logger.info('User tool permission request timed out', { requestId, toolName })
|
||||
logger.info('User tool permission request timed out', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId
|
||||
})
|
||||
finalizeRequest(requestId, { behavior: 'deny', message: 'Timed out waiting for approval' }, 'timeout')
|
||||
}, TOOL_APPROVAL_TIMEOUT_MS)
|
||||
|
||||
@@ -287,7 +304,11 @@ export async function promptForToolApproval(
|
||||
|
||||
if (options?.signal) {
|
||||
const abortListener = () => {
|
||||
logger.info('Tool permission request aborted before user responded', { requestId, toolName })
|
||||
logger.info('Tool permission request aborted before user responded', {
|
||||
requestId,
|
||||
toolName,
|
||||
toolCallId: options.toolCallId
|
||||
})
|
||||
finalizeRequest(requestId, defaultDenyUpdate, 'aborted')
|
||||
}
|
||||
|
||||
|
||||
@@ -243,9 +243,10 @@ function handleAssistantToolUse(
|
||||
state: ClaudeStreamState,
|
||||
chunks: AgentStreamPart[]
|
||||
): void {
|
||||
const toolCallId = state.getNamespacedToolCallId(block.id)
|
||||
chunks.push({
|
||||
type: 'tool-call',
|
||||
toolCallId: block.id,
|
||||
toolCallId,
|
||||
toolName: block.name,
|
||||
input: block.input,
|
||||
providerExecuted: true,
|
||||
@@ -331,10 +332,11 @@ function handleUserMessage(
|
||||
if (block.type === 'tool_result') {
|
||||
const toolResult = block as ToolResultContent
|
||||
const pendingCall = state.consumePendingToolCall(toolResult.tool_use_id)
|
||||
const toolCallId = pendingCall?.toolCallId ?? state.getNamespacedToolCallId(toolResult.tool_use_id)
|
||||
if (toolResult.is_error) {
|
||||
chunks.push({
|
||||
type: 'tool-error',
|
||||
toolCallId: toolResult.tool_use_id,
|
||||
toolCallId,
|
||||
toolName: pendingCall?.toolName ?? 'unknown',
|
||||
input: pendingCall?.input,
|
||||
error: toolResult.content,
|
||||
@@ -343,7 +345,7 @@ function handleUserMessage(
|
||||
} else {
|
||||
chunks.push({
|
||||
type: 'tool-result',
|
||||
toolCallId: toolResult.tool_use_id,
|
||||
toolCallId,
|
||||
toolName: pendingCall?.toolName ?? 'unknown',
|
||||
input: pendingCall?.input,
|
||||
output: toolResult.content,
|
||||
@@ -514,7 +516,7 @@ function handleContentBlockStart(
|
||||
}
|
||||
case 'tool_use': {
|
||||
const block = state.openToolBlock(index, {
|
||||
toolCallId: contentBlock.id,
|
||||
rawToolCallId: contentBlock.id,
|
||||
toolName: contentBlock.name,
|
||||
providerMetadata
|
||||
})
|
||||
|
||||
@@ -111,6 +111,7 @@ const api = {
|
||||
setFullScreen: (value: boolean): Promise<void> => ipcRenderer.invoke(IpcChannel.App_SetFullScreen, value),
|
||||
isFullScreen: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_IsFullScreen),
|
||||
getSystemFonts: (): Promise<string[]> => ipcRenderer.invoke(IpcChannel.App_GetSystemFonts),
|
||||
mockCrashRenderProcess: () => ipcRenderer.invoke(IpcChannel.APP_CrashRenderProcess),
|
||||
mac: {
|
||||
isProcessTrusted: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacIsProcessTrusted),
|
||||
requestProcessTrust: (): Promise<boolean> => ipcRenderer.invoke(IpcChannel.App_MacRequestProcessTrust)
|
||||
@@ -219,6 +220,10 @@ const api = {
|
||||
startFileWatcher: (dirPath: string, config?: any) =>
|
||||
ipcRenderer.invoke(IpcChannel.File_StartWatcher, dirPath, config),
|
||||
stopFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_StopWatcher),
|
||||
pauseFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_PauseWatcher),
|
||||
resumeFileWatcher: () => ipcRenderer.invoke(IpcChannel.File_ResumeWatcher),
|
||||
batchUploadMarkdown: (filePaths: string[], targetPath: string) =>
|
||||
ipcRenderer.invoke(IpcChannel.File_BatchUploadMarkdown, filePaths, targetPath),
|
||||
onFileChange: (callback: (data: FileChangeEvent) => void) => {
|
||||
const listener = (_event: Electron.IpcRendererEvent, data: any) => {
|
||||
if (data && typeof data === 'object') {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { loggerService } from '@logger'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isFunctionCallingModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isOpenAIModel,
|
||||
@@ -242,12 +243,18 @@ export abstract class BaseApiClient<
|
||||
return serviceTierSetting
|
||||
}
|
||||
|
||||
protected getVerbosity(): OpenAIVerbosity {
|
||||
protected getVerbosity(model?: Model): OpenAIVerbosity {
|
||||
try {
|
||||
const state = window.store?.getState()
|
||||
const verbosity = state?.settings?.openAI?.verbosity
|
||||
|
||||
if (verbosity && ['low', 'medium', 'high'].includes(verbosity)) {
|
||||
// If model is provided, check if the verbosity is supported by the model
|
||||
if (model) {
|
||||
const supportedVerbosity = getModelSupportedVerbosity(model)
|
||||
// Use user's verbosity if supported, otherwise use the first supported option
|
||||
return supportedVerbosity.includes(verbosity) ? verbosity : supportedVerbosity[0]
|
||||
}
|
||||
return verbosity
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -35,6 +35,7 @@ import {
|
||||
isSupportedThinkingTokenModel,
|
||||
isSupportedThinkingTokenQwenModel,
|
||||
isSupportedThinkingTokenZhipuModel,
|
||||
isSupportVerbosityModel,
|
||||
isVisionModel,
|
||||
MODEL_SUPPORTED_REASONING_EFFORT,
|
||||
ZHIPU_RESULT_TOKENS
|
||||
@@ -733,6 +734,13 @@ export class OpenAIAPIClient extends OpenAIBaseClient<
|
||||
...modalities,
|
||||
// groq 有不同的 service tier 配置,不符合 openai 接口类型
|
||||
service_tier: this.getServiceTier(model) as OpenAIServiceTier,
|
||||
...(isSupportVerbosityModel(model)
|
||||
? {
|
||||
text: {
|
||||
verbosity: this.getVerbosity(model)
|
||||
}
|
||||
}
|
||||
: {}),
|
||||
...this.getProviderSpecificParameters(assistant, model),
|
||||
...reasoningEffort,
|
||||
...getOpenAIWebSearchParams(model, enableWebSearch),
|
||||
|
||||
@@ -48,9 +48,8 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
// 仅适用于openai
|
||||
override getBaseURL(): string {
|
||||
const host = this.provider.apiHost
|
||||
return formatApiHost(host)
|
||||
override getBaseURL(isSupportedAPIVerion: boolean = true): string {
|
||||
return formatApiHost(this.provider.apiHost, isSupportedAPIVerion)
|
||||
}
|
||||
|
||||
override async generateImage({
|
||||
@@ -144,6 +143,11 @@ export abstract class OpenAIBaseClient<
|
||||
}
|
||||
|
||||
let apiKeyForSdkInstance = this.apiKey
|
||||
let baseURLForSdkInstance = this.getBaseURL()
|
||||
let headersForSdkInstance = {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers
|
||||
}
|
||||
|
||||
if (this.provider.id === 'copilot') {
|
||||
const defaultHeaders = store.getState().copilot.defaultHeaders
|
||||
@@ -151,6 +155,11 @@ export abstract class OpenAIBaseClient<
|
||||
// this.provider.apiKey不允许修改
|
||||
// this.provider.apiKey = token
|
||||
apiKeyForSdkInstance = token
|
||||
baseURLForSdkInstance = this.getBaseURL(false)
|
||||
headersForSdkInstance = {
|
||||
...headersForSdkInstance,
|
||||
...COPILOT_DEFAULT_HEADERS
|
||||
}
|
||||
}
|
||||
|
||||
if (this.provider.id === 'azure-openai' || this.provider.type === 'azure-openai') {
|
||||
@@ -164,12 +173,8 @@ export abstract class OpenAIBaseClient<
|
||||
this.sdkInstance = new OpenAI({
|
||||
dangerouslyAllowBrowser: true,
|
||||
apiKey: apiKeyForSdkInstance,
|
||||
baseURL: this.getBaseURL(),
|
||||
defaultHeaders: {
|
||||
...this.defaultHeaders(),
|
||||
...this.provider.extra_headers,
|
||||
...(this.provider.id === 'copilot' ? COPILOT_DEFAULT_HEADERS : {})
|
||||
}
|
||||
baseURL: baseURLForSdkInstance,
|
||||
defaultHeaders: headersForSdkInstance
|
||||
}) as TSdkInstance
|
||||
}
|
||||
return this.sdkInstance
|
||||
|
||||
@@ -297,7 +297,31 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
|
||||
private convertResponseToMessageContent(response: OpenAI.Responses.Response): ResponseInput {
|
||||
const content: OpenAI.Responses.ResponseInput = []
|
||||
content.push(...response.output)
|
||||
response.output.forEach((item) => {
|
||||
if (item.type !== 'apply_patch_call' && item.type !== 'apply_patch_call_output') {
|
||||
content.push(item)
|
||||
} else if (item.type === 'apply_patch_call') {
|
||||
if (item.operation !== undefined) {
|
||||
const applyPatchToolCall: OpenAI.Responses.ResponseInputItem.ApplyPatchCall = {
|
||||
...item,
|
||||
operation: item.operation
|
||||
}
|
||||
content.push(applyPatchToolCall)
|
||||
} else {
|
||||
logger.warn('Undefined tool call operation for ApplyPatchToolCall.')
|
||||
}
|
||||
} else if (item.type === 'apply_patch_call_output') {
|
||||
if (item.output !== undefined) {
|
||||
const applyPatchToolCallOutput: OpenAI.Responses.ResponseInputItem.ApplyPatchCallOutput = {
|
||||
...item,
|
||||
output: item.output === null ? undefined : item.output
|
||||
}
|
||||
content.push(applyPatchToolCallOutput)
|
||||
} else {
|
||||
logger.warn('Undefined tool call operation for ApplyPatchToolCall.')
|
||||
}
|
||||
}
|
||||
})
|
||||
return content
|
||||
}
|
||||
|
||||
@@ -496,7 +520,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
...(isSupportVerbosityModel(model)
|
||||
? {
|
||||
text: {
|
||||
verbosity: this.getVerbosity()
|
||||
verbosity: this.getVerbosity(model)
|
||||
}
|
||||
}
|
||||
: {}),
|
||||
|
||||
13
src/renderer/src/aiCore/prepareParams/header.ts
Normal file
13
src/renderer/src/aiCore/prepareParams/header.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { isClaude45ReasoningModel } from '@renderer/config/models'
|
||||
import type { Assistant, Model } from '@renderer/types'
|
||||
import { isToolUseModeFunction } from '@renderer/utils/assistant'
|
||||
|
||||
const INTERLEAVED_THINKING_HEADER = 'interleaved-thinking-2025-05-14'
|
||||
|
||||
export function addAnthropicHeaders(assistant: Assistant, model: Model): string[] {
|
||||
const anthropicHeaders: string[] = []
|
||||
if (isClaude45ReasoningModel(model) && isToolUseModeFunction(assistant)) {
|
||||
anthropicHeaders.push(INTERLEAVED_THINKING_HEADER)
|
||||
}
|
||||
return anthropicHeaders
|
||||
}
|
||||
@@ -7,10 +7,12 @@ import { anthropic } from '@ai-sdk/anthropic'
|
||||
import { google } from '@ai-sdk/google'
|
||||
import { vertexAnthropic } from '@ai-sdk/google-vertex/anthropic/edge'
|
||||
import { vertex } from '@ai-sdk/google-vertex/edge'
|
||||
import { combineHeaders } from '@ai-sdk/provider-utils'
|
||||
import type { WebSearchPluginConfig } from '@cherrystudio/ai-core/built-in/plugins'
|
||||
import { isBaseProvider } from '@cherrystudio/ai-core/core/providers/schemas'
|
||||
import { loggerService } from '@logger'
|
||||
import {
|
||||
isAnthropicModel,
|
||||
isGenerateImageModel,
|
||||
isOpenRouterBuiltInWebSearchModel,
|
||||
isReasoningModel,
|
||||
@@ -19,6 +21,8 @@ import {
|
||||
isSupportedThinkingTokenModel,
|
||||
isWebSearchModel
|
||||
} from '@renderer/config/models'
|
||||
import { isAwsBedrockProvider } from '@renderer/config/providers'
|
||||
import { isVertexProvider } from '@renderer/hooks/useVertexAI'
|
||||
import { getAssistantSettings, getDefaultModel } from '@renderer/services/AssistantService'
|
||||
import store from '@renderer/store'
|
||||
import type { CherryWebSearchConfig } from '@renderer/store/websearch'
|
||||
@@ -34,6 +38,7 @@ import { setupToolsConfig } from '../utils/mcp'
|
||||
import { buildProviderOptions } from '../utils/options'
|
||||
import { getAnthropicThinkingBudget } from '../utils/reasoning'
|
||||
import { buildProviderBuiltinWebSearchConfig } from '../utils/websearch'
|
||||
import { addAnthropicHeaders } from './header'
|
||||
import { supportsTopP } from './modelCapabilities'
|
||||
import { getTemperature, getTopP } from './modelParameters'
|
||||
|
||||
@@ -172,13 +177,21 @@ export async function buildStreamTextParams(
|
||||
}
|
||||
}
|
||||
|
||||
let headers: Record<string, string | undefined> = options.requestOptions?.headers ?? {}
|
||||
|
||||
// https://docs.claude.com/en/docs/build-with-claude/extended-thinking#interleaved-thinking
|
||||
if (!isVertexProvider(provider) && !isAwsBedrockProvider(provider) && isAnthropicModel(model)) {
|
||||
const newBetaHeaders = { 'anthropic-beta': addAnthropicHeaders(assistant, model).join(',') }
|
||||
headers = combineHeaders(headers, newBetaHeaders)
|
||||
}
|
||||
|
||||
// 构建基础参数
|
||||
const params: StreamTextParams = {
|
||||
messages: sdkMessages,
|
||||
maxOutputTokens: maxTokens,
|
||||
temperature: getTemperature(assistant, model),
|
||||
abortSignal: options.requestOptions?.signal,
|
||||
headers: options.requestOptions?.headers,
|
||||
headers,
|
||||
providerOptions,
|
||||
stopWhen: stepCountIs(20),
|
||||
maxRetries: 0
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { baseProviderIdSchema, customProviderIdSchema } from '@cherrystudio/ai-core/provider'
|
||||
import { isOpenAIModel, isQwenMTModel, isSupportFlexServiceTierModel } from '@renderer/config/models'
|
||||
import { loggerService } from '@logger'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isOpenAIModel,
|
||||
isQwenMTModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportVerbosityModel
|
||||
} from '@renderer/config/models'
|
||||
import { isSupportServiceTierProvider } from '@renderer/config/providers'
|
||||
import { mapLanguageToQwenMTModel } from '@renderer/config/translate'
|
||||
import type { Assistant, Model, Provider } from '@renderer/types'
|
||||
@@ -26,6 +33,8 @@ import {
|
||||
} from './reasoning'
|
||||
import { getWebSearchParams } from './websearch'
|
||||
|
||||
const logger = loggerService.withContext('aiCore.utils.options')
|
||||
|
||||
// copy from BaseApiClient.ts
|
||||
const getServiceTier = (model: Model, provider: Provider) => {
|
||||
const serviceTierSetting = provider.serviceTier
|
||||
@@ -70,6 +79,7 @@ export function buildProviderOptions(
|
||||
enableGenerateImage: boolean
|
||||
}
|
||||
): Record<string, any> {
|
||||
logger.debug('buildProviderOptions', { assistant, model, actualProvider, capabilities })
|
||||
const rawProviderId = getAiSdkProviderId(actualProvider)
|
||||
// 构建 provider 特定的选项
|
||||
let providerSpecificOptions: Record<string, any> = {}
|
||||
@@ -89,9 +99,6 @@ export function buildProviderOptions(
|
||||
serviceTier: serviceTierSetting
|
||||
}
|
||||
break
|
||||
case 'huggingface':
|
||||
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
case 'anthropic':
|
||||
providerSpecificOptions = buildAnthropicProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
@@ -134,6 +141,9 @@ export function buildProviderOptions(
|
||||
case 'bedrock':
|
||||
providerSpecificOptions = buildBedrockProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
case 'huggingface':
|
||||
providerSpecificOptions = buildOpenAIProviderOptions(assistant, model, capabilities)
|
||||
break
|
||||
default:
|
||||
// 对于其他 provider,使用通用的构建逻辑
|
||||
providerSpecificOptions = {
|
||||
@@ -152,13 +162,17 @@ export function buildProviderOptions(
|
||||
...getCustomParameters(assistant)
|
||||
}
|
||||
|
||||
const rawProviderKey =
|
||||
let rawProviderKey =
|
||||
{
|
||||
'google-vertex': 'google',
|
||||
'google-vertex-anthropic': 'anthropic',
|
||||
'ai-gateway': 'gateway'
|
||||
}[rawProviderId] || rawProviderId
|
||||
|
||||
if (rawProviderKey === 'cherryin') {
|
||||
rawProviderKey = { gemini: 'google' }[actualProvider.type] || actualProvider.type
|
||||
}
|
||||
|
||||
// 返回 AI Core SDK 要求的格式:{ 'providerId': providerOptions }
|
||||
return {
|
||||
[rawProviderKey]: providerSpecificOptions
|
||||
@@ -187,6 +201,23 @@ function buildOpenAIProviderOptions(
|
||||
...reasoningParams
|
||||
}
|
||||
}
|
||||
|
||||
if (isSupportVerbosityModel(model)) {
|
||||
const state = window.store?.getState()
|
||||
const userVerbosity = state?.settings?.openAI?.verbosity
|
||||
|
||||
if (userVerbosity && ['low', 'medium', 'high'].includes(userVerbosity)) {
|
||||
const supportedVerbosity = getModelSupportedVerbosity(model)
|
||||
// Use user's verbosity if supported, otherwise use the first supported option
|
||||
const verbosity = supportedVerbosity.includes(userVerbosity) ? userVerbosity : supportedVerbosity[0]
|
||||
|
||||
providerOptions = {
|
||||
...providerOptions,
|
||||
textVerbosity: verbosity
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return providerOptions
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import type { BedrockProviderOptions } from '@ai-sdk/amazon-bedrock'
|
||||
import type { AnthropicProviderOptions } from '@ai-sdk/anthropic'
|
||||
import type { GoogleGenerativeAIProviderOptions } from '@ai-sdk/google'
|
||||
import type { XaiProviderOptions } from '@ai-sdk/xai'
|
||||
import { loggerService } from '@logger'
|
||||
import { DEFAULT_MAX_TOKENS } from '@renderer/config/constant'
|
||||
import {
|
||||
@@ -7,6 +11,7 @@ import {
|
||||
isDeepSeekHybridInferenceModel,
|
||||
isDoubaoSeedAfter251015,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isGPT51SeriesModel,
|
||||
isGrok4FastReasoningModel,
|
||||
isGrokReasoningModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
@@ -56,13 +61,20 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
||||
}
|
||||
const reasoningEffort = assistant?.settings?.reasoning_effort
|
||||
|
||||
if (!reasoningEffort) {
|
||||
// Handle undefined and 'none' reasoningEffort.
|
||||
// TODO: They should be separated.
|
||||
if (!reasoningEffort || reasoningEffort === 'none') {
|
||||
// openrouter: use reasoning
|
||||
if (model.provider === SystemProviderIds.openrouter) {
|
||||
// Don't disable reasoning for Gemini models that support thinking tokens
|
||||
if (isSupportedThinkingTokenGeminiModel(model) && !GEMINI_FLASH_MODEL_REGEX.test(model.id)) {
|
||||
return {}
|
||||
}
|
||||
// 'none' is not an available value for effort for now.
|
||||
// I think they should resolve this issue soon, so I'll just go ahead and use this value.
|
||||
if (isGPT51SeriesModel(model) && reasoningEffort === 'none') {
|
||||
return { reasoning: { effort: 'none' } }
|
||||
}
|
||||
// Don't disable reasoning for models that require it
|
||||
if (
|
||||
isGrokReasoningModel(model) ||
|
||||
@@ -117,6 +129,13 @@ export function getReasoningEffort(assistant: Assistant, model: Model): Reasonin
|
||||
return { thinking: { type: 'disabled' } }
|
||||
}
|
||||
|
||||
// Specially for GPT-5.1. Suppose this is a OpenAI Compatible provider
|
||||
if (isGPT51SeriesModel(model) && reasoningEffort === 'none') {
|
||||
return {
|
||||
reasoningEffort: 'none'
|
||||
}
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
@@ -371,7 +390,7 @@ export function getOpenAIReasoningParams(assistant: Assistant, model: Model): Re
|
||||
|
||||
export function getAnthropicThinkingBudget(assistant: Assistant, model: Model): number {
|
||||
const { maxTokens, reasoning_effort: reasoningEffort } = getAssistantSettings(assistant)
|
||||
if (reasoningEffort === undefined) {
|
||||
if (reasoningEffort === undefined || reasoningEffort === 'none') {
|
||||
return 0
|
||||
}
|
||||
const effortRatio = EFFORT_RATIO[reasoningEffort]
|
||||
@@ -393,14 +412,17 @@ export function getAnthropicThinkingBudget(assistant: Assistant, model: Model):
|
||||
* 获取 Anthropic 推理参数
|
||||
* 从 AnthropicAPIClient 中提取的逻辑
|
||||
*/
|
||||
export function getAnthropicReasoningParams(assistant: Assistant, model: Model): Record<string, any> {
|
||||
export function getAnthropicReasoningParams(
|
||||
assistant: Assistant,
|
||||
model: Model
|
||||
): Pick<AnthropicProviderOptions, 'thinking'> {
|
||||
if (!isReasoningModel(model)) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const reasoningEffort = assistant?.settings?.reasoning_effort
|
||||
|
||||
if (reasoningEffort === undefined) {
|
||||
if (reasoningEffort === undefined || reasoningEffort === 'none') {
|
||||
return {
|
||||
thinking: {
|
||||
type: 'disabled'
|
||||
@@ -429,7 +451,10 @@ export function getAnthropicReasoningParams(assistant: Assistant, model: Model):
|
||||
* 注意:Gemini/GCP 端点所使用的 thinkingBudget 等参数应该按照驼峰命名法传递
|
||||
* 而在 Google 官方提供的 OpenAI 兼容端点中则使用蛇形命名法 thinking_budget
|
||||
*/
|
||||
export function getGeminiReasoningParams(assistant: Assistant, model: Model): Record<string, any> {
|
||||
export function getGeminiReasoningParams(
|
||||
assistant: Assistant,
|
||||
model: Model
|
||||
): Pick<GoogleGenerativeAIProviderOptions, 'thinkingConfig'> {
|
||||
if (!isReasoningModel(model)) {
|
||||
return {}
|
||||
}
|
||||
@@ -438,7 +463,7 @@ export function getGeminiReasoningParams(assistant: Assistant, model: Model): Re
|
||||
|
||||
// Gemini 推理参数
|
||||
if (isSupportedThinkingTokenGeminiModel(model)) {
|
||||
if (reasoningEffort === undefined) {
|
||||
if (reasoningEffort === undefined || reasoningEffort === 'none') {
|
||||
return {
|
||||
thinkingConfig: {
|
||||
includeThoughts: false,
|
||||
@@ -478,27 +503,35 @@ export function getGeminiReasoningParams(assistant: Assistant, model: Model): Re
|
||||
* @param model - The model being used
|
||||
* @returns XAI-specific reasoning parameters
|
||||
*/
|
||||
export function getXAIReasoningParams(assistant: Assistant, model: Model): Record<string, any> {
|
||||
export function getXAIReasoningParams(assistant: Assistant, model: Model): Pick<XaiProviderOptions, 'reasoningEffort'> {
|
||||
if (!isSupportedReasoningEffortGrokModel(model)) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const { reasoning_effort: reasoningEffort } = getAssistantSettings(assistant)
|
||||
|
||||
if (!reasoningEffort) {
|
||||
if (!reasoningEffort || reasoningEffort === 'none') {
|
||||
return {}
|
||||
}
|
||||
|
||||
// For XAI provider Grok models, use reasoningEffort parameter directly
|
||||
return {
|
||||
reasoningEffort
|
||||
switch (reasoningEffort) {
|
||||
case 'auto':
|
||||
case 'minimal':
|
||||
case 'medium':
|
||||
return { reasoningEffort: 'low' }
|
||||
case 'low':
|
||||
case 'high':
|
||||
return { reasoningEffort }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Bedrock reasoning parameters
|
||||
*/
|
||||
export function getBedrockReasoningParams(assistant: Assistant, model: Model): Record<string, any> {
|
||||
export function getBedrockReasoningParams(
|
||||
assistant: Assistant,
|
||||
model: Model
|
||||
): Pick<BedrockProviderOptions, 'reasoningConfig'> {
|
||||
if (!isReasoningModel(model)) {
|
||||
return {}
|
||||
}
|
||||
@@ -509,6 +542,14 @@ export function getBedrockReasoningParams(assistant: Assistant, model: Model): R
|
||||
return {}
|
||||
}
|
||||
|
||||
if (reasoningEffort === 'none') {
|
||||
return {
|
||||
reasoningConfig: {
|
||||
type: 'disabled'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only apply thinking budget for Claude reasoning models
|
||||
if (!isSupportedThinkingTokenClaudeModel(model)) {
|
||||
return {}
|
||||
|
||||
BIN
src/renderer/src/assets/images/models/gpt-5.1-chat.png
Normal file
BIN
src/renderer/src/assets/images/models/gpt-5.1-chat.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
BIN
src/renderer/src/assets/images/models/gpt-5.1-codex-mini.png
Normal file
BIN
src/renderer/src/assets/images/models/gpt-5.1-codex-mini.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
BIN
src/renderer/src/assets/images/models/gpt-5.1-codex.png
Normal file
BIN
src/renderer/src/assets/images/models/gpt-5.1-codex.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 20 KiB |
BIN
src/renderer/src/assets/images/models/gpt-5.1.png
Normal file
BIN
src/renderer/src/assets/images/models/gpt-5.1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
@@ -1,5 +1,4 @@
|
||||
import { loggerService } from '@logger'
|
||||
import ClaudeIcon from '@renderer/assets/images/models/claude.png'
|
||||
import { ErrorBoundary } from '@renderer/components/ErrorBoundary'
|
||||
import { TopView } from '@renderer/components/TopView'
|
||||
import { permissionModeCards } from '@renderer/config/agent'
|
||||
@@ -9,7 +8,6 @@ import SelectAgentBaseModelButton from '@renderer/pages/home/components/SelectAg
|
||||
import type {
|
||||
AddAgentForm,
|
||||
AgentEntity,
|
||||
AgentType,
|
||||
ApiModel,
|
||||
BaseAgentForm,
|
||||
PermissionMode,
|
||||
@@ -17,30 +15,22 @@ import type {
|
||||
UpdateAgentForm
|
||||
} from '@renderer/types'
|
||||
import { AgentConfigurationSchema, isAgentType } from '@renderer/types'
|
||||
import { Avatar, Button, Input, Modal, Select } from 'antd'
|
||||
import { Button, Input, Modal, Select } from 'antd'
|
||||
import { AlertTriangleIcon } from 'lucide-react'
|
||||
import type { ChangeEvent, FormEvent } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
import type { BaseOption } from './shared'
|
||||
|
||||
const { TextArea } = Input
|
||||
|
||||
const logger = loggerService.withContext('AddAgentPopup')
|
||||
|
||||
interface AgentTypeOption extends BaseOption {
|
||||
type: 'type'
|
||||
key: AgentEntity['type']
|
||||
name: AgentEntity['name']
|
||||
}
|
||||
|
||||
type AgentWithTools = AgentEntity & { tools?: Tool[] }
|
||||
|
||||
const buildAgentForm = (existing?: AgentWithTools): BaseAgentForm => ({
|
||||
type: existing?.type ?? 'claude-code',
|
||||
name: existing?.name ?? 'Claude Code',
|
||||
name: existing?.name ?? 'Agent',
|
||||
description: existing?.description,
|
||||
instructions: existing?.instructions,
|
||||
model: existing?.model ?? '',
|
||||
@@ -100,54 +90,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
})
|
||||
}, [])
|
||||
|
||||
// add supported agents type here.
|
||||
const agentConfig = useMemo(
|
||||
() =>
|
||||
[
|
||||
{
|
||||
type: 'type',
|
||||
key: 'claude-code',
|
||||
label: 'Claude Code',
|
||||
name: 'Claude Code',
|
||||
avatar: ClaudeIcon
|
||||
}
|
||||
] as const satisfies AgentTypeOption[],
|
||||
[]
|
||||
)
|
||||
|
||||
const agentOptions = useMemo(
|
||||
() =>
|
||||
agentConfig.map((option) => ({
|
||||
value: option.key,
|
||||
label: (
|
||||
<OptionWrapper>
|
||||
<Avatar src={option.avatar} size={24} />
|
||||
<span>{option.label}</span>
|
||||
</OptionWrapper>
|
||||
)
|
||||
})),
|
||||
[agentConfig]
|
||||
)
|
||||
|
||||
const onAgentTypeChange = useCallback(
|
||||
(value: AgentType) => {
|
||||
const prevConfig = agentConfig.find((config) => config.key === form.type)
|
||||
let newName: string | undefined = form.name
|
||||
if (prevConfig && prevConfig.name === form.name) {
|
||||
const newConfig = agentConfig.find((config) => config.key === value)
|
||||
if (newConfig) {
|
||||
newName = newConfig.name
|
||||
}
|
||||
}
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
type: value,
|
||||
name: newName
|
||||
}))
|
||||
},
|
||||
[agentConfig, form.name, form.type]
|
||||
)
|
||||
|
||||
const onNameChange = useCallback((e: ChangeEvent<HTMLInputElement>) => {
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
@@ -155,12 +97,12 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
}))
|
||||
}, [])
|
||||
|
||||
const onDescChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setForm((prev) => ({
|
||||
...prev,
|
||||
description: e.target.value
|
||||
}))
|
||||
}, [])
|
||||
// const onDescChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
// setForm((prev) => ({
|
||||
// ...prev,
|
||||
// description: e.target.value
|
||||
// }))
|
||||
// }, [])
|
||||
|
||||
const onInstChange = useCallback((e: ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setForm((prev) => ({
|
||||
@@ -334,16 +276,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
<StyledForm onSubmit={onSubmit}>
|
||||
<FormContent>
|
||||
<FormRow>
|
||||
<FormItem style={{ flex: 1 }}>
|
||||
<Label>{t('agent.type.label')}</Label>
|
||||
<Select
|
||||
value={form.type}
|
||||
onChange={onAgentTypeChange}
|
||||
options={agentOptions}
|
||||
disabled={isEditing(agent)}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</FormItem>
|
||||
<FormItem style={{ flex: 1 }}>
|
||||
<Label>
|
||||
{t('common.name')} <RequiredMark>*</RequiredMark>
|
||||
@@ -363,7 +295,7 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
avatarSize={24}
|
||||
iconSize={16}
|
||||
buttonStyle={{
|
||||
padding: '8px 12px',
|
||||
padding: '3px 8px',
|
||||
width: '100%',
|
||||
border: '1px solid var(--color-border)',
|
||||
borderRadius: 6,
|
||||
@@ -382,7 +314,6 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
onChange={onPermissionModeChange}
|
||||
style={{ width: '100%' }}
|
||||
placeholder={t('agent.settings.tooling.permissionMode.placeholder', 'Select permission mode')}
|
||||
dropdownStyle={{ minWidth: '500px' }}
|
||||
optionLabelProp="label">
|
||||
{permissionModeCards.map((item) => (
|
||||
<Select.Option key={item.mode} value={item.mode} label={t(item.titleKey, item.titleFallback)}>
|
||||
@@ -438,10 +369,10 @@ const PopupContainer: React.FC<Props> = ({ agent, afterSubmit, resolve }) => {
|
||||
<TextArea rows={3} value={form.instructions ?? ''} onChange={onInstChange} />
|
||||
</FormItem>
|
||||
|
||||
<FormItem>
|
||||
{/* <FormItem>
|
||||
<Label>{t('common.description')}</Label>
|
||||
<TextArea rows={2} value={form.description ?? ''} onChange={onDescChange} />
|
||||
</FormItem>
|
||||
<TextArea rows={1} value={form.description ?? ''} onChange={onDescChange} />
|
||||
</FormItem> */}
|
||||
</FormContent>
|
||||
|
||||
<FormFooter>
|
||||
@@ -575,14 +506,7 @@ const FormFooter = styled.div`
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 8px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--color-border);
|
||||
`
|
||||
|
||||
const OptionWrapper = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 10px;
|
||||
`
|
||||
|
||||
const PermissionOptionWrapper = styled.div`
|
||||
|
||||
@@ -140,11 +140,11 @@ describe('DynamicVirtualList', () => {
|
||||
// Should call isSticky function during rendering
|
||||
expect(isSticky).toHaveBeenCalled()
|
||||
|
||||
// Should apply sticky styles to sticky items
|
||||
// Sticky items within visible range should have proper z-index but may be absolute until scrolled
|
||||
const stickyItem = document.querySelector('[data-index="0"]') as HTMLElement
|
||||
expect(stickyItem).toBeInTheDocument()
|
||||
expect(stickyItem).toHaveStyle('position: sticky')
|
||||
expect(stickyItem).toHaveStyle('z-index: 1')
|
||||
// When sticky item is in visible range, it gets z-index but may not be sticky yet
|
||||
expect(stickyItem).toHaveStyle('z-index: 999')
|
||||
})
|
||||
|
||||
it('should apply absolute positioning to non-sticky items', () => {
|
||||
|
||||
@@ -24,7 +24,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
>
|
||||
<div
|
||||
data-index="0"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(0px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(0px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-0"
|
||||
@@ -34,7 +34,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
</div>
|
||||
<div
|
||||
data-index="1"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(50px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(50px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-1"
|
||||
@@ -44,7 +44,7 @@ exports[`DynamicVirtualList > basic rendering > snapshot test 1`] = `
|
||||
</div>
|
||||
<div
|
||||
data-index="2"
|
||||
style="position: absolute; top: 0px; left: 0px; transform: translateY(100px); width: 100%;"
|
||||
style="position: absolute; top: 0px; left: 0px; z-index: 0; pointer-events: auto; transform: translateY(100px); width: 100%;"
|
||||
>
|
||||
<div
|
||||
data-testid="item-2"
|
||||
|
||||
@@ -62,6 +62,12 @@ export interface DynamicVirtualListProps<T> extends InheritedVirtualizerOptions
|
||||
*/
|
||||
isSticky?: (index: number) => boolean
|
||||
|
||||
/**
|
||||
* Get the depth/level of an item for hierarchical sticky positioning
|
||||
* Used with isSticky to determine ancestor relationships
|
||||
*/
|
||||
getItemDepth?: (index: number) => number
|
||||
|
||||
/**
|
||||
* Range extractor function, cannot be used with isSticky
|
||||
*/
|
||||
@@ -101,6 +107,7 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
size,
|
||||
estimateSize,
|
||||
isSticky,
|
||||
getItemDepth,
|
||||
rangeExtractor: customRangeExtractor,
|
||||
itemContainerStyle,
|
||||
scrollerStyle,
|
||||
@@ -115,7 +122,7 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
const internalScrollerRef = useRef<HTMLDivElement>(null)
|
||||
const scrollerRef = internalScrollerRef
|
||||
|
||||
const activeStickyIndexRef = useRef(0)
|
||||
const activeStickyIndexesRef = useRef<number[]>([])
|
||||
|
||||
const stickyIndexes = useMemo(() => {
|
||||
if (!isSticky) return []
|
||||
@@ -124,21 +131,54 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
|
||||
const internalStickyRangeExtractor = useCallback(
|
||||
(range: Range) => {
|
||||
// The active sticky index is the last one that is before or at the start of the visible range
|
||||
const newActiveStickyIndex =
|
||||
[...stickyIndexes].reverse().find((index) => range.startIndex >= index) ?? stickyIndexes[0] ?? 0
|
||||
const activeStickies: number[] = []
|
||||
|
||||
if (newActiveStickyIndex !== activeStickyIndexRef.current) {
|
||||
activeStickyIndexRef.current = newActiveStickyIndex
|
||||
if (getItemDepth) {
|
||||
// With depth information, we can build a proper ancestor chain
|
||||
// Find all sticky items before the visible range
|
||||
const stickiesBeforeRange = stickyIndexes.filter((index) => index < range.startIndex)
|
||||
|
||||
if (stickiesBeforeRange.length > 0) {
|
||||
// Find the depth of the first visible item (or last sticky before it)
|
||||
const firstVisibleIndex = range.startIndex
|
||||
const referenceDepth = getItemDepth(firstVisibleIndex)
|
||||
|
||||
// Build ancestor chain: include all sticky parents
|
||||
const ancestorChain: number[] = []
|
||||
let minDepth = referenceDepth
|
||||
|
||||
// Walk backwards from the last sticky before visible range
|
||||
for (let i = stickiesBeforeRange.length - 1; i >= 0; i--) {
|
||||
const stickyIndex = stickiesBeforeRange[i]
|
||||
const stickyDepth = getItemDepth(stickyIndex)
|
||||
|
||||
// Include this sticky if it's a parent (smaller depth) of our reference
|
||||
if (stickyDepth < minDepth) {
|
||||
ancestorChain.unshift(stickyIndex)
|
||||
minDepth = stickyDepth
|
||||
}
|
||||
}
|
||||
|
||||
activeStickies.push(...ancestorChain)
|
||||
}
|
||||
} else {
|
||||
// Fallback: without depth info, just use the last sticky before range
|
||||
const lastStickyBeforeRange = [...stickyIndexes].reverse().find((index) => index < range.startIndex)
|
||||
if (lastStickyBeforeRange !== undefined) {
|
||||
activeStickies.push(lastStickyBeforeRange)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge the active sticky index and the default range extractor
|
||||
const next = new Set([activeStickyIndexRef.current, ...defaultRangeExtractor(range)])
|
||||
// Update the ref with current active stickies
|
||||
activeStickyIndexesRef.current = activeStickies
|
||||
|
||||
// Merge the active sticky indexes and the default range extractor
|
||||
const next = new Set([...activeStickyIndexesRef.current, ...defaultRangeExtractor(range)])
|
||||
|
||||
// Sort the set to maintain proper order
|
||||
return [...next].sort((a, b) => a - b)
|
||||
},
|
||||
[stickyIndexes]
|
||||
[stickyIndexes, getItemDepth]
|
||||
)
|
||||
|
||||
const rangeExtractor = customRangeExtractor ?? (isSticky ? internalStickyRangeExtractor : undefined)
|
||||
@@ -221,14 +261,47 @@ function DynamicVirtualList<T>(props: DynamicVirtualListProps<T>) {
|
||||
}}>
|
||||
{virtualItems.map((virtualItem) => {
|
||||
const isItemSticky = stickyIndexes.includes(virtualItem.index)
|
||||
const isItemActiveSticky = isItemSticky && activeStickyIndexRef.current === virtualItem.index
|
||||
const isItemActiveSticky = isItemSticky && activeStickyIndexesRef.current.includes(virtualItem.index)
|
||||
|
||||
// Calculate the sticky offset for multi-level sticky headers
|
||||
const activeStickyIndex = isItemActiveSticky ? activeStickyIndexesRef.current.indexOf(virtualItem.index) : -1
|
||||
|
||||
// Calculate cumulative offset based on actual sizes of previous sticky items
|
||||
let stickyOffset = 0
|
||||
if (activeStickyIndex >= 0) {
|
||||
for (let i = 0; i < activeStickyIndex; i++) {
|
||||
const prevStickyIndex = activeStickyIndexesRef.current[i]
|
||||
stickyOffset += estimateSize(prevStickyIndex)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this item is visually covered by sticky items
|
||||
// If covered, disable pointer events to prevent hover/click bleeding through
|
||||
const isCoveredBySticky = (() => {
|
||||
if (!activeStickyIndexesRef.current.length) return false
|
||||
if (isItemActiveSticky) return false // Sticky items themselves are not covered
|
||||
|
||||
// Calculate if this item's visual position is under any sticky header
|
||||
const itemVisualTop = virtualItem.start
|
||||
let totalStickyHeight = 0
|
||||
for (const stickyIdx of activeStickyIndexesRef.current) {
|
||||
totalStickyHeight += estimateSize(stickyIdx)
|
||||
}
|
||||
|
||||
// If item starts within the sticky area, it's covered
|
||||
return itemVisualTop < totalStickyHeight
|
||||
})()
|
||||
|
||||
const style: React.CSSProperties = {
|
||||
...itemContainerStyle,
|
||||
position: isItemActiveSticky ? 'sticky' : 'absolute',
|
||||
top: 0,
|
||||
top: isItemActiveSticky ? stickyOffset : 0,
|
||||
left: 0,
|
||||
zIndex: isItemSticky ? 1 : undefined,
|
||||
zIndex: isItemActiveSticky ? 1000 + (100 - activeStickyIndex) : isItemSticky ? 999 : 0,
|
||||
pointerEvents: isCoveredBySticky ? 'none' : 'auto',
|
||||
...(isItemActiveSticky && {
|
||||
backgroundColor: 'var(--color-background)'
|
||||
}),
|
||||
...(horizontal
|
||||
? {
|
||||
transform: isItemActiveSticky ? undefined : `translateX(${virtualItem.start}px)`,
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { isDoubaoSeedAfter251015, isDoubaoThinkingAutoModel, isLingReasoningModel } from '../models/reasoning'
|
||||
import {
|
||||
isDoubaoSeedAfter251015,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isGeminiReasoningModel,
|
||||
isLingReasoningModel,
|
||||
isSupportedThinkingTokenGeminiModel
|
||||
} from '../models/reasoning'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
@@ -231,3 +237,284 @@ describe('Ling Models', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Gemini Models', () => {
|
||||
describe('isSupportedThinkingTokenGeminiModel', () => {
|
||||
it('should return true for gemini 2.5 models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini latest models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-flash-lite-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'google/gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for image and tts models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-image',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-2.5-flash-preview-tts',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false for older gemini models', () => {
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isSupportedThinkingTokenGeminiModel({
|
||||
id: 'gemini-1.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isGeminiReasoningModel', () => {
|
||||
it('should return true for gemini thinking models', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.0-flash-thinking',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-thinking-exp',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for supported thinking token gemini models', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini-3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'google/gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for older gemini models without thinking', () => {
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
expect(
|
||||
isGeminiReasoningModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false for undefined model', () => {
|
||||
expect(isGeminiReasoningModel(undefined)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
167
src/renderer/src/config/__test__/vision.test.ts
Normal file
167
src/renderer/src/config/__test__/vision.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { isVisionModel } from '../models/vision'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
getState: () => ({
|
||||
llm: {
|
||||
settings: {}
|
||||
}
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
// FIXME: Idk why it's imported. Maybe circular dependency somewhere
|
||||
vi.mock('@renderer/services/AssistantService.ts', () => ({
|
||||
getDefaultAssistant: () => {
|
||||
return {
|
||||
id: 'default',
|
||||
name: 'default',
|
||||
emoji: '😀',
|
||||
prompt: '',
|
||||
topics: [],
|
||||
messages: [],
|
||||
type: 'assistant',
|
||||
regularPhrases: [],
|
||||
settings: {}
|
||||
}
|
||||
},
|
||||
getProviderByModel: () => null
|
||||
}))
|
||||
|
||||
describe('isVisionModel', () => {
|
||||
describe('Gemini Models', () => {
|
||||
it('should return true for gemini 1.5 models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 2.x models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.0-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.5-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-2.5-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini latest models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-flash-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-pro-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-flash-lite-latest',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-pro-preview',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
// Future stable versions
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-flash',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-3-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true for gemini exp models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-exp-1206',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for gemini 1.0 models', () => {
|
||||
expect(
|
||||
isVisionModel({
|
||||
id: 'gemini-1.0-pro',
|
||||
name: '',
|
||||
provider: '',
|
||||
group: ''
|
||||
})
|
||||
).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
64
src/renderer/src/config/__test__/websearch.test.ts
Normal file
64
src/renderer/src/config/__test__/websearch.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { GEMINI_SEARCH_REGEX } from '../models/websearch'
|
||||
|
||||
vi.mock('@renderer/store', () => ({
|
||||
default: {
|
||||
getState: () => ({
|
||||
llm: {
|
||||
settings: {}
|
||||
}
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
// FIXME: Idk why it's imported. Maybe circular dependency somewhere
|
||||
vi.mock('@renderer/services/AssistantService.ts', () => ({
|
||||
getDefaultAssistant: () => {
|
||||
return {
|
||||
id: 'default',
|
||||
name: 'default',
|
||||
emoji: '😀',
|
||||
prompt: '',
|
||||
topics: [],
|
||||
messages: [],
|
||||
type: 'assistant',
|
||||
regularPhrases: [],
|
||||
settings: {}
|
||||
}
|
||||
},
|
||||
getProviderByModel: () => null
|
||||
}))
|
||||
|
||||
describe('Gemini Search Models', () => {
|
||||
describe('GEMINI_SEARCH_REGEX', () => {
|
||||
it('should match gemini 2.x models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.0-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.0-pro')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-pro')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-flash-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-2.5-pro-latest')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match gemini latest models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-flash-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-pro-latest')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-flash-lite-latest')).toBe(true)
|
||||
})
|
||||
|
||||
it('should match gemini 3 models', () => {
|
||||
// Preview versions
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-pro-preview')).toBe(true)
|
||||
// Future stable versions
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-flash')).toBe(true)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-3-pro')).toBe(true)
|
||||
})
|
||||
|
||||
it('should not match older gemini models', () => {
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.5-flash')).toBe(false)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.5-pro')).toBe(false)
|
||||
expect(GEMINI_SEARCH_REGEX.test('gemini-1.0-pro')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -59,6 +59,10 @@ import {
|
||||
} from '@renderer/assets/images/models/gpt_dark.png'
|
||||
import ChatGPTImageModelLogo from '@renderer/assets/images/models/gpt_image_1.png'
|
||||
import ChatGPTo1ModelLogo from '@renderer/assets/images/models/gpt_o1.png'
|
||||
import GPT51ModelLogo from '@renderer/assets/images/models/gpt-5.1.png'
|
||||
import GPT51ChatModelLogo from '@renderer/assets/images/models/gpt-5.1-chat.png'
|
||||
import GPT51CodexModelLogo from '@renderer/assets/images/models/gpt-5.1-codex.png'
|
||||
import GPT51CodexMiniModelLogo from '@renderer/assets/images/models/gpt-5.1-codex-mini.png'
|
||||
import GPT5ModelLogo from '@renderer/assets/images/models/gpt-5.png'
|
||||
import GPT5ChatModelLogo from '@renderer/assets/images/models/gpt-5-chat.png'
|
||||
import GPT5CodexModelLogo from '@renderer/assets/images/models/gpt-5-codex.png'
|
||||
@@ -182,6 +186,10 @@ export function getModelLogoById(modelId: string): string | undefined {
|
||||
'gpt-5-nano': GPT5NanoModelLogo,
|
||||
'gpt-5-chat': GPT5ChatModelLogo,
|
||||
'gpt-5-codex': GPT5CodexModelLogo,
|
||||
'gpt-5.1-codex-mini': GPT51CodexMiniModelLogo,
|
||||
'gpt-5.1-codex': GPT51CodexModelLogo,
|
||||
'gpt-5.1-chat': GPT51ChatModelLogo,
|
||||
'gpt-5.1': GPT51ModelLogo,
|
||||
'gpt-5': GPT5ModelLogo,
|
||||
gpts: isLight ? ChatGPT4ModelLogo : ChatGPT4ModelLogoDark,
|
||||
'gpt-oss(?:-[\\w-]+)': isLight ? ChatGptModelLogo : ChatGptModelLogoDark,
|
||||
|
||||
@@ -8,7 +8,7 @@ import type {
|
||||
import { getLowerBaseModelName, isUserSelectedModelType } from '@renderer/utils'
|
||||
|
||||
import { isEmbeddingModel, isRerankModel } from './embedding'
|
||||
import { isGPT5SeriesModel } from './utils'
|
||||
import { isGPT5ProModel, isGPT5SeriesModel, isGPT51SeriesModel } from './utils'
|
||||
import { isTextToImageModel } from './vision'
|
||||
import { GEMINI_FLASH_MODEL_REGEX, isOpenAIDeepResearchModel } from './websearch'
|
||||
|
||||
@@ -24,6 +24,9 @@ export const MODEL_SUPPORTED_REASONING_EFFORT: ReasoningEffortConfig = {
|
||||
openai_deep_research: ['medium'] as const,
|
||||
gpt5: ['minimal', 'low', 'medium', 'high'] as const,
|
||||
gpt5_codex: ['low', 'medium', 'high'] as const,
|
||||
gpt5_1: ['none', 'low', 'medium', 'high'] as const,
|
||||
gpt5_1_codex: ['none', 'medium', 'high'] as const,
|
||||
gpt5pro: ['high'] as const,
|
||||
grok: ['low', 'high'] as const,
|
||||
grok4_fast: ['auto'] as const,
|
||||
gemini: ['low', 'medium', 'high', 'auto'] as const,
|
||||
@@ -41,24 +44,27 @@ export const MODEL_SUPPORTED_REASONING_EFFORT: ReasoningEffortConfig = {
|
||||
|
||||
// 模型类型到支持选项的映射表
|
||||
export const MODEL_SUPPORTED_OPTIONS: ThinkingOptionConfig = {
|
||||
default: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.default] as const,
|
||||
default: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.default] as const,
|
||||
o: MODEL_SUPPORTED_REASONING_EFFORT.o,
|
||||
openai_deep_research: MODEL_SUPPORTED_REASONING_EFFORT.openai_deep_research,
|
||||
gpt5: [...MODEL_SUPPORTED_REASONING_EFFORT.gpt5] as const,
|
||||
gpt5pro: MODEL_SUPPORTED_REASONING_EFFORT.gpt5pro,
|
||||
gpt5_codex: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_codex,
|
||||
gpt5_1: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_1,
|
||||
gpt5_1_codex: MODEL_SUPPORTED_REASONING_EFFORT.gpt5_1_codex,
|
||||
grok: MODEL_SUPPORTED_REASONING_EFFORT.grok,
|
||||
grok4_fast: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.grok4_fast] as const,
|
||||
gemini: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.gemini] as const,
|
||||
grok4_fast: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.grok4_fast] as const,
|
||||
gemini: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.gemini] as const,
|
||||
gemini_pro: MODEL_SUPPORTED_REASONING_EFFORT.gemini_pro,
|
||||
qwen: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.qwen] as const,
|
||||
qwen: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.qwen] as const,
|
||||
qwen_thinking: MODEL_SUPPORTED_REASONING_EFFORT.qwen_thinking,
|
||||
doubao: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.doubao] as const,
|
||||
doubao_no_auto: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.doubao_no_auto] as const,
|
||||
doubao: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.doubao] as const,
|
||||
doubao_no_auto: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.doubao_no_auto] as const,
|
||||
doubao_after_251015: MODEL_SUPPORTED_REASONING_EFFORT.doubao_after_251015,
|
||||
hunyuan: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.hunyuan] as const,
|
||||
zhipu: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.zhipu] as const,
|
||||
hunyuan: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.hunyuan] as const,
|
||||
zhipu: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.zhipu] as const,
|
||||
perplexity: MODEL_SUPPORTED_REASONING_EFFORT.perplexity,
|
||||
deepseek_hybrid: ['off', ...MODEL_SUPPORTED_REASONING_EFFORT.deepseek_hybrid] as const
|
||||
deepseek_hybrid: ['none', ...MODEL_SUPPORTED_REASONING_EFFORT.deepseek_hybrid] as const
|
||||
} as const
|
||||
|
||||
const withModelIdAndNameAsId = <T>(model: Model, fn: (model: Model) => T): { idResult: T; nameResult: T } => {
|
||||
@@ -75,11 +81,20 @@ const _getThinkModelType = (model: Model): ThinkingModelType => {
|
||||
if (isOpenAIDeepResearchModel(model)) {
|
||||
return 'openai_deep_research'
|
||||
}
|
||||
if (isGPT5SeriesModel(model)) {
|
||||
if (isGPT51SeriesModel(model)) {
|
||||
if (modelId.includes('codex')) {
|
||||
thinkingModelType = 'gpt5_1_codex'
|
||||
} else {
|
||||
thinkingModelType = 'gpt5_1'
|
||||
}
|
||||
} else if (isGPT5SeriesModel(model)) {
|
||||
if (modelId.includes('codex')) {
|
||||
thinkingModelType = 'gpt5_codex'
|
||||
} else {
|
||||
thinkingModelType = 'gpt5'
|
||||
if (isGPT5ProModel(model)) {
|
||||
thinkingModelType = 'gpt5pro'
|
||||
}
|
||||
}
|
||||
} else if (isSupportedReasoningEffortOpenAIModel(model)) {
|
||||
thinkingModelType = 'o'
|
||||
@@ -239,7 +254,7 @@ export function isGeminiReasoningModel(model?: Model): boolean {
|
||||
|
||||
// Gemini 支持思考模式的模型正则
|
||||
export const GEMINI_THINKING_MODEL_REGEX =
|
||||
/gemini-(?:2\.5.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\w-]+)*$/i
|
||||
/gemini-(?:2\.5.*(?:-latest)?|3-(?:flash|pro)(?:-preview)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\w-]+)*$/i
|
||||
|
||||
export const isSupportedThinkingTokenGeminiModel = (model: Model): boolean => {
|
||||
const modelId = getLowerBaseModelName(model.id, '/')
|
||||
@@ -526,7 +541,7 @@ export function isSupportedReasoningEffortOpenAIModel(model: Model): boolean {
|
||||
modelId.includes('o3') ||
|
||||
modelId.includes('o4') ||
|
||||
modelId.includes('gpt-oss') ||
|
||||
(isGPT5SeriesModel(model) && !modelId.includes('chat'))
|
||||
((isGPT5SeriesModel(model) || isGPT51SeriesModel(model)) && !modelId.includes('chat'))
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ export function isSupportedFlexServiceTier(model: Model): boolean {
|
||||
|
||||
export function isSupportVerbosityModel(model: Model): boolean {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return isGPT5SeriesModel(model) && !modelId.includes('chat')
|
||||
return (isGPT5SeriesModel(model) || isGPT51SeriesModel(model)) && !modelId.includes('chat')
|
||||
}
|
||||
|
||||
export function isOpenAIChatCompletionOnlyModel(model: Model): boolean {
|
||||
@@ -227,12 +227,32 @@ export const isNotSupportSystemMessageModel = (model: Model): boolean => {
|
||||
|
||||
export const isGPT5SeriesModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5')
|
||||
return modelId.includes('gpt-5') && !modelId.includes('gpt-5.1')
|
||||
}
|
||||
|
||||
export const isGPT5SeriesReasoningModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5') && !modelId.includes('chat')
|
||||
return isGPT5SeriesModel(model) && !modelId.includes('chat')
|
||||
}
|
||||
|
||||
export const isGPT51SeriesModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5.1')
|
||||
}
|
||||
|
||||
// GPT-5 verbosity configuration
|
||||
// gpt-5-pro only supports 'high', other GPT-5 models support all levels
|
||||
export const MODEL_SUPPORTED_VERBOSITY: Record<string, ('low' | 'medium' | 'high')[]> = {
|
||||
'gpt-5-pro': ['high'],
|
||||
default: ['low', 'medium', 'high']
|
||||
}
|
||||
|
||||
export const getModelSupportedVerbosity = (model: Model): ('low' | 'medium' | 'high')[] => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
if (modelId.includes('gpt-5-pro')) {
|
||||
return MODEL_SUPPORTED_VERBOSITY['gpt-5-pro']
|
||||
}
|
||||
return MODEL_SUPPORTED_VERBOSITY.default
|
||||
}
|
||||
|
||||
export const isGeminiModel = (model: Model) => {
|
||||
@@ -251,3 +271,8 @@ export const ZHIPU_RESULT_TOKENS = ['<|begin_of_box|>', '<|end_of_box|>'] as con
|
||||
export const agentModelFilter = (model: Model): boolean => {
|
||||
return !isEmbeddingModel(model) && !isRerankModel(model) && !isTextToImageModel(model)
|
||||
}
|
||||
|
||||
export const isGPT5ProModel = (model: Model) => {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return modelId.includes('gpt-5-pro')
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ const visionAllowedModels = [
|
||||
'gemini-1\\.5',
|
||||
'gemini-2\\.0',
|
||||
'gemini-2\\.5',
|
||||
'gemini-3-(?:flash|pro)(?:-preview)?',
|
||||
'gemini-(flash|pro|flash-lite)-latest',
|
||||
'gemini-exp',
|
||||
'claude-3',
|
||||
@@ -64,13 +65,13 @@ const visionExcludedModels = [
|
||||
'o1-preview',
|
||||
'AIDC-AI/Marco-o1'
|
||||
]
|
||||
export const VISION_REGEX = new RegExp(
|
||||
const VISION_REGEX = new RegExp(
|
||||
`\\b(?!(?:${visionExcludedModels.join('|')})\\b)(${visionAllowedModels.join('|')})\\b`,
|
||||
'i'
|
||||
)
|
||||
|
||||
// For middleware to identify models that must use the dedicated Image API
|
||||
export const DEDICATED_IMAGE_MODELS = [
|
||||
const DEDICATED_IMAGE_MODELS = [
|
||||
'grok-2-image',
|
||||
'grok-2-image-1212',
|
||||
'grok-2-image-latest',
|
||||
@@ -79,7 +80,7 @@ export const DEDICATED_IMAGE_MODELS = [
|
||||
'gpt-image-1'
|
||||
]
|
||||
|
||||
export const IMAGE_ENHANCEMENT_MODELS = [
|
||||
const IMAGE_ENHANCEMENT_MODELS = [
|
||||
'grok-2-image(?:-[\\w-]+)?',
|
||||
'qwen-image-edit',
|
||||
'gpt-image-1',
|
||||
@@ -90,9 +91,9 @@ export const IMAGE_ENHANCEMENT_MODELS = [
|
||||
const IMAGE_ENHANCEMENT_MODELS_REGEX = new RegExp(IMAGE_ENHANCEMENT_MODELS.join('|'), 'i')
|
||||
|
||||
// Models that should auto-enable image generation button when selected
|
||||
export const AUTO_ENABLE_IMAGE_MODELS = ['gemini-2.5-flash-image', ...DEDICATED_IMAGE_MODELS]
|
||||
const AUTO_ENABLE_IMAGE_MODELS = ['gemini-2.5-flash-image', ...DEDICATED_IMAGE_MODELS]
|
||||
|
||||
export const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
'o3',
|
||||
'gpt-4o',
|
||||
'gpt-4o-mini',
|
||||
@@ -102,9 +103,9 @@ export const OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS = [
|
||||
'gpt-5'
|
||||
]
|
||||
|
||||
export const OPENAI_IMAGE_GENERATION_MODELS = [...OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS, 'gpt-image-1']
|
||||
const OPENAI_IMAGE_GENERATION_MODELS = [...OPENAI_TOOL_USE_IMAGE_GENERATION_MODELS, 'gpt-image-1']
|
||||
|
||||
export const GENERATE_IMAGE_MODELS = [
|
||||
const GENERATE_IMAGE_MODELS = [
|
||||
'gemini-2.0-flash-exp',
|
||||
'gemini-2.0-flash-exp-image-generation',
|
||||
'gemini-2.0-flash-preview-image-generation',
|
||||
@@ -169,22 +170,23 @@ export function isPureGenerateImageModel(model: Model): boolean {
|
||||
}
|
||||
|
||||
// Text to image models
|
||||
export const TEXT_TO_IMAGE_REGEX = /flux|diffusion|stabilityai|sd-|dall|cogview|janus|midjourney|mj-|image|gpt-image/i
|
||||
const TEXT_TO_IMAGE_REGEX = /flux|diffusion|stabilityai|sd-|dall|cogview|janus|midjourney|mj-|image|gpt-image/i
|
||||
|
||||
export function isTextToImageModel(model: Model): boolean {
|
||||
const modelId = getLowerBaseModelName(model.id)
|
||||
return TEXT_TO_IMAGE_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
export function isNotSupportedImageSizeModel(model?: Model): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
// It's not used now
|
||||
// export function isNotSupportedImageSizeModel(model?: Model): boolean {
|
||||
// if (!model) {
|
||||
// return false
|
||||
// }
|
||||
|
||||
const baseName = getLowerBaseModelName(model.id, '/')
|
||||
// const baseName = getLowerBaseModelName(model.id, '/')
|
||||
|
||||
return baseName.includes('grok-2-image')
|
||||
}
|
||||
// return baseName.includes('grok-2-image')
|
||||
// }
|
||||
|
||||
/**
|
||||
* 判断模型是否支持图片增强(包括编辑、增强、修复等)
|
||||
|
||||
@@ -3,7 +3,13 @@ import type { Model } from '@renderer/types'
|
||||
import { SystemProviderIds } from '@renderer/types'
|
||||
import { getLowerBaseModelName, isUserSelectedModelType } from '@renderer/utils'
|
||||
|
||||
import { isGeminiProvider, isNewApiProvider, isOpenAICompatibleProvider, isOpenAIProvider } from '../providers'
|
||||
import {
|
||||
isGeminiProvider,
|
||||
isNewApiProvider,
|
||||
isOpenAICompatibleProvider,
|
||||
isOpenAIProvider,
|
||||
isVertexAiProvider
|
||||
} from '../providers'
|
||||
import { isEmbeddingModel, isRerankModel } from './embedding'
|
||||
import { isAnthropicModel } from './utils'
|
||||
import { isPureGenerateImageModel, isTextToImageModel } from './vision'
|
||||
@@ -16,7 +22,7 @@ export const CLAUDE_SUPPORTED_WEBSEARCH_REGEX = new RegExp(
|
||||
export const GEMINI_FLASH_MODEL_REGEX = new RegExp('gemini.*-flash.*$')
|
||||
|
||||
export const GEMINI_SEARCH_REGEX = new RegExp(
|
||||
'gemini-(?:2.*(?:-latest)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\\w-]+)*$',
|
||||
'gemini-(?:2.*(?:-latest)?|3-(?:flash|pro)(?:-preview)?|flash-latest|pro-latest|flash-lite-latest)(?:-[\\w-]+)*$',
|
||||
'i'
|
||||
)
|
||||
|
||||
@@ -70,7 +76,7 @@ export function isWebSearchModel(model: Model): boolean {
|
||||
// bedrock和vertex不支持
|
||||
if (
|
||||
isAnthropicModel(model) &&
|
||||
(provider.id === SystemProviderIds['aws-bedrock'] || provider.id === SystemProviderIds.vertexai)
|
||||
!(provider.id === SystemProviderIds['aws-bedrock'] || provider.id === SystemProviderIds.vertexai)
|
||||
) {
|
||||
return CLAUDE_SUPPORTED_WEBSEARCH_REGEX.test(modelId)
|
||||
}
|
||||
@@ -107,7 +113,7 @@ export function isWebSearchModel(model: Model): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
if (isGeminiProvider(provider) || provider.id === SystemProviderIds.vertexai) {
|
||||
if (isGeminiProvider(provider) || isVertexAiProvider(provider)) {
|
||||
return GEMINI_SEARCH_REGEX.test(modelId)
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ import type {
|
||||
SystemProvider,
|
||||
SystemProviderId
|
||||
} from '@renderer/types'
|
||||
import { isSystemProvider, OpenAIServiceTiers } from '@renderer/types'
|
||||
import { isSystemProvider, OpenAIServiceTiers, SystemProviderIds } from '@renderer/types'
|
||||
|
||||
import { TOKENFLUX_HOST } from './constant'
|
||||
import { glm45FlashModel, qwen38bModel, SYSTEM_MODELS } from './models'
|
||||
@@ -275,6 +275,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
type: 'openai',
|
||||
apiKey: '',
|
||||
apiHost: 'https://api.qnaigc.com',
|
||||
anthropicApiHost: 'https://api.qnaigc.com',
|
||||
models: SYSTEM_MODELS.qiniu,
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@@ -665,6 +666,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
type: 'openai',
|
||||
apiKey: '',
|
||||
apiHost: 'https://api.longcat.chat/openai',
|
||||
anthropicApiHost: 'https://api.longcat.chat/anthropic',
|
||||
models: SYSTEM_MODELS.longcat,
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@@ -684,7 +686,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
|
||||
name: 'AI Gateway',
|
||||
type: 'ai-gateway',
|
||||
apiKey: '',
|
||||
apiHost: 'https://ai-gateway.vercel.sh/v1',
|
||||
apiHost: 'https://ai-gateway.vercel.sh/v1/ai',
|
||||
models: [],
|
||||
isSystem: true,
|
||||
enabled: false
|
||||
@@ -1519,7 +1521,10 @@ const SUPPORT_URL_CONTEXT_PROVIDER_TYPES = [
|
||||
] as const satisfies ProviderType[]
|
||||
|
||||
export const isSupportUrlContextProvider = (provider: Provider) => {
|
||||
return SUPPORT_URL_CONTEXT_PROVIDER_TYPES.some((type) => type === provider.type)
|
||||
return (
|
||||
SUPPORT_URL_CONTEXT_PROVIDER_TYPES.some((type) => type === provider.type) ||
|
||||
provider.id === SystemProviderIds.cherryin
|
||||
)
|
||||
}
|
||||
|
||||
const SUPPORT_GEMINI_NATIVE_WEB_SEARCH_PROVIDERS = ['gemini', 'vertexai'] as const satisfies SystemProviderId[]
|
||||
@@ -1566,10 +1571,18 @@ export function isGeminiProvider(provider: Provider): boolean {
|
||||
return provider.type === 'gemini'
|
||||
}
|
||||
|
||||
export function isVertexAiProvider(provider: Provider): boolean {
|
||||
return provider.type === 'vertexai'
|
||||
}
|
||||
|
||||
export function isAIGatewayProvider(provider: Provider): boolean {
|
||||
return provider.type === 'ai-gateway'
|
||||
}
|
||||
|
||||
export function isAwsBedrockProvider(provider: Provider): boolean {
|
||||
return provider.type === 'aws-bedrock'
|
||||
}
|
||||
|
||||
const NOT_SUPPORT_API_VERSION_PROVIDERS = ['github', 'copilot', 'perplexity'] as const satisfies SystemProviderId[]
|
||||
|
||||
export const isSupportAPIVersionProvider = (provider: Provider) => {
|
||||
|
||||
@@ -123,9 +123,9 @@ export function useAssistant(id: string) {
|
||||
}
|
||||
|
||||
updateAssistantSettings({
|
||||
reasoning_effort: fallbackOption === 'off' ? undefined : fallbackOption,
|
||||
reasoning_effort_cache: fallbackOption === 'off' ? undefined : fallbackOption,
|
||||
qwenThinkMode: fallbackOption === 'off' ? undefined : true
|
||||
reasoning_effort: fallbackOption === 'none' ? undefined : fallbackOption,
|
||||
reasoning_effort_cache: fallbackOption === 'none' ? undefined : fallbackOption,
|
||||
qwenThinkMode: fallbackOption === 'none' ? undefined : true
|
||||
})
|
||||
} else {
|
||||
// 对于支持的选项, 不再更新 cache.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
|
||||
import { useTimer } from './useTimer'
|
||||
import { loggerService } from '@logger'
|
||||
import { useCallback, useLayoutEffect, useRef, useState } from 'react'
|
||||
|
||||
const logger = loggerService.withContext('useInPlaceEdit')
|
||||
export interface UseInPlaceEditOptions {
|
||||
onSave: ((value: string) => void) | ((value: string) => Promise<void>)
|
||||
onCancel?: () => void
|
||||
@@ -12,14 +12,10 @@ export interface UseInPlaceEditOptions {
|
||||
export interface UseInPlaceEditReturn {
|
||||
isEditing: boolean
|
||||
isSaving: boolean
|
||||
editValue: string
|
||||
inputRef: React.RefObject<HTMLInputElement | null>
|
||||
startEdit: (initialValue: string) => void
|
||||
saveEdit: () => void
|
||||
cancelEdit: () => void
|
||||
handleKeyDown: (e: React.KeyboardEvent) => void
|
||||
handleInputChange: (e: React.ChangeEvent<HTMLInputElement>) => void
|
||||
handleValueChange: (value: string) => void
|
||||
inputProps: React.InputHTMLAttributes<HTMLInputElement> & { ref: React.RefObject<HTMLInputElement | null> }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -37,58 +33,55 @@ export function useInPlaceEdit(options: UseInPlaceEditOptions): UseInPlaceEditRe
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
const [editValue, setEditValue] = useState('')
|
||||
const [originalValue, setOriginalValue] = useState('')
|
||||
const originalValueRef = useRef('')
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const { setTimeoutTimer } = useTimer()
|
||||
|
||||
const startEdit = useCallback(
|
||||
(initialValue: string) => {
|
||||
setIsEditing(true)
|
||||
setEditValue(initialValue)
|
||||
setOriginalValue(initialValue)
|
||||
const startEdit = useCallback((initialValue: string) => {
|
||||
setIsEditing(true)
|
||||
setEditValue(initialValue)
|
||||
originalValueRef.current = initialValue
|
||||
}, [])
|
||||
|
||||
setTimeoutTimer(
|
||||
'startEdit',
|
||||
() => {
|
||||
inputRef.current?.focus()
|
||||
if (autoSelectOnStart) {
|
||||
inputRef.current?.select()
|
||||
}
|
||||
},
|
||||
0
|
||||
)
|
||||
},
|
||||
[autoSelectOnStart, setTimeoutTimer]
|
||||
)
|
||||
useLayoutEffect(() => {
|
||||
if (isEditing) {
|
||||
inputRef.current?.focus()
|
||||
if (autoSelectOnStart) {
|
||||
inputRef.current?.select()
|
||||
}
|
||||
}
|
||||
}, [autoSelectOnStart, isEditing])
|
||||
|
||||
const saveEdit = useCallback(async () => {
|
||||
if (isSaving) return
|
||||
|
||||
const finalValue = trimOnSave ? editValue.trim() : editValue
|
||||
if (finalValue === originalValueRef.current) {
|
||||
setIsEditing(false)
|
||||
return
|
||||
}
|
||||
|
||||
setIsSaving(true)
|
||||
|
||||
try {
|
||||
const finalValue = trimOnSave ? editValue.trim() : editValue
|
||||
if (finalValue !== originalValue) {
|
||||
await onSave(finalValue)
|
||||
}
|
||||
await onSave(finalValue)
|
||||
setIsEditing(false)
|
||||
setEditValue('')
|
||||
setOriginalValue('')
|
||||
} finally {
|
||||
} catch (error) {
|
||||
logger.error('Error saving in-place edit', { error })
|
||||
setIsSaving(false)
|
||||
}
|
||||
}, [isSaving, trimOnSave, editValue, originalValue, onSave])
|
||||
}, [isSaving, trimOnSave, editValue, onSave])
|
||||
|
||||
const cancelEdit = useCallback(() => {
|
||||
setIsEditing(false)
|
||||
setEditValue('')
|
||||
setOriginalValue('')
|
||||
onCancel?.()
|
||||
}, [onCancel])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && !e.nativeEvent.isComposing) {
|
||||
if (e.nativeEvent.isComposing) return
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault()
|
||||
saveEdit()
|
||||
} else if (e.key === 'Escape') {
|
||||
@@ -104,37 +97,29 @@ export function useInPlaceEdit(options: UseInPlaceEditOptions): UseInPlaceEditRe
|
||||
setEditValue(e.target.value)
|
||||
}, [])
|
||||
|
||||
const handleValueChange = useCallback((value: string) => {
|
||||
setEditValue(value)
|
||||
}, [])
|
||||
|
||||
// Handle clicks outside the input to save
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (isEditing && inputRef.current && !inputRef.current.contains(event.target as Node)) {
|
||||
saveEdit()
|
||||
}
|
||||
const handleBlur = useCallback(() => {
|
||||
// 这里的逻辑需要注意:
|
||||
// 如果点击了“取消”按钮,可能会先触发 Blur 保存。
|
||||
// 通常 InPlaceEdit 的逻辑是 Blur 即 Save。
|
||||
// 如果不想 Blur 保存,可以去掉这一行,或者判断 relatedTarget。
|
||||
if (!isSaving) {
|
||||
saveEdit()
|
||||
}
|
||||
|
||||
if (isEditing) {
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside)
|
||||
}
|
||||
}
|
||||
return
|
||||
}, [isEditing, saveEdit])
|
||||
}, [saveEdit, isSaving])
|
||||
|
||||
return {
|
||||
isEditing,
|
||||
isSaving,
|
||||
editValue,
|
||||
inputRef,
|
||||
startEdit,
|
||||
saveEdit,
|
||||
cancelEdit,
|
||||
handleKeyDown,
|
||||
handleInputChange,
|
||||
handleValueChange
|
||||
inputProps: {
|
||||
ref: inputRef,
|
||||
value: editValue,
|
||||
onChange: handleInputChange,
|
||||
onKeyDown: handleKeyDown,
|
||||
onBlur: handleBlur,
|
||||
disabled: isSaving // 保存时禁用输入
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,7 +311,7 @@ export const getHttpMessageLabel = (key: string): string => {
|
||||
}
|
||||
|
||||
const reasoningEffortOptionsKeyMap: Record<ThinkingOption, string> = {
|
||||
off: 'assistants.settings.reasoning_effort.off',
|
||||
none: 'assistants.settings.reasoning_effort.off',
|
||||
minimal: 'assistants.settings.reasoning_effort.minimal',
|
||||
high: 'assistants.settings.reasoning_effort.high',
|
||||
low: 'assistants.settings.reasoning_effort.low',
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "Agent ID is null."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Failed to list agents."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "New Folder",
|
||||
"untitled_note": "Untitled Note",
|
||||
"upload_failed": "Note upload failed",
|
||||
"upload_success": "Note uploaded success"
|
||||
"upload_files": "Upload Files",
|
||||
"upload_folder": "Upload Folder",
|
||||
"upload_success": "Note uploaded success",
|
||||
"uploading_files": "Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Assistant Response",
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "智能体 ID 为空。"
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "在这里输入消息,按 {{key}} 发送 - @ 选择路径, / 选择命令"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "获取智能体列表失败"
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "新文件夹",
|
||||
"untitled_note": "无标题笔记",
|
||||
"upload_failed": "笔记上传失败",
|
||||
"upload_success": "笔记上传成功"
|
||||
"upload_files": "上传文件",
|
||||
"upload_folder": "上传文件夹",
|
||||
"upload_success": "笔记上传成功",
|
||||
"uploading_files": "正在上传 {{count}} 个文件..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手响应",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "确认",
|
||||
"forward": "前进",
|
||||
"multiple": "多选",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "未找到结果",
|
||||
"page": "翻页",
|
||||
"select": "选择",
|
||||
"title": "快捷菜单"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "代理程式 ID 為空。"
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "無法列出代理程式。"
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "新資料夾",
|
||||
"untitled_note": "無標題筆記",
|
||||
"upload_failed": "筆記上傳失敗",
|
||||
"upload_success": "筆記上傳成功"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "筆記上傳成功",
|
||||
"uploading_files": "正在上傳 {{count}} 個檔案..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手回應",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "確認",
|
||||
"forward": "前進",
|
||||
"multiple": "多選",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "未找到結果",
|
||||
"page": "翻頁",
|
||||
"select": "選擇",
|
||||
"title": "快捷選單"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "Agent ID ist leer."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Agent-Liste abrufen fehlgeschlagen"
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "Neuer Ordner",
|
||||
"untitled_note": "Unbenannte Notiz",
|
||||
"upload_failed": "Notizen-Upload fehlgeschlagen",
|
||||
"upload_success": "Notizen erfolgreich hochgeladen"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Notizen erfolgreich hochgeladen",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Assistenten-Antwort",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Bestätigen",
|
||||
"forward": "Vorwärts",
|
||||
"multiple": "Mehrfachauswahl",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "Keine Ergebnisse gefunden",
|
||||
"page": "Seite umblättern",
|
||||
"select": "Auswählen",
|
||||
"title": "Schnellmenü"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "Το ID του πράκτορα είναι null."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Αποτυχία καταχώρησης πρακτόρων."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "Νέος φάκελος",
|
||||
"untitled_note": "σημείωση χωρίς τίτλο",
|
||||
"upload_failed": "Η σημείωση δεν ανέβηκε",
|
||||
"upload_success": "Οι σημειώσεις μεταφορτώθηκαν με επιτυχία"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Οι σημειώσεις μεταφορτώθηκαν με επιτυχία",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Απάντηση Βοηθού",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Επιβεβαίωση",
|
||||
"forward": "Μπρος",
|
||||
"multiple": "Πολλαπλή επιλογή",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "Δεν βρέθηκαν αποτελέσματα",
|
||||
"page": "Σελίδα",
|
||||
"select": "Επιλογή",
|
||||
"title": "Γρήγορη Πρόσβαση"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "El ID del agente es nulo."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Error al listar agentes."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "Nueva carpeta",
|
||||
"untitled_note": "Nota sin título",
|
||||
"upload_failed": "Error al cargar la nota",
|
||||
"upload_success": "Nota cargada con éxito"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Nota cargada con éxito",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Respuesta del asistente",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Confirmar",
|
||||
"forward": "Adelante",
|
||||
"multiple": "Selección múltiple",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "No se encontraron resultados",
|
||||
"page": "Página",
|
||||
"select": "Seleccionar",
|
||||
"title": "Menú de acceso rápido"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "L'ID de l'agent est nul."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Échec de la liste des agents."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "nouveau dossier",
|
||||
"untitled_note": "Note sans titre",
|
||||
"upload_failed": "Échec du téléchargement de la note",
|
||||
"upload_success": "Note téléchargée avec succès"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Note téléchargée avec succès",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Réponse de l'assistant",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Подтвердить",
|
||||
"forward": "Вперед",
|
||||
"multiple": "Множественный выбор",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "Aucun résultat trouvé",
|
||||
"page": "Перелистнуть страницу",
|
||||
"select": "Выбрать",
|
||||
"title": "Быстрое меню"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "エージェント ID が null です。"
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "エージェントの一覧取得に失敗しました。"
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "新ファイル夹",
|
||||
"untitled_note": "無題のメモ",
|
||||
"upload_failed": "ノートのアップロードに失敗しました",
|
||||
"upload_success": "ノートのアップロードが成功しました"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "ノートのアップロードが成功しました",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "助手回應",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "確認",
|
||||
"forward": "進む",
|
||||
"multiple": "複数選択",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "結果が見つかりません",
|
||||
"page": "ページ",
|
||||
"select": "選択",
|
||||
"title": "クイックメニュー"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "O ID do agente é nulo."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Falha ao listar agentes."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "Nova pasta",
|
||||
"untitled_note": "Nota sem título",
|
||||
"upload_failed": "Falha ao carregar a nota",
|
||||
"upload_success": "Nota carregada com sucesso"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Nota carregada com sucesso",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Resposta do assistente",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Confirmar",
|
||||
"forward": "Avançar",
|
||||
"multiple": "Múltipla Seleção",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "Nenhum resultado encontrado",
|
||||
"page": "Página",
|
||||
"select": "Selecionar",
|
||||
"title": "Menu de Atalho"
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
"null_id": "ID агента равен null."
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"placeholder": "[to be translated]:Enter your message here, send with {{key}} - @ select path, / select command"
|
||||
},
|
||||
"list": {
|
||||
"error": {
|
||||
"failed": "Не удалось получить список агентов."
|
||||
@@ -2172,7 +2175,10 @@
|
||||
"untitled_folder": "Новая папка",
|
||||
"untitled_note": "Незаглавленная заметка",
|
||||
"upload_failed": "Не удалось загрузить заметку",
|
||||
"upload_success": "Заметка успешно загружена"
|
||||
"upload_files": "[to be translated]:Upload Files",
|
||||
"upload_folder": "[to be translated]:Upload Folder",
|
||||
"upload_success": "Заметка успешно загружена",
|
||||
"uploading_files": "[to be translated]:Uploading {{count}} files..."
|
||||
},
|
||||
"notification": {
|
||||
"assistant": "Ответ ассистента",
|
||||
@@ -4478,7 +4484,7 @@
|
||||
"confirm": "Подтвердить",
|
||||
"forward": "Вперед",
|
||||
"multiple": "Множественный выбор",
|
||||
"noResult": "[to be translated]:No results found",
|
||||
"noResult": "Результаты не найдены",
|
||||
"page": "Страница",
|
||||
"select": "Выбрать",
|
||||
"title": "Быстрое меню"
|
||||
|
||||
@@ -470,7 +470,7 @@ const AgentSessionInputbarInner: FC<InnerProps> = ({ assistant, agentId, session
|
||||
)
|
||||
const placeholderText = useMemo(
|
||||
() =>
|
||||
t('chat.input.placeholder', {
|
||||
t('agent.input.placeholder', {
|
||||
key: getSendMessageShortcutLabel(sendMessageShortcut)
|
||||
}),
|
||||
[sendMessageShortcut, t]
|
||||
|
||||
@@ -313,7 +313,7 @@ export const InputbarCore: FC<InputbarCoreProps> = ({
|
||||
|
||||
const isEnterPressed = event.key === 'Enter' && !event.nativeEvent.isComposing
|
||||
if (isEnterPressed) {
|
||||
if (isSendMessageKeyPressed(event, sendMessageShortcut)) {
|
||||
if (isSendMessageKeyPressed(event, sendMessageShortcut) && !cannotSend) {
|
||||
handleSendMessage()
|
||||
event.preventDefault()
|
||||
return
|
||||
@@ -359,6 +359,7 @@ export const InputbarCore: FC<InputbarCoreProps> = ({
|
||||
translate,
|
||||
handleToggleExpanded,
|
||||
sendMessageShortcut,
|
||||
cannotSend,
|
||||
handleSendMessage,
|
||||
setText,
|
||||
setTimeoutTimer,
|
||||
|
||||
@@ -36,7 +36,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
const { assistant, updateAssistantSettings } = useAssistant(assistantId)
|
||||
|
||||
const currentReasoningEffort = useMemo(() => {
|
||||
return assistant.settings?.reasoning_effort || 'off'
|
||||
return assistant.settings?.reasoning_effort || 'none'
|
||||
}, [assistant.settings?.reasoning_effort])
|
||||
|
||||
// 确定当前模型支持的选项类型
|
||||
@@ -46,21 +46,21 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
const supportedOptions: ThinkingOption[] = useMemo(() => {
|
||||
if (modelType === 'doubao') {
|
||||
if (isDoubaoThinkingAutoModel(model)) {
|
||||
return ['off', 'auto', 'high']
|
||||
return ['none', 'auto', 'high']
|
||||
}
|
||||
return ['off', 'high']
|
||||
return ['none', 'high']
|
||||
}
|
||||
return MODEL_SUPPORTED_OPTIONS[modelType]
|
||||
}, [model, modelType])
|
||||
|
||||
const onThinkingChange = useCallback(
|
||||
(option?: ThinkingOption) => {
|
||||
const isEnabled = option !== undefined && option !== 'off'
|
||||
const isEnabled = option !== undefined && option !== 'none'
|
||||
// 然后更新设置
|
||||
if (!isEnabled) {
|
||||
updateAssistantSettings({
|
||||
reasoning_effort: undefined,
|
||||
reasoning_effort_cache: undefined,
|
||||
reasoning_effort: option,
|
||||
reasoning_effort_cache: option,
|
||||
qwenThinkMode: false
|
||||
})
|
||||
return
|
||||
@@ -96,10 +96,10 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
}))
|
||||
}, [currentReasoningEffort, supportedOptions, onThinkingChange])
|
||||
|
||||
const isThinkingEnabled = currentReasoningEffort !== undefined && currentReasoningEffort !== 'off'
|
||||
const isThinkingEnabled = currentReasoningEffort !== undefined && currentReasoningEffort !== 'none'
|
||||
|
||||
const disableThinking = useCallback(() => {
|
||||
onThinkingChange('off')
|
||||
onThinkingChange('none')
|
||||
}, [onThinkingChange])
|
||||
|
||||
const openQuickPanel = useCallback(() => {
|
||||
@@ -116,7 +116,7 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
return
|
||||
}
|
||||
|
||||
if (isThinkingEnabled && supportedOptions.includes('off')) {
|
||||
if (isThinkingEnabled && supportedOptions.includes('none')) {
|
||||
disableThinking()
|
||||
return
|
||||
}
|
||||
@@ -146,13 +146,13 @@ const ThinkingButton: FC<Props> = ({ quickPanel, model, assistantId }): ReactEle
|
||||
<Tooltip
|
||||
placement="top"
|
||||
title={
|
||||
isThinkingEnabled && supportedOptions.includes('off')
|
||||
isThinkingEnabled && supportedOptions.includes('none')
|
||||
? t('common.close')
|
||||
: t('assistants.settings.reasoning_effort.label')
|
||||
}
|
||||
mouseLeaveDelay={0}
|
||||
arrow>
|
||||
<ActionIconButton onClick={handleOpenQuickPanel} active={currentReasoningEffort !== 'off'}>
|
||||
<ActionIconButton onClick={handleOpenQuickPanel} active={currentReasoningEffort !== 'none'}>
|
||||
{ThinkingIcon(currentReasoningEffort)}
|
||||
</ActionIconButton>
|
||||
</Tooltip>
|
||||
@@ -178,7 +178,7 @@ const ThinkingIcon = (option?: ThinkingOption) => {
|
||||
case 'auto':
|
||||
IconComponent = MdiLightbulbAutoOutline
|
||||
break
|
||||
case 'off':
|
||||
case 'none':
|
||||
IconComponent = MdiLightbulbOffOutline
|
||||
break
|
||||
default:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { isGeminiModel } from '@renderer/config/models'
|
||||
import { isAnthropicModel, isGeminiModel } from '@renderer/config/models'
|
||||
import { isSupportUrlContextProvider } from '@renderer/config/providers'
|
||||
import { defineTool, registerTool, TopicType } from '@renderer/pages/home/Inputbar/types'
|
||||
import { getProviderByModel } from '@renderer/services/AssistantService'
|
||||
@@ -10,9 +10,8 @@ const urlContextTool = defineTool({
|
||||
label: (t) => t('chat.input.url_context'),
|
||||
visibleInScopes: [TopicType.Chat],
|
||||
condition: ({ model }) => {
|
||||
if (!isGeminiModel(model)) return false
|
||||
const provider = getProviderByModel(model)
|
||||
return !!provider && isSupportUrlContextProvider(provider)
|
||||
return !!provider && isSupportUrlContextProvider(provider) && (isGeminiModel(model) || isAnthropicModel(model))
|
||||
},
|
||||
render: ({ assistant }) => <UrlContextButton assistantId={assistant.id} />
|
||||
})
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { cn } from '@renderer/utils'
|
||||
import type { CollapseProps } from 'antd'
|
||||
import { Card } from 'antd'
|
||||
import { CheckCircle, Circle, Clock, ListTodo } from 'lucide-react'
|
||||
@@ -11,23 +10,27 @@ const getStatusConfig = (status: TodoItem['status']) => {
|
||||
switch (status) {
|
||||
case 'completed':
|
||||
return {
|
||||
color: 'success' as const,
|
||||
icon: <CheckCircle className="h-3 w-3" />
|
||||
color: 'var(--color-status-success)',
|
||||
opacity: 0.6,
|
||||
icon: <CheckCircle className="h-4 w-4" strokeWidth={2.5} />
|
||||
}
|
||||
case 'in_progress':
|
||||
return {
|
||||
color: 'primary' as const,
|
||||
icon: <Clock className="h-3 w-3" />
|
||||
color: 'var(--color-primary)',
|
||||
opacity: 0.9,
|
||||
icon: <Clock className="h-4 w-4" strokeWidth={2.5} />
|
||||
}
|
||||
case 'pending':
|
||||
return {
|
||||
color: 'default' as const,
|
||||
icon: <Circle className="h-3 w-3" />
|
||||
color: 'var(--color-border)',
|
||||
opacity: 0.4,
|
||||
icon: <Circle className="h-4 w-4" strokeWidth={2.5} />
|
||||
}
|
||||
default:
|
||||
return {
|
||||
color: 'default' as const,
|
||||
icon: <Circle className="h-3 w-3" />
|
||||
color: 'var(--color-border)',
|
||||
opacity: 0.4,
|
||||
icon: <Circle className="h-4 w-4" strokeWidth={2.5} />
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -64,10 +67,8 @@ export function TodoWriteTool({
|
||||
<div className="p-2">
|
||||
<div className="flex items-center justify-center gap-3">
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center justify-center rounded-full border bg-opacity-50 p-2',
|
||||
`bg-${statusConfig.color}`
|
||||
)}>
|
||||
className="flex items-center justify-center rounded-full border p-1"
|
||||
style={{ backgroundColor: statusConfig.color, opacity: statusConfig.opacity }}>
|
||||
{statusConfig.icon}
|
||||
</div>
|
||||
<div className="min-w-0 flex-1">
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { PermissionUpdate } from '@anthropic-ai/claude-agent-sdk'
|
||||
import { loggerService } from '@logger'
|
||||
import { useAppDispatch, useAppSelector } from '@renderer/store'
|
||||
import { selectPendingPermissionByToolName, toolPermissionsActions } from '@renderer/store/toolPermissions'
|
||||
import { selectPendingPermission, toolPermissionsActions } from '@renderer/store/toolPermissions'
|
||||
import type { NormalToolResponse } from '@renderer/types'
|
||||
import { Button } from 'antd'
|
||||
import { ChevronDown, CirclePlay, CircleX } from 'lucide-react'
|
||||
@@ -17,9 +17,7 @@ interface Props {
|
||||
export function ToolPermissionRequestCard({ toolResponse }: Props) {
|
||||
const { t } = useTranslation()
|
||||
const dispatch = useAppDispatch()
|
||||
const request = useAppSelector((state) =>
|
||||
selectPendingPermissionByToolName(state.toolPermissions, toolResponse.tool.name)
|
||||
)
|
||||
const request = useAppSelector((state) => selectPendingPermission(state.toolPermissions, toolResponse.toolCallId))
|
||||
const [now, setNow] = useState(() => Date.now())
|
||||
const [showDetails, setShowDetails] = useState(false)
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import Selector from '@renderer/components/Selector'
|
||||
import {
|
||||
getModelSupportedVerbosity,
|
||||
isSupportedReasoningEffortOpenAIModel,
|
||||
isSupportFlexServiceTierModel,
|
||||
isSupportVerbosityModel
|
||||
@@ -80,20 +81,24 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
}
|
||||
]
|
||||
|
||||
const verbosityOptions = [
|
||||
{
|
||||
value: 'low',
|
||||
label: t('settings.openai.verbosity.low')
|
||||
},
|
||||
{
|
||||
value: 'medium',
|
||||
label: t('settings.openai.verbosity.medium')
|
||||
},
|
||||
{
|
||||
value: 'high',
|
||||
label: t('settings.openai.verbosity.high')
|
||||
}
|
||||
]
|
||||
const verbosityOptions = useMemo(() => {
|
||||
const allOptions = [
|
||||
{
|
||||
value: 'low',
|
||||
label: t('settings.openai.verbosity.low')
|
||||
},
|
||||
{
|
||||
value: 'medium',
|
||||
label: t('settings.openai.verbosity.medium')
|
||||
},
|
||||
{
|
||||
value: 'high',
|
||||
label: t('settings.openai.verbosity.high')
|
||||
}
|
||||
]
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
|
||||
return allOptions.filter((option) => supportedVerbosityLevels.includes(option.value as any))
|
||||
}, [model, t])
|
||||
|
||||
const serviceTierOptions = useMemo(() => {
|
||||
let baseOptions: { value: ServiceTier; label: string }[]
|
||||
@@ -155,6 +160,15 @@ const OpenAISettingsGroup: FC<Props> = ({ model, providerId, SettingGroup, Setti
|
||||
}
|
||||
}, [provider.id, serviceTierMode, serviceTierOptions, setServiceTierMode])
|
||||
|
||||
useEffect(() => {
|
||||
if (verbosity && !verbosityOptions.some((option) => option.value === verbosity)) {
|
||||
const supportedVerbosityLevels = getModelSupportedVerbosity(model)
|
||||
// Default to the highest supported verbosity level
|
||||
const defaultVerbosity = supportedVerbosityLevels[supportedVerbosityLevels.length - 1]
|
||||
setVerbosity(defaultVerbosity)
|
||||
}
|
||||
}, [model, verbosity, verbosityOptions, setVerbosity])
|
||||
|
||||
if (!isOpenAIReasoning && !isSupportServiceTier && !isSupportVerbosity) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ const SessionItem: FC<SessionItemProps> = ({ session, agentId, onDelete, onPress
|
||||
const targetSession = useDeferredValue(_targetSession)
|
||||
const dispatch = useAppDispatch()
|
||||
|
||||
const { isEditing, isSaving, editValue, inputRef, startEdit, handleKeyDown, handleValueChange } = useInPlaceEdit({
|
||||
const { isEditing, isSaving, startEdit, inputProps } = useInPlaceEdit({
|
||||
onSave: async (value) => {
|
||||
if (value !== session.name) {
|
||||
await updateSession({ id: session.id, name: value })
|
||||
@@ -179,14 +179,7 @@ const SessionItem: FC<SessionItemProps> = ({ session, agentId, onDelete, onPress
|
||||
{isFulfilled && !isActive && <FulfilledIndicator />}
|
||||
<SessionNameContainer>
|
||||
{isEditing ? (
|
||||
<SessionEditInput
|
||||
ref={inputRef}
|
||||
value={editValue}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => handleValueChange(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onClick={(e: React.MouseEvent) => e.stopPropagation()}
|
||||
style={{ opacity: isSaving ? 0.5 : 1 }}
|
||||
/>
|
||||
<SessionEditInput {...inputProps} style={{ opacity: isSaving ? 0.5 : 1 }} />
|
||||
) : (
|
||||
<>
|
||||
<SessionName>
|
||||
|
||||
@@ -79,7 +79,7 @@ export const Topics: React.FC<Props> = ({ assistant: _assistant, activeTopic, se
|
||||
const deleteTimerRef = useRef<NodeJS.Timeout>(null)
|
||||
const [editingTopicId, setEditingTopicId] = useState<string | null>(null)
|
||||
|
||||
const topicEdit = useInPlaceEdit({
|
||||
const { startEdit, isEditing, inputProps } = useInPlaceEdit({
|
||||
onSave: (name: string) => {
|
||||
const topic = assistant.topics.find((t) => t.id === editingTopicId)
|
||||
if (topic && name !== topic.name) {
|
||||
@@ -520,29 +520,23 @@ export const Topics: React.FC<Props> = ({ assistant: _assistant, activeTopic, se
|
||||
<TopicListItem
|
||||
onContextMenu={() => setTargetTopic(topic)}
|
||||
className={classNames(isActive ? 'active' : '', singlealone ? 'singlealone' : '')}
|
||||
onClick={editingTopicId === topic.id && topicEdit.isEditing ? undefined : () => onSwitchTopic(topic)}
|
||||
onClick={editingTopicId === topic.id && isEditing ? undefined : () => onSwitchTopic(topic)}
|
||||
style={{
|
||||
borderRadius,
|
||||
cursor: editingTopicId === topic.id && topicEdit.isEditing ? 'default' : 'pointer'
|
||||
cursor: editingTopicId === topic.id && isEditing ? 'default' : 'pointer'
|
||||
}}>
|
||||
{isPending(topic.id) && !isActive && <PendingIndicator />}
|
||||
{isFulfilled(topic.id) && !isActive && <FulfilledIndicator />}
|
||||
<TopicNameContainer>
|
||||
{editingTopicId === topic.id && topicEdit.isEditing ? (
|
||||
<TopicEditInput
|
||||
ref={topicEdit.inputRef}
|
||||
value={topicEdit.editValue}
|
||||
onChange={topicEdit.handleInputChange}
|
||||
onKeyDown={topicEdit.handleKeyDown}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
/>
|
||||
{editingTopicId === topic.id && isEditing ? (
|
||||
<TopicEditInput {...inputProps} onClick={(e) => e.stopPropagation()} />
|
||||
) : (
|
||||
<TopicName
|
||||
className={getTopicNameClassName()}
|
||||
title={topicName}
|
||||
onDoubleClick={() => {
|
||||
setEditingTopicId(topic.id)
|
||||
topicEdit.startEdit(topic.name)
|
||||
startEdit(topic.name)
|
||||
}}>
|
||||
{topicName}
|
||||
</TopicName>
|
||||
|
||||
@@ -295,6 +295,16 @@ const NotesPage: FC = () => {
|
||||
break
|
||||
}
|
||||
|
||||
case 'refresh': {
|
||||
// 批量操作完成后的单次刷新
|
||||
logger.debug('Received refresh event, triggering tree refresh')
|
||||
const refresh = refreshTreeRef.current
|
||||
if (refresh) {
|
||||
await refresh()
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'add':
|
||||
case 'addDir':
|
||||
case 'unlink':
|
||||
@@ -621,7 +631,27 @@ const NotesPage: FC = () => {
|
||||
throw new Error('No folder path selected')
|
||||
}
|
||||
|
||||
const result = await uploadNotes(files, targetFolderPath)
|
||||
// Validate uploadNotes function is available
|
||||
if (typeof uploadNotes !== 'function') {
|
||||
logger.error('uploadNotes function is not available', { uploadNotes })
|
||||
window.toast.error(t('notes.upload_failed'))
|
||||
return
|
||||
}
|
||||
|
||||
let result: Awaited<ReturnType<typeof uploadNotes>>
|
||||
try {
|
||||
result = await uploadNotes(files, targetFolderPath)
|
||||
} catch (uploadError) {
|
||||
logger.error('Upload operation failed:', uploadError as Error)
|
||||
throw uploadError
|
||||
}
|
||||
|
||||
// Validate result object
|
||||
if (!result || typeof result !== 'object') {
|
||||
logger.error('Invalid upload result:', { result })
|
||||
window.toast.error(t('notes.upload_failed'))
|
||||
return
|
||||
}
|
||||
|
||||
// 检查上传结果
|
||||
if (result.fileCount === 0) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
498
src/renderer/src/pages/notes/components/TreeNode.tsx
Normal file
498
src/renderer/src/pages/notes/components/TreeNode.tsx
Normal file
@@ -0,0 +1,498 @@
|
||||
import HighlightText from '@renderer/components/HighlightText'
|
||||
import {
|
||||
useNotesActions,
|
||||
useNotesDrag,
|
||||
useNotesEditing,
|
||||
useNotesSearch,
|
||||
useNotesSelection,
|
||||
useNotesUI
|
||||
} from '@renderer/pages/notes/context/NotesContexts'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
import type { SearchMatch, SearchResult } from '@renderer/services/NotesSearchService'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { Dropdown } from 'antd'
|
||||
import { ChevronDown, ChevronRight, File, FilePlus, Folder, FolderOpen } from 'lucide-react'
|
||||
import { memo, useCallback, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
interface TreeNodeProps {
|
||||
node: NotesTreeNode | SearchResult
|
||||
depth: number
|
||||
renderChildren?: boolean
|
||||
onHintClick?: () => void
|
||||
}
|
||||
|
||||
const TreeNode = memo<TreeNodeProps>(({ node, depth, renderChildren = true, onHintClick }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
// Use split contexts - only subscribe to what this node needs
|
||||
const { selectedFolderId, activeNodeId } = useNotesSelection()
|
||||
const { editingNodeId, renamingNodeIds, newlyRenamedNodeIds, inPlaceEdit } = useNotesEditing()
|
||||
const { draggedNodeId, dragOverNodeId, dragPosition, onDragStart, onDragOver, onDragLeave, onDrop, onDragEnd } =
|
||||
useNotesDrag()
|
||||
const { searchKeyword, showMatches } = useNotesSearch()
|
||||
const { openDropdownKey } = useNotesUI()
|
||||
const { getMenuItems, onSelectNode, onToggleExpanded, onDropdownOpenChange } = useNotesActions()
|
||||
|
||||
const [showAllMatches, setShowAllMatches] = useState(false)
|
||||
const { isEditing: isInputEditing, inputProps } = inPlaceEdit
|
||||
|
||||
// 检查是否是 hint 节点
|
||||
const isHintNode = node.type === 'hint'
|
||||
|
||||
// 检查是否是搜索结果
|
||||
const searchResult = 'matchType' in node ? (node as SearchResult) : null
|
||||
const hasMatches = searchResult && searchResult.matches && searchResult.matches.length > 0
|
||||
|
||||
// 处理匹配项点击
|
||||
const handleMatchClick = useCallback(
|
||||
(match: SearchMatch) => {
|
||||
// 发送定位事件
|
||||
EventEmitter.emit(EVENT_NAMES.LOCATE_NOTE_LINE, {
|
||||
noteId: node.id,
|
||||
lineNumber: match.lineNumber,
|
||||
lineContent: match.lineContent
|
||||
})
|
||||
},
|
||||
[node]
|
||||
)
|
||||
|
||||
const isActive = selectedFolderId ? node.type === 'folder' && node.id === selectedFolderId : node.id === activeNodeId
|
||||
const isEditing = editingNodeId === node.id && isInputEditing
|
||||
const isRenaming = renamingNodeIds.has(node.id)
|
||||
const isNewlyRenamed = newlyRenamedNodeIds.has(node.id)
|
||||
const hasChildren = node.children && node.children.length > 0
|
||||
const isDragging = draggedNodeId === node.id
|
||||
const isDragOver = dragOverNodeId === node.id
|
||||
const isDragBefore = isDragOver && dragPosition === 'before'
|
||||
const isDragInside = isDragOver && dragPosition === 'inside'
|
||||
const isDragAfter = isDragOver && dragPosition === 'after'
|
||||
|
||||
const getNodeNameClassName = () => {
|
||||
if (isRenaming) return 'shimmer'
|
||||
if (isNewlyRenamed) return 'typing'
|
||||
return ''
|
||||
}
|
||||
|
||||
const displayName = useMemo(() => {
|
||||
if (!searchKeyword) {
|
||||
return node.name
|
||||
}
|
||||
|
||||
const name = node.name ?? ''
|
||||
if (!name) {
|
||||
return name
|
||||
}
|
||||
|
||||
const keyword = searchKeyword
|
||||
const nameLower = name.toLowerCase()
|
||||
const keywordLower = keyword.toLowerCase()
|
||||
const matchStart = nameLower.indexOf(keywordLower)
|
||||
|
||||
if (matchStart === -1) {
|
||||
return name
|
||||
}
|
||||
|
||||
const matchEnd = matchStart + keyword.length
|
||||
const beforeMatch = Math.min(2, matchStart)
|
||||
const contextStart = matchStart - beforeMatch
|
||||
const contextLength = 50
|
||||
const contextEnd = Math.min(name.length, matchEnd + contextLength)
|
||||
|
||||
const prefix = contextStart > 0 ? '...' : ''
|
||||
const suffix = contextEnd < name.length ? '...' : ''
|
||||
|
||||
return prefix + name.substring(contextStart, contextEnd) + suffix
|
||||
}, [node.name, searchKeyword])
|
||||
|
||||
// Special render for hint nodes
|
||||
if (isHintNode) {
|
||||
return (
|
||||
<div key={node.id}>
|
||||
<TreeNodeContainer active={false} depth={depth}>
|
||||
<TreeNodeContent>
|
||||
<NodeIcon>
|
||||
<FilePlus size={16} />
|
||||
</NodeIcon>
|
||||
<DropHintText onClick={onHintClick}>{t('notes.drop_markdown_hint')}</DropHintText>
|
||||
</TreeNodeContent>
|
||||
</TreeNodeContainer>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={node.id}>
|
||||
<Dropdown
|
||||
menu={{ items: getMenuItems(node as NotesTreeNode) }}
|
||||
trigger={['contextMenu']}
|
||||
open={openDropdownKey === node.id}
|
||||
onOpenChange={(open) => onDropdownOpenChange(open ? node.id : null)}>
|
||||
<div onContextMenu={(e) => e.stopPropagation()}>
|
||||
<TreeNodeContainer
|
||||
active={isActive}
|
||||
depth={depth}
|
||||
isDragging={isDragging}
|
||||
isDragOver={isDragOver}
|
||||
isDragBefore={isDragBefore}
|
||||
isDragInside={isDragInside}
|
||||
isDragAfter={isDragAfter}
|
||||
draggable={!isEditing}
|
||||
data-node-id={node.id}
|
||||
onDragStart={(e) => onDragStart(e, node as NotesTreeNode)}
|
||||
onDragOver={(e) => onDragOver(e, node as NotesTreeNode)}
|
||||
onDragLeave={onDragLeave}
|
||||
onDrop={(e) => onDrop(e, node as NotesTreeNode)}
|
||||
onDragEnd={onDragEnd}>
|
||||
<TreeNodeContent onClick={() => onSelectNode(node as NotesTreeNode)}>
|
||||
<NodeIndent depth={depth} />
|
||||
|
||||
{node.type === 'folder' && (
|
||||
<ExpandIcon
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onToggleExpanded(node.id)
|
||||
}}
|
||||
title={node.expanded ? t('notes.collapse') : t('notes.expand')}>
|
||||
{node.expanded ? <ChevronDown size={14} /> : <ChevronRight size={14} />}
|
||||
</ExpandIcon>
|
||||
)}
|
||||
|
||||
<NodeIcon>
|
||||
{node.type === 'folder' ? (
|
||||
node.expanded ? (
|
||||
<FolderOpen size={16} />
|
||||
) : (
|
||||
<Folder size={16} />
|
||||
)
|
||||
) : (
|
||||
<File size={16} />
|
||||
)}
|
||||
</NodeIcon>
|
||||
|
||||
{isEditing ? (
|
||||
<EditInput {...inputProps} onClick={(e) => e.stopPropagation()} autoFocus />
|
||||
) : (
|
||||
<NodeNameContainer>
|
||||
<NodeName className={getNodeNameClassName()}>
|
||||
{searchKeyword ? <HighlightText text={displayName} keyword={searchKeyword} /> : node.name}
|
||||
</NodeName>
|
||||
{searchResult && searchResult.matchType && searchResult.matchType !== 'filename' && (
|
||||
<MatchBadge matchType={searchResult.matchType}>
|
||||
{searchResult.matchType === 'both' ? t('notes.search.both') : t('notes.search.content')}
|
||||
</MatchBadge>
|
||||
)}
|
||||
</NodeNameContainer>
|
||||
)}
|
||||
</TreeNodeContent>
|
||||
</TreeNodeContainer>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
{showMatches && hasMatches && (
|
||||
<SearchMatchesContainer depth={depth}>
|
||||
{(showAllMatches ? searchResult!.matches! : searchResult!.matches!.slice(0, 3)).map((match, idx) => (
|
||||
<MatchItem key={idx} onClick={() => handleMatchClick(match)}>
|
||||
<MatchLineNumber>{match.lineNumber}</MatchLineNumber>
|
||||
<MatchContext>
|
||||
<HighlightText text={match.context} keyword={searchKeyword} />
|
||||
</MatchContext>
|
||||
</MatchItem>
|
||||
))}
|
||||
{searchResult!.matches!.length > 3 && (
|
||||
<MoreMatches
|
||||
depth={depth}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setShowAllMatches(!showAllMatches)
|
||||
}}>
|
||||
{showAllMatches ? (
|
||||
<>
|
||||
<ChevronDown size={12} style={{ marginRight: 4 }} />
|
||||
{t('notes.search.show_less')}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronRight size={12} style={{ marginRight: 4 }} />+{searchResult!.matches!.length - 3}{' '}
|
||||
{t('notes.search.more_matches')}
|
||||
</>
|
||||
)}
|
||||
</MoreMatches>
|
||||
)}
|
||||
</SearchMatchesContainer>
|
||||
)}
|
||||
|
||||
{renderChildren && node.type === 'folder' && node.expanded && hasChildren && (
|
||||
<div>
|
||||
{node.children!.map((child) => (
|
||||
<TreeNode key={child.id} node={child} depth={depth + 1} renderChildren={renderChildren} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
export const TreeNodeContainer = styled.div<{
|
||||
active: boolean
|
||||
depth: number
|
||||
isDragging?: boolean
|
||||
isDragOver?: boolean
|
||||
isDragBefore?: boolean
|
||||
isDragInside?: boolean
|
||||
isDragAfter?: boolean
|
||||
}>`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 4px 6px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
margin-bottom: 2px;
|
||||
/* CRITICAL: Must have fully opaque background for sticky to work properly */
|
||||
/* Transparent/semi-transparent backgrounds will show content bleeding through when sticky */
|
||||
background-color: ${(props) => {
|
||||
if (props.isDragInside) return 'var(--color-primary-background)'
|
||||
// Use hover color for active state - it's guaranteed to be opaque
|
||||
if (props.active) return 'var(--color-hover, var(--color-background-mute))'
|
||||
return 'var(--color-background)'
|
||||
}};
|
||||
border: 0.5px solid
|
||||
${(props) => {
|
||||
if (props.isDragInside) return 'var(--color-primary)'
|
||||
if (props.active) return 'var(--color-border)'
|
||||
return 'transparent'
|
||||
}};
|
||||
opacity: ${(props) => (props.isDragging ? 0.5 : 1)};
|
||||
transition: all 0.2s ease;
|
||||
position: relative;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
|
||||
.node-actions {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
/* 添加拖拽指示线 */
|
||||
${(props) =>
|
||||
props.isDragBefore &&
|
||||
`
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: -2px;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2px;
|
||||
background-color: var(--color-primary);
|
||||
border-radius: 1px;
|
||||
}
|
||||
`}
|
||||
|
||||
${(props) =>
|
||||
props.isDragAfter &&
|
||||
`
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
bottom: -2px;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2px;
|
||||
background-color: var(--color-primary);
|
||||
border-radius: 1px;
|
||||
}
|
||||
`}
|
||||
`
|
||||
|
||||
export const TreeNodeContent = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
`
|
||||
|
||||
export const NodeIndent = styled.div<{ depth: number }>`
|
||||
width: ${(props) => props.depth * 16}px;
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const ExpandIcon = styled.div`
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: var(--color-text-2);
|
||||
margin-right: 4px;
|
||||
|
||||
&:hover {
|
||||
color: var(--color-text);
|
||||
}
|
||||
`
|
||||
|
||||
export const NodeIcon = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin-right: 8px;
|
||||
color: var(--color-text-2);
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const NodeName = styled.div`
|
||||
flex: 1;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
font-size: 13px;
|
||||
color: var(--color-text);
|
||||
position: relative;
|
||||
will-change: background-position, width;
|
||||
|
||||
--color-shimmer-mid: var(--color-text-1);
|
||||
--color-shimmer-end: color-mix(in srgb, var(--color-text-1) 25%, transparent);
|
||||
|
||||
&.shimmer {
|
||||
background: linear-gradient(to left, var(--color-shimmer-end), var(--color-shimmer-mid), var(--color-shimmer-end));
|
||||
background-size: 200% 100%;
|
||||
background-clip: text;
|
||||
color: transparent;
|
||||
animation: shimmer 3s linear infinite;
|
||||
}
|
||||
|
||||
&.typing {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
animation: typewriter 0.5s steps(40, end);
|
||||
}
|
||||
|
||||
@keyframes shimmer {
|
||||
0% {
|
||||
background-position: 200% 0;
|
||||
}
|
||||
100% {
|
||||
background-position: -200% 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes typewriter {
|
||||
from {
|
||||
width: 0;
|
||||
}
|
||||
to {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
export const SearchMatchesContainer = styled.div<{ depth: number }>`
|
||||
margin-left: ${(props) => props.depth * 16 + 40}px;
|
||||
margin-top: 4px;
|
||||
margin-bottom: 8px;
|
||||
padding: 6px 8px;
|
||||
background-color: var(--color-background-mute);
|
||||
border-radius: 4px;
|
||||
border-left: 2px solid var(--color-primary-soft);
|
||||
`
|
||||
|
||||
export const NodeNameContainer = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
`
|
||||
|
||||
export const MatchBadge = styled.span<{ matchType: string }>`
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 0 4px;
|
||||
height: 16px;
|
||||
font-size: 10px;
|
||||
line-height: 1;
|
||||
border-radius: 2px;
|
||||
background-color: ${(props) =>
|
||||
props.matchType === 'both' ? 'var(--color-primary-soft)' : 'var(--color-background-mute)'};
|
||||
color: ${(props) => (props.matchType === 'both' ? 'var(--color-primary)' : 'var(--color-text-3)')};
|
||||
font-weight: 500;
|
||||
flex-shrink: 0;
|
||||
`
|
||||
|
||||
export const MatchItem = styled.div`
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
font-size: 12px;
|
||||
padding: 4px 6px;
|
||||
margin-left: -6px;
|
||||
margin-right: -6px;
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--color-background-soft);
|
||||
transform: translateX(2px);
|
||||
}
|
||||
|
||||
&:active {
|
||||
background-color: var(--color-active);
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
`
|
||||
|
||||
export const MatchLineNumber = styled.span`
|
||||
color: var(--color-text-3);
|
||||
font-family: monospace;
|
||||
flex-shrink: 0;
|
||||
width: 30px;
|
||||
`
|
||||
|
||||
export const MatchContext = styled.div`
|
||||
color: var(--color-text-2);
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
font-family: monospace;
|
||||
`
|
||||
|
||||
export const MoreMatches = styled.div<{ depth: number }>`
|
||||
margin-top: 4px;
|
||||
padding: 4px 6px;
|
||||
margin-left: -6px;
|
||||
margin-right: -6px;
|
||||
font-size: 11px;
|
||||
color: var(--color-text-3);
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
color: var(--color-text-2);
|
||||
background-color: var(--color-background-soft);
|
||||
}
|
||||
`
|
||||
|
||||
const EditInput = styled.input`
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
`
|
||||
|
||||
const DropHintText = styled.div`
|
||||
color: var(--color-text-3);
|
||||
font-size: 12px;
|
||||
font-style: italic;
|
||||
`
|
||||
|
||||
export default TreeNode
|
||||
109
src/renderer/src/pages/notes/context/NotesContexts.tsx
Normal file
109
src/renderer/src/pages/notes/context/NotesContexts.tsx
Normal file
@@ -0,0 +1,109 @@
|
||||
import type { UseInPlaceEditReturn } from '@renderer/hooks/useInPlaceEdit'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import type { MenuProps } from 'antd'
|
||||
import { createContext, use } from 'react'
|
||||
|
||||
// ==================== 1. Actions Context (Static, rarely changes) ====================
|
||||
export interface NotesActionsContextType {
|
||||
getMenuItems: (node: NotesTreeNode) => MenuProps['items']
|
||||
onSelectNode: (node: NotesTreeNode) => void
|
||||
onToggleExpanded: (nodeId: string) => void
|
||||
onDropdownOpenChange: (key: string | null) => void
|
||||
}
|
||||
|
||||
export const NotesActionsContext = createContext<NotesActionsContextType | null>(null)
|
||||
|
||||
export const useNotesActions = () => {
|
||||
const context = use(NotesActionsContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesActions must be used within NotesActionsContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 2. Selection Context (Low frequency updates) ====================
|
||||
export interface NotesSelectionContextType {
|
||||
selectedFolderId?: string | null
|
||||
activeNodeId?: string
|
||||
}
|
||||
|
||||
export const NotesSelectionContext = createContext<NotesSelectionContextType | null>(null)
|
||||
|
||||
export const useNotesSelection = () => {
|
||||
const context = use(NotesSelectionContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesSelection must be used within NotesSelectionContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 3. Editing Context (Medium frequency updates) ====================
|
||||
export interface NotesEditingContextType {
|
||||
editingNodeId: string | null
|
||||
renamingNodeIds: Set<string>
|
||||
newlyRenamedNodeIds: Set<string>
|
||||
inPlaceEdit: UseInPlaceEditReturn
|
||||
}
|
||||
|
||||
export const NotesEditingContext = createContext<NotesEditingContextType | null>(null)
|
||||
|
||||
export const useNotesEditing = () => {
|
||||
const context = use(NotesEditingContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesEditing must be used within NotesEditingContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 4. Drag Context (High frequency updates) ====================
|
||||
export interface NotesDragContextType {
|
||||
draggedNodeId: string | null
|
||||
dragOverNodeId: string | null
|
||||
dragPosition: 'before' | 'inside' | 'after'
|
||||
onDragStart: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragOver: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragLeave: () => void
|
||||
onDrop: (e: React.DragEvent, node: NotesTreeNode) => void
|
||||
onDragEnd: () => void
|
||||
}
|
||||
|
||||
export const NotesDragContext = createContext<NotesDragContextType | null>(null)
|
||||
|
||||
export const useNotesDrag = () => {
|
||||
const context = use(NotesDragContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesDrag must be used within NotesDragContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 5. Search Context (Medium frequency updates) ====================
|
||||
export interface NotesSearchContextType {
|
||||
searchKeyword: string
|
||||
showMatches: boolean
|
||||
}
|
||||
|
||||
export const NotesSearchContext = createContext<NotesSearchContextType | null>(null)
|
||||
|
||||
export const useNotesSearch = () => {
|
||||
const context = use(NotesSearchContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesSearch must be used within NotesSearchContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
// ==================== 6. UI Context (Medium frequency updates) ====================
|
||||
export interface NotesUIContextType {
|
||||
openDropdownKey: string | null
|
||||
}
|
||||
|
||||
export const NotesUIContext = createContext<NotesUIContextType | null>(null)
|
||||
|
||||
export const useNotesUI = () => {
|
||||
const context = use(NotesUIContext)
|
||||
if (!context) {
|
||||
throw new Error('useNotesUI must be used within NotesUIContext.Provider')
|
||||
}
|
||||
return context
|
||||
}
|
||||
101
src/renderer/src/pages/notes/hooks/useNotesDragAndDrop.ts
Normal file
101
src/renderer/src/pages/notes/hooks/useNotesDragAndDrop.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
|
||||
interface UseNotesDragAndDropProps {
|
||||
onMoveNode: (sourceNodeId: string, targetNodeId: string, position: 'before' | 'after' | 'inside') => void
|
||||
}
|
||||
|
||||
export const useNotesDragAndDrop = ({ onMoveNode }: UseNotesDragAndDropProps) => {
|
||||
const [draggedNodeId, setDraggedNodeId] = useState<string | null>(null)
|
||||
const [dragOverNodeId, setDragOverNodeId] = useState<string | null>(null)
|
||||
const [dragPosition, setDragPosition] = useState<'before' | 'inside' | 'after'>('inside')
|
||||
const dragNodeRef = useRef<HTMLDivElement | null>(null)
|
||||
|
||||
const handleDragStart = useCallback((e: React.DragEvent, node: NotesTreeNode) => {
|
||||
setDraggedNodeId(node.id)
|
||||
e.dataTransfer.effectAllowed = 'move'
|
||||
e.dataTransfer.setData('text/plain', node.id)
|
||||
|
||||
dragNodeRef.current = e.currentTarget as HTMLDivElement
|
||||
|
||||
// Create ghost element
|
||||
if (e.currentTarget.parentElement) {
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
const ghostElement = e.currentTarget.cloneNode(true) as HTMLElement
|
||||
ghostElement.style.width = `${rect.width}px`
|
||||
ghostElement.style.opacity = '0.7'
|
||||
ghostElement.style.position = 'absolute'
|
||||
ghostElement.style.top = '-1000px'
|
||||
document.body.appendChild(ghostElement)
|
||||
e.dataTransfer.setDragImage(ghostElement, 10, 10)
|
||||
setTimeout(() => {
|
||||
document.body.removeChild(ghostElement)
|
||||
}, 0)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleDragOver = useCallback(
|
||||
(e: React.DragEvent, node: NotesTreeNode) => {
|
||||
e.preventDefault()
|
||||
e.dataTransfer.dropEffect = 'move'
|
||||
|
||||
if (draggedNodeId === node.id) {
|
||||
return
|
||||
}
|
||||
|
||||
setDragOverNodeId(node.id)
|
||||
|
||||
const rect = (e.currentTarget as HTMLElement).getBoundingClientRect()
|
||||
const mouseY = e.clientY
|
||||
const thresholdTop = rect.top + rect.height * 0.3
|
||||
const thresholdBottom = rect.bottom - rect.height * 0.3
|
||||
|
||||
if (mouseY < thresholdTop) {
|
||||
setDragPosition('before')
|
||||
} else if (mouseY > thresholdBottom) {
|
||||
setDragPosition('after')
|
||||
} else {
|
||||
setDragPosition(node.type === 'folder' ? 'inside' : 'after')
|
||||
}
|
||||
},
|
||||
[draggedNodeId]
|
||||
)
|
||||
|
||||
const handleDragLeave = useCallback(() => {
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
}, [])
|
||||
|
||||
const handleDrop = useCallback(
|
||||
(e: React.DragEvent, targetNode: NotesTreeNode) => {
|
||||
e.preventDefault()
|
||||
const draggedId = e.dataTransfer.getData('text/plain')
|
||||
|
||||
if (draggedId && draggedId !== targetNode.id) {
|
||||
onMoveNode(draggedId, targetNode.id, dragPosition)
|
||||
}
|
||||
|
||||
setDraggedNodeId(null)
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
},
|
||||
[onMoveNode, dragPosition]
|
||||
)
|
||||
|
||||
const handleDragEnd = useCallback(() => {
|
||||
setDraggedNodeId(null)
|
||||
setDragOverNodeId(null)
|
||||
setDragPosition('inside')
|
||||
}, [])
|
||||
|
||||
return {
|
||||
draggedNodeId,
|
||||
dragOverNodeId,
|
||||
dragPosition,
|
||||
handleDragStart,
|
||||
handleDragOver,
|
||||
handleDragLeave,
|
||||
handleDrop,
|
||||
handleDragEnd
|
||||
}
|
||||
}
|
||||
94
src/renderer/src/pages/notes/hooks/useNotesEditing.ts
Normal file
94
src/renderer/src/pages/notes/hooks/useNotesEditing.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { useInPlaceEdit } from '@renderer/hooks/useInPlaceEdit'
|
||||
import { fetchNoteSummary } from '@renderer/services/ApiService'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
const logger = loggerService.withContext('UseNotesEditing')
|
||||
|
||||
interface UseNotesEditingProps {
|
||||
onRenameNode: (nodeId: string, newName: string) => void
|
||||
}
|
||||
|
||||
export const useNotesEditing = ({ onRenameNode }: UseNotesEditingProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [editingNodeId, setEditingNodeId] = useState<string | null>(null)
|
||||
const [renamingNodeIds, setRenamingNodeIds] = useState<Set<string>>(new Set())
|
||||
const [newlyRenamedNodeIds, setNewlyRenamedNodeIds] = useState<Set<string>>(new Set())
|
||||
|
||||
const inPlaceEdit = useInPlaceEdit({
|
||||
onSave: (newName: string) => {
|
||||
if (editingNodeId && newName) {
|
||||
onRenameNode(editingNodeId, newName)
|
||||
window.toast.success(t('common.saved'))
|
||||
logger.debug(`Renamed node ${editingNodeId} to "${newName}"`)
|
||||
}
|
||||
setEditingNodeId(null)
|
||||
},
|
||||
onCancel: () => {
|
||||
setEditingNodeId(null)
|
||||
}
|
||||
})
|
||||
|
||||
const handleStartEdit = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
setEditingNodeId(node.id)
|
||||
inPlaceEdit.startEdit(node.name)
|
||||
},
|
||||
[inPlaceEdit]
|
||||
)
|
||||
|
||||
const handleAutoRename = useCallback(
|
||||
async (note: NotesTreeNode) => {
|
||||
if (note.type !== 'file') return
|
||||
|
||||
setRenamingNodeIds((prev) => new Set(prev).add(note.id))
|
||||
try {
|
||||
const content = await window.api.file.readExternal(note.externalPath)
|
||||
if (!content || content.trim().length === 0) {
|
||||
window.toast.warning(t('notes.auto_rename.empty_note'))
|
||||
return
|
||||
}
|
||||
|
||||
const summaryText = await fetchNoteSummary({ content })
|
||||
if (summaryText) {
|
||||
onRenameNode(note.id, summaryText)
|
||||
window.toast.success(t('notes.auto_rename.success'))
|
||||
} else {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
}
|
||||
} catch (error) {
|
||||
window.toast.error(t('notes.auto_rename.failed'))
|
||||
logger.error(`Failed to auto-rename note: ${error}`)
|
||||
} finally {
|
||||
setRenamingNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
|
||||
setNewlyRenamedNodeIds((prev) => new Set(prev).add(note.id))
|
||||
|
||||
setTimeout(() => {
|
||||
setNewlyRenamedNodeIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(note.id)
|
||||
return next
|
||||
})
|
||||
}, 700)
|
||||
}
|
||||
},
|
||||
[onRenameNode, t]
|
||||
)
|
||||
|
||||
return {
|
||||
editingNodeId,
|
||||
renamingNodeIds,
|
||||
newlyRenamedNodeIds,
|
||||
inPlaceEdit,
|
||||
handleStartEdit,
|
||||
handleAutoRename,
|
||||
setEditingNodeId
|
||||
}
|
||||
}
|
||||
112
src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts
Normal file
112
src/renderer/src/pages/notes/hooks/useNotesFileUpload.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { useCallback } from 'react'
|
||||
|
||||
interface UseNotesFileUploadProps {
|
||||
onUploadFiles: (files: File[]) => void
|
||||
setIsDragOverSidebar: (isDragOver: boolean) => void
|
||||
}
|
||||
|
||||
export const useNotesFileUpload = ({ onUploadFiles, setIsDragOverSidebar }: UseNotesFileUploadProps) => {
|
||||
const handleDropFiles = useCallback(
|
||||
async (e: React.DragEvent) => {
|
||||
e.preventDefault()
|
||||
setIsDragOverSidebar(false)
|
||||
|
||||
// 处理文件夹拖拽:从 dataTransfer.items 获取完整文件路径信息
|
||||
const items = Array.from(e.dataTransfer.items)
|
||||
const files: File[] = []
|
||||
|
||||
const processEntry = async (entry: FileSystemEntry, path: string = '') => {
|
||||
if (entry.isFile) {
|
||||
const fileEntry = entry as FileSystemFileEntry
|
||||
return new Promise<void>((resolve) => {
|
||||
fileEntry.file((file) => {
|
||||
// 手动设置 webkitRelativePath 以保持文件夹结构
|
||||
Object.defineProperty(file, 'webkitRelativePath', {
|
||||
value: path + file.name,
|
||||
writable: false
|
||||
})
|
||||
files.push(file)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
} else if (entry.isDirectory) {
|
||||
const dirEntry = entry as FileSystemDirectoryEntry
|
||||
const reader = dirEntry.createReader()
|
||||
return new Promise<void>((resolve) => {
|
||||
reader.readEntries(async (entries) => {
|
||||
const promises = entries.map((subEntry) => processEntry(subEntry, path + entry.name + '/'))
|
||||
await Promise.all(promises)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 如果支持 DataTransferItem API(文件夹拖拽)
|
||||
if (items.length > 0 && items[0].webkitGetAsEntry()) {
|
||||
const promises = items.map((item) => {
|
||||
const entry = item.webkitGetAsEntry()
|
||||
return entry ? processEntry(entry) : Promise.resolve()
|
||||
})
|
||||
|
||||
await Promise.all(promises)
|
||||
|
||||
if (files.length > 0) {
|
||||
onUploadFiles(files)
|
||||
}
|
||||
} else {
|
||||
const regularFiles = Array.from(e.dataTransfer.files)
|
||||
if (regularFiles.length > 0) {
|
||||
onUploadFiles(regularFiles)
|
||||
}
|
||||
}
|
||||
},
|
||||
[onUploadFiles, setIsDragOverSidebar]
|
||||
)
|
||||
|
||||
const handleSelectFiles = useCallback(() => {
|
||||
const fileInput = document.createElement('input')
|
||||
fileInput.type = 'file'
|
||||
fileInput.multiple = true
|
||||
fileInput.accept = '.md,.markdown'
|
||||
fileInput.webkitdirectory = false
|
||||
|
||||
fileInput.onchange = (e) => {
|
||||
const target = e.target as HTMLInputElement
|
||||
if (target.files && target.files.length > 0) {
|
||||
const selectedFiles = Array.from(target.files)
|
||||
onUploadFiles(selectedFiles)
|
||||
}
|
||||
fileInput.remove()
|
||||
}
|
||||
|
||||
fileInput.click()
|
||||
}, [onUploadFiles])
|
||||
|
||||
const handleSelectFolder = useCallback(() => {
|
||||
const folderInput = document.createElement('input')
|
||||
folderInput.type = 'file'
|
||||
// @ts-ignore - webkitdirectory is a non-standard attribute
|
||||
folderInput.webkitdirectory = true
|
||||
// @ts-ignore - directory is a non-standard attribute
|
||||
folderInput.directory = true
|
||||
folderInput.multiple = true
|
||||
|
||||
folderInput.onchange = (e) => {
|
||||
const target = e.target as HTMLInputElement
|
||||
if (target.files && target.files.length > 0) {
|
||||
const selectedFiles = Array.from(target.files)
|
||||
onUploadFiles(selectedFiles)
|
||||
}
|
||||
folderInput.remove()
|
||||
}
|
||||
|
||||
folderInput.click()
|
||||
}, [onUploadFiles])
|
||||
|
||||
return {
|
||||
handleDropFiles,
|
||||
handleSelectFiles,
|
||||
handleSelectFolder
|
||||
}
|
||||
}
|
||||
263
src/renderer/src/pages/notes/hooks/useNotesMenu.tsx
Normal file
263
src/renderer/src/pages/notes/hooks/useNotesMenu.tsx
Normal file
@@ -0,0 +1,263 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { DeleteIcon } from '@renderer/components/Icons'
|
||||
import SaveToKnowledgePopup from '@renderer/components/Popups/SaveToKnowledgePopup'
|
||||
import { useKnowledgeBases } from '@renderer/hooks/useKnowledge'
|
||||
import type { RootState } from '@renderer/store'
|
||||
import type { NotesTreeNode } from '@renderer/types/note'
|
||||
import { exportNote } from '@renderer/utils/export'
|
||||
import type { MenuProps } from 'antd'
|
||||
import type { ItemType, MenuItemType } from 'antd/es/menu/interface'
|
||||
import { Edit3, FilePlus, FileSearch, Folder, FolderOpen, Sparkles, Star, StarOff, UploadIcon } from 'lucide-react'
|
||||
import { useCallback } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useSelector } from 'react-redux'
|
||||
|
||||
const logger = loggerService.withContext('UseNotesMenu')
|
||||
|
||||
interface UseNotesMenuProps {
|
||||
renamingNodeIds: Set<string>
|
||||
onCreateNote: (name: string, targetFolderId?: string) => void
|
||||
onCreateFolder: (name: string, targetFolderId?: string) => void
|
||||
onRenameNode: (nodeId: string, newName: string) => void
|
||||
onToggleStar: (nodeId: string) => void
|
||||
onDeleteNode: (nodeId: string) => void
|
||||
onSelectNode: (node: NotesTreeNode) => void
|
||||
handleStartEdit: (node: NotesTreeNode) => void
|
||||
handleAutoRename: (node: NotesTreeNode) => void
|
||||
activeNode?: NotesTreeNode | null
|
||||
}
|
||||
|
||||
export const useNotesMenu = ({
|
||||
renamingNodeIds,
|
||||
onCreateNote,
|
||||
onCreateFolder,
|
||||
onToggleStar,
|
||||
onDeleteNode,
|
||||
onSelectNode,
|
||||
handleStartEdit,
|
||||
handleAutoRename,
|
||||
activeNode
|
||||
}: UseNotesMenuProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { bases } = useKnowledgeBases()
|
||||
const exportMenuOptions = useSelector((state: RootState) => state.settings.exportMenuOptions)
|
||||
|
||||
const handleExportKnowledge = useCallback(
|
||||
async (note: NotesTreeNode) => {
|
||||
try {
|
||||
if (bases.length === 0) {
|
||||
window.toast.warning(t('chat.save.knowledge.empty.no_knowledge_base'))
|
||||
return
|
||||
}
|
||||
|
||||
const result = await SaveToKnowledgePopup.showForNote(note)
|
||||
|
||||
if (result?.success) {
|
||||
window.toast.success(t('notes.export_success', { count: result.savedCount }))
|
||||
}
|
||||
} catch (error) {
|
||||
window.toast.error(t('notes.export_failed'))
|
||||
logger.error(`Failed to export note to knowledge base: ${error}`)
|
||||
}
|
||||
},
|
||||
[bases.length, t]
|
||||
)
|
||||
|
||||
const handleImageAction = useCallback(
|
||||
async (node: NotesTreeNode, platform: 'copyImage' | 'exportImage') => {
|
||||
try {
|
||||
if (activeNode?.id !== node.id) {
|
||||
onSelectNode(node)
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
}
|
||||
|
||||
await exportNote({ node, platform })
|
||||
} catch (error) {
|
||||
logger.error(`Failed to ${platform === 'copyImage' ? 'copy' : 'export'} as image:`, error as Error)
|
||||
window.toast.error(t('common.copy_failed'))
|
||||
}
|
||||
},
|
||||
[activeNode, onSelectNode, t]
|
||||
)
|
||||
|
||||
const handleDeleteNodeWrapper = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
const confirmText =
|
||||
node.type === 'folder'
|
||||
? t('notes.delete_folder_confirm', { name: node.name })
|
||||
: t('notes.delete_note_confirm', { name: node.name })
|
||||
|
||||
window.modal.confirm({
|
||||
title: t('notes.delete'),
|
||||
content: confirmText,
|
||||
centered: true,
|
||||
okButtonProps: { danger: true },
|
||||
onOk: () => {
|
||||
onDeleteNode(node.id)
|
||||
}
|
||||
})
|
||||
},
|
||||
[onDeleteNode, t]
|
||||
)
|
||||
|
||||
const getMenuItems = useCallback(
|
||||
(node: NotesTreeNode) => {
|
||||
const baseMenuItems: MenuProps['items'] = []
|
||||
|
||||
// only show auto rename for file for now
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push({
|
||||
label: t('notes.auto_rename.label'),
|
||||
key: 'auto-rename',
|
||||
icon: <Sparkles size={14} />,
|
||||
disabled: renamingNodeIds.has(node.id),
|
||||
onClick: () => {
|
||||
handleAutoRename(node)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (node.type === 'folder') {
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: t('notes.new_note'),
|
||||
key: 'new_note',
|
||||
icon: <FilePlus size={14} />,
|
||||
onClick: () => {
|
||||
onCreateNote(t('notes.untitled_note'), node.id)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.new_folder'),
|
||||
key: 'new_folder',
|
||||
icon: <Folder size={14} />,
|
||||
onClick: () => {
|
||||
onCreateFolder(t('notes.untitled_folder'), node.id)
|
||||
}
|
||||
},
|
||||
{ type: 'divider' }
|
||||
)
|
||||
}
|
||||
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: t('notes.rename'),
|
||||
key: 'rename',
|
||||
icon: <Edit3 size={14} />,
|
||||
onClick: () => {
|
||||
handleStartEdit(node)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.open_outside'),
|
||||
key: 'open_outside',
|
||||
icon: <FolderOpen size={14} />,
|
||||
onClick: () => {
|
||||
window.api.openPath(node.externalPath)
|
||||
}
|
||||
}
|
||||
)
|
||||
if (node.type !== 'folder') {
|
||||
baseMenuItems.push(
|
||||
{
|
||||
label: node.isStarred ? t('notes.unstar') : t('notes.star'),
|
||||
key: 'star',
|
||||
icon: node.isStarred ? <StarOff size={14} /> : <Star size={14} />,
|
||||
onClick: () => {
|
||||
onToggleStar(node.id)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('notes.export_knowledge'),
|
||||
key: 'export_knowledge',
|
||||
icon: <FileSearch size={14} />,
|
||||
onClick: () => {
|
||||
handleExportKnowledge(node)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: t('chat.topics.export.title'),
|
||||
key: 'export',
|
||||
icon: <UploadIcon size={14} />,
|
||||
children: [
|
||||
exportMenuOptions.image && {
|
||||
label: t('chat.topics.copy.image'),
|
||||
key: 'copy-image',
|
||||
onClick: () => handleImageAction(node, 'copyImage')
|
||||
},
|
||||
exportMenuOptions.image && {
|
||||
label: t('chat.topics.export.image'),
|
||||
key: 'export-image',
|
||||
onClick: () => handleImageAction(node, 'exportImage')
|
||||
},
|
||||
exportMenuOptions.markdown && {
|
||||
label: t('chat.topics.export.md.label'),
|
||||
key: 'markdown',
|
||||
onClick: () => exportNote({ node, platform: 'markdown' })
|
||||
},
|
||||
exportMenuOptions.docx && {
|
||||
label: t('chat.topics.export.word'),
|
||||
key: 'word',
|
||||
onClick: () => exportNote({ node, platform: 'docx' })
|
||||
},
|
||||
exportMenuOptions.notion && {
|
||||
label: t('chat.topics.export.notion'),
|
||||
key: 'notion',
|
||||
onClick: () => exportNote({ node, platform: 'notion' })
|
||||
},
|
||||
exportMenuOptions.yuque && {
|
||||
label: t('chat.topics.export.yuque'),
|
||||
key: 'yuque',
|
||||
onClick: () => exportNote({ node, platform: 'yuque' })
|
||||
},
|
||||
exportMenuOptions.obsidian && {
|
||||
label: t('chat.topics.export.obsidian'),
|
||||
key: 'obsidian',
|
||||
onClick: () => exportNote({ node, platform: 'obsidian' })
|
||||
},
|
||||
exportMenuOptions.joplin && {
|
||||
label: t('chat.topics.export.joplin'),
|
||||
key: 'joplin',
|
||||
onClick: () => exportNote({ node, platform: 'joplin' })
|
||||
},
|
||||
exportMenuOptions.siyuan && {
|
||||
label: t('chat.topics.export.siyuan'),
|
||||
key: 'siyuan',
|
||||
onClick: () => exportNote({ node, platform: 'siyuan' })
|
||||
}
|
||||
].filter(Boolean) as ItemType<MenuItemType>[]
|
||||
}
|
||||
)
|
||||
}
|
||||
baseMenuItems.push(
|
||||
{ type: 'divider' },
|
||||
{
|
||||
label: t('notes.delete'),
|
||||
danger: true,
|
||||
key: 'delete',
|
||||
icon: <DeleteIcon size={14} className="lucide-custom" />,
|
||||
onClick: () => {
|
||||
handleDeleteNodeWrapper(node)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return baseMenuItems
|
||||
},
|
||||
[
|
||||
t,
|
||||
handleStartEdit,
|
||||
onToggleStar,
|
||||
handleExportKnowledge,
|
||||
handleImageAction,
|
||||
handleDeleteNodeWrapper,
|
||||
renamingNodeIds,
|
||||
handleAutoRename,
|
||||
exportMenuOptions,
|
||||
onCreateNote,
|
||||
onCreateFolder
|
||||
]
|
||||
)
|
||||
|
||||
return { getMenuItems }
|
||||
}
|
||||
@@ -1,21 +1,13 @@
|
||||
import { getAgentTypeAvatar } from '@renderer/config/agent'
|
||||
import type { useUpdateAgent } from '@renderer/hooks/agents/useUpdateAgent'
|
||||
import type { useUpdateSession } from '@renderer/hooks/agents/useUpdateSession'
|
||||
import { getAgentTypeLabel } from '@renderer/i18n/label'
|
||||
import type { GetAgentResponse, GetAgentSessionResponse } from '@renderer/types'
|
||||
import { isAgentEntity } from '@renderer/types'
|
||||
import { Avatar } from 'antd'
|
||||
import type { FC } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { AccessibleDirsSetting } from './AccessibleDirsSetting'
|
||||
import { AvatarSetting } from './AvatarSetting'
|
||||
import { DescriptionSetting } from './DescriptionSetting'
|
||||
import { ModelSetting } from './ModelSetting'
|
||||
import { NameSetting } from './NameSetting'
|
||||
import { SettingsContainer, SettingsItem, SettingsTitle } from './shared'
|
||||
|
||||
// const logger = loggerService.withContext('AgentEssentialSettings')
|
||||
import { SettingsContainer } from './shared'
|
||||
|
||||
type EssentialSettingsProps =
|
||||
| {
|
||||
@@ -30,26 +22,10 @@ type EssentialSettingsProps =
|
||||
}
|
||||
|
||||
const EssentialSettings: FC<EssentialSettingsProps> = ({ agentBase, update, showModelSetting = true }) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
if (!agentBase) return null
|
||||
|
||||
const isAgent = isAgentEntity(agentBase)
|
||||
|
||||
return (
|
||||
<SettingsContainer>
|
||||
{isAgent && (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle>{t('agent.type.label')}</SettingsTitle>
|
||||
<div className="flex items-center gap-2">
|
||||
<Avatar size={24} src={getAgentTypeAvatar(agentBase.type)} className="h-6 w-6 text-lg" />
|
||||
<span>{(agentBase?.name ?? agentBase?.type) ? getAgentTypeLabel(agentBase.type) : ''}</span>
|
||||
</div>
|
||||
</SettingsItem>
|
||||
)}
|
||||
{isAgent && (
|
||||
<AvatarSetting agent={agentBase} update={update as ReturnType<typeof useUpdateAgent>['updateAgent']} />
|
||||
)}
|
||||
<NameSetting base={agentBase} update={update} />
|
||||
{showModelSetting && <ModelSetting base={agentBase} update={update} />}
|
||||
<AccessibleDirsSetting base={agentBase} update={update} />
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { EmojiAvatarWithPicker } from '@renderer/components/Avatar/EmojiAvatarWithPicker'
|
||||
import type { AgentBaseWithId, UpdateAgentBaseForm, UpdateAgentFunctionUnion } from '@renderer/types'
|
||||
import { AgentConfigurationSchema, isAgentEntity, isAgentType } from '@renderer/types'
|
||||
import { Input } from 'antd'
|
||||
import { useState } from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
import { SettingsItem, SettingsTitle } from './shared'
|
||||
@@ -13,26 +15,61 @@ export interface NameSettingsProps {
|
||||
export const NameSetting = ({ base, update }: NameSettingsProps) => {
|
||||
const { t } = useTranslation()
|
||||
const [name, setName] = useState<string | undefined>(base?.name?.trim())
|
||||
|
||||
const updateName = async (name: UpdateAgentBaseForm['name']) => {
|
||||
if (!base) return
|
||||
return update({ id: base.id, name: name?.trim() })
|
||||
}
|
||||
|
||||
// Avatar logic
|
||||
const isAgent = isAgentEntity(base)
|
||||
const isDefault = isAgent ? isAgentType(base.configuration?.avatar) : false
|
||||
const [emoji, setEmoji] = useState(isAgent && !isDefault ? (base.configuration?.avatar ?? '⭐️') : '⭐️')
|
||||
|
||||
const updateAvatar = useCallback(
|
||||
(avatar: string) => {
|
||||
if (!isAgent || !base) return
|
||||
const parsedConfiguration = AgentConfigurationSchema.parse(base.configuration ?? {})
|
||||
const payload = {
|
||||
id: base.id,
|
||||
configuration: {
|
||||
...parsedConfiguration,
|
||||
avatar
|
||||
}
|
||||
}
|
||||
update(payload)
|
||||
},
|
||||
[base, update, isAgent]
|
||||
)
|
||||
|
||||
if (!base) return null
|
||||
|
||||
return (
|
||||
<SettingsItem inline>
|
||||
<SettingsTitle>{t('common.name')}</SettingsTitle>
|
||||
<Input
|
||||
placeholder={t('common.agent_one') + t('common.name')}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={() => {
|
||||
if (name !== base.name) {
|
||||
updateName(name)
|
||||
}
|
||||
}}
|
||||
className="max-w-70 flex-1"
|
||||
/>
|
||||
<div className="flex max-w-70 flex-1 items-center gap-1">
|
||||
{isAgent && (
|
||||
<EmojiAvatarWithPicker
|
||||
emoji={emoji}
|
||||
onPick={(emoji: string) => {
|
||||
setEmoji(emoji)
|
||||
if (isAgent && emoji === base?.configuration?.avatar) return
|
||||
updateAvatar(emoji)
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Input
|
||||
placeholder={t('common.agent_one') + t('common.name')}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onBlur={() => {
|
||||
if (name !== base.name) {
|
||||
updateName(name)
|
||||
}
|
||||
}}
|
||||
className="flex-1"
|
||||
/>
|
||||
</div>
|
||||
</SettingsItem>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { DEFAULT_CONTEXTCOUNT, DEFAULT_TEMPERATURE, MAX_CONTEXT_COUNT } from '@r
|
||||
import { isEmbeddingModel, isRerankModel } from '@renderer/config/models'
|
||||
import { useTimer } from '@renderer/hooks/useTimer'
|
||||
import { SettingRow } from '@renderer/pages/settings'
|
||||
import { DEFAULT_ASSISTANT_SETTINGS } from '@renderer/services/AssistantService'
|
||||
import type { Assistant, AssistantSettingCustomParameters, AssistantSettings, Model } from '@renderer/types'
|
||||
import { modalConfirm } from '@renderer/utils'
|
||||
import { Button, Col, Divider, Input, InputNumber, Row, Select, Slider, Switch, Tooltip } from 'antd'
|
||||
@@ -31,7 +32,9 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
|
||||
const [enableMaxTokens, setEnableMaxTokens] = useState(assistant?.settings?.enableMaxTokens ?? false)
|
||||
const [maxTokens, setMaxTokens] = useState(assistant?.settings?.maxTokens ?? 0)
|
||||
const [streamOutput, setStreamOutput] = useState(assistant?.settings?.streamOutput)
|
||||
const [toolUseMode, setToolUseMode] = useState(assistant?.settings?.toolUseMode ?? 'prompt')
|
||||
const [toolUseMode, setToolUseMode] = useState<AssistantSettings['toolUseMode']>(
|
||||
assistant?.settings?.toolUseMode ?? 'function'
|
||||
)
|
||||
const [defaultModel, setDefaultModel] = useState(assistant?.defaultModel)
|
||||
const [topP, setTopP] = useState(assistant?.settings?.topP ?? 1)
|
||||
const [enableTopP, setEnableTopP] = useState(assistant?.settings?.enableTopP ?? false)
|
||||
@@ -158,28 +161,17 @@ const AssistantModelSettings: FC<Props> = ({ assistant, updateAssistant, updateA
|
||||
}
|
||||
|
||||
const onReset = () => {
|
||||
setTemperature(DEFAULT_TEMPERATURE)
|
||||
setEnableTemperature(true)
|
||||
setContextCount(DEFAULT_CONTEXTCOUNT)
|
||||
setEnableMaxTokens(false)
|
||||
setMaxTokens(0)
|
||||
setStreamOutput(true)
|
||||
setTopP(1)
|
||||
setEnableTopP(false)
|
||||
setCustomParameters([])
|
||||
setToolUseMode('prompt')
|
||||
updateAssistantSettings({
|
||||
temperature: DEFAULT_TEMPERATURE,
|
||||
enableTemperature: true,
|
||||
contextCount: DEFAULT_CONTEXTCOUNT,
|
||||
enableMaxTokens: false,
|
||||
maxTokens: 0,
|
||||
streamOutput: true,
|
||||
topP: 1,
|
||||
enableTopP: false,
|
||||
customParameters: [],
|
||||
toolUseMode: 'prompt'
|
||||
})
|
||||
setTemperature(DEFAULT_ASSISTANT_SETTINGS.temperature)
|
||||
setEnableTemperature(DEFAULT_ASSISTANT_SETTINGS.enableTemperature ?? true)
|
||||
setContextCount(DEFAULT_ASSISTANT_SETTINGS.contextCount)
|
||||
setEnableMaxTokens(DEFAULT_ASSISTANT_SETTINGS.enableMaxTokens ?? false)
|
||||
setMaxTokens(DEFAULT_ASSISTANT_SETTINGS.maxTokens ?? 0)
|
||||
setStreamOutput(DEFAULT_ASSISTANT_SETTINGS.streamOutput)
|
||||
setTopP(DEFAULT_ASSISTANT_SETTINGS.topP)
|
||||
setEnableTopP(DEFAULT_ASSISTANT_SETTINGS.enableTopP ?? false)
|
||||
setCustomParameters(DEFAULT_ASSISTANT_SETTINGS.customParameters ?? [])
|
||||
setToolUseMode(DEFAULT_ASSISTANT_SETTINGS.toolUseMode)
|
||||
updateAssistantSettings(DEFAULT_ASSISTANT_SETTINGS)
|
||||
}
|
||||
const modelFilter = (model: Model) => !isEmbeddingModel(model) && !isRerankModel(model)
|
||||
|
||||
|
||||
@@ -109,7 +109,6 @@ const InstallNpxUv: FC<Props> = ({ mini = false }) => {
|
||||
<Container>
|
||||
<Alert
|
||||
type={isUvInstalled ? 'success' : 'warning'}
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
description={
|
||||
<VStack>
|
||||
@@ -140,7 +139,6 @@ const InstallNpxUv: FC<Props> = ({ mini = false }) => {
|
||||
/>
|
||||
<Alert
|
||||
type={isBunInstalled ? 'success' : 'warning'}
|
||||
banner
|
||||
style={{ borderRadius: 'var(--list-item-border-radius)' }}
|
||||
description={
|
||||
<VStack>
|
||||
|
||||
@@ -140,7 +140,7 @@ const MCPSettings: FC = () => {
|
||||
<Route
|
||||
path="mcp-install"
|
||||
element={
|
||||
<SettingContainer theme={theme}>
|
||||
<SettingContainer style={{ backgroundColor: 'inherit' }}>
|
||||
<InstallNpxUv />
|
||||
</SettingContainer>
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import AiProvider from '@renderer/aiCore'
|
||||
import type { CompletionsParams } from '@renderer/aiCore/legacy/middleware/schemas'
|
||||
import type { AiSdkMiddlewareConfig } from '@renderer/aiCore/middleware/AiSdkMiddlewareBuilder'
|
||||
import { buildStreamTextParams } from '@renderer/aiCore/prepareParams'
|
||||
import { isDedicatedImageGenerationModel, isEmbeddingModel } from '@renderer/config/models'
|
||||
import { isDedicatedImageGenerationModel, isEmbeddingModel, isFunctionCallingModel } from '@renderer/config/models'
|
||||
import { getStoreSetting } from '@renderer/hooks/useSettings'
|
||||
import i18n from '@renderer/i18n'
|
||||
import store from '@renderer/store'
|
||||
@@ -18,6 +18,7 @@ import type { Message } from '@renderer/types/newMessage'
|
||||
import type { SdkModel } from '@renderer/types/sdk'
|
||||
import { removeSpecialCharactersForTopicName, uuid } from '@renderer/utils'
|
||||
import { abortCompletion, readyToAbort } from '@renderer/utils/abortController'
|
||||
import { isToolUseModeFunction } from '@renderer/utils/assistant'
|
||||
import { isAbortError } from '@renderer/utils/error'
|
||||
import { purifyMarkdownImages } from '@renderer/utils/markdown'
|
||||
import { isPromptToolUse, isSupportedToolUse } from '@renderer/utils/mcp-tools'
|
||||
@@ -126,12 +127,16 @@ export async function fetchChatCompletion({
|
||||
requestOptions: options
|
||||
})
|
||||
|
||||
// Safely fallback to prompt tool use when function calling is not supported by model.
|
||||
const usePromptToolUse =
|
||||
isPromptToolUse(assistant) || (isToolUseModeFunction(assistant) && !isFunctionCallingModel(assistant.model))
|
||||
|
||||
const middlewareConfig: AiSdkMiddlewareConfig = {
|
||||
streamOutput: assistant.settings?.streamOutput ?? true,
|
||||
onChunk: onChunkReceived,
|
||||
model: assistant.model,
|
||||
enableReasoning: capabilities.enableReasoning,
|
||||
isPromptToolUse: isPromptToolUse(assistant),
|
||||
isPromptToolUse: usePromptToolUse,
|
||||
isSupportedToolUse: isSupportedToolUse(assistant),
|
||||
isImageGenerationEndpoint: isDedicatedImageGenerationModel(assistant.model || getDefaultModel()),
|
||||
webSearchPluginConfig: webSearchPluginConfig,
|
||||
|
||||
@@ -36,9 +36,10 @@ export const DEFAULT_ASSISTANT_SETTINGS: AssistantSettings = {
|
||||
streamOutput: true,
|
||||
topP: 1,
|
||||
enableTopP: false,
|
||||
toolUseMode: 'prompt',
|
||||
// It would gracefully fallback to prompt if not supported by model.
|
||||
toolUseMode: 'function',
|
||||
customParameters: []
|
||||
}
|
||||
} as const
|
||||
|
||||
export function getDefaultAssistant(): Assistant {
|
||||
return {
|
||||
@@ -176,7 +177,7 @@ export const getAssistantSettings = (assistant: Assistant): AssistantSettings =>
|
||||
enableMaxTokens: assistant?.settings?.enableMaxTokens ?? false,
|
||||
maxTokens: getAssistantMaxTokens(),
|
||||
streamOutput: assistant?.settings?.streamOutput ?? true,
|
||||
toolUseMode: assistant?.settings?.toolUseMode ?? 'prompt',
|
||||
toolUseMode: assistant?.settings?.toolUseMode ?? 'function',
|
||||
defaultModel: assistant?.defaultModel ?? undefined,
|
||||
reasoning_effort: assistant?.settings?.reasoning_effort ?? undefined,
|
||||
customParameters: assistant?.settings?.customParameters ?? []
|
||||
|
||||
@@ -83,6 +83,68 @@ export async function renameNode(node: NotesTreeNode, newName: string): Promise<
|
||||
}
|
||||
|
||||
export async function uploadNotes(files: File[], targetPath: string): Promise<UploadResult> {
|
||||
const basePath = normalizePath(targetPath)
|
||||
const totalFiles = files.length
|
||||
|
||||
if (files.length === 0) {
|
||||
return {
|
||||
uploadedNodes: [],
|
||||
totalFiles: 0,
|
||||
skippedFiles: 0,
|
||||
fileCount: 0,
|
||||
folderCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Get file paths from File objects
|
||||
// For browser File objects from drag-and-drop, we need to use FileReader to save temporarily
|
||||
// However, for directory uploads, the files already have paths
|
||||
const filePaths: string[] = []
|
||||
|
||||
for (const file of files) {
|
||||
// @ts-ignore - webkitRelativePath exists on File objects from directory uploads
|
||||
if (file.path) {
|
||||
// @ts-ignore - Electron File objects have .path property
|
||||
filePaths.push(file.path)
|
||||
} else {
|
||||
// For browser File API, we'd need to use FileReader and create temp files
|
||||
// For now, fall back to the old method for these cases
|
||||
logger.warn('File without path detected, using fallback method')
|
||||
return uploadNotesLegacy(files, targetPath)
|
||||
}
|
||||
}
|
||||
|
||||
// Pause file watcher to prevent N refresh events
|
||||
await window.api.file.pauseFileWatcher()
|
||||
|
||||
try {
|
||||
// Use the new optimized batch upload API that runs in Main process
|
||||
const result = await window.api.file.batchUploadMarkdown(filePaths, basePath)
|
||||
|
||||
return {
|
||||
uploadedNodes: [],
|
||||
totalFiles,
|
||||
skippedFiles: result.skippedFiles,
|
||||
fileCount: result.fileCount,
|
||||
folderCount: result.folderCount
|
||||
}
|
||||
} finally {
|
||||
// Resume watcher and trigger single refresh
|
||||
await window.api.file.resumeFileWatcher()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Batch upload failed, falling back to legacy method:', error as Error)
|
||||
// Fall back to old method if new method fails
|
||||
return uploadNotesLegacy(files, targetPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy upload method using Renderer process
|
||||
* Kept as fallback for browser File API files without paths
|
||||
*/
|
||||
async function uploadNotesLegacy(files: File[], targetPath: string): Promise<UploadResult> {
|
||||
const basePath = normalizePath(targetPath)
|
||||
const markdownFiles = filterMarkdown(files)
|
||||
const skippedFiles = files.length - markdownFiles.length
|
||||
@@ -101,18 +163,37 @@ export async function uploadNotes(files: File[], targetPath: string): Promise<Up
|
||||
await createFolders(folders)
|
||||
|
||||
let fileCount = 0
|
||||
const BATCH_SIZE = 5 // Process 5 files concurrently to balance performance and responsiveness
|
||||
|
||||
for (const file of markdownFiles) {
|
||||
const { dir, name } = resolveFileTarget(file, basePath)
|
||||
const { safeName } = await window.api.file.checkFileName(dir, name, true)
|
||||
const finalPath = `${dir}/${safeName}${MARKDOWN_EXT}`
|
||||
// Process files in batches to avoid blocking the UI thread
|
||||
for (let i = 0; i < markdownFiles.length; i += BATCH_SIZE) {
|
||||
const batch = markdownFiles.slice(i, i + BATCH_SIZE)
|
||||
|
||||
try {
|
||||
const content = await file.text()
|
||||
await window.api.file.write(finalPath, content)
|
||||
fileCount += 1
|
||||
} catch (error) {
|
||||
logger.error('Failed to write uploaded file:', error as Error)
|
||||
// Process current batch in parallel
|
||||
const results = await Promise.allSettled(
|
||||
batch.map(async (file) => {
|
||||
const { dir, name } = resolveFileTarget(file, basePath)
|
||||
const { safeName } = await window.api.file.checkFileName(dir, name, true)
|
||||
const finalPath = `${dir}/${safeName}${MARKDOWN_EXT}`
|
||||
|
||||
const content = await file.text()
|
||||
await window.api.file.write(finalPath, content)
|
||||
return true
|
||||
})
|
||||
)
|
||||
|
||||
// Count successful uploads
|
||||
results.forEach((result) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
fileCount += 1
|
||||
} else {
|
||||
logger.error('Failed to write uploaded file:', result.reason)
|
||||
}
|
||||
})
|
||||
|
||||
// Yield to the event loop between batches to keep UI responsive
|
||||
if (i + BATCH_SIZE < markdownFiles.length) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 0))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ const persistedReducer = persistReducer(
|
||||
{
|
||||
key: 'cherry-studio',
|
||||
storage,
|
||||
version: 174,
|
||||
version: 176,
|
||||
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
|
||||
migrate
|
||||
},
|
||||
|
||||
@@ -2819,6 +2819,43 @@ const migrateConfig = {
|
||||
logger.error('migrate 174 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'175': (state: RootState) => {
|
||||
try {
|
||||
state.assistants.assistants.forEach((assistant) => {
|
||||
// @ts-ignore
|
||||
if (assistant.settings?.reasoning_effort === 'off') {
|
||||
// @ts-ignore
|
||||
assistant.settings.reasoning_effort = 'none'
|
||||
}
|
||||
// @ts-ignore
|
||||
if (assistant.settings?.reasoning_effort_cache === 'off') {
|
||||
// @ts-ignore
|
||||
assistant.settings.reasoning_effort_cache = 'none'
|
||||
}
|
||||
})
|
||||
logger.info('migrate 175 success')
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 175 error', error as Error)
|
||||
return state
|
||||
}
|
||||
},
|
||||
'176': (state: RootState) => {
|
||||
try {
|
||||
state.llm.providers.forEach((provider) => {
|
||||
if (provider.id === SystemProviderIds.qiniu) {
|
||||
provider.anthropicApiHost = 'https://api.qnaigc.com'
|
||||
}
|
||||
if (provider.id === SystemProviderIds.longcat) {
|
||||
provider.anthropicApiHost = 'https://api.longcat.chat/anthropic'
|
||||
}
|
||||
})
|
||||
return state
|
||||
} catch (error) {
|
||||
logger.error('migrate 176 error', error as Error)
|
||||
return state
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ export type ToolPermissionRequestPayload = {
|
||||
requestId: string
|
||||
toolName: string
|
||||
toolId: string
|
||||
toolCallId: string
|
||||
description?: string
|
||||
requiresPermissions: boolean
|
||||
input: Record<string, unknown>
|
||||
@@ -82,12 +83,12 @@ export const selectActiveToolPermission = (state: ToolPermissionsState): ToolPer
|
||||
return activeEntries[0]
|
||||
}
|
||||
|
||||
export const selectPendingPermissionByToolName = (
|
||||
export const selectPendingPermission = (
|
||||
state: ToolPermissionsState,
|
||||
toolName: string
|
||||
toolCallId: string
|
||||
): ToolPermissionEntry | undefined => {
|
||||
const activeEntries = Object.values(state.requests)
|
||||
.filter((entry) => entry.toolName === toolName)
|
||||
.filter((entry) => entry.toolCallId === toolCallId)
|
||||
.filter(
|
||||
(entry) => entry.status === 'pending' || entry.status === 'submitting-allow' || entry.status === 'submitting-deny'
|
||||
)
|
||||
|
||||
@@ -83,7 +83,10 @@ const ThinkModelTypes = [
|
||||
'o',
|
||||
'openai_deep_research',
|
||||
'gpt5',
|
||||
'gpt5_1',
|
||||
'gpt5_codex',
|
||||
'gpt5_1_codex',
|
||||
'gpt5pro',
|
||||
'grok',
|
||||
'grok4_fast',
|
||||
'gemini',
|
||||
@@ -100,7 +103,7 @@ const ThinkModelTypes = [
|
||||
] as const
|
||||
|
||||
export type ReasoningEffortOption = NonNullable<OpenAI.ReasoningEffort> | 'auto'
|
||||
export type ThinkingOption = ReasoningEffortOption | 'off'
|
||||
export type ThinkingOption = ReasoningEffortOption
|
||||
export type ThinkingModelType = (typeof ThinkModelTypes)[number]
|
||||
export type ThinkingOptionConfig = Record<ThinkingModelType, ThinkingOption[]>
|
||||
export type ReasoningEffortConfig = Record<ThinkingModelType, ReasoningEffortOption[]>
|
||||
@@ -111,6 +114,7 @@ export function isThinkModelType(type: string): type is ThinkingModelType {
|
||||
}
|
||||
|
||||
export const EFFORT_RATIO: EffortRatio = {
|
||||
none: 0.01,
|
||||
minimal: 0.05,
|
||||
low: 0.05,
|
||||
medium: 0.5,
|
||||
|
||||
@@ -13,7 +13,7 @@ export type NotesSortType =
|
||||
export interface NotesTreeNode {
|
||||
id: string
|
||||
name: string // 不包含扩展名
|
||||
type: 'folder' | 'file'
|
||||
type: 'folder' | 'file' | 'hint'
|
||||
treePath: string // 相对路径
|
||||
externalPath: string // 绝对路径
|
||||
children?: NotesTreeNode[]
|
||||
|
||||
@@ -126,6 +126,10 @@ export type OpenAIExtraBody = {
|
||||
source_lang: 'auto'
|
||||
target_lang: string
|
||||
}
|
||||
// for gpt-5 series models verbosity control
|
||||
text?: {
|
||||
verbosity?: 'low' | 'medium' | 'high'
|
||||
}
|
||||
}
|
||||
// image is for openrouter. audio is ignored for now
|
||||
export type OpenAIModality = OpenAI.ChatCompletionModality | 'image'
|
||||
|
||||
110
yarn.lock
110
yarn.lock
@@ -102,7 +102,19 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/anthropic@npm:2.0.44":
|
||||
"@ai-sdk/anthropic@npm:2.0.45":
|
||||
version: 2.0.45
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.45"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/ef0e54f032e3b8324c278f3b25d9b388308204d753404c49fd880709a796c2343aee36d335c99f50e683edd39d5b8b6f42b2e9034e1725d8e0db514e2233d104
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/anthropic@npm:^2.0.44":
|
||||
version: 2.0.44
|
||||
resolution: "@ai-sdk/anthropic@npm:2.0.44"
|
||||
dependencies:
|
||||
@@ -179,42 +191,42 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google-vertex@npm:^3.0.62":
|
||||
version: 3.0.62
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.62"
|
||||
"@ai-sdk/google-vertex@npm:^3.0.68":
|
||||
version: 3.0.68
|
||||
resolution: "@ai-sdk/google-vertex@npm:3.0.68"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:2.0.44"
|
||||
"@ai-sdk/google": "npm:2.0.31"
|
||||
"@ai-sdk/anthropic": "npm:2.0.45"
|
||||
"@ai-sdk/google": "npm:2.0.36"
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
google-auth-library: "npm:^9.15.0"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/673bb51e3e0cbe5235ad5e65379b1cb8f099dbc690ab8552e208553a9f1cc6026d2588e956e73468bc6d267066be276e7a9aba98e32e905809dfbeab4ac0e352
|
||||
checksum: 10c0/6a3f4cb1e649313b46a0c349c717757071f8b012b0a28e59ab7a55fd35d9600f0043f0a4f57417c4cc49e0d3734e89a1e4fb248fc88795b5286c83395d3f617a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@npm:2.0.31":
|
||||
version: 2.0.31
|
||||
resolution: "@ai-sdk/google@npm:2.0.31"
|
||||
"@ai-sdk/google@npm:2.0.36":
|
||||
version: 2.0.36
|
||||
resolution: "@ai-sdk/google@npm:2.0.36"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/d8f143f058fb62e6e67e30564ec92530d7389c22ad91b1e4bbe781c8570bf718cd417e44dcd4855e347e85c4174538a9a884eac666109e17f20d21467ab3e749
|
||||
checksum: 10c0/2c6de5e1cf0703b6b932a3f313bf4bc9439897af39c805169ab04bba397185d99b2b1306f3b817f991ca41fdced0365b072ee39e76382c045930256bce47e0e4
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch":
|
||||
version: 2.0.31
|
||||
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch::version=2.0.31&hash=9f3835"
|
||||
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch":
|
||||
version: 2.0.36
|
||||
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch::version=2.0.36&hash=2da8c3"
|
||||
dependencies:
|
||||
"@ai-sdk/provider": "npm:2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:3.0.17"
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
checksum: 10c0/dd37dfb7abf402caaae3edb2f1a8dab018fddad6ba3190376723e03a2a0c352329c8e41e60df3fb8436b717d9c2ee4b82dff091848f50d026f62565cbdb158f8
|
||||
checksum: 10c0/ce99a497360377d2917cf3a48278eb6f4337623ce3738ba743cf048c8c2a7731ec4fc27605a50e461e716ed49b3690206ca8e4078f27cb7be162b684bfc2fc22
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@@ -1879,30 +1891,30 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@cherrystudio/ai-core@workspace:^1.0.0-alpha.18, @cherrystudio/ai-core@workspace:packages/aiCore":
|
||||
"@cherrystudio/ai-core@workspace:^1.0.9, @cherrystudio/ai-core@workspace:packages/aiCore":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@cherrystudio/ai-core@workspace:packages/aiCore"
|
||||
dependencies:
|
||||
"@ai-sdk/anthropic": "npm:^2.0.43"
|
||||
"@ai-sdk/azure": "npm:^2.0.66"
|
||||
"@ai-sdk/deepseek": "npm:^1.0.27"
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch"
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch"
|
||||
"@ai-sdk/openai-compatible": "npm:^1.0.26"
|
||||
"@ai-sdk/provider": "npm:^2.0.0"
|
||||
"@ai-sdk/provider-utils": "npm:^3.0.16"
|
||||
"@ai-sdk/xai": "npm:^2.0.31"
|
||||
"@cherrystudio/ai-sdk-provider": "workspace:*"
|
||||
tsdown: "npm:^0.12.9"
|
||||
typescript: "npm:^5.0.0"
|
||||
vitest: "npm:^3.2.4"
|
||||
zod: "npm:^4.1.5"
|
||||
peerDependencies:
|
||||
"@ai-sdk/google": ^2.0.36
|
||||
"@ai-sdk/openai": ^2.0.64
|
||||
"@cherrystudio/ai-sdk-provider": ^0.1.2
|
||||
ai: ^5.0.26
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@cherrystudio/ai-sdk-provider@workspace:*, @cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider":
|
||||
"@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider"
|
||||
dependencies:
|
||||
@@ -2140,9 +2152,9 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@cherrystudio/openai@npm:^6.5.0":
|
||||
version: 6.5.0
|
||||
resolution: "@cherrystudio/openai@npm:6.5.0"
|
||||
"@cherrystudio/openai@npm:^6.9.0":
|
||||
version: 6.9.0
|
||||
resolution: "@cherrystudio/openai@npm:6.9.0"
|
||||
peerDependencies:
|
||||
ws: ^8.18.0
|
||||
zod: ^3.25 || ^4.0
|
||||
@@ -2153,7 +2165,7 @@ __metadata:
|
||||
optional: true
|
||||
bin:
|
||||
openai: bin/cli
|
||||
checksum: 10c0/0f6cafb97aec17037d5ddcccc88e4b4a9c8de77a989a35bab2394b682a1a69e8a9343e8ee5eb8107d5c495970dbf3567642f154c033f7afc3bf078078666a92e
|
||||
checksum: 10c0/9c51ef33c5b9d08041a115e3d6a8158412a379998a0eae186923d5bdcc808b634c1fef4471a1d499bb8c624b04c075167bc90a1a60a805005c0657ecebbb58d0
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@@ -5169,15 +5181,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@opeoginni/github-copilot-openai-compatible@npm:0.1.19":
|
||||
version: 0.1.19
|
||||
resolution: "@opeoginni/github-copilot-openai-compatible@npm:0.1.19"
|
||||
"@opeoginni/github-copilot-openai-compatible@npm:0.1.21":
|
||||
version: 0.1.21
|
||||
resolution: "@opeoginni/github-copilot-openai-compatible@npm:0.1.21"
|
||||
dependencies:
|
||||
"@ai-sdk/openai": "npm:^2.0.42"
|
||||
"@ai-sdk/openai-compatible": "npm:^1.0.19"
|
||||
"@ai-sdk/provider": "npm:^2.1.0-beta.4"
|
||||
"@ai-sdk/provider-utils": "npm:^3.0.10"
|
||||
checksum: 10c0/dfb01832d7c704b2eb080fc09d31b07fc26e5ac4e648ce219dc0d80cf044ef3cae504427781ec2ce3c5a2459c9c81d043046a255642108d5b3de0f83f4a9f20a
|
||||
checksum: 10c0/05b73d935dc7f24123330ade919698b486ac2a25a7d607c1d3789471f782ead4c803ce6ffd3d97b9ca3f1aadaf6b5c1ea52363c9d24b36894fcfc403fda9cef3
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@@ -8732,12 +8744,12 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/react-dom@npm:^19.0.4":
|
||||
version: 19.1.2
|
||||
resolution: "@types/react-dom@npm:19.1.2"
|
||||
"@types/react-dom@npm:^19.2.3":
|
||||
version: 19.2.3
|
||||
resolution: "@types/react-dom@npm:19.2.3"
|
||||
peerDependencies:
|
||||
"@types/react": ^19.0.0
|
||||
checksum: 10c0/100c341cacba9ec8ae1d47ee051072a3450e9573bf8eeb7262490e341cb246ea0f95a07a1f2077e61cf92648f812a0324c602fcd811bd87b7ce41db2811510cd
|
||||
"@types/react": ^19.2.0
|
||||
checksum: 10c0/b486ebe0f4e2fb35e2e108df1d8fc0927ca5d6002d5771e8a739de11239fe62d0e207c50886185253c99eb9dedfeeb956ea7429e5ba17f6693c7acb4c02f8cd1
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@@ -8777,12 +8789,12 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/react@npm:^19.0.12":
|
||||
version: 19.1.2
|
||||
resolution: "@types/react@npm:19.1.2"
|
||||
"@types/react@npm:^19.2.6":
|
||||
version: 19.2.6
|
||||
resolution: "@types/react@npm:19.2.6"
|
||||
dependencies:
|
||||
csstype: "npm:^3.0.2"
|
||||
checksum: 10c0/76ffe71395c713d4adc3c759465012d3c956db00af35ab7c6d0d91bd07b274b7ce69caa0478c0760311587bd1e38c78ffc9688ebc629f2b266682a19d8750947
|
||||
csstype: "npm:^3.2.2"
|
||||
checksum: 10c0/23b1100f88662ce9f9e4fcca3a2b4ef9fff1ecde24ede2b2dcbd07731e48d6946fd7fd156cd133f5b25321694b0569cd9b8dd30b22c4e076d1cf4c8cdd9a75cb
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
@@ -9891,11 +9903,14 @@ __metadata:
|
||||
"@agentic/searxng": "npm:^7.3.3"
|
||||
"@agentic/tavily": "npm:^7.3.3"
|
||||
"@ai-sdk/amazon-bedrock": "npm:^3.0.53"
|
||||
"@ai-sdk/anthropic": "npm:^2.0.44"
|
||||
"@ai-sdk/cerebras": "npm:^1.0.31"
|
||||
"@ai-sdk/gateway": "npm:^2.0.9"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.62"
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.36#~/.yarn/patches/@ai-sdk-google-npm-2.0.36-6f3cc06026.patch"
|
||||
"@ai-sdk/google-vertex": "npm:^3.0.68"
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch"
|
||||
"@ai-sdk/mistral": "npm:^2.0.23"
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch"
|
||||
"@ai-sdk/perplexity": "npm:^2.0.17"
|
||||
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch"
|
||||
@@ -9905,7 +9920,7 @@ __metadata:
|
||||
"@aws-sdk/client-bedrock-runtime": "npm:^3.910.0"
|
||||
"@aws-sdk/client-s3": "npm:^3.910.0"
|
||||
"@biomejs/biome": "npm:2.2.4"
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18"
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9"
|
||||
"@cherrystudio/embedjs": "npm:^0.1.31"
|
||||
"@cherrystudio/embedjs-libsql": "npm:^0.1.31"
|
||||
"@cherrystudio/embedjs-loader-csv": "npm:^0.1.31"
|
||||
@@ -9919,7 +9934,7 @@ __metadata:
|
||||
"@cherrystudio/embedjs-ollama": "npm:^0.1.31"
|
||||
"@cherrystudio/embedjs-openai": "npm:^0.1.31"
|
||||
"@cherrystudio/extension-table-plus": "workspace:^"
|
||||
"@cherrystudio/openai": "npm:^6.5.0"
|
||||
"@cherrystudio/openai": "npm:^6.9.0"
|
||||
"@dnd-kit/core": "npm:^6.3.1"
|
||||
"@dnd-kit/modifiers": "npm:^9.0.0"
|
||||
"@dnd-kit/sortable": "npm:^10.0.0"
|
||||
@@ -9952,7 +9967,7 @@ __metadata:
|
||||
"@opentelemetry/sdk-trace-base": "npm:^2.0.0"
|
||||
"@opentelemetry/sdk-trace-node": "npm:^2.0.0"
|
||||
"@opentelemetry/sdk-trace-web": "npm:^2.0.0"
|
||||
"@opeoginni/github-copilot-openai-compatible": "npm:0.1.19"
|
||||
"@opeoginni/github-copilot-openai-compatible": "npm:0.1.21"
|
||||
"@paymoapp/electron-shutdown-handler": "npm:^1.1.2"
|
||||
"@playwright/test": "npm:^1.52.0"
|
||||
"@radix-ui/react-context-menu": "npm:^2.2.16"
|
||||
@@ -10000,8 +10015,8 @@ __metadata:
|
||||
"@types/mime-types": "npm:^3"
|
||||
"@types/node": "npm:^22.17.1"
|
||||
"@types/pako": "npm:^1.0.2"
|
||||
"@types/react": "npm:^19.0.12"
|
||||
"@types/react-dom": "npm:^19.0.4"
|
||||
"@types/react": "npm:^19.2.6"
|
||||
"@types/react-dom": "npm:^19.2.3"
|
||||
"@types/react-infinite-scroll-component": "npm:^5.0.0"
|
||||
"@types/react-transition-group": "npm:^4.4.12"
|
||||
"@types/react-window": "npm:^1"
|
||||
@@ -12268,6 +12283,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"csstype@npm:^3.2.2":
|
||||
version: 3.2.3
|
||||
resolution: "csstype@npm:3.2.3"
|
||||
checksum: 10c0/cd29c51e70fa822f1cecd8641a1445bed7063697469d35633b516e60fe8c1bde04b08f6c5b6022136bb669b64c63d4173af54864510fbb4ee23281801841a3ce
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"csv-parse@npm:^5.6.0":
|
||||
version: 5.6.0
|
||||
resolution: "csv-parse@npm:5.6.0"
|
||||
|
||||
Reference in New Issue
Block a user