Compare commits
29 Commits
feat/model
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c97f7b843d | ||
|
|
438b1673d5 | ||
|
|
3bd71e4618 | ||
|
|
9b8e4d1d70 | ||
|
|
cfb9ee7df3 | ||
|
|
ef7e8a7201 | ||
|
|
54503c0e62 | ||
|
|
a66c0860b2 | ||
|
|
a418b61230 | ||
|
|
cd188e128e | ||
|
|
252e30a66e | ||
|
|
1067e6fd85 | ||
|
|
24563b524c | ||
|
|
c9b1e61b8f | ||
|
|
27ccc25e20 | ||
|
|
ed6bfeca77 | ||
|
|
c2fe2160b5 | ||
|
|
b7d8dff0d3 | ||
|
|
ecc7f635b8 | ||
|
|
71f0059960 | ||
|
|
ec16657cbb | ||
|
|
d5dd8bc123 | ||
|
|
99de3eeff7 | ||
|
|
795fb715e3 | ||
|
|
d8bbd3fdb9 | ||
|
|
6d259bb5bd | ||
|
|
4be84b59bc | ||
|
|
ac9c6c204c | ||
|
|
f1bad06ae5 |
2
.github/workflows/auto-i18n.yml
vendored
2
.github/workflows/auto-i18n.yml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }} # Use the built-in GITHUB_TOKEN for bot actions
|
||||
commit-message: "feat(bot): Weekly automated script run"
|
||||
title: "🤖 Weekly Auto I18N Sync: ${{ env.CURRENT_DATE }}"
|
||||
title: "🤖 Weekly Automated Update: ${{ env.CURRENT_DATE }}"
|
||||
body: |
|
||||
This PR includes changes generated by the weekly auto i18n.
|
||||
Review the changes before merging.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index dc7b74ba55337c491cdf1ab3e39ca68cc4187884..ace8c90591288e42c2957e93c9bf7984f1b22444 100644
|
||||
index ff305b112779b718f21a636a27b1196125a332d9..cf32ff5086d4d9e56f8fe90c98724559083bafc3 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -472,7 +472,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
@@ -471,7 +471,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
@@ -12,10 +12,10 @@ index dc7b74ba55337c491cdf1ab3e39ca68cc4187884..ace8c90591288e42c2957e93c9bf7984
|
||||
|
||||
// src/google-generative-ai-options.ts
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 8390439c38cb7eaeb52080862cd6f4c58509e67c..a7647f2e11700dff7e1c8d4ae8f99d3637010733 100644
|
||||
index 57659290f1cec74878a385626ad75b2a4d5cd3fc..d04e5927ec3725b6ffdb80868bfa1b5a48849537 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -478,7 +478,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
@@ -477,7 +477,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
|
||||
|
||||
// src/get-model-path.ts
|
||||
function getModelPath(modelId) {
|
||||
131
.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch
vendored
Normal file
131
.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index b3f018730a93639aad7c203f15fb1aeb766c73f4..ade2a43d66e9184799d072153df61ef7be4ea110 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -296,7 +296,14 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
metadata: huggingfaceOptions == null ? void 0 : huggingfaceOptions.metadata,
|
||||
instructions: huggingfaceOptions == null ? void 0 : huggingfaceOptions.instructions,
|
||||
...preparedTools && { tools: preparedTools },
|
||||
- ...preparedToolChoice && { tool_choice: preparedToolChoice }
|
||||
+ ...preparedToolChoice && { tool_choice: preparedToolChoice },
|
||||
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||
+ reasoning: {
|
||||
+ ...(huggingfaceOptions?.reasoningEffort != null && {
|
||||
+ effort: huggingfaceOptions.reasoningEffort,
|
||||
+ }),
|
||||
+ },
|
||||
+ }),
|
||||
};
|
||||
return { args: baseArgs, warnings };
|
||||
}
|
||||
@@ -365,6 +372,20 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
}
|
||||
break;
|
||||
}
|
||||
+ case 'reasoning': {
|
||||
+ for (const contentPart of part.content) {
|
||||
+ content.push({
|
||||
+ type: 'reasoning',
|
||||
+ text: contentPart.text,
|
||||
+ providerMetadata: {
|
||||
+ huggingface: {
|
||||
+ itemId: part.id,
|
||||
+ },
|
||||
+ },
|
||||
+ });
|
||||
+ }
|
||||
+ break;
|
||||
+ }
|
||||
case "mcp_call": {
|
||||
content.push({
|
||||
type: "tool-call",
|
||||
@@ -519,6 +540,11 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
id: value.item.call_id,
|
||||
toolName: value.item.name
|
||||
});
|
||||
+ } else if (value.item.type === 'reasoning') {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-start',
|
||||
+ id: value.item.id,
|
||||
+ });
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -570,6 +596,22 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
});
|
||||
return;
|
||||
}
|
||||
+ if (isReasoningDeltaChunk(value)) {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-delta',
|
||||
+ id: value.item_id,
|
||||
+ delta: value.delta,
|
||||
+ });
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
+ if (isReasoningEndChunk(value)) {
|
||||
+ controller.enqueue({
|
||||
+ type: 'reasoning-end',
|
||||
+ id: value.item_id,
|
||||
+ });
|
||||
+ return;
|
||||
+ }
|
||||
},
|
||||
flush(controller) {
|
||||
controller.enqueue({
|
||||
@@ -593,7 +635,8 @@ var HuggingFaceResponsesLanguageModel = class {
|
||||
var huggingfaceResponsesProviderOptionsSchema = z2.object({
|
||||
metadata: z2.record(z2.string(), z2.string()).optional(),
|
||||
instructions: z2.string().optional(),
|
||||
- strictJsonSchema: z2.boolean().optional()
|
||||
+ strictJsonSchema: z2.boolean().optional(),
|
||||
+ reasoningEffort: z2.string().optional(),
|
||||
});
|
||||
var huggingfaceResponsesResponseSchema = z2.object({
|
||||
id: z2.string(),
|
||||
@@ -727,12 +770,31 @@ var responseCreatedChunkSchema = z2.object({
|
||||
model: z2.string()
|
||||
})
|
||||
});
|
||||
+var reasoningTextDeltaChunkSchema = z2.object({
|
||||
+ type: z2.literal('response.reasoning_text.delta'),
|
||||
+ item_id: z2.string(),
|
||||
+ output_index: z2.number(),
|
||||
+ content_index: z2.number(),
|
||||
+ delta: z2.string(),
|
||||
+ sequence_number: z2.number(),
|
||||
+});
|
||||
+
|
||||
+var reasoningTextEndChunkSchema = z2.object({
|
||||
+ type: z2.literal('response.reasoning_text.done'),
|
||||
+ item_id: z2.string(),
|
||||
+ output_index: z2.number(),
|
||||
+ content_index: z2.number(),
|
||||
+ text: z2.string(),
|
||||
+ sequence_number: z2.number(),
|
||||
+});
|
||||
var huggingfaceResponsesChunkSchema = z2.union([
|
||||
responseOutputItemAddedSchema,
|
||||
responseOutputItemDoneSchema,
|
||||
textDeltaChunkSchema,
|
||||
responseCompletedChunkSchema,
|
||||
responseCreatedChunkSchema,
|
||||
+ reasoningTextDeltaChunkSchema,
|
||||
+ reasoningTextEndChunkSchema,
|
||||
z2.object({ type: z2.string() }).loose()
|
||||
// fallback for unknown chunks
|
||||
]);
|
||||
@@ -751,6 +813,12 @@ function isResponseCompletedChunk(chunk) {
|
||||
function isResponseCreatedChunk(chunk) {
|
||||
return chunk.type === "response.created";
|
||||
}
|
||||
+function isReasoningDeltaChunk(chunk) {
|
||||
+ return chunk.type === 'response.reasoning_text.delta';
|
||||
+}
|
||||
+function isReasoningEndChunk(chunk) {
|
||||
+ return chunk.type === 'response.reasoning_text.done';
|
||||
+}
|
||||
|
||||
// src/huggingface-provider.ts
|
||||
function createHuggingFace(options = {}) {
|
||||
@@ -1,140 +0,0 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 73045a7d38faafdc7f7d2cd79d7ff0e2b031056b..8d948c9ac4ea4b474db9ef3c5491961e7fcf9a07 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -421,6 +421,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -598,6 +609,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -765,6 +787,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
|
||||
arguments: import_v43.z.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
@@ -795,6 +825,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_v43.z.union(
|
||||
arguments: import_v43.z.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: import_v43.z.array(
|
||||
+ import_v43.z.object({
|
||||
+ type: import_v43.z.literal('image_url'),
|
||||
+ image_url: import_v43.z.object({
|
||||
+ url: import_v43.z.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: import_v43.z.string().nullish()
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 1c2b9560bbfbfe10cb01af080aeeed4ff59db29c..2c8ddc4fc9bfc5e7e06cfca105d197a08864c427 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -405,6 +405,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
text: reasoning
|
||||
});
|
||||
}
|
||||
+ if (choice.message.images) {
|
||||
+ for (const image of choice.message.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ content.push({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (choice.message.tool_calls != null) {
|
||||
for (const toolCall of choice.message.tool_calls) {
|
||||
content.push({
|
||||
@@ -582,6 +593,17 @@ var OpenAICompatibleChatLanguageModel = class {
|
||||
delta: delta.content
|
||||
});
|
||||
}
|
||||
+ if (delta.images) {
|
||||
+ for (const image of delta.images) {
|
||||
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
|
||||
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
|
||||
+ controller.enqueue({
|
||||
+ type: 'file',
|
||||
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
|
||||
+ data: match2 ? match2[1] : image.image_url.url,
|
||||
+ });
|
||||
+ }
|
||||
+ }
|
||||
if (delta.tool_calls != null) {
|
||||
for (const toolCallDelta of delta.tool_calls) {
|
||||
const index = toolCallDelta.index;
|
||||
@@ -749,6 +771,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
|
||||
arguments: z3.string()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}),
|
||||
finish_reason: z3.string().nullish()
|
||||
@@ -779,6 +809,14 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
|
||||
arguments: z3.string().nullish()
|
||||
})
|
||||
})
|
||||
+ ).nullish(),
|
||||
+ images: z3.array(
|
||||
+ z3.object({
|
||||
+ type: z3.literal('image_url'),
|
||||
+ image_url: z3.object({
|
||||
+ url: z3.string(),
|
||||
+ })
|
||||
+ })
|
||||
).nullish()
|
||||
}).nullish(),
|
||||
finish_reason: z3.string().nullish()
|
||||
@@ -1,5 +1,5 @@
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 7481f3b3511078068d87d03855b568b20bb86971..8ac5ec28d2f7ad1b3b0d3f8da945c75674e59637 100644
|
||||
index 992c85ac6656e51c3471af741583533c5a7bf79f..83c05952a07aebb95fc6c62f9ddb8aa96b52ac0d 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -274,6 +274,7 @@ var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
||||
@@ -18,7 +18,7 @@ index 7481f3b3511078068d87d03855b568b20bb86971..8ac5ec28d2f7ad1b3b0d3f8da945c756
|
||||
tool_calls: import_v42.z.array(
|
||||
import_v42.z.object({
|
||||
index: import_v42.z.number(),
|
||||
@@ -795,6 +797,13 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -785,6 +787,13 @@ var OpenAIChatLanguageModel = class {
|
||||
if (text != null && text.length > 0) {
|
||||
content.push({ type: "text", text });
|
||||
}
|
||||
@@ -32,7 +32,7 @@ index 7481f3b3511078068d87d03855b568b20bb86971..8ac5ec28d2f7ad1b3b0d3f8da945c756
|
||||
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
||||
content.push({
|
||||
type: "tool-call",
|
||||
@@ -876,6 +885,7 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -866,6 +875,7 @@ var OpenAIChatLanguageModel = class {
|
||||
};
|
||||
let metadataExtracted = false;
|
||||
let isActiveText = false;
|
||||
@@ -40,7 +40,7 @@ index 7481f3b3511078068d87d03855b568b20bb86971..8ac5ec28d2f7ad1b3b0d3f8da945c756
|
||||
const providerMetadata = { openai: {} };
|
||||
return {
|
||||
stream: response.pipeThrough(
|
||||
@@ -933,6 +943,21 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -923,6 +933,21 @@ var OpenAIChatLanguageModel = class {
|
||||
return;
|
||||
}
|
||||
const delta = choice.delta;
|
||||
@@ -62,7 +62,7 @@ index 7481f3b3511078068d87d03855b568b20bb86971..8ac5ec28d2f7ad1b3b0d3f8da945c756
|
||||
if (delta.content != null) {
|
||||
if (!isActiveText) {
|
||||
controller.enqueue({ type: "text-start", id: "0" });
|
||||
@@ -1045,6 +1070,9 @@ var OpenAIChatLanguageModel = class {
|
||||
@@ -1035,6 +1060,9 @@ var OpenAIChatLanguageModel = class {
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
29
.yarn/patches/@tiptap-extension-code-npm-3.10.7-6d3deb3e10.patch
vendored
Normal file
29
.yarn/patches/@tiptap-extension-code-npm-3.10.7-6d3deb3e10.patch
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
diff --git a/dist/index.cjs b/dist/index.cjs
|
||||
index 650402009637c04dce23b2de9baa48b69601f6e7..e4106894f67ff68b78e4e7485b7beb24570f91c0 100644
|
||||
--- a/dist/index.cjs
|
||||
+++ b/dist/index.cjs
|
||||
@@ -29,8 +29,8 @@ module.exports = __toCommonJS(index_exports);
|
||||
|
||||
// src/code.ts
|
||||
var import_core = require("@tiptap/core");
|
||||
-var inputRegex = /(^|[^`])`([^`]+)`(?!`)$/;
|
||||
-var pasteRegex = /(^|[^`])`([^`]+)`(?!`)/g;
|
||||
+var inputRegex = /(?:^|\s)(`(?!\s+`)((?:[^`]+))`(?!\s+`))$/;
|
||||
+var pasteRegex = /(?:^|\s)(`(?!\s+`)((?:[^`]+))`(?!\s+`))/g;
|
||||
var Code = import_core.Mark.create({
|
||||
name: "code",
|
||||
addOptions() {
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 7f9e650a5713377d8d6a824f884bbfe6d27fe519..3736cac514b979438a808705931636ae04b06d16 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -1,7 +1,7 @@
|
||||
// src/code.ts
|
||||
import { Mark, markInputRule, markPasteRule, mergeAttributes } from "@tiptap/core";
|
||||
-var inputRegex = /(^|[^`])`([^`]+)`(?!`)$/;
|
||||
-var pasteRegex = /(^|[^`])`([^`]+)`(?!`)/g;
|
||||
+var inputRegex = /(?:^|\s)(`(?!\s+`)((?:[^`]+))`(?!\s+`))$/;
|
||||
+var pasteRegex = /(?:^|\s)(`(?!\s+`)((?:[^`]+))`(?!\s+`))/g;
|
||||
var Code = Mark.create({
|
||||
name: "code",
|
||||
addOptions() {
|
||||
@@ -1,8 +1,8 @@
|
||||
diff --git a/dist/index.cjs b/dist/index.cjs
|
||||
index 8e560a4406c5cc616c11bb9fd5455ac0dcf47fa3..c7cd0d65ddc971bff71e89f610de82cfdaa5a8c7 100644
|
||||
index 506aa37711fdb8452c68c4e1364b769793e56290..a69f9cc11066f5cf224599cb7b01c7ab6d465bb1 100644
|
||||
--- a/dist/index.cjs
|
||||
+++ b/dist/index.cjs
|
||||
@@ -413,6 +413,19 @@ var DragHandlePlugin = ({
|
||||
@@ -454,6 +454,19 @@ var DragHandlePlugin = ({
|
||||
}
|
||||
return false;
|
||||
},
|
||||
@@ -23,10 +23,10 @@ index 8e560a4406c5cc616c11bb9fd5455ac0dcf47fa3..c7cd0d65ddc971bff71e89f610de82cf
|
||||
if (locked) {
|
||||
return false;
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 39e4c3ef9986cd25544d9d3994cf6a9ada74b145..378d9130abbfdd0e1e4f743b5b537743c9ab07d0 100644
|
||||
index ad58ef1637a6e5544733f4002cd0cfcc8e43022a..ce03e2e2882e8d1828726dcb3de31e9cbeb83374 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -387,6 +387,19 @@ var DragHandlePlugin = ({
|
||||
@@ -428,6 +428,19 @@ var DragHandlePlugin = ({
|
||||
}
|
||||
return false;
|
||||
},
|
||||
28
.yarn/patches/@tiptap-extension-table-of-contents-npm-3.10.7-4852787461.patch
vendored
Normal file
28
.yarn/patches/@tiptap-extension-table-of-contents-npm-3.10.7-4852787461.patch
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
diff --git a/dist/index.cjs b/dist/index.cjs
|
||||
index f27ba0ac6bb377fb0e394e7b656edd60dd20cfd5..6dad2fc41d1df08a608ecc73ad89efabd4ccce31 100644
|
||||
--- a/dist/index.cjs
|
||||
+++ b/dist/index.cjs
|
||||
@@ -45,6 +45,9 @@ var TableOfContentsPlugin = ({
|
||||
return new import_state.Plugin({
|
||||
key: new import_state.PluginKey("tableOfContent"),
|
||||
appendTransaction(transactions, _oldState, newState) {
|
||||
+ if (transactions.some(tr => tr.getMeta('composition'))) {
|
||||
+ return
|
||||
+ }
|
||||
const tr = newState.tr;
|
||||
let modified = false;
|
||||
if (transactions.some((transaction) => transaction.docChanged)) {
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index 83afa3f0b57db38a80194d991dadb4e21a8f83da..bfbc84135845a9789f419c895eb4ea735b573363 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -12,6 +12,9 @@ var TableOfContentsPlugin = ({
|
||||
return new Plugin({
|
||||
key: new PluginKey("tableOfContent"),
|
||||
appendTransaction(transactions, _oldState, newState) {
|
||||
+ if (transactions.some(tr => tr.getMeta('composition'))) {
|
||||
+ return
|
||||
+ }
|
||||
const tr = newState.tr;
|
||||
let modified = false;
|
||||
if (transactions.some((transaction) => transaction.docChanged)) {
|
||||
@@ -7,7 +7,7 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
- **Keep it clear**: Write code that is easy to read, maintain, and explain.
|
||||
- **Match the house style**: Reuse existing patterns, naming, and conventions.
|
||||
- **Search smart**: Prefer `ast-grep` for semantic queries; fall back to `rg`/`grep` when needed.
|
||||
- **Build with Tailwind CSS & Shadcn UI**: Use components from `@packages/ui` (Shadcn UI + Tailwind CSS) for every new UI component; never add `antd` or `styled-components`.
|
||||
- **Build with HeroUI**: Use HeroUI for every new UI component; never add `antd` or `styled-components`.
|
||||
- **Log centrally**: Route all logging through `loggerService` with the right context—no `console.log`.
|
||||
- **Research via subagent**: Lean on `subagent` for external docs, APIs, news, and references.
|
||||
- **Always propose before executing**: Before making any changes, clearly explain your planned approach and wait for explicit user approval to ensure alignment and prevent unwanted modifications.
|
||||
@@ -90,9 +90,9 @@ This file provides guidance to AI coding assistants when working with code in th
|
||||
|
||||
### UI Design
|
||||
|
||||
The project is in the process of migrating from antd & styled-components to Tailwind CSS and Shadcn UI. Please use components from `@packages/ui` to build UI components. The use of antd and styled-components is prohibited.
|
||||
The project is in the process of migrating from antd & styled-components to HeroUI. Please use HeroUI to build UI components. The use of antd and styled-components is prohibited.
|
||||
|
||||
UI Library: `@packages/ui`
|
||||
HeroUI Docs: https://www.heroui.com/docs/guide/introduction
|
||||
|
||||
### Database Architecture
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
}
|
||||
},
|
||||
"enabled": true,
|
||||
"includes": ["**/*.json", "!*.json", "!**/package.json", "!coverage/**"]
|
||||
"includes": ["**/*.json", "!*.json", "!**/package.json"]
|
||||
},
|
||||
"css": {
|
||||
"formatter": {
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"includes": ["**", "!**/.claude/**", "!**/.vscode/**"],
|
||||
"includes": ["**", "!**/.claude/**"],
|
||||
"maxSize": 2097152
|
||||
},
|
||||
"formatter": {
|
||||
|
||||
@@ -135,66 +135,58 @@ artifactBuildCompleted: scripts/artifact-build-completed.js
|
||||
releaseInfo:
|
||||
releaseNotes: |
|
||||
<!--LANG:en-->
|
||||
What's New in v1.7.0-rc.2
|
||||
What's New in v1.7.0-rc.1
|
||||
|
||||
🎉 MAJOR NEW FEATURE: AI Agents
|
||||
- Create and manage custom AI agents with specialized tools and permissions
|
||||
- Dedicated agent sessions with persistent SQLite storage, separate from regular chats
|
||||
- Real-time tool approval system - review and approve agent actions dynamically
|
||||
- MCP (Model Context Protocol) integration for connecting external tools
|
||||
- Slash commands support for quick agent interactions
|
||||
- OpenAI-compatible REST API for agent access
|
||||
|
||||
✨ New Features:
|
||||
- AI Models: Added support for Gemini 3, Gemini 3 Pro with image preview, and GPT-5.1
|
||||
- Import: ChatGPT conversation import feature
|
||||
- Agent: Git Bash detection and requirement check for Windows agents
|
||||
- Search: Native language emoji search with CLDR data format
|
||||
- Provider: Endpoint type support for cherryin provider
|
||||
- Debug: Local crash mini dump file for better diagnostics
|
||||
|
||||
🐛 Important Bug Fixes:
|
||||
- Error Handling: Improved error display in AiSdkToChunkAdapter
|
||||
- Database: Optimized DatabaseManager and fixed libsql crash issues
|
||||
- Memory: Fixed EventEmitter memory leak in useApiServer hook
|
||||
- Messages: Fixed adjacent user messages appearing when assistant message contains error only
|
||||
- Tools: Fixed missing execution state for approved tool permissions
|
||||
- File Processing: Fixed "no such file" error for non-English filenames in open-mineru
|
||||
- PDF: Fixed mineru PDF validation and 403 errors
|
||||
- Images: Fixed base64 image save issues
|
||||
- Search: Fixed URL context and web search capability
|
||||
- Models: Added verbosity parameter support for GPT-5 models
|
||||
- UI: Improved todo tool status icon visibility and colors
|
||||
- Providers: Fixed api-host for vercel ai-gateway and gitcode update config
|
||||
- AI Providers: Added support for Hugging Face, Mistral, Perplexity, and SophNet
|
||||
- Knowledge Base: OpenMinerU document preprocessor, full-text search in notes, enhanced tool selection
|
||||
- Image & OCR: Intel OVMS painting provider and Intel OpenVINO (NPU) OCR support
|
||||
- MCP Management: Redesigned interface with dual-column layout for easier management
|
||||
- Languages: Added German language support
|
||||
|
||||
⚡ Improvements:
|
||||
- SDK: Updated Google and OpenAI SDKs with new features
|
||||
- UI: Simplified knowledge base creation modal and agent creation form
|
||||
- Tools: Replaced renderToolContent function with ToolContent component
|
||||
- Architecture: Namespace tool call IDs with session ID to prevent conflicts
|
||||
- Config: AI SDK configuration refactoring
|
||||
- Upgraded to Electron 38.7.0
|
||||
- Enhanced system shutdown handling and automatic update checks
|
||||
- Improved proxy bypass rules
|
||||
|
||||
🐛 Important Bug Fixes:
|
||||
- Fixed streaming response issues across multiple AI providers
|
||||
- Fixed session list scrolling problems
|
||||
- Fixed knowledge base deletion errors
|
||||
|
||||
<!--LANG:zh-CN-->
|
||||
v1.7.0-rc.2 新特性
|
||||
v1.7.0-rc.1 新特性
|
||||
|
||||
🎉 重大更新:AI Agent 智能体系统
|
||||
- 创建和管理专属 AI Agent,配置专用工具和权限
|
||||
- 独立的 Agent 会话,使用 SQLite 持久化存储,与普通聊天分离
|
||||
- 实时工具审批系统 - 动态审查和批准 Agent 操作
|
||||
- MCP(模型上下文协议)集成,连接外部工具
|
||||
- 支持斜杠命令快速交互
|
||||
- 兼容 OpenAI 的 REST API 访问
|
||||
|
||||
✨ 新功能:
|
||||
- AI 模型:新增 Gemini 3、Gemini 3 Pro 图像预览支持,以及 GPT-5.1
|
||||
- 导入:ChatGPT 对话导入功能
|
||||
- Agent:Windows Agent 的 Git Bash 检测和要求检查
|
||||
- 搜索:支持本地语言 emoji 搜索(CLDR 数据格式)
|
||||
- 提供商:cherryin provider 的端点类型支持
|
||||
- 调试:启用本地崩溃 mini dump 文件,方便诊断
|
||||
|
||||
🐛 重要修复:
|
||||
- 错误处理:改进 AiSdkToChunkAdapter 的错误显示
|
||||
- 数据库:优化 DatabaseManager 并修复 libsql 崩溃问题
|
||||
- 内存:修复 useApiServer hook 中的 EventEmitter 内存泄漏
|
||||
- 消息:修复当助手消息仅包含错误时相邻用户消息出现的问题
|
||||
- 工具:修复批准工具权限缺少执行状态的问题
|
||||
- 文件处理:修复 open-mineru 处理非英文文件名时的"无此文件"错误
|
||||
- PDF:修复 mineru PDF 验证和 403 错误
|
||||
- 图片:修复 base64 图片保存问题
|
||||
- 搜索:修复 URL 上下文和网络搜索功能
|
||||
- 模型:为 GPT-5 模型添加 verbosity 参数支持
|
||||
- UI:改进 todo 工具状态图标可见性和颜色
|
||||
- 提供商:修复 vercel ai-gateway 和 gitcode 更新配置的 api-host
|
||||
- AI 提供商:新增 Hugging Face、Mistral、Perplexity 和 SophNet 支持
|
||||
- 知识库:OpenMinerU 文档预处理器、笔记全文搜索、增强的工具选择
|
||||
- 图像与 OCR:Intel OVMS 绘图提供商和 Intel OpenVINO (NPU) OCR 支持
|
||||
- MCP 管理:重构管理界面,采用双列布局,更加方便管理
|
||||
- 语言:新增德语支持
|
||||
|
||||
⚡ 改进:
|
||||
- SDK:更新 Google 和 OpenAI SDK,新增功能和修复
|
||||
- UI:简化知识库创建模态框和 agent 创建表单
|
||||
- 工具:用 ToolContent 组件替换 renderToolContent 函数,提升可读性
|
||||
- 架构:用会话 ID 命名工具调用 ID 以防止冲突
|
||||
- 配置:AI SDK 配置重构
|
||||
- 升级到 Electron 38.7.0
|
||||
- 增强的系统关机处理和自动更新检查
|
||||
- 改进的代理绕过规则
|
||||
|
||||
🐛 重要修复:
|
||||
- 修复多个 AI 提供商的流式响应问题
|
||||
- 修复会话列表滚动问题
|
||||
- 修复知识库删除错误
|
||||
<!--LANG:END-->
|
||||
|
||||
@@ -134,7 +134,7 @@ export default defineConfig({
|
||||
selectionToolbar: resolve(__dirname, 'src/renderer/selectionToolbar.html'),
|
||||
selectionAction: resolve(__dirname, 'src/renderer/selectionAction.html'),
|
||||
traceWindow: resolve(__dirname, 'src/renderer/traceWindow.html'),
|
||||
migrationV2: resolve(__dirname, 'src/renderer/migrationV2.html')
|
||||
dataRefactorMigrate: resolve(__dirname, 'src/renderer/dataRefactorMigrate.html')
|
||||
},
|
||||
onwarn(warning, warn) {
|
||||
if (warning.code === 'COMMONJS_VARIABLE_IN_ESM') return
|
||||
|
||||
@@ -140,7 +140,7 @@ export default defineConfig([
|
||||
{
|
||||
// Component Rules - prevent importing antd components when migration completed
|
||||
files: ['**/*.{ts,tsx,js,jsx}'],
|
||||
ignores: [],
|
||||
ignores: ['src/renderer/src/windows/dataRefactorTest/**/*.{ts,tsx}'],
|
||||
rules: {
|
||||
// 'no-restricted-imports': [
|
||||
// 'error',
|
||||
|
||||
81
package.json
81
package.json
@@ -77,10 +77,9 @@
|
||||
"prepare": "git config blame.ignoreRevsFile .git-blame-ignore-revs && husky",
|
||||
"claude": "dotenv -e .env -- claude",
|
||||
"migrations:generate": "drizzle-kit generate --config ./migrations/sqlite-drizzle.config.ts",
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --preid alpha --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --preid beta --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core build && yarn workspace @cherrystudio/ai-core npm publish --access public",
|
||||
"release:ai-sdk-provider": "yarn workspace @cherrystudio/ai-sdk-provider version patch --immediate && yarn workspace @cherrystudio/ai-sdk-provider build && yarn workspace @cherrystudio/ai-sdk-provider npm publish --access public"
|
||||
"release:aicore:alpha": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag alpha --access public",
|
||||
"release:aicore:beta": "yarn workspace @cherrystudio/ai-core version prerelease --immediate && yarn workspace @cherrystudio/ai-core npm publish --tag beta --access public",
|
||||
"release:aicore": "yarn workspace @cherrystudio/ai-core version patch --immediate && yarn workspace @cherrystudio/ai-core npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "patch:@anthropic-ai/claude-agent-sdk@npm%3A0.1.30#~/.yarn/patches/@anthropic-ai-claude-agent-sdk-npm-0.1.30-b50a299674.patch",
|
||||
@@ -89,7 +88,6 @@
|
||||
"@napi-rs/system-ocr": "patch:@napi-rs/system-ocr@npm%3A1.0.2#~/.yarn/patches/@napi-rs-system-ocr-npm-1.0.2-59e7a78e8b.patch",
|
||||
"@paymoapp/electron-shutdown-handler": "^1.1.2",
|
||||
"@strongtz/win32-arm64-msvc": "^0.4.7",
|
||||
"emoji-picker-element-data": "^1",
|
||||
"express": "^5.1.0",
|
||||
"font-list": "^2.0.0",
|
||||
"graceful-fs": "^4.2.11",
|
||||
@@ -103,7 +101,6 @@
|
||||
"selection-hook": "^1.0.12",
|
||||
"sharp": "^0.34.3",
|
||||
"socket.io": "^4.8.1",
|
||||
"stream-json": "^1.9.1",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"tesseract.js": "patch:tesseract.js@npm%3A6.0.1#~/.yarn/patches/tesseract.js-npm-6.0.1-2562a7e46d.patch",
|
||||
@@ -113,17 +110,13 @@
|
||||
"@agentic/exa": "^7.3.3",
|
||||
"@agentic/searxng": "^7.3.3",
|
||||
"@agentic/tavily": "^7.3.3",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.56",
|
||||
"@ai-sdk/anthropic": "^2.0.45",
|
||||
"@ai-sdk/amazon-bedrock": "^3.0.53",
|
||||
"@ai-sdk/cerebras": "^1.0.31",
|
||||
"@ai-sdk/gateway": "^2.0.13",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
|
||||
"@ai-sdk/google-vertex": "^3.0.72",
|
||||
"@ai-sdk/huggingface": "^0.0.10",
|
||||
"@ai-sdk/mistral": "^2.0.24",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.71#~/.yarn/patches/@ai-sdk-openai-npm-2.0.71-a88ef00525.patch",
|
||||
"@ai-sdk/perplexity": "^2.0.20",
|
||||
"@ai-sdk/test-server": "^0.0.1",
|
||||
"@ai-sdk/gateway": "^2.0.9",
|
||||
"@ai-sdk/google-vertex": "^3.0.62",
|
||||
"@ai-sdk/huggingface": "patch:@ai-sdk/huggingface@npm%3A0.0.8#~/.yarn/patches/@ai-sdk-huggingface-npm-0.0.8-d4d0aaac93.patch",
|
||||
"@ai-sdk/mistral": "^2.0.23",
|
||||
"@ai-sdk/perplexity": "^2.0.17",
|
||||
"@ant-design/v5-patch-for-react-19": "^1.0.3",
|
||||
"@anthropic-ai/sdk": "^0.41.0",
|
||||
"@anthropic-ai/vertex-sdk": "patch:@anthropic-ai/vertex-sdk@npm%3A0.11.4#~/.yarn/patches/@anthropic-ai-vertex-sdk-npm-0.11.4-c19cb41edb.patch",
|
||||
@@ -131,7 +124,7 @@
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.910.0",
|
||||
"@aws-sdk/client-s3": "^3.910.0",
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.9",
|
||||
"@cherrystudio/ai-core": "workspace:^1.0.0-alpha.18",
|
||||
"@cherrystudio/embedjs": "^0.1.31",
|
||||
"@cherrystudio/embedjs-libsql": "^0.1.31",
|
||||
"@cherrystudio/embedjs-loader-csv": "^0.1.31",
|
||||
@@ -145,7 +138,7 @@
|
||||
"@cherrystudio/embedjs-ollama": "^0.1.31",
|
||||
"@cherrystudio/embedjs-openai": "^0.1.31",
|
||||
"@cherrystudio/extension-table-plus": "workspace:^",
|
||||
"@cherrystudio/openai": "^6.9.0",
|
||||
"@cherrystudio/openai": "^6.5.0",
|
||||
"@cherrystudio/ui": "workspace:*",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/modifiers": "^9.0.0",
|
||||
@@ -168,14 +161,14 @@
|
||||
"@modelcontextprotocol/sdk": "^1.17.5",
|
||||
"@mozilla/readability": "^0.6.0",
|
||||
"@notionhq/client": "^2.2.15",
|
||||
"@openrouter/ai-sdk-provider": "^1.2.5",
|
||||
"@openrouter/ai-sdk-provider": "^1.2.0",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/core": "2.0.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
|
||||
"@opentelemetry/sdk-trace-base": "^2.0.0",
|
||||
"@opentelemetry/sdk-trace-node": "^2.0.0",
|
||||
"@opentelemetry/sdk-trace-web": "^2.0.0",
|
||||
"@opeoginni/github-copilot-openai-compatible": "0.1.21",
|
||||
"@opeoginni/github-copilot-openai-compatible": "0.1.19",
|
||||
"@playwright/test": "^1.52.0",
|
||||
"@radix-ui/react-context-menu": "^2.2.16",
|
||||
"@reduxjs/toolkit": "^2.2.5",
|
||||
@@ -188,22 +181,26 @@
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@tiptap/extension-collaboration": "^3.2.0",
|
||||
"@tiptap/extension-drag-handle": "patch:@tiptap/extension-drag-handle@npm%3A3.2.0#~/.yarn/patches/@tiptap-extension-drag-handle-npm-3.2.0-5a9ebff7c9.patch",
|
||||
"@tiptap/extension-drag-handle-react": "^3.2.0",
|
||||
"@tiptap/extension-image": "^3.2.0",
|
||||
"@tiptap/extension-list": "^3.2.0",
|
||||
"@tiptap/extension-mathematics": "^3.2.0",
|
||||
"@tiptap/extension-mention": "^3.2.0",
|
||||
"@tiptap/extension-node-range": "^3.2.0",
|
||||
"@tiptap/extension-table-of-contents": "^3.2.0",
|
||||
"@tiptap/extension-typography": "^3.2.0",
|
||||
"@tiptap/extension-underline": "^3.2.0",
|
||||
"@tiptap/pm": "^3.2.0",
|
||||
"@tiptap/react": "^3.2.0",
|
||||
"@tiptap/starter-kit": "^3.2.0",
|
||||
"@tiptap/suggestion": "^3.2.0",
|
||||
"@tiptap/y-tiptap": "^3.0.0",
|
||||
"@tiptap/extension-code": "patch:@tiptap/extension-code@npm%3A3.10.7#~/.yarn/patches/@tiptap-extension-code-npm-3.10.7-6d3deb3e10.patch",
|
||||
"@tiptap/extension-code-block": "^3.10.7",
|
||||
"@tiptap/extension-collaboration": "^3.10.7",
|
||||
"@tiptap/extension-drag-handle": "patch:@tiptap/extension-drag-handle@npm%3A3.10.7#~/.yarn/patches/@tiptap-extension-drag-handle-npm-3.10.7-332b0175fc.patch",
|
||||
"@tiptap/extension-drag-handle-react": "^3.10.7",
|
||||
"@tiptap/extension-image": "^3.10.7",
|
||||
"@tiptap/extension-link": "^3.10.7",
|
||||
"@tiptap/extension-list": "^3.10.7",
|
||||
"@tiptap/extension-mathematics": "^3.10.7",
|
||||
"@tiptap/extension-mention": "^3.10.7",
|
||||
"@tiptap/extension-node-range": "^3.10.7",
|
||||
"@tiptap/extension-table-of-contents": "patch:@tiptap/extension-table-of-contents@npm%3A3.10.7#~/.yarn/patches/@tiptap-extension-table-of-contents-npm-3.10.7-4852787461.patch",
|
||||
"@tiptap/extension-typography": "^3.10.7",
|
||||
"@tiptap/extension-underline": "^3.10.7",
|
||||
"@tiptap/markdown": "^3.10.7",
|
||||
"@tiptap/pm": "^3.10.7",
|
||||
"@tiptap/react": "^3.10.7",
|
||||
"@tiptap/starter-kit": "^3.10.7",
|
||||
"@tiptap/suggestion": "^3.10.7",
|
||||
"@tiptap/y-tiptap": "^3.0.1",
|
||||
"@truto/turndown-plugin-gfm": "^1.0.2",
|
||||
"@tryfabric/martian": "^1.2.4",
|
||||
"@types/cli-progress": "^3",
|
||||
@@ -226,7 +223,6 @@
|
||||
"@types/react-infinite-scroll-component": "^5.0.0",
|
||||
"@types/react-transition-group": "^4.4.12",
|
||||
"@types/react-window": "^1",
|
||||
"@types/stream-json": "^1",
|
||||
"@types/swagger-jsdoc": "^6",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/tinycolor2": "^1",
|
||||
@@ -245,7 +241,7 @@
|
||||
"@viz-js/lang-dot": "^1.0.5",
|
||||
"@viz-js/viz": "^3.14.0",
|
||||
"@xyflow/react": "^12.4.4",
|
||||
"ai": "^5.0.98",
|
||||
"ai": "^5.0.90",
|
||||
"antd": "patch:antd@npm%3A5.27.0#~/.yarn/patches/antd-npm-5.27.0-aa91c36546.patch",
|
||||
"archiver": "^7.0.1",
|
||||
"async-mutex": "^0.5.0",
|
||||
@@ -324,6 +320,7 @@
|
||||
"oxlint": "^1.22.0",
|
||||
"oxlint-tsgolint": "^0.2.0",
|
||||
"p-queue": "^8.1.0",
|
||||
"patch-package": "^8.0.1",
|
||||
"pdf-lib": "^1.17.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"playwright": "^1.55.1",
|
||||
@@ -418,11 +415,9 @@
|
||||
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
|
||||
"@ai-sdk/openai@npm:2.0.64": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.71#~/.yarn/patches/@ai-sdk-openai-npm-2.0.71-a88ef00525.patch",
|
||||
"@ai-sdk/google@npm:2.0.40": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
|
||||
"@ai-sdk/openai@npm:2.0.71": "patch:@ai-sdk/openai@npm%3A2.0.71#~/.yarn/patches/@ai-sdk-openai-npm-2.0.71-a88ef00525.patch",
|
||||
"@ai-sdk/openai-compatible@npm:1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
|
||||
"@ai-sdk/openai-compatible@npm:^1.0.19": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch"
|
||||
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/google@npm:2.0.31": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch",
|
||||
"@tiptap/extension-code@npm:^3.10.7": "patch:@tiptap/extension-code@npm%3A3.10.7#~/.yarn/patches/@tiptap-extension-code-npm-3.10.7-6d3deb3e10.patch"
|
||||
},
|
||||
"packageManager": "yarn@4.9.1",
|
||||
"lint-staged": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-sdk-provider",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.0",
|
||||
"description": "Cherry Studio AI SDK provider bundle with CherryIN routing.",
|
||||
"keywords": [
|
||||
"ai-sdk",
|
||||
@@ -42,7 +42,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.17"
|
||||
"@ai-sdk/provider-utils": "^3.0.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tsdown": "^0.13.3",
|
||||
|
||||
@@ -67,10 +67,6 @@ export interface CherryInProviderSettings {
|
||||
* Optional static headers applied to every request.
|
||||
*/
|
||||
headers?: HeadersInput
|
||||
/**
|
||||
* Optional endpoint type to distinguish different endpoint behaviors.
|
||||
*/
|
||||
endpointType?: 'openai' | 'openai-response' | 'anthropic' | 'gemini' | 'image-generation' | 'jina-rerank'
|
||||
}
|
||||
|
||||
export interface CherryInProvider extends ProviderV2 {
|
||||
@@ -155,8 +151,7 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
|
||||
baseURL = DEFAULT_CHERRYIN_BASE_URL,
|
||||
anthropicBaseURL = DEFAULT_CHERRYIN_ANTHROPIC_BASE_URL,
|
||||
geminiBaseURL = DEFAULT_CHERRYIN_GEMINI_BASE_URL,
|
||||
fetch,
|
||||
endpointType
|
||||
fetch
|
||||
} = options
|
||||
|
||||
const getJsonHeaders = createJsonHeadersGetter(options)
|
||||
@@ -210,7 +205,7 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
|
||||
fetch
|
||||
})
|
||||
|
||||
const createChatModelByModelId = (modelId: string, settings: OpenAIProviderSettings = {}) => {
|
||||
const createChatModel = (modelId: string, settings: OpenAIProviderSettings = {}) => {
|
||||
if (isAnthropicModel(modelId)) {
|
||||
return createAnthropicModel(modelId)
|
||||
}
|
||||
@@ -228,29 +223,6 @@ export const createCherryIn = (options: CherryInProviderSettings = {}): CherryIn
|
||||
})
|
||||
}
|
||||
|
||||
const createChatModel = (modelId: string, settings: OpenAIProviderSettings = {}) => {
|
||||
if (!endpointType) return createChatModelByModelId(modelId, settings)
|
||||
switch (endpointType) {
|
||||
case 'anthropic':
|
||||
return createAnthropicModel(modelId)
|
||||
case 'gemini':
|
||||
return createGeminiModel(modelId)
|
||||
case 'openai':
|
||||
return createOpenAIChatModel(modelId)
|
||||
case 'openai-response':
|
||||
default:
|
||||
return new OpenAIResponsesLanguageModel(modelId, {
|
||||
provider: `${CHERRYIN_PROVIDER_NAME}.openai`,
|
||||
url,
|
||||
headers: () => ({
|
||||
...getJsonHeaders(),
|
||||
...settings.headers
|
||||
}),
|
||||
fetch
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const createCompletionModel = (modelId: string, settings: OpenAIProviderSettings = {}) =>
|
||||
new OpenAICompletionLanguageModel(modelId, {
|
||||
provider: `${CHERRYIN_PROVIDER_NAME}.completion`,
|
||||
|
||||
@@ -71,7 +71,7 @@ Cherry Studio AI Core 是一个基于 Vercel AI SDK 的统一 AI Provider 接口
|
||||
## 安装
|
||||
|
||||
```bash
|
||||
npm install @cherrystudio/ai-core ai @ai-sdk/google @ai-sdk/openai
|
||||
npm install @cherrystudio/ai-core ai
|
||||
```
|
||||
|
||||
### React Native
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cherrystudio/ai-core",
|
||||
"version": "1.0.9",
|
||||
"version": "1.0.1",
|
||||
"description": "Cherry Studio AI Core - Unified AI Provider Interface Based on Vercel AI SDK",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
@@ -33,19 +33,19 @@
|
||||
},
|
||||
"homepage": "https://github.com/CherryHQ/cherry-studio#readme",
|
||||
"peerDependencies": {
|
||||
"@ai-sdk/google": "^2.0.36",
|
||||
"@ai-sdk/openai": "^2.0.64",
|
||||
"@cherrystudio/ai-sdk-provider": "^0.1.3",
|
||||
"ai": "^5.0.26"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.45",
|
||||
"@ai-sdk/azure": "^2.0.73",
|
||||
"@ai-sdk/deepseek": "^1.0.29",
|
||||
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
|
||||
"@ai-sdk/anthropic": "^2.0.43",
|
||||
"@ai-sdk/azure": "^2.0.66",
|
||||
"@ai-sdk/deepseek": "^1.0.27",
|
||||
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A2.0.31#~/.yarn/patches/@ai-sdk-google-npm-2.0.31-b0de047210.patch",
|
||||
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A2.0.64#~/.yarn/patches/@ai-sdk-openai-npm-2.0.64-48f99f5bf3.patch",
|
||||
"@ai-sdk/openai-compatible": "^1.0.26",
|
||||
"@ai-sdk/provider": "^2.0.0",
|
||||
"@ai-sdk/provider-utils": "^3.0.17",
|
||||
"@ai-sdk/xai": "^2.0.34",
|
||||
"@ai-sdk/provider-utils": "^3.0.16",
|
||||
"@ai-sdk/xai": "^2.0.31",
|
||||
"@cherrystudio/ai-sdk-provider": "workspace:*",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
/**
|
||||
* Mock Provider Instances
|
||||
* Provides mock implementations for all supported AI providers
|
||||
*/
|
||||
|
||||
import type { ImageModelV2, LanguageModelV2 } from '@ai-sdk/provider'
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Creates a mock language model with customizable behavior
|
||||
*/
|
||||
export function createMockLanguageModel(overrides?: Partial<LanguageModelV2>): LanguageModelV2 {
|
||||
return {
|
||||
specificationVersion: 'v1',
|
||||
provider: 'mock-provider',
|
||||
modelId: 'mock-model',
|
||||
defaultObjectGenerationMode: 'tool',
|
||||
|
||||
doGenerate: vi.fn().mockResolvedValue({
|
||||
text: 'Mock response text',
|
||||
finishReason: 'stop',
|
||||
usage: {
|
||||
promptTokens: 10,
|
||||
completionTokens: 20,
|
||||
totalTokens: 30
|
||||
},
|
||||
rawCall: { rawPrompt: null, rawSettings: {} },
|
||||
rawResponse: { headers: {} },
|
||||
warnings: []
|
||||
}),
|
||||
|
||||
doStream: vi.fn().mockReturnValue({
|
||||
stream: (async function* () {
|
||||
yield {
|
||||
type: 'text-delta',
|
||||
textDelta: 'Mock '
|
||||
}
|
||||
yield {
|
||||
type: 'text-delta',
|
||||
textDelta: 'streaming '
|
||||
}
|
||||
yield {
|
||||
type: 'text-delta',
|
||||
textDelta: 'response'
|
||||
}
|
||||
yield {
|
||||
type: 'finish',
|
||||
finishReason: 'stop',
|
||||
usage: {
|
||||
promptTokens: 10,
|
||||
completionTokens: 15,
|
||||
totalTokens: 25
|
||||
}
|
||||
}
|
||||
})(),
|
||||
rawCall: { rawPrompt: null, rawSettings: {} },
|
||||
rawResponse: { headers: {} },
|
||||
warnings: []
|
||||
}),
|
||||
|
||||
...overrides
|
||||
} as LanguageModelV2
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock image model with customizable behavior
|
||||
*/
|
||||
export function createMockImageModel(overrides?: Partial<ImageModelV2>): ImageModelV2 {
|
||||
return {
|
||||
specificationVersion: 'v2',
|
||||
provider: 'mock-provider',
|
||||
modelId: 'mock-image-model',
|
||||
|
||||
doGenerate: vi.fn().mockResolvedValue({
|
||||
images: [
|
||||
{
|
||||
base64: 'mock-base64-image-data',
|
||||
uint8Array: new Uint8Array([1, 2, 3, 4, 5]),
|
||||
mimeType: 'image/png'
|
||||
}
|
||||
],
|
||||
warnings: []
|
||||
}),
|
||||
|
||||
...overrides
|
||||
} as ImageModelV2
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock provider configurations for testing
|
||||
*/
|
||||
export const mockProviderConfigs = {
|
||||
openai: {
|
||||
apiKey: 'sk-test-openai-key-123456789',
|
||||
baseURL: 'https://api.openai.com/v1',
|
||||
organization: 'test-org'
|
||||
},
|
||||
|
||||
anthropic: {
|
||||
apiKey: 'sk-ant-test-key-123456789',
|
||||
baseURL: 'https://api.anthropic.com'
|
||||
},
|
||||
|
||||
google: {
|
||||
apiKey: 'test-google-api-key-123456789',
|
||||
baseURL: 'https://generativelanguage.googleapis.com/v1'
|
||||
},
|
||||
|
||||
xai: {
|
||||
apiKey: 'xai-test-key-123456789',
|
||||
baseURL: 'https://api.x.ai/v1'
|
||||
},
|
||||
|
||||
azure: {
|
||||
apiKey: 'test-azure-key-123456789',
|
||||
resourceName: 'test-resource',
|
||||
deployment: 'test-deployment'
|
||||
},
|
||||
|
||||
deepseek: {
|
||||
apiKey: 'sk-test-deepseek-key-123456789',
|
||||
baseURL: 'https://api.deepseek.com/v1'
|
||||
},
|
||||
|
||||
openrouter: {
|
||||
apiKey: 'sk-or-test-key-123456789',
|
||||
baseURL: 'https://openrouter.ai/api/v1'
|
||||
},
|
||||
|
||||
huggingface: {
|
||||
apiKey: 'hf_test_key_123456789',
|
||||
baseURL: 'https://api-inference.huggingface.co'
|
||||
},
|
||||
|
||||
'openai-compatible': {
|
||||
apiKey: 'test-compatible-key-123456789',
|
||||
baseURL: 'https://api.example.com/v1',
|
||||
name: 'test-provider'
|
||||
},
|
||||
|
||||
'openai-chat': {
|
||||
apiKey: 'sk-test-chat-key-123456789',
|
||||
baseURL: 'https://api.openai.com/v1'
|
||||
}
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Mock provider instances for testing
|
||||
*/
|
||||
export const mockProviderInstances = {
|
||||
openai: {
|
||||
name: 'openai-mock',
|
||||
languageModel: createMockLanguageModel({ provider: 'openai', modelId: 'gpt-4' }),
|
||||
imageModel: createMockImageModel({ provider: 'openai', modelId: 'dall-e-3' })
|
||||
},
|
||||
|
||||
anthropic: {
|
||||
name: 'anthropic-mock',
|
||||
languageModel: createMockLanguageModel({ provider: 'anthropic', modelId: 'claude-3-5-sonnet-20241022' })
|
||||
},
|
||||
|
||||
google: {
|
||||
name: 'google-mock',
|
||||
languageModel: createMockLanguageModel({ provider: 'google', modelId: 'gemini-2.0-flash-exp' }),
|
||||
imageModel: createMockImageModel({ provider: 'google', modelId: 'imagen-3.0-generate-001' })
|
||||
},
|
||||
|
||||
xai: {
|
||||
name: 'xai-mock',
|
||||
languageModel: createMockLanguageModel({ provider: 'xai', modelId: 'grok-2-latest' }),
|
||||
imageModel: createMockImageModel({ provider: 'xai', modelId: 'grok-2-image-latest' })
|
||||
},
|
||||
|
||||
deepseek: {
|
||||
name: 'deepseek-mock',
|
||||
languageModel: createMockLanguageModel({ provider: 'deepseek', modelId: 'deepseek-chat' })
|
||||
}
|
||||
}
|
||||
|
||||
export type ProviderId = keyof typeof mockProviderConfigs
|
||||
@@ -1,331 +0,0 @@
|
||||
/**
|
||||
* Mock Responses
|
||||
* Provides realistic mock responses for all provider types
|
||||
*/
|
||||
|
||||
import { jsonSchema, type ModelMessage, type Tool } from 'ai'
|
||||
|
||||
/**
|
||||
* Standard test messages for all scenarios
|
||||
*/
|
||||
export const testMessages = {
|
||||
simple: [{ role: 'user' as const, content: 'Hello, how are you?' }],
|
||||
|
||||
conversation: [
|
||||
{ role: 'user' as const, content: 'What is the capital of France?' },
|
||||
{ role: 'assistant' as const, content: 'The capital of France is Paris.' },
|
||||
{ role: 'user' as const, content: 'What is its population?' }
|
||||
],
|
||||
|
||||
withSystem: [
|
||||
{ role: 'system' as const, content: 'You are a helpful assistant that provides concise answers.' },
|
||||
{ role: 'user' as const, content: 'Explain quantum computing in one sentence.' }
|
||||
],
|
||||
|
||||
withImages: [
|
||||
{
|
||||
role: 'user' as const,
|
||||
content: [
|
||||
{ type: 'text' as const, text: 'What is in this image?' },
|
||||
{
|
||||
type: 'image' as const,
|
||||
image:
|
||||
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=='
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
toolUse: [{ role: 'user' as const, content: 'What is the weather in San Francisco?' }],
|
||||
|
||||
multiTurn: [
|
||||
{ role: 'user' as const, content: 'Can you help me with a math problem?' },
|
||||
{ role: 'assistant' as const, content: 'Of course! What math problem would you like help with?' },
|
||||
{ role: 'user' as const, content: 'What is 15 * 23?' },
|
||||
{ role: 'assistant' as const, content: '15 * 23 = 345' },
|
||||
{ role: 'user' as const, content: 'Now divide that by 5' }
|
||||
]
|
||||
} satisfies Record<string, ModelMessage[]>
|
||||
|
||||
/**
|
||||
* Standard test tools for tool calling scenarios
|
||||
*/
|
||||
export const testTools: Record<string, Tool> = {
|
||||
getWeather: {
|
||||
description: 'Get the current weather in a given location',
|
||||
inputSchema: jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
location: {
|
||||
type: 'string',
|
||||
description: 'The city and state, e.g. San Francisco, CA'
|
||||
},
|
||||
unit: {
|
||||
type: 'string',
|
||||
enum: ['celsius', 'fahrenheit'],
|
||||
description: 'The temperature unit to use'
|
||||
}
|
||||
},
|
||||
required: ['location']
|
||||
}),
|
||||
execute: async ({ location, unit = 'fahrenheit' }) => {
|
||||
return {
|
||||
location,
|
||||
temperature: unit === 'celsius' ? 22 : 72,
|
||||
unit,
|
||||
condition: 'sunny'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
calculate: {
|
||||
description: 'Perform a mathematical calculation',
|
||||
inputSchema: jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
operation: {
|
||||
type: 'string',
|
||||
enum: ['add', 'subtract', 'multiply', 'divide'],
|
||||
description: 'The operation to perform'
|
||||
},
|
||||
a: {
|
||||
type: 'number',
|
||||
description: 'The first number'
|
||||
},
|
||||
b: {
|
||||
type: 'number',
|
||||
description: 'The second number'
|
||||
}
|
||||
},
|
||||
required: ['operation', 'a', 'b']
|
||||
}),
|
||||
execute: async ({ operation, a, b }) => {
|
||||
const operations = {
|
||||
add: (x: number, y: number) => x + y,
|
||||
subtract: (x: number, y: number) => x - y,
|
||||
multiply: (x: number, y: number) => x * y,
|
||||
divide: (x: number, y: number) => x / y
|
||||
}
|
||||
return { result: operations[operation as keyof typeof operations](a, b) }
|
||||
}
|
||||
},
|
||||
|
||||
searchDatabase: {
|
||||
description: 'Search for information in a database',
|
||||
inputSchema: jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: 'The search query'
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'Maximum number of results to return',
|
||||
default: 10
|
||||
}
|
||||
},
|
||||
required: ['query']
|
||||
}),
|
||||
execute: async ({ query, limit = 10 }) => {
|
||||
return {
|
||||
results: [
|
||||
{ id: 1, title: `Result 1 for ${query}`, relevance: 0.95 },
|
||||
{ id: 2, title: `Result 2 for ${query}`, relevance: 0.87 }
|
||||
].slice(0, limit)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock streaming chunks for different providers
|
||||
*/
|
||||
export const mockStreamingChunks = {
|
||||
text: [
|
||||
{ type: 'text-delta' as const, textDelta: 'Hello' },
|
||||
{ type: 'text-delta' as const, textDelta: ', ' },
|
||||
{ type: 'text-delta' as const, textDelta: 'this ' },
|
||||
{ type: 'text-delta' as const, textDelta: 'is ' },
|
||||
{ type: 'text-delta' as const, textDelta: 'a ' },
|
||||
{ type: 'text-delta' as const, textDelta: 'test.' }
|
||||
],
|
||||
|
||||
withToolCall: [
|
||||
{ type: 'text-delta' as const, textDelta: 'Let me check the weather for you.' },
|
||||
{
|
||||
type: 'tool-call-delta' as const,
|
||||
toolCallType: 'function' as const,
|
||||
toolCallId: 'call_123',
|
||||
toolName: 'getWeather',
|
||||
argsTextDelta: '{"location":'
|
||||
},
|
||||
{
|
||||
type: 'tool-call-delta' as const,
|
||||
toolCallType: 'function' as const,
|
||||
toolCallId: 'call_123',
|
||||
toolName: 'getWeather',
|
||||
argsTextDelta: ' "San Francisco, CA"}'
|
||||
},
|
||||
{
|
||||
type: 'tool-call' as const,
|
||||
toolCallType: 'function' as const,
|
||||
toolCallId: 'call_123',
|
||||
toolName: 'getWeather',
|
||||
args: { location: 'San Francisco, CA' }
|
||||
}
|
||||
],
|
||||
|
||||
withFinish: [
|
||||
{ type: 'text-delta' as const, textDelta: 'Complete response.' },
|
||||
{
|
||||
type: 'finish' as const,
|
||||
finishReason: 'stop' as const,
|
||||
usage: {
|
||||
promptTokens: 10,
|
||||
completionTokens: 5,
|
||||
totalTokens: 15
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock complete responses for non-streaming scenarios
|
||||
*/
|
||||
export const mockCompleteResponses = {
|
||||
simple: {
|
||||
text: 'This is a simple response.',
|
||||
finishReason: 'stop' as const,
|
||||
usage: {
|
||||
promptTokens: 15,
|
||||
completionTokens: 8,
|
||||
totalTokens: 23
|
||||
}
|
||||
},
|
||||
|
||||
withToolCalls: {
|
||||
text: 'I will check the weather for you.',
|
||||
toolCalls: [
|
||||
{
|
||||
toolCallId: 'call_456',
|
||||
toolName: 'getWeather',
|
||||
args: { location: 'New York, NY', unit: 'celsius' }
|
||||
}
|
||||
],
|
||||
finishReason: 'tool-calls' as const,
|
||||
usage: {
|
||||
promptTokens: 25,
|
||||
completionTokens: 12,
|
||||
totalTokens: 37
|
||||
}
|
||||
},
|
||||
|
||||
withWarnings: {
|
||||
text: 'Response with warnings.',
|
||||
finishReason: 'stop' as const,
|
||||
usage: {
|
||||
promptTokens: 10,
|
||||
completionTokens: 5,
|
||||
totalTokens: 15
|
||||
},
|
||||
warnings: [
|
||||
{
|
||||
type: 'unsupported-setting' as const,
|
||||
message: 'Temperature parameter not supported for this model'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock image generation responses
|
||||
*/
|
||||
export const mockImageResponses = {
|
||||
single: {
|
||||
image: {
|
||||
base64: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==',
|
||||
uint8Array: new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82]),
|
||||
mimeType: 'image/png' as const
|
||||
},
|
||||
warnings: []
|
||||
},
|
||||
|
||||
multiple: {
|
||||
images: [
|
||||
{
|
||||
base64: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==',
|
||||
uint8Array: new Uint8Array([137, 80, 78, 71]),
|
||||
mimeType: 'image/png' as const
|
||||
},
|
||||
{
|
||||
base64: 'iVBORw0KGgoAAAANSUhEUgAAAAIAAAACCAYAAABytg0kAAAAEklEQVR42mNk+M9QzwAEjDAGACCKAgdZ9zImAAAAAElFTkSuQmCC',
|
||||
uint8Array: new Uint8Array([137, 80, 78, 71]),
|
||||
mimeType: 'image/png' as const
|
||||
}
|
||||
],
|
||||
warnings: []
|
||||
},
|
||||
|
||||
withProviderMetadata: {
|
||||
image: {
|
||||
base64: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==',
|
||||
uint8Array: new Uint8Array([137, 80, 78, 71]),
|
||||
mimeType: 'image/png' as const
|
||||
},
|
||||
providerMetadata: {
|
||||
openai: {
|
||||
images: [
|
||||
{
|
||||
revisedPrompt: 'A detailed and enhanced version of the original prompt'
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
warnings: []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock error responses
|
||||
*/
|
||||
export const mockErrors = {
|
||||
invalidApiKey: {
|
||||
name: 'APIError',
|
||||
message: 'Invalid API key provided',
|
||||
statusCode: 401
|
||||
},
|
||||
|
||||
rateLimitExceeded: {
|
||||
name: 'RateLimitError',
|
||||
message: 'Rate limit exceeded. Please try again later.',
|
||||
statusCode: 429,
|
||||
headers: {
|
||||
'retry-after': '60'
|
||||
}
|
||||
},
|
||||
|
||||
modelNotFound: {
|
||||
name: 'ModelNotFoundError',
|
||||
message: 'The requested model was not found',
|
||||
statusCode: 404
|
||||
},
|
||||
|
||||
contextLengthExceeded: {
|
||||
name: 'ContextLengthError',
|
||||
message: "This model's maximum context length is 4096 tokens",
|
||||
statusCode: 400
|
||||
},
|
||||
|
||||
timeout: {
|
||||
name: 'TimeoutError',
|
||||
message: 'Request timed out after 30000ms',
|
||||
code: 'ETIMEDOUT'
|
||||
},
|
||||
|
||||
networkError: {
|
||||
name: 'NetworkError',
|
||||
message: 'Network connection failed',
|
||||
code: 'ECONNREFUSED'
|
||||
}
|
||||
}
|
||||
@@ -1,329 +0,0 @@
|
||||
/**
|
||||
* Provider-Specific Test Utilities
|
||||
* Helper functions for testing individual providers with all their parameters
|
||||
*/
|
||||
|
||||
import type { Tool } from 'ai'
|
||||
import { expect } from 'vitest'
|
||||
|
||||
/**
|
||||
* Provider parameter configurations for comprehensive testing
|
||||
*/
|
||||
export const providerParameterMatrix = {
|
||||
openai: {
|
||||
models: ['gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo', 'gpt-4o'],
|
||||
parameters: {
|
||||
temperature: [0, 0.5, 0.7, 1.0, 1.5, 2.0],
|
||||
maxTokens: [100, 500, 1000, 2000, 4000],
|
||||
topP: [0.1, 0.5, 0.9, 1.0],
|
||||
frequencyPenalty: [-2.0, -1.0, 0, 1.0, 2.0],
|
||||
presencePenalty: [-2.0, -1.0, 0, 1.0, 2.0],
|
||||
stop: [undefined, ['stop'], ['STOP', 'END']],
|
||||
seed: [undefined, 12345, 67890],
|
||||
responseFormat: [undefined, { type: 'json_object' as const }],
|
||||
user: [undefined, 'test-user-123']
|
||||
},
|
||||
toolChoice: ['auto', 'required', 'none', { type: 'function' as const, name: 'getWeather' }],
|
||||
parallelToolCalls: [true, false]
|
||||
},
|
||||
|
||||
anthropic: {
|
||||
models: ['claude-3-5-sonnet-20241022', 'claude-3-opus-20240229', 'claude-3-haiku-20240307'],
|
||||
parameters: {
|
||||
temperature: [0, 0.5, 1.0],
|
||||
maxTokens: [100, 1000, 4000, 8000],
|
||||
topP: [0.1, 0.5, 0.9, 1.0],
|
||||
topK: [undefined, 1, 5, 10, 40],
|
||||
stop: [undefined, ['Human:', 'Assistant:']],
|
||||
metadata: [undefined, { userId: 'test-123' }]
|
||||
},
|
||||
toolChoice: ['auto', 'any', { type: 'tool' as const, name: 'getWeather' }]
|
||||
},
|
||||
|
||||
google: {
|
||||
models: ['gemini-2.0-flash-exp', 'gemini-1.5-pro', 'gemini-1.5-flash'],
|
||||
parameters: {
|
||||
temperature: [0, 0.5, 0.9, 1.0],
|
||||
maxTokens: [100, 1000, 2000, 8000],
|
||||
topP: [0.1, 0.5, 0.95, 1.0],
|
||||
topK: [undefined, 1, 16, 40],
|
||||
stopSequences: [undefined, ['END'], ['STOP', 'TERMINATE']]
|
||||
},
|
||||
safetySettings: [
|
||||
undefined,
|
||||
[
|
||||
{ category: 'HARM_CATEGORY_HARASSMENT', threshold: 'BLOCK_MEDIUM_AND_ABOVE' },
|
||||
{ category: 'HARM_CATEGORY_HATE_SPEECH', threshold: 'BLOCK_ONLY_HIGH' }
|
||||
]
|
||||
]
|
||||
},
|
||||
|
||||
xai: {
|
||||
models: ['grok-2-latest', 'grok-2-1212'],
|
||||
parameters: {
|
||||
temperature: [0, 0.5, 1.0, 1.5],
|
||||
maxTokens: [100, 500, 2000, 4000],
|
||||
topP: [0.1, 0.5, 0.9, 1.0],
|
||||
stop: [undefined, ['STOP'], ['END', 'TERMINATE']],
|
||||
seed: [undefined, 12345]
|
||||
}
|
||||
},
|
||||
|
||||
deepseek: {
|
||||
models: ['deepseek-chat', 'deepseek-coder'],
|
||||
parameters: {
|
||||
temperature: [0, 0.5, 1.0],
|
||||
maxTokens: [100, 1000, 4000],
|
||||
topP: [0.1, 0.5, 0.95],
|
||||
frequencyPenalty: [0, 0.5, 1.0],
|
||||
presencePenalty: [0, 0.5, 1.0],
|
||||
stop: [undefined, ['```'], ['END']]
|
||||
}
|
||||
},
|
||||
|
||||
azure: {
|
||||
deployments: ['gpt-4-deployment', 'gpt-35-turbo-deployment'],
|
||||
parameters: {
|
||||
temperature: [0, 0.7, 1.0],
|
||||
maxTokens: [100, 1000, 2000],
|
||||
topP: [0.1, 0.5, 0.95],
|
||||
frequencyPenalty: [0, 1.0],
|
||||
presencePenalty: [0, 1.0],
|
||||
stop: [undefined, ['STOP']]
|
||||
}
|
||||
}
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Creates test cases for all parameter combinations
|
||||
*/
|
||||
export function generateParameterTestCases<T extends Record<string, any[]>>(
|
||||
params: T,
|
||||
maxCombinations = 50
|
||||
): Array<Partial<{ [K in keyof T]: T[K][number] }>> {
|
||||
const keys = Object.keys(params) as Array<keyof T>
|
||||
const testCases: Array<Partial<{ [K in keyof T]: T[K][number] }>> = []
|
||||
|
||||
// Generate combinations using sampling strategy for large parameter spaces
|
||||
const totalCombinations = keys.reduce((acc, key) => acc * params[key].length, 1)
|
||||
|
||||
if (totalCombinations <= maxCombinations) {
|
||||
// Generate all combinations if total is small
|
||||
generateAllCombinations(params, keys, 0, {}, testCases)
|
||||
} else {
|
||||
// Sample diverse combinations if total is large
|
||||
generateSampledCombinations(params, keys, maxCombinations, testCases)
|
||||
}
|
||||
|
||||
return testCases
|
||||
}
|
||||
|
||||
function generateAllCombinations<T extends Record<string, any[]>>(
|
||||
params: T,
|
||||
keys: Array<keyof T>,
|
||||
index: number,
|
||||
current: Partial<{ [K in keyof T]: T[K][number] }>,
|
||||
results: Array<Partial<{ [K in keyof T]: T[K][number] }>>
|
||||
) {
|
||||
if (index === keys.length) {
|
||||
results.push({ ...current })
|
||||
return
|
||||
}
|
||||
|
||||
const key = keys[index]
|
||||
for (const value of params[key]) {
|
||||
generateAllCombinations(params, keys, index + 1, { ...current, [key]: value }, results)
|
||||
}
|
||||
}
|
||||
|
||||
function generateSampledCombinations<T extends Record<string, any[]>>(
|
||||
params: T,
|
||||
keys: Array<keyof T>,
|
||||
count: number,
|
||||
results: Array<Partial<{ [K in keyof T]: T[K][number] }>>
|
||||
) {
|
||||
// Generate edge cases first (min/max values)
|
||||
const edgeCase1: any = {}
|
||||
const edgeCase2: any = {}
|
||||
|
||||
for (const key of keys) {
|
||||
edgeCase1[key] = params[key][0]
|
||||
edgeCase2[key] = params[key][params[key].length - 1]
|
||||
}
|
||||
|
||||
results.push(edgeCase1, edgeCase2)
|
||||
|
||||
// Generate random combinations for the rest
|
||||
for (let i = results.length; i < count; i++) {
|
||||
const combination: any = {}
|
||||
for (const key of keys) {
|
||||
const values = params[key]
|
||||
combination[key] = values[Math.floor(Math.random() * values.length)]
|
||||
}
|
||||
results.push(combination)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that all provider-specific parameters are correctly passed through
|
||||
*/
|
||||
export function validateProviderParams(providerId: string, actualParams: any, expectedParams: any): void {
|
||||
const requiredFields: Record<string, string[]> = {
|
||||
openai: ['model', 'messages'],
|
||||
anthropic: ['model', 'messages'],
|
||||
google: ['model', 'contents'],
|
||||
xai: ['model', 'messages'],
|
||||
deepseek: ['model', 'messages'],
|
||||
azure: ['messages']
|
||||
}
|
||||
|
||||
const fields = requiredFields[providerId] || ['model', 'messages']
|
||||
|
||||
for (const field of fields) {
|
||||
expect(actualParams).toHaveProperty(field)
|
||||
}
|
||||
|
||||
// Validate optional parameters if they were provided
|
||||
const optionalParams = ['temperature', 'max_tokens', 'top_p', 'stop', 'tools']
|
||||
|
||||
for (const param of optionalParams) {
|
||||
if (expectedParams[param] !== undefined) {
|
||||
expect(actualParams[param]).toEqual(expectedParams[param])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a comprehensive test suite for a provider
|
||||
*/
|
||||
// oxlint-disable-next-line no-unused-vars
|
||||
export function createProviderTestSuite(_providerId: string) {
|
||||
return {
|
||||
testBasicCompletion: async (executor: any, model: string) => {
|
||||
const result = await executor.generateText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'Hello' }]
|
||||
})
|
||||
|
||||
expect(result).toBeDefined()
|
||||
expect(result.text).toBeDefined()
|
||||
expect(typeof result.text).toBe('string')
|
||||
},
|
||||
|
||||
testStreaming: async (executor: any, model: string) => {
|
||||
const chunks: any[] = []
|
||||
const result = await executor.streamText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'Hello' }]
|
||||
})
|
||||
|
||||
for await (const chunk of result.textStream) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
|
||||
expect(chunks.length).toBeGreaterThan(0)
|
||||
},
|
||||
|
||||
testTemperature: async (executor: any, model: string, temperatures: number[]) => {
|
||||
for (const temperature of temperatures) {
|
||||
const result = await executor.generateText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'Hello' }],
|
||||
temperature
|
||||
})
|
||||
|
||||
expect(result).toBeDefined()
|
||||
}
|
||||
},
|
||||
|
||||
testMaxTokens: async (executor: any, model: string, maxTokensValues: number[]) => {
|
||||
for (const maxTokens of maxTokensValues) {
|
||||
const result = await executor.generateText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'Hello' }],
|
||||
maxTokens
|
||||
})
|
||||
|
||||
expect(result).toBeDefined()
|
||||
if (result.usage?.completionTokens) {
|
||||
expect(result.usage.completionTokens).toBeLessThanOrEqual(maxTokens)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
testToolCalling: async (executor: any, model: string, tools: Record<string, Tool>) => {
|
||||
const result = await executor.generateText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'What is the weather in SF?' }],
|
||||
tools
|
||||
})
|
||||
|
||||
expect(result).toBeDefined()
|
||||
},
|
||||
|
||||
testStopSequences: async (executor: any, model: string, stopSequences: string[][]) => {
|
||||
for (const stop of stopSequences) {
|
||||
const result = await executor.generateText({
|
||||
model,
|
||||
messages: [{ role: 'user' as const, content: 'Count to 10' }],
|
||||
stop
|
||||
})
|
||||
|
||||
expect(result).toBeDefined()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates test data for vision/multimodal testing
|
||||
*/
|
||||
export function createVisionTestData() {
|
||||
return {
|
||||
imageUrl: 'https://example.com/test-image.jpg',
|
||||
base64Image:
|
||||
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==',
|
||||
messages: [
|
||||
{
|
||||
role: 'user' as const,
|
||||
content: [
|
||||
{ type: 'text' as const, text: 'What is in this image?' },
|
||||
{
|
||||
type: 'image' as const,
|
||||
image:
|
||||
'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=='
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates mock responses for different finish reasons
|
||||
*/
|
||||
export function createFinishReasonMocks() {
|
||||
return {
|
||||
stop: {
|
||||
text: 'Complete response.',
|
||||
finishReason: 'stop' as const,
|
||||
usage: { promptTokens: 10, completionTokens: 5, totalTokens: 15 }
|
||||
},
|
||||
length: {
|
||||
text: 'Incomplete response due to',
|
||||
finishReason: 'length' as const,
|
||||
usage: { promptTokens: 10, completionTokens: 100, totalTokens: 110 }
|
||||
},
|
||||
'tool-calls': {
|
||||
text: 'Calling tools',
|
||||
finishReason: 'tool-calls' as const,
|
||||
toolCalls: [{ toolCallId: 'call_1', toolName: 'getWeather', args: { location: 'SF' } }],
|
||||
usage: { promptTokens: 10, completionTokens: 8, totalTokens: 18 }
|
||||
},
|
||||
'content-filter': {
|
||||
text: '',
|
||||
finishReason: 'content-filter' as const,
|
||||
usage: { promptTokens: 10, completionTokens: 0, totalTokens: 10 }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,291 +0,0 @@
|
||||
/**
|
||||
* Test Utilities
|
||||
* Helper functions for testing AI Core functionality
|
||||
*/
|
||||
|
||||
import { expect, vi } from 'vitest'
|
||||
|
||||
import type { ProviderId } from '../fixtures/mock-providers'
|
||||
import { createMockImageModel, createMockLanguageModel, mockProviderConfigs } from '../fixtures/mock-providers'
|
||||
|
||||
/**
|
||||
* Creates a test provider with streaming support
|
||||
*/
|
||||
export function createTestStreamingProvider(chunks: any[]) {
|
||||
return createMockLanguageModel({
|
||||
doStream: vi.fn().mockReturnValue({
|
||||
stream: (async function* () {
|
||||
for (const chunk of chunks) {
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
rawCall: { rawPrompt: null, rawSettings: {} },
|
||||
rawResponse: { headers: {} },
|
||||
warnings: []
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a test provider that throws errors
|
||||
*/
|
||||
export function createErrorProvider(error: Error) {
|
||||
return createMockLanguageModel({
|
||||
doGenerate: vi.fn().mockRejectedValue(error),
|
||||
doStream: vi.fn().mockImplementation(() => {
|
||||
throw error
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all chunks from a stream
|
||||
*/
|
||||
export async function collectStreamChunks<T>(stream: AsyncIterable<T>): Promise<T[]> {
|
||||
const chunks: T[] = []
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for a specific number of milliseconds
|
||||
*/
|
||||
export function wait(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock abort controller that aborts after a delay
|
||||
*/
|
||||
export function createDelayedAbortController(delayMs: number): AbortController {
|
||||
const controller = new AbortController()
|
||||
setTimeout(() => controller.abort(), delayMs)
|
||||
return controller
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error with a specific message
|
||||
*/
|
||||
export async function expectError(fn: () => Promise<any>, expectedMessage?: string | RegExp): Promise<Error> {
|
||||
try {
|
||||
await fn()
|
||||
throw new Error('Expected function to throw an error, but it did not')
|
||||
} catch (error) {
|
||||
if (expectedMessage) {
|
||||
const message = (error as Error).message
|
||||
if (typeof expectedMessage === 'string') {
|
||||
if (!message.includes(expectedMessage)) {
|
||||
throw new Error(`Expected error message to include "${expectedMessage}", but got "${message}"`)
|
||||
}
|
||||
} else {
|
||||
if (!expectedMessage.test(message)) {
|
||||
throw new Error(`Expected error message to match ${expectedMessage}, but got "${message}"`)
|
||||
}
|
||||
}
|
||||
}
|
||||
return error as Error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a spy function that tracks calls and arguments
|
||||
*/
|
||||
export function createSpy<T extends (...args: any[]) => any>() {
|
||||
const calls: Array<{ args: Parameters<T>; result?: ReturnType<T>; error?: Error }> = []
|
||||
|
||||
const spy = vi.fn((...args: Parameters<T>) => {
|
||||
try {
|
||||
const result = undefined as ReturnType<T>
|
||||
calls.push({ args, result })
|
||||
return result
|
||||
} catch (error) {
|
||||
calls.push({ args, error: error as Error })
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
fn: spy,
|
||||
calls,
|
||||
getCalls: () => calls,
|
||||
getCallCount: () => calls.length,
|
||||
getLastCall: () => calls[calls.length - 1],
|
||||
reset: () => {
|
||||
calls.length = 0
|
||||
spy.mockClear()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates provider configuration
|
||||
*/
|
||||
export function validateProviderConfig(providerId: ProviderId) {
|
||||
const config = mockProviderConfigs[providerId]
|
||||
if (!config) {
|
||||
throw new Error(`No mock configuration found for provider: ${providerId}`)
|
||||
}
|
||||
|
||||
if (!config.apiKey) {
|
||||
throw new Error(`Provider ${providerId} is missing apiKey in mock config`)
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a test context with common setup
|
||||
*/
|
||||
export function createTestContext() {
|
||||
const mocks = {
|
||||
languageModel: createMockLanguageModel(),
|
||||
imageModel: createMockImageModel(),
|
||||
providers: new Map<string, any>()
|
||||
}
|
||||
|
||||
const cleanup = () => {
|
||||
mocks.providers.clear()
|
||||
vi.clearAllMocks()
|
||||
}
|
||||
|
||||
return {
|
||||
mocks,
|
||||
cleanup
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Measures execution time of an async function
|
||||
*/
|
||||
export async function measureTime<T>(fn: () => Promise<T>): Promise<{ result: T; duration: number }> {
|
||||
const start = Date.now()
|
||||
const result = await fn()
|
||||
const duration = Date.now() - start
|
||||
return { result, duration }
|
||||
}
|
||||
|
||||
/**
|
||||
* Retries a function until it succeeds or max attempts reached
|
||||
*/
|
||||
export async function retryUntilSuccess<T>(fn: () => Promise<T>, maxAttempts = 3, delayMs = 100): Promise<T> {
|
||||
let lastError: Error | undefined
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
return await fn()
|
||||
} catch (error) {
|
||||
lastError = error as Error
|
||||
if (attempt < maxAttempts) {
|
||||
await wait(delayMs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError || new Error('All retry attempts failed')
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock streaming response that emits chunks at intervals
|
||||
*/
|
||||
export function createTimedStream<T>(chunks: T[], intervalMs = 10) {
|
||||
return {
|
||||
async *[Symbol.asyncIterator]() {
|
||||
for (const chunk of chunks) {
|
||||
await wait(intervalMs)
|
||||
yield chunk
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that two objects are deeply equal, ignoring specified keys
|
||||
*/
|
||||
export function assertDeepEqualIgnoring<T extends Record<string, any>>(
|
||||
actual: T,
|
||||
expected: T,
|
||||
ignoreKeys: string[] = []
|
||||
): void {
|
||||
const filterKeys = (obj: T): Partial<T> => {
|
||||
const filtered = { ...obj }
|
||||
for (const key of ignoreKeys) {
|
||||
delete filtered[key]
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
const filteredActual = filterKeys(actual)
|
||||
const filteredExpected = filterKeys(expected)
|
||||
|
||||
expect(filteredActual).toEqual(filteredExpected)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a provider mock that simulates rate limiting
|
||||
*/
|
||||
export function createRateLimitedProvider(limitPerSecond: number) {
|
||||
const calls: number[] = []
|
||||
|
||||
return createMockLanguageModel({
|
||||
doGenerate: vi.fn().mockImplementation(async () => {
|
||||
const now = Date.now()
|
||||
calls.push(now)
|
||||
|
||||
// Remove calls older than 1 second
|
||||
const recentCalls = calls.filter((time) => now - time < 1000)
|
||||
|
||||
if (recentCalls.length > limitPerSecond) {
|
||||
throw new Error('Rate limit exceeded')
|
||||
}
|
||||
|
||||
return {
|
||||
text: 'Rate limited response',
|
||||
finishReason: 'stop' as const,
|
||||
usage: { promptTokens: 10, completionTokens: 5, totalTokens: 15 },
|
||||
rawCall: { rawPrompt: null, rawSettings: {} },
|
||||
rawResponse: { headers: {} },
|
||||
warnings: []
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates streaming response structure
|
||||
*/
|
||||
export function validateStreamChunk(chunk: any): void {
|
||||
expect(chunk).toBeDefined()
|
||||
expect(chunk).toHaveProperty('type')
|
||||
|
||||
if (chunk.type === 'text-delta') {
|
||||
expect(chunk).toHaveProperty('textDelta')
|
||||
expect(typeof chunk.textDelta).toBe('string')
|
||||
} else if (chunk.type === 'finish') {
|
||||
expect(chunk).toHaveProperty('finishReason')
|
||||
expect(chunk).toHaveProperty('usage')
|
||||
} else if (chunk.type === 'tool-call') {
|
||||
expect(chunk).toHaveProperty('toolCallId')
|
||||
expect(chunk).toHaveProperty('toolName')
|
||||
expect(chunk).toHaveProperty('args')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a test logger that captures log messages
|
||||
*/
|
||||
export function createTestLogger() {
|
||||
const logs: Array<{ level: string; message: string; meta?: any }> = []
|
||||
|
||||
return {
|
||||
info: (message: string, meta?: any) => logs.push({ level: 'info', message, meta }),
|
||||
warn: (message: string, meta?: any) => logs.push({ level: 'warn', message, meta }),
|
||||
error: (message: string, meta?: any) => logs.push({ level: 'error', message, meta }),
|
||||
debug: (message: string, meta?: any) => logs.push({ level: 'debug', message, meta }),
|
||||
getLogs: () => logs,
|
||||
clear: () => {
|
||||
logs.length = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
/**
|
||||
* Test Infrastructure Exports
|
||||
* Central export point for all test utilities, fixtures, and helpers
|
||||
*/
|
||||
|
||||
// Fixtures
|
||||
export * from './fixtures/mock-providers'
|
||||
export * from './fixtures/mock-responses'
|
||||
|
||||
// Helpers
|
||||
export * from './helpers/provider-test-utils'
|
||||
export * from './helpers/test-utils'
|
||||
@@ -4,7 +4,12 @@
|
||||
*/
|
||||
export const BUILT_IN_PLUGIN_PREFIX = 'built-in:'
|
||||
|
||||
export * from './googleToolsPlugin'
|
||||
export * from './toolUsePlugin/promptToolUsePlugin'
|
||||
export * from './toolUsePlugin/type'
|
||||
export * from './webSearchPlugin'
|
||||
export { googleToolsPlugin } from './googleToolsPlugin'
|
||||
export { createLoggingPlugin } from './logging'
|
||||
export { createPromptToolUsePlugin } from './toolUsePlugin/promptToolUsePlugin'
|
||||
export type {
|
||||
PromptToolUseConfig,
|
||||
ToolUseRequestContext,
|
||||
ToolUseResult
|
||||
} from './toolUsePlugin/type'
|
||||
export { webSearchPlugin, type WebSearchPluginConfig } from './webSearchPlugin'
|
||||
|
||||
@@ -32,7 +32,7 @@ export const webSearchPlugin = (config: WebSearchPluginConfig = DEFAULT_WEB_SEAR
|
||||
})
|
||||
|
||||
// 导出类型定义供开发者使用
|
||||
export * from './helper'
|
||||
export type { WebSearchPluginConfig, WebSearchToolOutputSchema } from './helper'
|
||||
|
||||
// 默认导出
|
||||
export default webSearchPlugin
|
||||
|
||||
@@ -44,7 +44,7 @@ export {
|
||||
// ==================== 基础数据和类型 ====================
|
||||
|
||||
// 基础Provider数据源
|
||||
export { baseProviderIds, baseProviders, isBaseProvider } from './schemas'
|
||||
export { baseProviderIds, baseProviders } from './schemas'
|
||||
|
||||
// 类型定义和Schema
|
||||
export type {
|
||||
|
||||
@@ -7,6 +7,7 @@ import { createAzure } from '@ai-sdk/azure'
|
||||
import { type AzureOpenAIProviderSettings } from '@ai-sdk/azure'
|
||||
import { createDeepSeek } from '@ai-sdk/deepseek'
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google'
|
||||
import { createHuggingFace } from '@ai-sdk/huggingface'
|
||||
import { createOpenAI, type OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { createOpenAICompatible } from '@ai-sdk/openai-compatible'
|
||||
import type { LanguageModelV2 } from '@ai-sdk/provider'
|
||||
@@ -32,7 +33,8 @@ export const baseProviderIds = [
|
||||
'deepseek',
|
||||
'openrouter',
|
||||
'cherryin',
|
||||
'cherryin-chat'
|
||||
'cherryin-chat',
|
||||
'huggingface'
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -156,6 +158,12 @@ export const baseProviders = [
|
||||
})
|
||||
},
|
||||
supportsImageGeneration: true
|
||||
},
|
||||
{
|
||||
id: 'huggingface',
|
||||
name: 'HuggingFace',
|
||||
creator: createHuggingFace,
|
||||
supportsImageGeneration: true
|
||||
}
|
||||
] as const satisfies BaseProvider[]
|
||||
|
||||
|
||||
@@ -1,499 +0,0 @@
|
||||
/**
|
||||
* RuntimeExecutor.generateText Comprehensive Tests
|
||||
* Tests non-streaming text generation across all providers with various parameters
|
||||
*/
|
||||
|
||||
import { generateText } from 'ai'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
createMockLanguageModel,
|
||||
mockCompleteResponses,
|
||||
mockProviderConfigs,
|
||||
testMessages,
|
||||
testTools
|
||||
} from '../../../__tests__'
|
||||
import type { AiPlugin } from '../../plugins'
|
||||
import { globalRegistryManagement } from '../../providers/RegistryManagement'
|
||||
import { RuntimeExecutor } from '../executor'
|
||||
|
||||
// Mock AI SDK
|
||||
vi.mock('ai', () => ({
|
||||
generateText: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('../../providers/RegistryManagement', () => ({
|
||||
globalRegistryManagement: {
|
||||
languageModel: vi.fn()
|
||||
},
|
||||
DEFAULT_SEPARATOR: '|'
|
||||
}))
|
||||
|
||||
describe('RuntimeExecutor.generateText', () => {
|
||||
let executor: RuntimeExecutor<'openai'>
|
||||
let mockLanguageModel: any
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
executor = RuntimeExecutor.create('openai', mockProviderConfigs.openai)
|
||||
|
||||
mockLanguageModel = createMockLanguageModel({
|
||||
provider: 'openai',
|
||||
modelId: 'gpt-4'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(mockLanguageModel)
|
||||
vi.mocked(generateText).mockResolvedValue(mockCompleteResponses.simple as any)
|
||||
})
|
||||
|
||||
describe('Basic Functionality', () => {
|
||||
it('should generate text with minimal parameters', async () => {
|
||||
const result = await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith({
|
||||
model: mockLanguageModel,
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(result.text).toBe('This is a simple response.')
|
||||
expect(result.finishReason).toBe('stop')
|
||||
expect(result.usage).toBeDefined()
|
||||
})
|
||||
|
||||
it('should generate with system messages', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.withSystem
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith({
|
||||
model: mockLanguageModel,
|
||||
messages: testMessages.withSystem
|
||||
})
|
||||
})
|
||||
|
||||
it('should generate with conversation history', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.conversation
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
messages: testMessages.conversation
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('All Parameter Combinations', () => {
|
||||
it('should support all parameters together', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 500,
|
||||
topP: 0.9,
|
||||
frequencyPenalty: 0.5,
|
||||
presencePenalty: 0.3,
|
||||
stopSequences: ['STOP'],
|
||||
seed: 12345
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 500,
|
||||
topP: 0.9,
|
||||
frequencyPenalty: 0.5,
|
||||
presencePenalty: 0.3,
|
||||
stopSequences: ['STOP'],
|
||||
seed: 12345
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should support partial parameters', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
temperature: 0.5,
|
||||
maxOutputTokens: 100
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature: 0.5,
|
||||
maxOutputTokens: 100
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tool Calling', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(generateText).mockResolvedValue(mockCompleteResponses.withToolCalls as any)
|
||||
})
|
||||
|
||||
it('should support tool calling', async () => {
|
||||
const result = await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.toolUse,
|
||||
tools: testTools
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tools: testTools
|
||||
})
|
||||
)
|
||||
|
||||
expect(result.toolCalls).toBeDefined()
|
||||
expect(result.toolCalls).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should support toolChoice auto', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.toolUse,
|
||||
tools: testTools,
|
||||
toolChoice: 'auto'
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
toolChoice: 'auto'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should support toolChoice required', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.toolUse,
|
||||
tools: testTools,
|
||||
toolChoice: 'required'
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
toolChoice: 'required'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should support toolChoice none', async () => {
|
||||
vi.mocked(generateText).mockResolvedValue(mockCompleteResponses.simple as any)
|
||||
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
tools: testTools,
|
||||
toolChoice: 'none'
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
toolChoice: 'none'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should support specific tool selection', async () => {
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.toolUse,
|
||||
tools: testTools,
|
||||
toolChoice: {
|
||||
type: 'tool',
|
||||
toolName: 'getWeather'
|
||||
}
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
toolChoice: {
|
||||
type: 'tool',
|
||||
toolName: 'getWeather'
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple Providers', () => {
|
||||
it('should work with Anthropic provider', async () => {
|
||||
const anthropicExecutor = RuntimeExecutor.create('anthropic', mockProviderConfigs.anthropic)
|
||||
|
||||
const anthropicModel = createMockLanguageModel({
|
||||
provider: 'anthropic',
|
||||
modelId: 'claude-3-5-sonnet-20241022'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(anthropicModel)
|
||||
|
||||
await anthropicExecutor.generateText({
|
||||
model: 'claude-3-5-sonnet-20241022',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(globalRegistryManagement.languageModel).toHaveBeenCalledWith('anthropic|claude-3-5-sonnet-20241022')
|
||||
})
|
||||
|
||||
it('should work with Google provider', async () => {
|
||||
const googleExecutor = RuntimeExecutor.create('google', mockProviderConfigs.google)
|
||||
|
||||
const googleModel = createMockLanguageModel({
|
||||
provider: 'google',
|
||||
modelId: 'gemini-2.0-flash-exp'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(googleModel)
|
||||
|
||||
await googleExecutor.generateText({
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(globalRegistryManagement.languageModel).toHaveBeenCalledWith('google|gemini-2.0-flash-exp')
|
||||
})
|
||||
|
||||
it('should work with xAI provider', async () => {
|
||||
const xaiExecutor = RuntimeExecutor.create('xai', mockProviderConfigs.xai)
|
||||
|
||||
const xaiModel = createMockLanguageModel({
|
||||
provider: 'xai',
|
||||
modelId: 'grok-2-latest'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(xaiModel)
|
||||
|
||||
await xaiExecutor.generateText({
|
||||
model: 'grok-2-latest',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(globalRegistryManagement.languageModel).toHaveBeenCalledWith('xai|grok-2-latest')
|
||||
})
|
||||
|
||||
it('should work with DeepSeek provider', async () => {
|
||||
const deepseekExecutor = RuntimeExecutor.create('deepseek', mockProviderConfigs.deepseek)
|
||||
|
||||
const deepseekModel = createMockLanguageModel({
|
||||
provider: 'deepseek',
|
||||
modelId: 'deepseek-chat'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(deepseekModel)
|
||||
|
||||
await deepseekExecutor.generateText({
|
||||
model: 'deepseek-chat',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(globalRegistryManagement.languageModel).toHaveBeenCalledWith('deepseek|deepseek-chat')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Plugin Integration', () => {
|
||||
it('should execute all plugin hooks', async () => {
|
||||
const pluginCalls: string[] = []
|
||||
|
||||
const testPlugin: AiPlugin = {
|
||||
name: 'test-plugin',
|
||||
onRequestStart: vi.fn(async () => {
|
||||
pluginCalls.push('onRequestStart')
|
||||
}),
|
||||
transformParams: vi.fn(async (params) => {
|
||||
pluginCalls.push('transformParams')
|
||||
return { ...params, temperature: 0.8 }
|
||||
}),
|
||||
transformResult: vi.fn(async (result) => {
|
||||
pluginCalls.push('transformResult')
|
||||
return { ...result, text: result.text + ' [modified]' }
|
||||
}),
|
||||
onRequestEnd: vi.fn(async () => {
|
||||
pluginCalls.push('onRequestEnd')
|
||||
})
|
||||
}
|
||||
|
||||
const executorWithPlugin = RuntimeExecutor.create('openai', mockProviderConfigs.openai, [testPlugin])
|
||||
|
||||
const result = await executorWithPlugin.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(pluginCalls).toEqual(['onRequestStart', 'transformParams', 'transformResult', 'onRequestEnd'])
|
||||
|
||||
// Verify transformed parameters
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature: 0.8
|
||||
})
|
||||
)
|
||||
|
||||
// Verify transformed result
|
||||
expect(result.text).toContain('[modified]')
|
||||
})
|
||||
|
||||
it('should handle multiple plugins in order', async () => {
|
||||
const pluginOrder: string[] = []
|
||||
|
||||
const plugin1: AiPlugin = {
|
||||
name: 'plugin-1',
|
||||
transformParams: vi.fn(async (params) => {
|
||||
pluginOrder.push('plugin-1')
|
||||
return { ...params, temperature: 0.5 }
|
||||
})
|
||||
}
|
||||
|
||||
const plugin2: AiPlugin = {
|
||||
name: 'plugin-2',
|
||||
transformParams: vi.fn(async (params) => {
|
||||
pluginOrder.push('plugin-2')
|
||||
return { ...params, maxTokens: 200 }
|
||||
})
|
||||
}
|
||||
|
||||
const executorWithPlugins = RuntimeExecutor.create('openai', mockProviderConfigs.openai, [plugin1, plugin2])
|
||||
|
||||
await executorWithPlugins.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(pluginOrder).toEqual(['plugin-1', 'plugin-2'])
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature: 0.5,
|
||||
maxTokens: 200
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle API errors', async () => {
|
||||
const error = new Error('API request failed')
|
||||
vi.mocked(generateText).mockRejectedValue(error)
|
||||
|
||||
await expect(
|
||||
executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
).rejects.toThrow('API request failed')
|
||||
})
|
||||
|
||||
it('should execute onError plugin hook', async () => {
|
||||
const error = new Error('Generation failed')
|
||||
vi.mocked(generateText).mockRejectedValue(error)
|
||||
|
||||
const errorPlugin: AiPlugin = {
|
||||
name: 'error-handler',
|
||||
onError: vi.fn()
|
||||
}
|
||||
|
||||
const executorWithPlugin = RuntimeExecutor.create('openai', mockProviderConfigs.openai, [errorPlugin])
|
||||
|
||||
await expect(
|
||||
executorWithPlugin.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
).rejects.toThrow('Generation failed')
|
||||
|
||||
expect(errorPlugin.onError).toHaveBeenCalledWith(
|
||||
error,
|
||||
expect.objectContaining({
|
||||
providerId: 'openai',
|
||||
modelId: 'gpt-4'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle model not found error', async () => {
|
||||
const error = new Error('Model not found: invalid-model')
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockImplementation(() => {
|
||||
throw error
|
||||
})
|
||||
|
||||
await expect(
|
||||
executor.generateText({
|
||||
model: 'invalid-model',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
).rejects.toThrow('Model not found')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Usage and Metadata', () => {
|
||||
it('should return usage information', async () => {
|
||||
const result = await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(result.usage).toBeDefined()
|
||||
expect(result.usage.inputTokens).toBe(15)
|
||||
expect(result.usage.outputTokens).toBe(8)
|
||||
expect(result.usage.totalTokens).toBe(23)
|
||||
})
|
||||
|
||||
it('should handle warnings', async () => {
|
||||
vi.mocked(generateText).mockResolvedValue(mockCompleteResponses.withWarnings as any)
|
||||
|
||||
const result = await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
temperature: 2.5 // Unsupported value
|
||||
})
|
||||
|
||||
expect(result.warnings).toBeDefined()
|
||||
expect(result.warnings).toHaveLength(1)
|
||||
expect(result.warnings![0].type).toBe('unsupported-setting')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Abort Signal', () => {
|
||||
it('should support abort signal', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
await executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
|
||||
expect(generateText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle aborted request', async () => {
|
||||
const abortError = new Error('Request aborted')
|
||||
abortError.name = 'AbortError'
|
||||
|
||||
vi.mocked(generateText).mockRejectedValue(abortError)
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortController.abort()
|
||||
|
||||
await expect(
|
||||
executor.generateText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
).rejects.toThrow('Request aborted')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,525 +0,0 @@
|
||||
/**
|
||||
* RuntimeExecutor.streamText Comprehensive Tests
|
||||
* Tests streaming text generation across all providers with various parameters
|
||||
*/
|
||||
|
||||
import { streamText } from 'ai'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { collectStreamChunks, createMockLanguageModel, mockProviderConfigs, testMessages } from '../../../__tests__'
|
||||
import type { AiPlugin } from '../../plugins'
|
||||
import { globalRegistryManagement } from '../../providers/RegistryManagement'
|
||||
import { RuntimeExecutor } from '../executor'
|
||||
|
||||
// Mock AI SDK
|
||||
vi.mock('ai', () => ({
|
||||
streamText: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('../../providers/RegistryManagement', () => ({
|
||||
globalRegistryManagement: {
|
||||
languageModel: vi.fn()
|
||||
},
|
||||
DEFAULT_SEPARATOR: '|'
|
||||
}))
|
||||
|
||||
describe('RuntimeExecutor.streamText', () => {
|
||||
let executor: RuntimeExecutor<'openai'>
|
||||
let mockLanguageModel: any
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
executor = RuntimeExecutor.create('openai', mockProviderConfigs.openai)
|
||||
|
||||
mockLanguageModel = createMockLanguageModel({
|
||||
provider: 'openai',
|
||||
modelId: 'gpt-4'
|
||||
})
|
||||
|
||||
vi.mocked(globalRegistryManagement.languageModel).mockReturnValue(mockLanguageModel)
|
||||
})
|
||||
|
||||
describe('Basic Functionality', () => {
|
||||
it('should stream text with minimal parameters', async () => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Hello'
|
||||
yield ' '
|
||||
yield 'World'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Hello' }
|
||||
yield { type: 'text-delta', textDelta: ' ' }
|
||||
yield { type: 'text-delta', textDelta: 'World' }
|
||||
})(),
|
||||
usage: Promise.resolve({ promptTokens: 5, completionTokens: 3, totalTokens: 8 })
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
const result = await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith({
|
||||
model: mockLanguageModel,
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
const chunks = await collectStreamChunks(result.textStream)
|
||||
expect(chunks).toEqual(['Hello', ' ', 'World'])
|
||||
})
|
||||
|
||||
it('should stream with system messages', async () => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.withSystem
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith({
|
||||
model: mockLanguageModel,
|
||||
messages: testMessages.withSystem
|
||||
})
|
||||
})
|
||||
|
||||
it('should stream multi-turn conversations', async () => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Multi-turn response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Multi-turn response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.multiTurn
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalled()
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
messages: testMessages.multiTurn
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Temperature Parameter', () => {
|
||||
const temperatures = [0, 0.3, 0.5, 0.7, 0.9, 1.0, 1.5, 2.0]
|
||||
|
||||
it.each(temperatures)('should support temperature=%s', async (temperature) => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
temperature
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Max Tokens Parameter', () => {
|
||||
const maxTokensValues = [10, 50, 100, 500, 1000, 2000, 4000]
|
||||
|
||||
it.each(maxTokensValues)('should support maxTokens=%s', async (maxTokens) => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
maxOutputTokens: maxTokens
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
maxTokens
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Top P Parameter', () => {
|
||||
const topPValues = [0.1, 0.3, 0.5, 0.7, 0.9, 0.95, 1.0]
|
||||
|
||||
it.each(topPValues)('should support topP=%s', async (topP) => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
topP
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
topP
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Frequency and Presence Penalty', () => {
|
||||
it('should support frequency penalty', async () => {
|
||||
const penalties = [-2.0, -1.0, 0, 0.5, 1.0, 1.5, 2.0]
|
||||
|
||||
for (const frequencyPenalty of penalties) {
|
||||
vi.clearAllMocks()
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
frequencyPenalty
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
frequencyPenalty
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('should support presence penalty', async () => {
|
||||
const penalties = [-2.0, -1.0, 0, 0.5, 1.0, 1.5, 2.0]
|
||||
|
||||
for (const presencePenalty of penalties) {
|
||||
vi.clearAllMocks()
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
presencePenalty
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
presencePenalty
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('should support both penalties together', async () => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
frequencyPenalty: 0.5,
|
||||
presencePenalty: 0.5
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
frequencyPenalty: 0.5,
|
||||
presencePenalty: 0.5
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Seed Parameter', () => {
|
||||
it('should support seed for deterministic output', async () => {
|
||||
const seeds = [0, 12345, 67890, 999999]
|
||||
|
||||
for (const seed of seeds) {
|
||||
vi.clearAllMocks()
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
seed
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
seed
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Abort Signal', () => {
|
||||
it('should support abort signal', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle abort during streaming', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Start'
|
||||
// Simulate abort
|
||||
abortController.abort()
|
||||
throw new Error('Aborted')
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Start' }
|
||||
throw new Error('Aborted')
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
const result = await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple,
|
||||
abortSignal: abortController.signal
|
||||
})
|
||||
|
||||
await expect(async () => {
|
||||
// oxlint-disable-next-line no-unused-vars
|
||||
for await (const _chunk of result.textStream) {
|
||||
// Stream should be interrupted
|
||||
}
|
||||
}).rejects.toThrow('Aborted')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Plugin Integration', () => {
|
||||
it('should execute plugins during streaming', async () => {
|
||||
const pluginCalls: string[] = []
|
||||
|
||||
const testPlugin: AiPlugin = {
|
||||
name: 'test-plugin',
|
||||
onRequestStart: vi.fn(async () => {
|
||||
pluginCalls.push('onRequestStart')
|
||||
}),
|
||||
transformParams: vi.fn(async (params) => {
|
||||
pluginCalls.push('transformParams')
|
||||
return { ...params, temperature: 0.5 }
|
||||
}),
|
||||
onRequestEnd: vi.fn(async () => {
|
||||
pluginCalls.push('onRequestEnd')
|
||||
})
|
||||
}
|
||||
|
||||
const executorWithPlugin = RuntimeExecutor.create('openai', mockProviderConfigs.openai, [testPlugin])
|
||||
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
const result = await executorWithPlugin.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
// Consume stream
|
||||
// oxlint-disable-next-line no-unused-vars
|
||||
for await (const _chunk of result.textStream) {
|
||||
// Stream chunks
|
||||
}
|
||||
|
||||
expect(pluginCalls).toContain('onRequestStart')
|
||||
expect(pluginCalls).toContain('transformParams')
|
||||
|
||||
// Verify transformed parameters were used
|
||||
expect(streamText).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
temperature: 0.5
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full Stream with Finish Reason', () => {
|
||||
it('should provide finish reason in full stream', async () => {
|
||||
const mockStream = {
|
||||
textStream: (async function* () {
|
||||
yield 'Response'
|
||||
})(),
|
||||
fullStream: (async function* () {
|
||||
yield { type: 'text-delta', textDelta: 'Response' }
|
||||
yield {
|
||||
type: 'finish',
|
||||
finishReason: 'stop',
|
||||
usage: { promptTokens: 5, completionTokens: 3, totalTokens: 8 }
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
vi.mocked(streamText).mockResolvedValue(mockStream as any)
|
||||
|
||||
const result = await executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
|
||||
const fullChunks = await collectStreamChunks(result.fullStream)
|
||||
|
||||
expect(fullChunks).toHaveLength(2)
|
||||
expect(fullChunks[0]).toEqual({ type: 'text-delta', textDelta: 'Response' })
|
||||
expect(fullChunks[1]).toEqual({
|
||||
type: 'finish',
|
||||
finishReason: 'stop',
|
||||
usage: { promptTokens: 5, completionTokens: 3, totalTokens: 8 }
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle streaming errors', async () => {
|
||||
const error = new Error('Streaming failed')
|
||||
vi.mocked(streamText).mockRejectedValue(error)
|
||||
|
||||
await expect(
|
||||
executor.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
).rejects.toThrow('Streaming failed')
|
||||
})
|
||||
|
||||
it('should execute onError plugin hook on failure', async () => {
|
||||
const error = new Error('Stream error')
|
||||
vi.mocked(streamText).mockRejectedValue(error)
|
||||
|
||||
const errorPlugin: AiPlugin = {
|
||||
name: 'error-handler',
|
||||
onError: vi.fn()
|
||||
}
|
||||
|
||||
const executorWithPlugin = RuntimeExecutor.create('openai', mockProviderConfigs.openai, [errorPlugin])
|
||||
|
||||
await expect(
|
||||
executorWithPlugin.streamText({
|
||||
model: 'gpt-4',
|
||||
messages: testMessages.simple
|
||||
})
|
||||
).rejects.toThrow('Stream error')
|
||||
|
||||
expect(errorPlugin.onError).toHaveBeenCalledWith(
|
||||
error,
|
||||
expect.objectContaining({
|
||||
providerId: 'openai',
|
||||
modelId: 'gpt-4'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -68,8 +68,8 @@
|
||||
],
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@tiptap/core": "^3.2.0",
|
||||
"@tiptap/pm": "^3.2.0",
|
||||
"@tiptap/core": "^3.10.7",
|
||||
"@tiptap/pm": "^3.10.7",
|
||||
"eslint": "^9.22.0",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"eslint-plugin-simple-import-sort": "^12.1.1",
|
||||
|
||||
@@ -41,7 +41,6 @@ export enum IpcChannel {
|
||||
App_SetFullScreen = 'app:set-full-screen',
|
||||
App_IsFullScreen = 'app:is-full-screen',
|
||||
App_GetSystemFonts = 'app:get-system-fonts',
|
||||
APP_CrashRenderProcess = 'app:crash-render-process',
|
||||
|
||||
App_MacIsProcessTrusted = 'app:mac-is-process-trusted',
|
||||
App_MacRequestProcessTrust = 'app:mac-request-process-trust',
|
||||
@@ -251,7 +250,6 @@ export enum IpcChannel {
|
||||
System_GetDeviceType = 'system:getDeviceType',
|
||||
System_GetHostname = 'system:getHostname',
|
||||
System_GetCpuName = 'system:getCpuName',
|
||||
System_CheckGitBash = 'system:checkGitBash',
|
||||
|
||||
// DevTools
|
||||
System_ToggleDevTools = 'system:toggleDevTools',
|
||||
|
||||
@@ -199,7 +199,7 @@ export enum FeedUrl {
|
||||
|
||||
export enum UpdateConfigUrl {
|
||||
GITHUB = 'https://raw.githubusercontent.com/CherryHQ/cherry-studio/refs/heads/x-files/app-upgrade-config/app-upgrade-config.json',
|
||||
GITCODE = 'https://raw.gitcode.com/CherryHQ/cherry-studio/raw/x-files%2Fapp-upgrade-config/app-upgrade-config.json'
|
||||
GITCODE = 'https://raw.gitcode.com/CherryHQ/cherry-studio/raw/x-files/app-upgrade-config/app-upgrade-config.json'
|
||||
}
|
||||
|
||||
// export enum UpgradeChannel {
|
||||
|
||||
60
packages/shared/data/cache/cacheSchemas.ts
vendored
60
packages/shared/data/cache/cacheSchemas.ts
vendored
@@ -26,6 +26,20 @@ export type UseCacheSchema = {
|
||||
'topic.active': CacheValueTypes.CacheTopic | null
|
||||
'topic.renaming': string[]
|
||||
'topic.newly_renamed': string[]
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'test-hook-memory-1': string
|
||||
'test-ttl-cache': string
|
||||
'test-protected-cache': string
|
||||
'test-deep-equal': { nested: { count: number }; tags: string[] }
|
||||
'test-performance': number
|
||||
'test-multi-hook': string
|
||||
'concurrent-test-1': number
|
||||
'concurrent-test-2': number
|
||||
'large-data-test': Record<string, any>
|
||||
'test-number-cache': number
|
||||
'test-object-cache': { name: string; count: number; active: boolean }
|
||||
}
|
||||
|
||||
export const DefaultUseCache: UseCacheSchema = {
|
||||
@@ -56,7 +70,21 @@ export const DefaultUseCache: UseCacheSchema = {
|
||||
// Topic management
|
||||
'topic.active': null,
|
||||
'topic.renaming': [],
|
||||
'topic.newly_renamed': []
|
||||
'topic.newly_renamed': [],
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'test-hook-memory-1': 'default-memory-value',
|
||||
'test-ttl-cache': 'test-ttl-cache',
|
||||
'test-protected-cache': 'protected-value',
|
||||
'test-deep-equal': { nested: { count: 0 }, tags: ['initial'] },
|
||||
'test-performance': 0,
|
||||
'test-multi-hook': 'hook-1-default',
|
||||
'concurrent-test-1': 0,
|
||||
'concurrent-test-2': 0,
|
||||
'large-data-test': {},
|
||||
'test-number-cache': 42,
|
||||
'test-object-cache': { name: 'test', count: 0, active: true }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,10 +92,22 @@ export const DefaultUseCache: UseCacheSchema = {
|
||||
*/
|
||||
export type UseSharedCacheSchema = {
|
||||
'example-key': string
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'test-hook-shared-1': string
|
||||
'test-multi-hook': string
|
||||
'concurrent-shared': number
|
||||
}
|
||||
|
||||
export const DefaultUseSharedCache: UseSharedCacheSchema = {
|
||||
'example-key': 'example default value'
|
||||
'example-key': 'example default value',
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'concurrent-shared': 0,
|
||||
'test-hook-shared-1': 'default-shared-value',
|
||||
'test-multi-hook': 'hook-3-shared'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,10 +116,24 @@ export const DefaultUseSharedCache: UseSharedCacheSchema = {
|
||||
*/
|
||||
export type RendererPersistCacheSchema = {
|
||||
'example-key': string
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'example-1': string
|
||||
'example-2': string
|
||||
'example-3': string
|
||||
'example-4': string
|
||||
}
|
||||
|
||||
export const DefaultRendererPersistCache: RendererPersistCacheSchema = {
|
||||
'example-key': 'example default value'
|
||||
'example-key': 'example default value',
|
||||
|
||||
// Test keys (for dataRefactorTest window)
|
||||
// TODO: remove after testing
|
||||
'example-1': 'example default value',
|
||||
'example-2': 'example default value',
|
||||
'example-3': 'example default value',
|
||||
'example-4': 'example default value'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
/**
|
||||
* Shared type definitions for the migration system
|
||||
*/
|
||||
|
||||
// Migration stages for UI flow
|
||||
export type MigrationStage =
|
||||
| 'introduction'
|
||||
| 'backup_required'
|
||||
| 'backup_progress'
|
||||
| 'backup_confirmed'
|
||||
| 'migration'
|
||||
| 'migration_completed'
|
||||
| 'completed'
|
||||
| 'error'
|
||||
|
||||
// Individual migrator status
|
||||
export type MigratorStatus = 'pending' | 'running' | 'completed' | 'failed'
|
||||
|
||||
// Migrator progress info for UI display
|
||||
export interface MigratorProgress {
|
||||
id: string
|
||||
name: string
|
||||
status: MigratorStatus
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Overall migration progress
|
||||
export interface MigrationProgress {
|
||||
stage: MigrationStage
|
||||
overallProgress: number // 0-100
|
||||
currentMessage: string
|
||||
migrators: MigratorProgress[]
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Prepare phase result
|
||||
export interface PrepareResult {
|
||||
success: boolean
|
||||
itemCount: number
|
||||
warnings?: string[]
|
||||
}
|
||||
|
||||
// Execute phase result
|
||||
export interface ExecuteResult {
|
||||
success: boolean
|
||||
processedCount: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Validation error detail
|
||||
export interface ValidationError {
|
||||
key: string
|
||||
expected?: unknown
|
||||
actual?: unknown
|
||||
message: string
|
||||
}
|
||||
|
||||
// Validate phase result with count validation support
|
||||
export interface ValidateResult {
|
||||
success: boolean
|
||||
errors: ValidationError[]
|
||||
stats: {
|
||||
sourceCount: number
|
||||
targetCount: number
|
||||
skippedCount: number
|
||||
mismatchReason?: string
|
||||
}
|
||||
}
|
||||
|
||||
// Individual migrator result
|
||||
export interface MigratorResult {
|
||||
migratorId: string
|
||||
migratorName: string
|
||||
success: boolean
|
||||
recordsProcessed: number
|
||||
duration: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Overall migration result
|
||||
export interface MigrationResult {
|
||||
success: boolean
|
||||
migratorResults: MigratorResult[]
|
||||
totalDuration: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Migration status stored in app_state table
|
||||
export interface MigrationStatusValue {
|
||||
status: 'completed' | 'failed' | 'in_progress'
|
||||
completedAt?: number
|
||||
failedAt?: number
|
||||
version: string
|
||||
error?: string | null
|
||||
}
|
||||
|
||||
// IPC channels for migration communication
|
||||
export const MigrationIpcChannels = {
|
||||
// Status queries
|
||||
CheckNeeded: 'migration:check-needed',
|
||||
GetProgress: 'migration:get-progress',
|
||||
GetLastError: 'migration:get-last-error',
|
||||
GetUserDataPath: 'migration:get-user-data-path',
|
||||
|
||||
// Flow control
|
||||
Start: 'migration:start',
|
||||
ProceedToBackup: 'migration:proceed-to-backup',
|
||||
ShowBackupDialog: 'migration:show-backup-dialog',
|
||||
BackupCompleted: 'migration:backup-completed',
|
||||
StartMigration: 'migration:start-migration',
|
||||
Retry: 'migration:retry',
|
||||
Cancel: 'migration:cancel',
|
||||
Restart: 'migration:restart',
|
||||
|
||||
// Data transfer (Renderer -> Main)
|
||||
SendReduxData: 'migration:send-redux-data',
|
||||
DexieExportCompleted: 'migration:dexie-export-completed',
|
||||
WriteExportFile: 'migration:write-export-file',
|
||||
|
||||
// Progress broadcast (Main -> Renderer)
|
||||
Progress: 'migration:progress',
|
||||
ExportProgress: 'migration:export-progress'
|
||||
} as const
|
||||
@@ -1,5 +1,3 @@
|
||||
import * as z from 'zod'
|
||||
|
||||
import type { PreferenceSchemas } from './preferenceSchemas'
|
||||
|
||||
export type PreferenceDefaultScopeType = PreferenceSchemas['default']
|
||||
@@ -40,38 +38,6 @@ export type SelectionActionItem = {
|
||||
searchEngine?: string
|
||||
}
|
||||
|
||||
const SelectionBuiltinActionItemIdSchema = z.enum([
|
||||
'translate',
|
||||
'explain',
|
||||
'summary',
|
||||
'search',
|
||||
'copy',
|
||||
'refine',
|
||||
'quote'
|
||||
])
|
||||
|
||||
export type SelectionBuiltinActionItemId = z.infer<typeof SelectionBuiltinActionItemIdSchema>
|
||||
|
||||
export function isBuiltinActionItemId(id: string): id is SelectionBuiltinActionItemId {
|
||||
return SelectionBuiltinActionItemIdSchema.safeParse(id).success
|
||||
}
|
||||
|
||||
export interface SelectionBuiltinActionItem extends SelectionActionItem {
|
||||
id: SelectionBuiltinActionItemId
|
||||
isBuiltIn: true
|
||||
assistantId?: never
|
||||
}
|
||||
|
||||
export function isSelectionBuiltinActionItem(
|
||||
item: SelectionActionItem | null | undefined
|
||||
): item is SelectionBuiltinActionItem {
|
||||
if (!item) {
|
||||
return false
|
||||
}
|
||||
|
||||
return isBuiltinActionItemId(item.id)
|
||||
}
|
||||
|
||||
export enum ThemeMode {
|
||||
light = 'light',
|
||||
dark = 'dark',
|
||||
|
||||
@@ -4,34 +4,3 @@ export const defaultAppHeaders = () => {
|
||||
'X-Title': 'Cherry Studio'
|
||||
}
|
||||
}
|
||||
|
||||
// Following two function are not being used for now.
|
||||
// I may use them in the future, so just keep them commented. - by eurfelux
|
||||
|
||||
/**
|
||||
* Converts an `undefined` value to `null`, otherwise returns the value as-is.
|
||||
* @param value - The value to check
|
||||
* @returns `null` if the input is `undefined`; otherwise the input value
|
||||
*/
|
||||
|
||||
// export function toNullIfUndefined<T>(value: T | undefined): T | null {
|
||||
// if (value === undefined) {
|
||||
// return null
|
||||
// } else {
|
||||
// return value
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Converts a `null` value to `undefined`, otherwise returns the value as-is.
|
||||
* @param value - The value to check
|
||||
* @returns `undefined` if the input is `null`; otherwise the input value
|
||||
*/
|
||||
|
||||
// export function toUndefinedIfNull<T>(value: T | null): T | undefined {
|
||||
// if (value === null) {
|
||||
// return undefined
|
||||
// } else {
|
||||
// return value
|
||||
// }
|
||||
// }
|
||||
|
||||
9
packages/ui/icons/DMXAPI-to-img.svg
Normal file
9
packages/ui/icons/DMXAPI-to-img.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 2.9 MiB |
@@ -53,9 +53,7 @@
|
||||
"@radix-ui/react-popover": "^1.1.15",
|
||||
"@radix-ui/react-radio-group": "^1.3.8",
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-use-controllable-state": "^1.2.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
|
||||
86
packages/ui/src/components/icons/FileIcons/index.tsx
Normal file
86
packages/ui/src/components/icons/FileIcons/index.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
// Original path: src/renderer/src/components/Icons/FileIcons.tsx
|
||||
import type { CSSProperties, SVGProps } from 'react'
|
||||
|
||||
interface BaseFileIconProps extends SVGProps<SVGSVGElement> {
|
||||
size?: string | number
|
||||
text?: string
|
||||
}
|
||||
|
||||
const textStyle: CSSProperties = {
|
||||
fontStyle: 'italic',
|
||||
fontSize: '7.70985px',
|
||||
lineHeight: 0.8,
|
||||
fontFamily: "'Times New Roman'",
|
||||
textAlign: 'center',
|
||||
writingMode: 'horizontal-tb',
|
||||
direction: 'ltr',
|
||||
textAnchor: 'middle',
|
||||
fill: 'none',
|
||||
stroke: '#000000',
|
||||
strokeWidth: '0.289119',
|
||||
strokeLinejoin: 'round',
|
||||
strokeDasharray: 'none'
|
||||
}
|
||||
|
||||
const tspanStyle: CSSProperties = {
|
||||
fontStyle: 'normal',
|
||||
fontVariant: 'normal',
|
||||
fontWeight: 'normal',
|
||||
fontStretch: 'condensed',
|
||||
fontSize: '7.70985px',
|
||||
lineHeight: 0.8,
|
||||
fontFamily: 'Arial',
|
||||
fill: '#000000',
|
||||
fillOpacity: 1,
|
||||
strokeWidth: '0.289119',
|
||||
strokeDasharray: 'none'
|
||||
}
|
||||
|
||||
const BaseFileIcon = ({ size = '1.1em', text = 'SVG', ...props }: BaseFileIconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
{...props}>
|
||||
<defs id="defs4" />
|
||||
<path d="m 14,2 v 4 a 2,2 0 0 0 2,2 h 4" id="path3" />
|
||||
<path d="M 15,2 H 6 A 2,2 0 0 0 4,4 v 16 a 2,2 0 0 0 2,2 h 12 a 2,2 0 0 0 2,-2 V 7 Z" id="path4" />
|
||||
<text
|
||||
xmlSpace="preserve"
|
||||
style={textStyle}
|
||||
x="12.478625"
|
||||
y="17.170216"
|
||||
id="text4"
|
||||
transform="scale(0.96196394,1.03954)">
|
||||
<tspan id="tspan4" x="12.478625" y="17.170216" style={tspanStyle}>
|
||||
{text}
|
||||
</tspan>
|
||||
</text>
|
||||
</svg>
|
||||
)
|
||||
|
||||
/**
|
||||
* @deprecated 此图标使用频率仅为 1 次,不符合 UI 库提取标准(需 ≥3 次)
|
||||
* 计划在未来版本中移除。
|
||||
*
|
||||
* This icon has only 1 usage and does not meet the UI library extraction criteria (requires ≥3 usages).
|
||||
* Planned for removal in future versions.
|
||||
*/
|
||||
export const FileSvgIcon = (props: Omit<BaseFileIconProps, 'text'>) => <BaseFileIcon text="SVG" {...props} />
|
||||
|
||||
/**
|
||||
* @deprecated 此图标使用频率仅为 2 次,不符合 UI 库提取标准(需 ≥3 次)
|
||||
* 计划在未来版本中移除。
|
||||
*
|
||||
* This icon has only 2 usages and does not meet the UI library extraction criteria (requires ≥3 usages).
|
||||
* Planned for removal in future versions.
|
||||
*/
|
||||
export const FilePngIcon = (props: Omit<BaseFileIconProps, 'text'>) => <BaseFileIcon text="PNG" {...props} />
|
||||
53
packages/ui/src/components/icons/Icon/index.tsx
Normal file
53
packages/ui/src/components/icons/Icon/index.tsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
import {
|
||||
AlignLeft,
|
||||
Copy,
|
||||
Eye,
|
||||
Pencil,
|
||||
RefreshCw,
|
||||
RotateCcw,
|
||||
ScanLine,
|
||||
Search,
|
||||
Trash,
|
||||
WrapText,
|
||||
Wrench
|
||||
} from 'lucide-react'
|
||||
import React from 'react'
|
||||
|
||||
// 创建一个 Icon 工厂函数
|
||||
export function createIcon(IconComponent: LucideIcon, defaultSize: string | number = '1rem') {
|
||||
const Icon = ({
|
||||
ref,
|
||||
...props
|
||||
}: React.ComponentProps<typeof IconComponent> & { ref?: React.RefObject<SVGSVGElement | null> }) => (
|
||||
<IconComponent ref={ref} size={defaultSize} {...props} />
|
||||
)
|
||||
Icon.displayName = `Icon(${IconComponent.displayName || IconComponent.name})`
|
||||
return Icon
|
||||
}
|
||||
|
||||
// 预定义的常用图标(向后兼容,只导入需要的图标)
|
||||
export const CopyIcon = createIcon(Copy)
|
||||
export const DeleteIcon = createIcon(Trash)
|
||||
export const EditIcon = createIcon(Pencil)
|
||||
export const RefreshIcon = createIcon(RefreshCw)
|
||||
export const ResetIcon = createIcon(RotateCcw)
|
||||
|
||||
/**
|
||||
* @deprecated 此组件使用频率为 0 次,不符合 UI 库提取标准(需 ≥3 次)
|
||||
* 计划在未来版本中移除。虽然主项目中有本地副本,但完全未被导入使用。
|
||||
*
|
||||
* This icon has 0 usages and does not meet the UI library extraction criteria (requires ≥3 usages).
|
||||
* Planned for removal in future versions.
|
||||
*/
|
||||
export const ToolIcon = createIcon(Wrench)
|
||||
|
||||
export const VisionIcon = createIcon(Eye)
|
||||
export const WebSearchIcon = createIcon(Search)
|
||||
export const WrapIcon = createIcon(WrapText)
|
||||
export const UnWrapIcon = createIcon(AlignLeft)
|
||||
export const OcrIcon = createIcon(ScanLine)
|
||||
|
||||
// 导出 createIcon 以便用户自行创建图标组件
|
||||
export type { LucideIcon }
|
||||
export type { LucideProps } from 'lucide-react'
|
||||
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* @deprecated 此组件使用频率为 0 次,不符合 UI 库提取标准(需 ≥3 次)
|
||||
* 计划在未来版本中移除。虽然主项目中有本地副本,但完全未被导入使用。
|
||||
*
|
||||
* This component has 0 usages and does not meet the UI library extraction criteria (requires ≥3 usages).
|
||||
* Planned for removal in future versions.
|
||||
*/
|
||||
|
||||
// Original path: src/renderer/src/components/Icons/SvgSpinners180Ring.tsx
|
||||
import type { SVGProps } from 'react'
|
||||
|
||||
import { cn } from '../../../utils'
|
||||
|
||||
interface SvgSpinners180RingProps extends SVGProps<SVGSVGElement> {
|
||||
size?: number | string
|
||||
}
|
||||
|
||||
export function SvgSpinners180Ring(props: SvgSpinners180RingProps) {
|
||||
const { size = '1em', className, ...svgProps } = props
|
||||
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 24 24"
|
||||
{...svgProps}
|
||||
className={cn('animate-spin', className)}>
|
||||
{/* Icon from SVG Spinners by Utkarsh Verma - https://github.com/n3r4zzurr0/svg-spinners/blob/main/LICENSE */}
|
||||
<path
|
||||
fill="currentColor"
|
||||
d="M12,4a8,8,0,0,1,7.89,6.7A1.53,1.53,0,0,0,21.38,12h0a1.5,1.5,0,0,0,1.48-1.75,11,11,0,0,0-21.72,0A1.5,1.5,0,0,0,2.62,12h0a1.53,1.53,0,0,0,1.49-1.3A8,8,0,0,1,12,4Z"></path>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export default SvgSpinners180Ring
|
||||
32
packages/ui/src/components/icons/ToolsCallingIcon/index.tsx
Normal file
32
packages/ui/src/components/icons/ToolsCallingIcon/index.tsx
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* @deprecated 此组件使用频率仅为 1 次,不符合 UI 库提取标准(需 ≥3 次)
|
||||
* 计划在未来版本中移除。建议直接使用 lucide-react 的 Wrench 图标。
|
||||
*
|
||||
* This component has only 1 usage and does not meet the UI library extraction criteria (requires ≥3 usages).
|
||||
* Planned for removal in future versions. Consider using Wrench icon from lucide-react directly.
|
||||
*/
|
||||
|
||||
// Original: src/renderer/src/components/Icons/ToolsCallingIcon.tsx
|
||||
import { Tooltip, type TooltipProps } from '@heroui/react'
|
||||
import { Wrench } from 'lucide-react'
|
||||
import React from 'react'
|
||||
|
||||
import { cn } from '../../../utils'
|
||||
|
||||
interface ToolsCallingIconProps extends React.HTMLAttributes<HTMLDivElement> {
|
||||
className?: string
|
||||
iconClassName?: string
|
||||
TooltipProps?: TooltipProps
|
||||
}
|
||||
|
||||
const ToolsCallingIcon = ({ className, iconClassName, TooltipProps, ...props }: ToolsCallingIconProps) => {
|
||||
return (
|
||||
<div className={cn('flex justify-center items-center', className)} {...props}>
|
||||
<Tooltip {...TooltipProps}>
|
||||
<Wrench className={cn('w-4 h-4 mr-1.5 text-[#00b96b]', iconClassName)} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default ToolsCallingIcon
|
||||
40
packages/ui/src/components/icons/logos/dmxapiToImg.tsx
Normal file
40
packages/ui/src/components/icons/logos/dmxapiToImg.tsx
Normal file
File diff suppressed because one or more lines are too long
@@ -25,6 +25,7 @@ export { Cohere } from './cohere'
|
||||
export { Dashscope } from './dashscope'
|
||||
export { Deepseek } from './deepseek'
|
||||
export { Dmxapi } from './dmxapi'
|
||||
export { DmxapiToImg } from './dmxapiToImg'
|
||||
export { Doc2x } from './doc2x'
|
||||
export { Doubao } from './doubao'
|
||||
export { Exa } from './exa'
|
||||
|
||||
@@ -22,11 +22,43 @@ export { default as Scrollbar } from './composites/Scrollbar'
|
||||
export { default as ThinkingEffect } from './composites/ThinkingEffect'
|
||||
|
||||
// Icon Components
|
||||
export { FilePngIcon, FileSvgIcon } from './icons/FileIcons'
|
||||
// export type { LucideIcon, LucideProps } from './icons/Icon'
|
||||
// export {
|
||||
// CopyIcon,
|
||||
// createIcon,
|
||||
// DeleteIcon,
|
||||
// EditIcon,
|
||||
// OcrIcon,
|
||||
// RefreshIcon,
|
||||
// ResetIcon,
|
||||
// ToolIcon,
|
||||
// UnWrapIcon,
|
||||
// VisionIcon,
|
||||
// WebSearchIcon,
|
||||
// WrapIcon
|
||||
// } from './icons/Icon'
|
||||
export { default as SvgSpinners180Ring } from './icons/SvgSpinners180Ring'
|
||||
export { default as ToolsCallingIcon } from './icons/ToolsCallingIcon'
|
||||
|
||||
// Brand Logo Icons (Colorful brand logo icons - 81 items)
|
||||
// Recommended to import using '@cherrystudio/ui/icons' path
|
||||
// Brand Logo Icons (彩色品牌 Logo 图标 - 84个)
|
||||
// 推荐使用 '@cherrystudio/ui/icons' 路径导入
|
||||
export * from './icons'
|
||||
|
||||
// /* Selector Components */
|
||||
// export { default as Selector } from './primitives/select'
|
||||
// export { default as SearchableSelector } from './primitives/Selector/SearchableSelector'
|
||||
// export type {
|
||||
// MultipleSearchableSelectorProps,
|
||||
// MultipleSelectorProps,
|
||||
// SearchableSelectorItem,
|
||||
// SearchableSelectorProps,
|
||||
// SelectorItem,
|
||||
// SelectorProps,
|
||||
// SingleSearchableSelectorProps,
|
||||
// SingleSelectorProps
|
||||
// } from './primitives/Selector/types'
|
||||
|
||||
/* Additional Composite Components */
|
||||
// CodeEditor
|
||||
export {
|
||||
@@ -52,16 +84,12 @@ export { default as ImageToolButton } from './composites/ImageToolButton'
|
||||
export { Sortable } from './composites/Sortable'
|
||||
|
||||
/* Shadcn Primitive Components */
|
||||
export * from './primitives/breadcrumb'
|
||||
export * from './primitives/button'
|
||||
export * from './primitives/checkbox'
|
||||
export * from './primitives/combobox'
|
||||
export * from './primitives/command'
|
||||
export * from './primitives/dialog'
|
||||
export * from './primitives/kbd'
|
||||
export * from './primitives/pagination'
|
||||
export * from './primitives/popover'
|
||||
export * from './primitives/radioGroup'
|
||||
export * from './primitives/select'
|
||||
export * from './primitives/shadcn-io/dropzone'
|
||||
export * from './primitives/tabs'
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
import { cn } from '@cherrystudio/ui/utils/index'
|
||||
import { Slot } from '@radix-ui/react-slot'
|
||||
import { ChevronRight, MoreHorizontal } from 'lucide-react'
|
||||
import * as React from 'react'
|
||||
|
||||
function Breadcrumb({ ...props }: React.ComponentProps<'nav'>) {
|
||||
return <nav aria-label="breadcrumb" data-slot="breadcrumb" {...props} />
|
||||
}
|
||||
|
||||
function BreadcrumbList({ className, ...props }: React.ComponentProps<'ol'>) {
|
||||
return (
|
||||
<ol
|
||||
data-slot="breadcrumb-list"
|
||||
className={cn(
|
||||
'text-muted-foreground flex flex-wrap items-center gap-2 text-sm break-words sm:gap-2.5',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function BreadcrumbItem({ className, ...props }: React.ComponentProps<'li'>) {
|
||||
return <li data-slot="breadcrumb-item" className={cn('inline-flex items-center gap-1.5', className)} {...props} />
|
||||
}
|
||||
|
||||
function BreadcrumbLink({
|
||||
asChild,
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<'a'> & {
|
||||
asChild?: boolean
|
||||
}) {
|
||||
const Comp = asChild ? Slot : 'a'
|
||||
|
||||
return (
|
||||
<Comp data-slot="breadcrumb-link" className={cn('hover:text-foreground transition-colors', className)} {...props} />
|
||||
)
|
||||
}
|
||||
|
||||
function BreadcrumbPage({ className, ...props }: React.ComponentProps<'span'>) {
|
||||
return (
|
||||
<span
|
||||
data-slot="breadcrumb-page"
|
||||
role="link"
|
||||
aria-disabled="true"
|
||||
aria-current="page"
|
||||
className={cn('text-foreground font-normal', className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function BreadcrumbSeparator({ children, className, ...props }: React.ComponentProps<'li'>) {
|
||||
return (
|
||||
<li
|
||||
data-slot="breadcrumb-separator"
|
||||
role="presentation"
|
||||
aria-hidden="true"
|
||||
className={cn('[&>svg]:size-3.5', className)}
|
||||
{...props}>
|
||||
{children ?? <ChevronRight />}
|
||||
</li>
|
||||
)
|
||||
}
|
||||
|
||||
function BreadcrumbEllipsis({ className, ...props }: React.ComponentProps<'span'>) {
|
||||
return (
|
||||
<span
|
||||
data-slot="breadcrumb-ellipsis"
|
||||
role="presentation"
|
||||
aria-hidden="true"
|
||||
className={cn('flex size-9 items-center justify-center', className)}
|
||||
{...props}>
|
||||
<MoreHorizontal className="size-4" />
|
||||
<span className="sr-only">More</span>
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
Breadcrumb,
|
||||
BreadcrumbEllipsis,
|
||||
BreadcrumbItem,
|
||||
BreadcrumbLink,
|
||||
BreadcrumbList,
|
||||
BreadcrumbPage,
|
||||
BreadcrumbSeparator
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { cn } from '@cherrystudio/ui/utils/index'
|
||||
|
||||
function Kbd({ className, ...props }: React.ComponentProps<'kbd'>) {
|
||||
return (
|
||||
<kbd
|
||||
data-slot="kbd"
|
||||
className={cn(
|
||||
'bg-primary/10 text-primary pointer-events-none inline-flex w-fit min-w-5 items-center justify-center gap-1 rounded-3xs p-1 font-sans text-xs font-medium select-none',
|
||||
"[&_svg:not([class*='size-'])]:size-3",
|
||||
'[[data-slot=tooltip-content]_&]:bg-background/20 [[data-slot=tooltip-content]_&]:text-background dark:[[data-slot=tooltip-content]_&]:bg-background/10',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function KbdGroup({ className, ...props }: React.ComponentProps<'div'>) {
|
||||
return <kbd data-slot="kbd-group" className={cn('inline-flex items-center gap-1', className)} {...props} />
|
||||
}
|
||||
|
||||
export { Kbd, KbdGroup }
|
||||
@@ -1,99 +0,0 @@
|
||||
import type { Button } from '@cherrystudio/ui/components/primitives/button'
|
||||
import { buttonVariants } from '@cherrystudio/ui/components/primitives/button'
|
||||
import { cn } from '@cherrystudio/ui/utils/index'
|
||||
import { ChevronLeftIcon, ChevronRightIcon, MoreHorizontalIcon } from 'lucide-react'
|
||||
import * as React from 'react'
|
||||
|
||||
function Pagination({ className, ...props }: React.ComponentProps<'nav'>) {
|
||||
return (
|
||||
<nav
|
||||
role="navigation"
|
||||
aria-label="pagination"
|
||||
data-slot="pagination"
|
||||
className={cn('mx-auto flex w-full justify-center', className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function PaginationContent({ className, ...props }: React.ComponentProps<'ul'>) {
|
||||
return <ul data-slot="pagination-content" className={cn('flex flex-row items-center gap-1', className)} {...props} />
|
||||
}
|
||||
|
||||
function PaginationItem({ ...props }: React.ComponentProps<'li'>) {
|
||||
return <li data-slot="pagination-item" {...props} />
|
||||
}
|
||||
|
||||
type PaginationLinkProps = {
|
||||
isActive?: boolean
|
||||
} & Pick<React.ComponentProps<typeof Button>, 'size'> &
|
||||
React.ComponentProps<'a'>
|
||||
|
||||
function PaginationLink({ className, isActive, size = 'icon', ...props }: PaginationLinkProps) {
|
||||
return (
|
||||
<a
|
||||
aria-current={isActive ? 'page' : undefined}
|
||||
data-slot="pagination-link"
|
||||
data-active={isActive}
|
||||
className={cn(
|
||||
buttonVariants({
|
||||
variant: isActive ? 'outline' : 'ghost',
|
||||
size
|
||||
}),
|
||||
'text-foreground hover:text-primary hover:shadow-none hover:bg-primary/10 rounded-3xs',
|
||||
isActive && 'bg-background text-primary',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function PaginationPrevious({ className, ...props }: React.ComponentProps<typeof PaginationLink>) {
|
||||
return (
|
||||
<PaginationLink
|
||||
aria-label="Go to previous page"
|
||||
size="default"
|
||||
className={cn('gap-1 px-2.5 sm:pl-2.5', className)}
|
||||
{...props}>
|
||||
<ChevronLeftIcon />
|
||||
<span className="hidden sm:block">Previous</span>
|
||||
</PaginationLink>
|
||||
)
|
||||
}
|
||||
|
||||
function PaginationNext({ className, ...props }: React.ComponentProps<typeof PaginationLink>) {
|
||||
return (
|
||||
<PaginationLink
|
||||
aria-label="Go to next page"
|
||||
size="default"
|
||||
className={cn('gap-1 px-2.5 sm:pr-2.5', className)}
|
||||
{...props}>
|
||||
<span className="hidden sm:block">Next</span>
|
||||
<ChevronRightIcon />
|
||||
</PaginationLink>
|
||||
)
|
||||
}
|
||||
|
||||
function PaginationEllipsis({ className, ...props }: React.ComponentProps<'span'>) {
|
||||
return (
|
||||
<span
|
||||
aria-hidden
|
||||
data-slot="pagination-ellipsis"
|
||||
className={cn('flex size-9 items-center justify-center', className)}
|
||||
{...props}>
|
||||
<MoreHorizontalIcon className="size-4" />
|
||||
<span className="sr-only">More pages</span>
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
export {
|
||||
Pagination,
|
||||
PaginationContent,
|
||||
PaginationEllipsis,
|
||||
PaginationItem,
|
||||
PaginationLink,
|
||||
PaginationNext,
|
||||
PaginationPrevious
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import { cn } from '@cherrystudio/ui/utils/index'
|
||||
import * as TabsPrimitive from '@radix-ui/react-tabs'
|
||||
import { cva } from 'class-variance-authority'
|
||||
import * as React from 'react'
|
||||
|
||||
const TabsContext = React.createContext<{
|
||||
variant?: 'default' | 'line'
|
||||
orientation?: 'horizontal' | 'vertical'
|
||||
}>({
|
||||
variant: 'default',
|
||||
orientation: 'horizontal'
|
||||
})
|
||||
|
||||
function Tabs({
|
||||
className,
|
||||
variant = 'default',
|
||||
orientation = 'horizontal',
|
||||
...props
|
||||
}: React.ComponentProps<typeof TabsPrimitive.Root> & {
|
||||
variant?: 'default' | 'line'
|
||||
}) {
|
||||
return (
|
||||
<TabsContext value={{ variant, orientation }}>
|
||||
<TabsPrimitive.Root
|
||||
data-slot="tabs"
|
||||
orientation={orientation}
|
||||
className={cn('flex flex-col gap-2', orientation === 'vertical' && 'flex-row', className)}
|
||||
{...props}
|
||||
/>
|
||||
</TabsContext>
|
||||
)
|
||||
}
|
||||
|
||||
const tabsListVariants = cva('inline-flex items-center justify-center', {
|
||||
variants: {
|
||||
variant: {
|
||||
default: 'bg-muted text-muted-foreground h-9 w-fit rounded-lg p-[3px]',
|
||||
line: 'bg-transparent gap-4 justify-start border-b-0 p-0'
|
||||
},
|
||||
orientation: {
|
||||
horizontal: 'flex-row',
|
||||
vertical: 'flex-col h-fit'
|
||||
}
|
||||
},
|
||||
compoundVariants: [
|
||||
{
|
||||
variant: 'default',
|
||||
orientation: 'vertical',
|
||||
class: 'h-fit w-fit flex-col'
|
||||
},
|
||||
{
|
||||
variant: 'line',
|
||||
orientation: 'vertical',
|
||||
class: 'flex-col items-stretch pb-0'
|
||||
}
|
||||
],
|
||||
defaultVariants: {
|
||||
variant: 'default',
|
||||
orientation: 'horizontal'
|
||||
}
|
||||
})
|
||||
|
||||
function TabsList({ className, ...props }: React.ComponentProps<typeof TabsPrimitive.List>) {
|
||||
const { variant, orientation } = React.use(TabsContext)
|
||||
return (
|
||||
<TabsPrimitive.List
|
||||
data-slot="tabs-list"
|
||||
className={cn(tabsListVariants({ variant, orientation }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const tabsTriggerVariants = cva(
|
||||
[
|
||||
'inline-flex items-center justify-center whitespace-nowrap text-sm font-medium',
|
||||
'disabled:pointer-events-none disabled:opacity-50',
|
||||
'transition-all',
|
||||
'[&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*="size-"])]:size-4'
|
||||
],
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default: [
|
||||
'h-[calc(100%-1px)] flex-1 gap-1.5 px-2 py-1 rounded-md',
|
||||
'text-foreground border border-transparent',
|
||||
'dark:text-muted-foreground',
|
||||
'focus-visible:ring-[3px] focus-visible:outline-1 focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:outline-ring',
|
||||
'data-[state=active]:bg-background data-[state=active]:shadow-sm',
|
||||
'dark:data-[state=active]:text-foreground dark:data-[state=active]:border-input dark:data-[state=active]:bg-input/30'
|
||||
],
|
||||
line: [
|
||||
'relative gap-2 px-2 py-2',
|
||||
'font-normal text-muted-foreground hover:text-foreground',
|
||||
'focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2',
|
||||
'data-[state=active]:text-primary',
|
||||
'after:absolute after:bg-primary/10 after:rounded-full',
|
||||
'data-[state=active]:after:bg-primary'
|
||||
]
|
||||
},
|
||||
orientation: {
|
||||
horizontal: '',
|
||||
vertical: 'rounded-full'
|
||||
}
|
||||
},
|
||||
compoundVariants: [
|
||||
{
|
||||
variant: 'line',
|
||||
orientation: 'horizontal',
|
||||
class: 'after:bottom-0 after:left-0 after:h-[2px] after:w-full data-[state=active]:after:h-[4px]'
|
||||
},
|
||||
{
|
||||
variant: 'line',
|
||||
orientation: 'vertical',
|
||||
class: [
|
||||
'justify-center after:bottom-0 after:left-0 after:h-[4px] after:w-full after:bg-transparent data-[state=active]:after:bg-primary',
|
||||
'hover:text-primary hover:bg-primary/10'
|
||||
]
|
||||
}
|
||||
],
|
||||
defaultVariants: {
|
||||
variant: 'default',
|
||||
orientation: 'horizontal'
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
function TabsTrigger({ className, ...props }: React.ComponentProps<typeof TabsPrimitive.Trigger>) {
|
||||
const { variant, orientation } = React.use(TabsContext)
|
||||
return (
|
||||
<TabsPrimitive.Trigger
|
||||
data-slot="tabs-trigger"
|
||||
className={cn(tabsTriggerVariants({ variant, orientation }), className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function TabsContent({ className, ...props }: React.ComponentProps<typeof TabsPrimitive.Content>) {
|
||||
return <TabsPrimitive.Content data-slot="tabs-content" className={cn('flex-1 outline-none', className)} {...props} />
|
||||
}
|
||||
|
||||
export { Tabs, TabsContent, TabsList, TabsTrigger }
|
||||
@@ -1,78 +0,0 @@
|
||||
import { cn } from '@cherrystudio/ui/utils/index'
|
||||
import * as TooltipPrimitive from '@radix-ui/react-tooltip'
|
||||
import * as React from 'react'
|
||||
|
||||
export type TooltipProps = React.ComponentProps<typeof TooltipPrimitive.Root>
|
||||
export type TooltipTriggerProps = React.ComponentProps<typeof TooltipPrimitive.Trigger>
|
||||
export type TooltipContentProps = React.ComponentProps<typeof TooltipPrimitive.Content>
|
||||
|
||||
function TooltipProvider({ delayDuration = 0, ...props }: React.ComponentProps<typeof TooltipPrimitive.Provider>) {
|
||||
// eslint-disable-next-line
|
||||
return <TooltipPrimitive.Provider data-slot="tooltip-provider" delayDuration={delayDuration} {...props} />
|
||||
}
|
||||
|
||||
function Tooltip({ delayDuration = 0, ...props }: TooltipProps) {
|
||||
return (
|
||||
<TooltipProvider delayDuration={delayDuration}>
|
||||
<TooltipPrimitive.Root data-slot="tooltip" delayDuration={delayDuration} {...props} />
|
||||
</TooltipProvider>
|
||||
)
|
||||
}
|
||||
|
||||
function TooltipTrigger({ ...props }: TooltipTriggerProps) {
|
||||
return <TooltipPrimitive.Trigger data-slot="tooltip-trigger" {...props} />
|
||||
}
|
||||
|
||||
function TooltipContent({ className, sideOffset = 0, children, ...props }: TooltipContentProps) {
|
||||
return (
|
||||
<TooltipPrimitive.Portal>
|
||||
<TooltipPrimitive.Content
|
||||
data-slot="tooltip-content"
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
'bg-foreground text-background animate-in fade-in-0 zoom-in-95 data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=closed]:zoom-out-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-50 w-fit origin-(--radix-tooltip-content-transform-origin) rounded-md px-3 py-1.5 text-xs text-balance',
|
||||
className
|
||||
)}
|
||||
{...props}>
|
||||
{children}
|
||||
<TooltipPrimitive.Arrow className="bg-foreground fill-foreground z-50 size-2.5 translate-y-[calc(-50%_-_2px)] rotate-45 rounded-[2px]" />
|
||||
</TooltipPrimitive.Content>
|
||||
</TooltipPrimitive.Portal>
|
||||
)
|
||||
}
|
||||
|
||||
interface NormalTooltipProps extends TooltipProps {
|
||||
content: React.ReactNode
|
||||
side?: TooltipContentProps['side']
|
||||
align?: TooltipContentProps['align']
|
||||
sideOffset?: TooltipContentProps['sideOffset']
|
||||
className?: string
|
||||
asChild?: boolean
|
||||
triggerProps?: Omit<TooltipTriggerProps, 'children'>
|
||||
contentProps?: TooltipContentProps
|
||||
}
|
||||
|
||||
const NormalTooltip = ({
|
||||
children,
|
||||
content,
|
||||
side,
|
||||
align,
|
||||
sideOffset,
|
||||
asChild = true,
|
||||
triggerProps,
|
||||
contentProps,
|
||||
...tooltipProps
|
||||
}: NormalTooltipProps) => {
|
||||
return (
|
||||
<Tooltip {...tooltipProps}>
|
||||
<TooltipTrigger asChild={asChild} {...triggerProps}>
|
||||
{children}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side={side} align={align} sideOffset={sideOffset} {...contentProps}>
|
||||
{content}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
export { NormalTooltip, Tooltip, TooltipContent, TooltipProvider, TooltipTrigger }
|
||||
270
packages/ui/stories/components/icons/FileIcons.stories.tsx
Normal file
270
packages/ui/stories/components/icons/FileIcons.stories.tsx
Normal file
@@ -0,0 +1,270 @@
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
|
||||
import { FilePngIcon, FileSvgIcon } from '../../../src/components/icons/FileIcons'
|
||||
|
||||
// Create a dummy component for the story
|
||||
const FileIconsShowcase = () => <div />
|
||||
|
||||
const meta: Meta<typeof FileIconsShowcase> = {
|
||||
title: 'Components/Icons/FileIcons',
|
||||
component: FileIconsShowcase,
|
||||
parameters: {
|
||||
layout: 'centered'
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
argTypes: {
|
||||
size: {
|
||||
description: '图标大小',
|
||||
control: { type: 'text' },
|
||||
defaultValue: '1.1em'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Basic File Icons
|
||||
export const BasicFileIcons: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">文件类型图标 (默认尺寸: 1.1em)</h3>
|
||||
<div className="flex items-center gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon />
|
||||
<span className="text-xs text-gray-600">SVG 文件</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon />
|
||||
<span className="text-xs text-gray-600">PNG 文件</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Different Sizes
|
||||
export const DifferentSizes: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同尺寸的 SVG 图标</h3>
|
||||
<div className="flex items-end gap-4">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="16" />
|
||||
<span className="text-xs text-gray-600">16px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="24" />
|
||||
<span className="text-xs text-gray-600">24px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" />
|
||||
<span className="text-xs text-gray-600">32px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="48" />
|
||||
<span className="text-xs text-gray-600">48px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="64" />
|
||||
<span className="text-xs text-gray-600">64px</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同尺寸的 PNG 图标</h3>
|
||||
<div className="flex items-end gap-4">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="16" />
|
||||
<span className="text-xs text-gray-600">16px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="24" />
|
||||
<span className="text-xs text-gray-600">24px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" />
|
||||
<span className="text-xs text-gray-600">32px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="48" />
|
||||
<span className="text-xs text-gray-600">48px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="64" />
|
||||
<span className="text-xs text-gray-600">64px</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Custom Colors
|
||||
export const CustomColors: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">自定义颜色 - SVG 图标</h3>
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" color="#3B82F6" />
|
||||
<span className="text-xs text-gray-600">蓝色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" color="#10B981" />
|
||||
<span className="text-xs text-gray-600">绿色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" color="#F59E0B" />
|
||||
<span className="text-xs text-gray-600">橙色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" color="#EF4444" />
|
||||
<span className="text-xs text-gray-600">红色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FileSvgIcon size="32" color="#8B5CF6" />
|
||||
<span className="text-xs text-gray-600">紫色</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">自定义颜色 - PNG 图标</h3>
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" color="#3B82F6" />
|
||||
<span className="text-xs text-gray-600">蓝色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" color="#10B981" />
|
||||
<span className="text-xs text-gray-600">绿色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" color="#F59E0B" />
|
||||
<span className="text-xs text-gray-600">橙色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" color="#EF4444" />
|
||||
<span className="text-xs text-gray-600">红色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<FilePngIcon size="32" color="#8B5CF6" />
|
||||
<span className="text-xs text-gray-600">紫色</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// In File List Context
|
||||
export const InFileListContext: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">文件列表中的使用示例</h3>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center gap-3 rounded p-2 hover:bg-gray-50">
|
||||
<FileSvgIcon size="20" />
|
||||
<span className="flex-1">illustration.svg</span>
|
||||
<span className="text-xs text-gray-500">45 KB</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3 rounded p-2 hover:bg-gray-50">
|
||||
<FilePngIcon size="20" />
|
||||
<span className="flex-1">screenshot.png</span>
|
||||
<span className="text-xs text-gray-500">1.2 MB</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3 rounded p-2 hover:bg-gray-50">
|
||||
<FileSvgIcon size="20" />
|
||||
<span className="flex-1">logo.svg</span>
|
||||
<span className="text-xs text-gray-500">12 KB</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3 rounded p-2 hover:bg-gray-50">
|
||||
<FilePngIcon size="20" />
|
||||
<span className="flex-1">background.png</span>
|
||||
<span className="text-xs text-gray-500">2.8 MB</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// File Type Grid
|
||||
export const FileTypeGrid: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">文件类型网格展示</h3>
|
||||
|
||||
<div className="grid grid-cols-4 gap-4">
|
||||
<div className="flex flex-col items-center gap-2 rounded-lg border border-gray-200 p-4 hover:border-blue-500">
|
||||
<FileSvgIcon size="48" />
|
||||
<span className="text-sm font-medium">SVG</span>
|
||||
<span className="text-xs text-gray-600">矢量图形</span>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col items-center gap-2 rounded-lg border border-gray-200 p-4 hover:border-blue-500">
|
||||
<FilePngIcon size="48" />
|
||||
<span className="text-sm font-medium">PNG</span>
|
||||
<span className="text-xs text-gray-600">位图图像</span>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col items-center gap-2 rounded-lg border border-gray-200 p-4 hover:border-blue-500">
|
||||
<FileSvgIcon size="48" color="#10B981" />
|
||||
<span className="text-sm font-medium">SVG</span>
|
||||
<span className="text-xs text-gray-600">已处理</span>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col items-center gap-2 rounded-lg border border-gray-200 p-4 hover:border-blue-500">
|
||||
<FilePngIcon size="48" color="#EF4444" />
|
||||
<span className="text-sm font-medium">PNG</span>
|
||||
<span className="text-xs text-gray-600">错误状态</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Interactive Example
|
||||
export const InteractiveExample: Story = {
|
||||
render: () => {
|
||||
const fileTypes = [
|
||||
{ icon: FileSvgIcon, name: 'Vector Graphics', ext: 'SVG', color: '#3B82F6' },
|
||||
{ icon: FilePngIcon, name: 'Raster Image', ext: 'PNG', color: '#10B981' }
|
||||
]
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">交互式文件类型选择器</h3>
|
||||
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
{fileTypes.map((fileType, index) => {
|
||||
const IconComponent = fileType.icon
|
||||
return (
|
||||
<button
|
||||
key={index}
|
||||
type="button"
|
||||
className="flex items-center gap-3 rounded-lg border border-gray-200 p-4 text-left transition-all hover:border-blue-500 hover:shadow-md focus:border-blue-500 focus:outline-none focus:ring-2 focus:ring-blue-500/20">
|
||||
<IconComponent size="32" color={fileType.color} />
|
||||
<div>
|
||||
<div className="font-medium">{fileType.ext} 文件</div>
|
||||
<div className="text-sm text-gray-600">{fileType.name}</div>
|
||||
</div>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
Dashscope,
|
||||
Deepseek,
|
||||
Dmxapi,
|
||||
DmxapiToImg,
|
||||
Doc2x,
|
||||
Doubao,
|
||||
Exa,
|
||||
@@ -103,6 +104,7 @@ const logos = [
|
||||
{ Component: Dashscope, name: 'Dashscope' },
|
||||
{ Component: Deepseek, name: 'Deepseek' },
|
||||
{ Component: Dmxapi, name: 'Dmxapi' },
|
||||
{ Component: DmxapiToImg, name: 'DmxapiToImg' },
|
||||
{ Component: Doc2x, name: 'Doc2x' },
|
||||
{ Component: Doubao, name: 'Doubao' },
|
||||
{ Component: Exa, name: 'Exa' },
|
||||
|
||||
@@ -0,0 +1,339 @@
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
|
||||
import SvgSpinners180Ring from '../../../src/components/icons/SvgSpinners180Ring'
|
||||
|
||||
const meta: Meta<typeof SvgSpinners180Ring> = {
|
||||
title: 'Components/Icons/SvgSpinners180Ring',
|
||||
component: SvgSpinners180Ring,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component:
|
||||
'⚠️ **已废弃** - 此组件使用频率为 0 次,不符合 UI 库提取标准(需 ≥3 次)。计划在未来版本中移除。虽然主项目中有本地副本,但完全未被导入使用。'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs', 'deprecated'],
|
||||
argTypes: {
|
||||
size: {
|
||||
description: '加载图标大小',
|
||||
control: { type: 'text' },
|
||||
defaultValue: '1em'
|
||||
},
|
||||
className: {
|
||||
description: '自定义 CSS 类名',
|
||||
control: { type: 'text' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Basic Spinner
|
||||
export const BasicSpinner: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">基础加载动画</h3>
|
||||
<div className="flex items-center gap-4">
|
||||
<SvgSpinners180Ring />
|
||||
<span className="text-sm text-gray-600">默认尺寸 (1em)</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Different Sizes
|
||||
export const DifferentSizes: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同尺寸的加载动画</h3>
|
||||
<div className="flex items-end gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="12" />
|
||||
<span className="text-xs text-gray-600">12px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="16" />
|
||||
<span className="text-xs text-gray-600">16px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="20" />
|
||||
<span className="text-xs text-gray-600">20px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" />
|
||||
<span className="text-xs text-gray-600">24px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="32" />
|
||||
<span className="text-xs text-gray-600">32px</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="48" />
|
||||
<span className="text-xs text-gray-600">48px</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Different Colors
|
||||
export const DifferentColors: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同颜色的加载动画</h3>
|
||||
<div className="flex items-center gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-blue-500" />
|
||||
<span className="text-xs text-gray-600">蓝色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-green-500" />
|
||||
<span className="text-xs text-gray-600">绿色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-orange-500" />
|
||||
<span className="text-xs text-gray-600">橙色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-red-500" />
|
||||
<span className="text-xs text-gray-600">红色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-purple-500" />
|
||||
<span className="text-xs text-gray-600">紫色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="text-gray-500" />
|
||||
<span className="text-xs text-gray-600">灰色</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Loading States in Buttons
|
||||
export const LoadingStatesInButtons: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">按钮中的加载状态</h3>
|
||||
<div className="flex flex-wrap items-center gap-4">
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center gap-2 rounded bg-blue-500 px-4 py-2 text-white hover:bg-blue-600"
|
||||
disabled>
|
||||
<SvgSpinners180Ring size="16" />
|
||||
<span>加载中...</span>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center gap-2 rounded bg-green-500 px-4 py-2 text-white hover:bg-green-600"
|
||||
disabled>
|
||||
<SvgSpinners180Ring size="16" />
|
||||
<span>保存中</span>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center gap-2 rounded bg-orange-500 px-4 py-2 text-white hover:bg-orange-600"
|
||||
disabled>
|
||||
<SvgSpinners180Ring size="16" />
|
||||
<span>上传中</span>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center gap-2 rounded border border-gray-300 bg-white px-4 py-2 text-gray-700 hover:bg-gray-50"
|
||||
disabled>
|
||||
<SvgSpinners180Ring size="16" className="text-gray-500" />
|
||||
<span>处理中</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Loading Cards
|
||||
export const LoadingCards: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">加载状态卡片</h3>
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<SvgSpinners180Ring size="20" className="text-blue-500" />
|
||||
<div>
|
||||
<h4 className="font-medium">AI 模型响应中</h4>
|
||||
<p className="text-sm text-gray-600">正在生成回复...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<SvgSpinners180Ring size="20" className="text-green-500" />
|
||||
<div>
|
||||
<h4 className="font-medium">文件上传中</h4>
|
||||
<p className="text-sm text-gray-600">75% 完成</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<SvgSpinners180Ring size="20" className="text-orange-500" />
|
||||
<div>
|
||||
<h4 className="font-medium">数据同步中</h4>
|
||||
<p className="text-sm text-gray-600">请稍候...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<SvgSpinners180Ring size="20" className="text-purple-500" />
|
||||
<div>
|
||||
<h4 className="font-medium">模型训练中</h4>
|
||||
<p className="text-sm text-gray-600">预计2分钟</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Inline Loading States
|
||||
export const InlineLoadingStates: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">行内加载状态</h3>
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<SvgSpinners180Ring size="14" className="text-blue-500" />
|
||||
<span className="text-sm">正在检查网络连接...</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<SvgSpinners180Ring size="14" className="text-green-500" />
|
||||
<span className="text-sm">正在保存更改...</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<SvgSpinners180Ring size="14" className="text-orange-500" />
|
||||
<span className="text-sm">正在验证凭据...</span>
|
||||
</div>
|
||||
|
||||
<div className="rounded bg-blue-50 p-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<SvgSpinners180Ring size="16" className="text-blue-600" />
|
||||
<span className="text-sm text-blue-800">系统正在处理您的请求,请稍候...</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Loading States with Different Speeds
|
||||
export const LoadingStatesWithDifferentSpeeds: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同速度的加载动画</h3>
|
||||
<div className="flex items-center gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="animate-spin" style={{ animationDuration: '2s' }} />
|
||||
<span className="text-xs text-gray-600">慢速 (2s)</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" />
|
||||
<span className="text-xs text-gray-600">默认速度</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<SvgSpinners180Ring size="24" className="animate-spin" style={{ animationDuration: '0.5s' }} />
|
||||
<span className="text-xs text-gray-600">快速 (0.5s)</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Full Page Loading
|
||||
export const FullPageLoading: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">全屏加载示例</h3>
|
||||
<div className="relative h-64 w-full overflow-hidden rounded-lg border border-gray-200 bg-white">
|
||||
<div className="absolute inset-0 flex flex-col items-center justify-center bg-white/80">
|
||||
<SvgSpinners180Ring size="32" className="text-blue-500" />
|
||||
<p className="mt-4 text-sm text-gray-600">页面加载中,请稍候...</p>
|
||||
</div>
|
||||
|
||||
{/* 模拟页面内容 */}
|
||||
<div className="p-6 opacity-30">
|
||||
<div className="mb-4 h-6 w-1/3 rounded bg-gray-200"></div>
|
||||
<div className="mb-2 h-4 w-full rounded bg-gray-200"></div>
|
||||
<div className="mb-2 h-4 w-5/6 rounded bg-gray-200"></div>
|
||||
<div className="mb-4 h-4 w-4/6 rounded bg-gray-200"></div>
|
||||
<div className="mb-2 h-4 w-full rounded bg-gray-200"></div>
|
||||
<div className="h-4 w-3/4 rounded bg-gray-200"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Interactive Loading Demo
|
||||
export const InteractiveLoadingDemo: Story = {
|
||||
render: () => {
|
||||
const loadingStates = [
|
||||
{ text: '发送消息', color: 'text-blue-500', bgColor: 'bg-blue-500' },
|
||||
{ text: '上传文件', color: 'text-green-500', bgColor: 'bg-green-500' },
|
||||
{ text: '生成内容', color: 'text-purple-500', bgColor: 'bg-purple-500' },
|
||||
{ text: '搜索结果', color: 'text-orange-500', bgColor: 'bg-orange-500' }
|
||||
]
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">交互式加载演示</h3>
|
||||
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
{loadingStates.map((state, index) => (
|
||||
<button
|
||||
key={index}
|
||||
type="button"
|
||||
className={`flex items-center justify-center gap-2 rounded-lg ${state.bgColor} px-4 py-3 text-white transition-all hover:opacity-90 focus:outline-none focus:ring-2 focus:ring-offset-2`}
|
||||
onClick={() => {
|
||||
// 演示用途 - 在实际应用中这里会触发真实的加载状态
|
||||
alert(`触发 ${state.text} 加载状态`)
|
||||
}}>
|
||||
<SvgSpinners180Ring size="16" />
|
||||
<span>{state.text}中...</span>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<p className="text-xs text-gray-500">点击按钮查看加载状态的交互效果</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
|
||||
import ToolsCallingIcon from '../../../src/components/icons/ToolsCallingIcon'
|
||||
|
||||
const meta: Meta<typeof ToolsCallingIcon> = {
|
||||
title: 'Components/Icons/ToolsCallingIcon',
|
||||
component: ToolsCallingIcon,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component:
|
||||
'⚠️ **已废弃** - 此组件使用频率仅为 1 次,不符合 UI 库提取标准(需 ≥3 次)。计划在未来版本中移除。建议直接使用 lucide-react 的 Wrench 图标。'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs', 'deprecated'],
|
||||
argTypes: {
|
||||
className: {
|
||||
description: '容器的自定义 CSS 类名',
|
||||
control: { type: 'text' }
|
||||
},
|
||||
iconClassName: {
|
||||
description: '图标的自定义 CSS 类名',
|
||||
control: { type: 'text' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Basic Tools Calling Icon
|
||||
export const BasicToolsCallingIcon: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">基础工具调用图标</h3>
|
||||
<div className="flex items-center gap-4">
|
||||
<ToolsCallingIcon />
|
||||
</div>
|
||||
<p className="mt-2 text-sm text-gray-600">悬停图标查看工具提示,显示"函数调用"文本</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Different Sizes
|
||||
export const DifferentSizes: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同尺寸的工具调用图标</h3>
|
||||
<div className="flex items-end gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-3 h-3" />
|
||||
<span className="text-xs text-gray-600">小号</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon />
|
||||
<span className="text-xs text-gray-600">默认</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-5 h-5" />
|
||||
<span className="text-xs text-gray-600">中号</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-6 h-6" />
|
||||
<span className="text-xs text-gray-600">大号</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-8 h-8" />
|
||||
<span className="text-xs text-gray-600">特大号</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Different Colors
|
||||
export const DifferentColors: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h3 className="mb-3 font-semibold">不同颜色的工具调用图标</h3>
|
||||
<div className="flex items-center gap-6">
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon />
|
||||
<span className="text-xs text-gray-600">默认绿色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-blue-500" />
|
||||
<span className="text-xs text-gray-600">蓝色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-orange-500" />
|
||||
<span className="text-xs text-gray-600">橙色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-red-500" />
|
||||
<span className="text-xs text-gray-600">红色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-purple-500" />
|
||||
<span className="text-xs text-gray-600">紫色</span>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-gray-500" />
|
||||
<span className="text-xs text-gray-600">灰色</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Model Features Context
|
||||
export const ModelFeaturesContext: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">在模型功能标识中的使用</h3>
|
||||
|
||||
<div className="grid gap-4">
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="mb-2 flex items-center gap-2">
|
||||
<h4 className="font-medium">GPT-4 Turbo</h4>
|
||||
<ToolsCallingIcon />
|
||||
</div>
|
||||
<p className="text-sm text-gray-600">支持函数调用的先进模型,可以调用外部工具和API</p>
|
||||
<div className="mt-2 flex gap-2">
|
||||
<span className="rounded bg-green-100 px-2 py-1 text-xs text-green-800">函数调用</span>
|
||||
<span className="rounded bg-blue-100 px-2 py-1 text-xs text-blue-800">多模态</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="mb-2 flex items-center gap-2">
|
||||
<h4 className="font-medium">Claude 3.5 Sonnet</h4>
|
||||
<ToolsCallingIcon />
|
||||
</div>
|
||||
<p className="text-sm text-gray-600">Anthropic的高性能模型,具备强大的工具使用能力</p>
|
||||
<div className="mt-2 flex gap-2">
|
||||
<span className="rounded bg-green-100 px-2 py-1 text-xs text-green-800">函数调用</span>
|
||||
<span className="rounded bg-orange-100 px-2 py-1 text-xs text-orange-800">推理</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="mb-2 flex items-center gap-2">
|
||||
<h4 className="font-medium">Llama 3.1 8B</h4>
|
||||
{/* 不支持函数调用 */}
|
||||
</div>
|
||||
<p className="text-sm text-gray-600">Meta的开源模型,适用于基础对话任务</p>
|
||||
<div className="mt-2 flex gap-2">
|
||||
<span className="rounded bg-gray-100 px-2 py-1 text-xs text-gray-800">文本生成</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Chat Message Context
|
||||
export const ChatMessageContext: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">在聊天消息中的使用</h3>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="rounded-lg bg-blue-50 p-3">
|
||||
<div className="mb-1 flex items-center gap-2 text-sm text-blue-800">
|
||||
<ToolsCallingIcon iconClassName="w-3.5 h-3.5 mr-1 text-[#00b96b]" />
|
||||
<span className="font-medium">调用工具: weather_api</span>
|
||||
</div>
|
||||
<p className="text-sm text-blue-700">正在获取北京的天气信息...</p>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg bg-green-50 p-3">
|
||||
<div className="mb-1 flex items-center gap-2 text-sm text-green-800">
|
||||
<ToolsCallingIcon iconClassName="w-3.5 h-3.5 mr-1 text-[#00b96b]" />
|
||||
<span className="font-medium">调用工具: search_web</span>
|
||||
</div>
|
||||
<p className="text-sm text-green-700">正在搜索最新的AI新闻...</p>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg bg-orange-50 p-3">
|
||||
<div className="mb-1 flex items-center gap-2 text-sm text-orange-800">
|
||||
<ToolsCallingIcon iconClassName="w-3.5 h-3.5 mr-1 text-[#00b96b]" />
|
||||
<span className="font-medium">调用工具: code_interpreter</span>
|
||||
</div>
|
||||
<p className="text-sm text-orange-700">正在执行Python代码计算结果...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Tool Availability Indicator
|
||||
export const ToolAvailabilityIndicator: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">工具可用性指示器</h3>
|
||||
|
||||
<div className="rounded-lg border border-gray-200">
|
||||
<div className="border-b border-gray-200 p-3">
|
||||
<h4 className="font-medium text-gray-900">可用工具</h4>
|
||||
</div>
|
||||
|
||||
<div className="divide-y divide-gray-200">
|
||||
<div className="flex items-center justify-between p-3 hover:bg-gray-50">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-[#00b96b]" />
|
||||
<span className="font-medium">天气查询</span>
|
||||
</div>
|
||||
<span className="rounded-full bg-green-100 px-2 py-1 text-xs text-green-800">可用</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between p-3 hover:bg-gray-50">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-[#00b96b]" />
|
||||
<span className="font-medium">网络搜索</span>
|
||||
</div>
|
||||
<span className="rounded-full bg-green-100 px-2 py-1 text-xs text-green-800">可用</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between p-3 hover:bg-gray-50 opacity-60">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-gray-400" />
|
||||
<span className="font-medium">代码执行</span>
|
||||
</div>
|
||||
<span className="rounded-full bg-gray-100 px-2 py-1 text-xs text-gray-800">不可用</span>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between p-3 hover:bg-gray-50">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-yellow-600" />
|
||||
<span className="font-medium">图像生成</span>
|
||||
</div>
|
||||
<span className="rounded-full bg-yellow-100 px-2 py-1 text-xs text-yellow-800">限制使用</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Interactive Tool Selection
|
||||
export const InteractiveToolSelection: Story = {
|
||||
render: () => {
|
||||
const tools = [
|
||||
{ name: '天气查询', description: '获取实时天气信息', available: true },
|
||||
{ name: '网络搜索', description: '搜索最新信息', available: true },
|
||||
{ name: '代码执行', description: '运行Python代码', available: false },
|
||||
{ name: '图像分析', description: '分析和描述图像', available: true },
|
||||
{ name: '数据可视化', description: '创建图表和图形', available: false }
|
||||
]
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">交互式工具选择器</h3>
|
||||
|
||||
<div className="grid grid-cols-1 gap-3">
|
||||
{tools.map((tool, index) => (
|
||||
<button
|
||||
key={index}
|
||||
type="button"
|
||||
className={`flex items-center gap-3 rounded-lg border p-3 text-left transition-all hover:shadow-md focus:outline-none focus:ring-2 focus:ring-blue-500/20 ${
|
||||
tool.available
|
||||
? 'border-gray-200 hover:border-blue-500'
|
||||
: 'border-gray-200 opacity-60 cursor-not-allowed'
|
||||
}`}
|
||||
disabled={!tool.available}>
|
||||
<ToolsCallingIcon
|
||||
iconClassName={`w-4 h-4 mr-1.5 ${tool.available ? 'text-[#00b96b]' : 'text-gray-400'}`}
|
||||
/>
|
||||
<div className="flex-1">
|
||||
<div className="font-medium">{tool.name}</div>
|
||||
<div className="text-sm text-gray-600">{tool.description}</div>
|
||||
</div>
|
||||
<div className="text-xs">
|
||||
{tool.available ? (
|
||||
<span className="rounded bg-green-100 px-2 py-1 text-green-800">可用</span>
|
||||
) : (
|
||||
<span className="rounded bg-gray-100 px-2 py-1 text-gray-800">不可用</span>
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Loading Tool Calls
|
||||
export const LoadingToolCalls: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">工具调用加载状态</h3>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="rounded-lg border border-gray-200 p-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon />
|
||||
<span className="font-medium">正在调用工具...</span>
|
||||
<div className="h-2 w-2 animate-pulse rounded-full bg-green-500"></div>
|
||||
</div>
|
||||
<p className="mt-1 text-sm text-gray-600">weather_api(city="北京")</p>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-green-200 bg-green-50 p-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-green-600" />
|
||||
<span className="font-medium text-green-800">工具调用完成</span>
|
||||
<span className="text-green-600">✓</span>
|
||||
</div>
|
||||
<p className="mt-1 text-sm text-green-700">已获取北京天气信息:晴天,温度 22°C</p>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border border-red-200 bg-red-50 p-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<ToolsCallingIcon iconClassName="w-4 h-4 mr-1.5 text-red-600" />
|
||||
<span className="font-medium text-red-800">工具调用失败</span>
|
||||
<span className="text-red-600">✗</span>
|
||||
</div>
|
||||
<p className="mt-1 text-sm text-red-700">API密钥无效,请检查配置</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Settings Panel
|
||||
export const SettingsPanel: Story = {
|
||||
render: () => (
|
||||
<div className="space-y-4">
|
||||
<h3 className="mb-3 font-semibold">设置面板中的使用</h3>
|
||||
|
||||
<div className="rounded-lg border border-gray-200 p-4">
|
||||
<div className="mb-4 flex items-center gap-2">
|
||||
<ToolsCallingIcon />
|
||||
<h4 className="font-medium">函数调用设置</h4>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="font-medium">启用函数调用</div>
|
||||
<div className="text-sm text-gray-600">允许AI模型调用外部工具</div>
|
||||
</div>
|
||||
<input type="checkbox" className="rounded" defaultChecked />
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="font-medium">自动确认调用</div>
|
||||
<div className="text-sm text-gray-600">自动执行安全的工具调用</div>
|
||||
</div>
|
||||
<input type="checkbox" className="rounded" />
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="font-medium">显示调用详情</div>
|
||||
<div className="text-sm text-gray-600">在聊天中显示工具调用过程</div>
|
||||
</div>
|
||||
<input type="checkbox" className="rounded" defaultChecked />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
import {
|
||||
Breadcrumb,
|
||||
BreadcrumbEllipsis,
|
||||
BreadcrumbItem,
|
||||
BreadcrumbLink,
|
||||
BreadcrumbList,
|
||||
BreadcrumbPage,
|
||||
BreadcrumbSeparator
|
||||
} from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Slash } from 'lucide-react'
|
||||
|
||||
const meta: Meta<typeof Breadcrumb> = {
|
||||
title: 'Components/Primitives/Breadcrumb',
|
||||
component: Breadcrumb,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component: 'Displays the path to the current resource using a hierarchy of links. Based on shadcn/ui.'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs']
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Default
|
||||
export const Default: Story = {
|
||||
render: () => (
|
||||
<Breadcrumb>
|
||||
<BreadcrumbList>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/">Home</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/components">Components</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbPage>Breadcrumb</BreadcrumbPage>
|
||||
</BreadcrumbItem>
|
||||
</BreadcrumbList>
|
||||
</Breadcrumb>
|
||||
)
|
||||
}
|
||||
|
||||
// Custom Separator
|
||||
export const CustomSeparator: Story = {
|
||||
render: () => (
|
||||
<Breadcrumb>
|
||||
<BreadcrumbList>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/">Home</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator>
|
||||
<Slash />
|
||||
</BreadcrumbSeparator>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/components">Components</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator>
|
||||
<Slash />
|
||||
</BreadcrumbSeparator>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbPage>Breadcrumb</BreadcrumbPage>
|
||||
</BreadcrumbItem>
|
||||
</BreadcrumbList>
|
||||
</Breadcrumb>
|
||||
)
|
||||
}
|
||||
|
||||
// Collapsed
|
||||
export const Collapsed: Story = {
|
||||
render: () => (
|
||||
<Breadcrumb>
|
||||
<BreadcrumbList>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/">Home</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbEllipsis />
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink href="/docs/components">Components</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbPage>Breadcrumb</BreadcrumbPage>
|
||||
</BreadcrumbItem>
|
||||
</BreadcrumbList>
|
||||
</Breadcrumb>
|
||||
)
|
||||
}
|
||||
|
||||
// Link as Child
|
||||
export const LinkAsChild: Story = {
|
||||
render: () => (
|
||||
<Breadcrumb>
|
||||
<BreadcrumbList>
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink asChild>
|
||||
<a href="/">Home</a>
|
||||
</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbLink asChild>
|
||||
<a href="/components">Components</a>
|
||||
</BreadcrumbLink>
|
||||
</BreadcrumbItem>
|
||||
<BreadcrumbSeparator />
|
||||
<BreadcrumbItem>
|
||||
<BreadcrumbPage>Breadcrumb</BreadcrumbPage>
|
||||
</BreadcrumbItem>
|
||||
</BreadcrumbList>
|
||||
</Breadcrumb>
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { Button } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { ChevronRight, Loader2, Mail } from 'lucide-react'
|
||||
|
||||
import { Button } from '../../../src/components/primitives/button'
|
||||
|
||||
const meta: Meta<typeof Button> = {
|
||||
title: 'Components/Primitives/Button',
|
||||
component: Button,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Checkbox, type CheckedState } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Bell, Check, FileText, Mail, Shield, Star } from 'lucide-react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import { Checkbox, type CheckedState } from '../../../src/components/primitives/checkbox'
|
||||
|
||||
const meta: Meta<typeof Checkbox> = {
|
||||
title: 'Components/Primitives/Checkbox',
|
||||
component: Checkbox,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Combobox } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { ChevronDown, User } from 'lucide-react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import { Combobox } from '../../../src/components/primitives/combobox'
|
||||
|
||||
const meta: Meta<typeof Combobox> = {
|
||||
title: 'Components/Primitives/Combobox',
|
||||
component: Combobox,
|
||||
|
||||
@@ -1,577 +0,0 @@
|
||||
import { Kbd, KbdGroup } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Command, Copy, Save, Search } from 'lucide-react'
|
||||
// import { Tooltip, TooltipContent, TooltipTrigger } from '../../../src/components/primitives/tooltip'
|
||||
|
||||
const meta: Meta<typeof Kbd> = {
|
||||
title: 'Components/Primitives/Kbd',
|
||||
component: Kbd,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component: '用于显示键盘快捷键的组件,支持单个按键和组合快捷键'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
argTypes: {
|
||||
className: {
|
||||
control: { type: 'text' },
|
||||
description: '自定义 CSS 类名'
|
||||
},
|
||||
children: {
|
||||
control: { type: 'text' },
|
||||
description: '键盘按键内容'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// 基础示例
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
children: 'Ctrl'
|
||||
}
|
||||
}
|
||||
|
||||
// 单个按键
|
||||
export const SingleKeys: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Shift</Kbd>
|
||||
<Kbd>Alt</Kbd>
|
||||
<Kbd>Enter</Kbd>
|
||||
<Kbd>Esc</Kbd>
|
||||
<Kbd>Tab</Kbd>
|
||||
<Kbd>Space</Kbd>
|
||||
<Kbd>Delete</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 字母和数字按键
|
||||
export const AlphanumericKeys: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Kbd>A</Kbd>
|
||||
<Kbd>B</Kbd>
|
||||
<Kbd>C</Kbd>
|
||||
<Kbd>1</Kbd>
|
||||
<Kbd>2</Kbd>
|
||||
<Kbd>3</Kbd>
|
||||
<Kbd>F1</Kbd>
|
||||
<Kbd>F2</Kbd>
|
||||
<Kbd>F12</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 方向键
|
||||
export const ArrowKeys: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Kbd>↑</Kbd>
|
||||
<Kbd>↓</Kbd>
|
||||
<Kbd>←</Kbd>
|
||||
<Kbd>→</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 组合快捷键
|
||||
export const KeyCombinations: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">保存:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>S</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">复制:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>C</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">粘贴:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>V</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">查找:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">全选:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>A</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Mac 快捷键
|
||||
export const MacKeys: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">保存:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>⌘</Kbd>
|
||||
<Kbd>S</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">复制:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>⌘</Kbd>
|
||||
<Kbd>C</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">粘贴:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>⌘</Kbd>
|
||||
<Kbd>V</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-24 text-sm text-muted-foreground">截图:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>⌘</Kbd>
|
||||
<Kbd>⇧</Kbd>
|
||||
<Kbd>4</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 三键组合
|
||||
export const ThreeKeyCombinations: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-32 text-sm text-muted-foreground">撤销:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Shift</Kbd>
|
||||
<Kbd>Z</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-32 text-sm text-muted-foreground">重做:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Alt</Kbd>
|
||||
<Kbd>Z</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-32 text-sm text-muted-foreground">格式化:</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Shift</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 带图标的按键
|
||||
export const WithIcons: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Kbd>
|
||||
<Command />
|
||||
</Kbd>
|
||||
<Kbd>
|
||||
<Copy />
|
||||
</Kbd>
|
||||
<Kbd>
|
||||
<Save />
|
||||
</Kbd>
|
||||
<Kbd>
|
||||
<Search />
|
||||
</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 在 Tooltip 中使用
|
||||
// export const InTooltip: Story = {
|
||||
// render: () => (
|
||||
// <div className="flex flex-wrap gap-4">
|
||||
// <Tooltip>
|
||||
// <TooltipTrigger asChild>
|
||||
// <button
|
||||
// type="button"
|
||||
// className="rounded bg-primary px-4 py-2 text-sm font-medium text-primary-foreground hover:bg-primary/90">
|
||||
// 保存
|
||||
// </button>
|
||||
// </TooltipTrigger>
|
||||
// <TooltipContent>
|
||||
// <Kbd>Ctrl+S</Kbd>
|
||||
// </TooltipContent>
|
||||
// </Tooltip>
|
||||
// <Tooltip>
|
||||
// <TooltipTrigger asChild>
|
||||
// <button
|
||||
// type="button"
|
||||
// className="rounded bg-secondary px-4 py-2 text-sm font-medium text-secondary-foreground hover:bg-secondary/80">
|
||||
// 复制
|
||||
// </button>
|
||||
// </TooltipTrigger>
|
||||
// <TooltipContent>
|
||||
// <KbdGroup>
|
||||
// <Kbd>Ctrl</Kbd>
|
||||
// <Kbd>C</Kbd>
|
||||
// </KbdGroup>
|
||||
// </TooltipContent>
|
||||
// </Tooltip>
|
||||
// <Tooltip>
|
||||
// <TooltipTrigger asChild>
|
||||
// <button
|
||||
// type="button"
|
||||
// className="rounded bg-secondary px-4 py-2 text-sm font-medium text-secondary-foreground hover:bg-secondary/80">
|
||||
// 粘贴
|
||||
// </button>
|
||||
// </TooltipTrigger>
|
||||
// <TooltipContent>
|
||||
// <KbdGroup>
|
||||
// <Kbd>Ctrl</Kbd>
|
||||
// <Kbd>V</Kbd>
|
||||
// </KbdGroup>
|
||||
// </TooltipContent>
|
||||
// </Tooltip>
|
||||
// </div>
|
||||
// )
|
||||
// }
|
||||
|
||||
// 快捷键列表
|
||||
export const ShortcutList: Story = {
|
||||
render: () => (
|
||||
<div className="w-96 space-y-2 rounded-lg border p-4">
|
||||
<h3 className="mb-3 text-base font-semibold">键盘快捷键</h3>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">保存文件</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>S</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">打开文件</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>O</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">查找</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">替换</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>H</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">撤销</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Z</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">重做</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Y</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 编辑器快捷键
|
||||
export const EditorShortcuts: Story = {
|
||||
render: () => (
|
||||
<div className="w-[600px] space-y-4 rounded-lg border p-6">
|
||||
<h3 className="text-lg font-semibold">编辑器快捷键</h3>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-muted-foreground">文件操作</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">新建文件</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>N</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">打开文件</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>O</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">保存</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>S</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-muted-foreground">编辑</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">复制</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>C</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">剪切</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>X</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">粘贴</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>V</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-muted-foreground">导航</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">转到行</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>G</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">查找</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">全局搜索</span>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Shift</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 游戏控制
|
||||
export const GameControls: Story = {
|
||||
render: () => (
|
||||
<div className="w-96 space-y-4 rounded-lg border p-6">
|
||||
<h3 className="text-lg font-semibold">游戏控制</h3>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-muted-foreground">移动</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">向前</span>
|
||||
<Kbd>W</Kbd>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">向后</span>
|
||||
<Kbd>S</Kbd>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">向左</span>
|
||||
<Kbd>A</Kbd>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">向右</span>
|
||||
<Kbd>D</Kbd>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-medium text-muted-foreground">动作</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">跳跃</span>
|
||||
<Kbd>Space</Kbd>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">冲刺</span>
|
||||
<Kbd>Shift</Kbd>
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm">使用物品</span>
|
||||
<Kbd>E</Kbd>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 特殊字符
|
||||
export const SpecialCharacters: Story = {
|
||||
render: () => (
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Kbd>⌘</Kbd>
|
||||
<Kbd>⌥</Kbd>
|
||||
<Kbd>⇧</Kbd>
|
||||
<Kbd>⌃</Kbd>
|
||||
<Kbd>⏎</Kbd>
|
||||
<Kbd>⌫</Kbd>
|
||||
<Kbd>⌦</Kbd>
|
||||
<Kbd>⇥</Kbd>
|
||||
<Kbd>⎋</Kbd>
|
||||
<Kbd>⇪</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 不同尺寸 (通过自定义类名)
|
||||
export const CustomSizes: Story = {
|
||||
render: () => (
|
||||
<div className="flex items-center gap-3">
|
||||
<Kbd className="h-4 min-w-4 text-[10px]">S</Kbd>
|
||||
<Kbd>M</Kbd>
|
||||
<Kbd className="h-6 min-w-6 text-sm">L</Kbd>
|
||||
<Kbd className="h-8 min-w-8 text-base">XL</Kbd>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// 实际应用示例
|
||||
export const RealWorldExample: Story = {
|
||||
render: () => (
|
||||
<div className="w-[700px] space-y-6">
|
||||
<div className="rounded-lg border p-6">
|
||||
<div className="mb-4 flex items-center justify-between">
|
||||
<h3 className="text-lg font-semibold">命令面板</h3>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>K</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between rounded-md p-2 hover:bg-muted">
|
||||
<div className="flex items-center gap-3">
|
||||
<Save className="h-4 w-4" />
|
||||
<span className="text-sm">保存当前文件</span>
|
||||
</div>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>S</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between rounded-md p-2 hover:bg-muted">
|
||||
<div className="flex items-center gap-3">
|
||||
<Copy className="h-4 w-4" />
|
||||
<span className="text-sm">复制选中内容</span>
|
||||
</div>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>C</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between rounded-md p-2 hover:bg-muted">
|
||||
<div className="flex items-center gap-3">
|
||||
<Search className="h-4 w-4" />
|
||||
<span className="text-sm">在文件中查找</span>
|
||||
</div>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>F</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
<div className="flex items-center justify-between rounded-md p-2 hover:bg-muted">
|
||||
<div className="flex items-center gap-3">
|
||||
<Command className="h-4 w-4" />
|
||||
<span className="text-sm">显示所有命令</span>
|
||||
</div>
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>Shift</Kbd>
|
||||
<Kbd>P</Kbd>
|
||||
</KbdGroup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border p-6">
|
||||
<h3 className="mb-4 text-lg font-semibold">提示信息</h3>
|
||||
<div className="space-y-3">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
按 <Kbd>Ctrl</Kbd> 并点击链接可在新标签页中打开
|
||||
</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
使用{' '}
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>↑</Kbd>
|
||||
</KbdGroup>{' '}
|
||||
或{' '}
|
||||
<KbdGroup>
|
||||
<Kbd>Ctrl</Kbd>
|
||||
<Kbd>↓</Kbd>
|
||||
</KbdGroup>{' '}
|
||||
在选项之间导航
|
||||
</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
按 <Kbd>Enter</Kbd> 确认,<Kbd>Esc</Kbd> 取消
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,181 +0,0 @@
|
||||
import {
|
||||
Pagination,
|
||||
PaginationContent,
|
||||
PaginationEllipsis,
|
||||
PaginationItem,
|
||||
PaginationLink,
|
||||
PaginationNext,
|
||||
PaginationPrevious
|
||||
} from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
|
||||
const meta: Meta<typeof Pagination> = {
|
||||
title: 'Components/Primitives/Pagination',
|
||||
component: Pagination,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component: 'Pagination with page navigation, next and previous links. Based on shadcn/ui.'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
argTypes: {
|
||||
className: {
|
||||
control: { type: 'text' },
|
||||
description: 'Additional CSS classes'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Default
|
||||
export const Default: Story = {
|
||||
render: () => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious href="#" />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">1</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#" isActive>
|
||||
2
|
||||
</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">3</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationEllipsis />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext href="#" />
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
)
|
||||
}
|
||||
|
||||
// Simple
|
||||
export const Simple: Story = {
|
||||
render: () => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious href="#" />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">1</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">2</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">3</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext href="#" />
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
)
|
||||
}
|
||||
|
||||
// With Ellipsis
|
||||
export const WithEllipsis: Story = {
|
||||
render: () => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious href="#" />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">1</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationEllipsis />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">4</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#" isActive>
|
||||
5
|
||||
</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">6</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationEllipsis />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">10</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext href="#" />
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
)
|
||||
}
|
||||
|
||||
// First Page Active
|
||||
export const FirstPageActive: Story = {
|
||||
render: () => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious href="#" className="pointer-events-none opacity-50" />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#" isActive>
|
||||
1
|
||||
</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">2</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">3</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext href="#" />
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
)
|
||||
}
|
||||
|
||||
// Last Page Active
|
||||
export const LastPageActive: Story = {
|
||||
render: () => (
|
||||
<Pagination>
|
||||
<PaginationContent>
|
||||
<PaginationItem>
|
||||
<PaginationPrevious href="#" />
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">1</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#">2</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationLink href="#" isActive>
|
||||
3
|
||||
</PaginationLink>
|
||||
</PaginationItem>
|
||||
<PaginationItem>
|
||||
<PaginationNext href="#" className="pointer-events-none opacity-50" />
|
||||
</PaginationItem>
|
||||
</PaginationContent>
|
||||
</Pagination>
|
||||
)
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
import { RadioGroup, RadioGroupItem } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Bell, Check, Moon, Palette, Sun } from 'lucide-react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import { RadioGroup, RadioGroupItem } from '../../../src/components/primitives/radioGroup'
|
||||
|
||||
const meta: Meta<typeof RadioGroup> = {
|
||||
title: 'Components/Primitives/RadioGroup',
|
||||
component: RadioGroup,
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Globe, Palette, User } from 'lucide-react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
@@ -7,10 +11,7 @@ import {
|
||||
SelectSeparator,
|
||||
SelectTrigger,
|
||||
SelectValue
|
||||
} from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { Globe, Palette, User } from 'lucide-react'
|
||||
import { useState } from 'react'
|
||||
} from '../../../src/components/primitives/select'
|
||||
|
||||
const meta: Meta<typeof Select> = {
|
||||
title: 'Components/Primitives/Select',
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { Button, Spinner } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
import { useState } from 'react'
|
||||
|
||||
import { Button } from '../../../src/components'
|
||||
import { Spinner } from '../../../src/components'
|
||||
|
||||
const meta: Meta<typeof Spinner> = {
|
||||
title: 'Components/Primitives/Spinner',
|
||||
component: Spinner,
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@cherrystudio/ui'
|
||||
import type { Meta, StoryObj } from '@storybook/react'
|
||||
|
||||
const meta: Meta<typeof Tabs> = {
|
||||
title: 'Components/Primitives/Tabs',
|
||||
component: Tabs,
|
||||
parameters: {
|
||||
layout: 'centered',
|
||||
docs: {
|
||||
description: {
|
||||
component:
|
||||
'A set of layered sections of content—known as tab panels—that are displayed one at a time. Based on shadcn/ui.'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['autodocs'],
|
||||
argTypes: {
|
||||
variant: {
|
||||
control: { type: 'select' },
|
||||
options: ['default', 'line'],
|
||||
description: 'The visual style of the tabs'
|
||||
},
|
||||
defaultValue: {
|
||||
control: { type: 'text' },
|
||||
description: 'The value of the tab that should be active when initially rendered'
|
||||
},
|
||||
className: {
|
||||
control: { type: 'text' },
|
||||
description: 'Additional CSS classes'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default meta
|
||||
type Story = StoryObj<typeof meta>
|
||||
|
||||
// Default (Segmented Control Style)
|
||||
export const Default: Story = {
|
||||
render: () => (
|
||||
<Tabs defaultValue="account" className="w-[400px]">
|
||||
<TabsList>
|
||||
<TabsTrigger value="account">Account</TabsTrigger>
|
||||
<TabsTrigger value="password">Password</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="account">
|
||||
<div className="rounded-md border p-4 mt-2">
|
||||
<h3 className="text-lg font-medium">Account</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Make changes to your account here. Click save when you're done.
|
||||
</p>
|
||||
</div>
|
||||
</TabsContent>
|
||||
<TabsContent value="password">
|
||||
<div className="rounded-md border p-4 mt-2">
|
||||
<h3 className="text-lg font-medium">Password</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Change your password here. After saving, you'll be logged out.
|
||||
</p>
|
||||
</div>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
)
|
||||
}
|
||||
|
||||
// Line Style (Figma)
|
||||
export const LineStyle: Story = {
|
||||
render: () => (
|
||||
<Tabs defaultValue="tab1" variant="line" className="w-[400px]">
|
||||
<TabsList>
|
||||
<TabsTrigger value="tab1">Tab 1</TabsTrigger>
|
||||
<TabsTrigger value="tab2">Tab 2</TabsTrigger>
|
||||
<TabsTrigger value="tab3">Tab 3</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="tab1">
|
||||
<div className="p-4 mt-2 border rounded-md bg-muted/10">Content for Tab 1</div>
|
||||
</TabsContent>
|
||||
<TabsContent value="tab2">
|
||||
<div className="p-4 mt-2 border rounded-md bg-muted/10">Content for Tab 2</div>
|
||||
</TabsContent>
|
||||
<TabsContent value="tab3">
|
||||
<div className="p-4 mt-2 border rounded-md bg-muted/10">Content for Tab 3</div>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
)
|
||||
}
|
||||
|
||||
// Vertical
|
||||
export const Vertical: Story = {
|
||||
render: () => (
|
||||
<Tabs defaultValue="music" orientation="vertical" variant="line" className="w-[400px]">
|
||||
<TabsList className="w-[120px]">
|
||||
<TabsTrigger value="music">Music</TabsTrigger>
|
||||
<TabsTrigger value="podcasts">Podcasts</TabsTrigger>
|
||||
<TabsTrigger value="live">Live</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="music" className="flex-1 p-4 border rounded-md bg-muted/10 mt-0">
|
||||
Music content
|
||||
</TabsContent>
|
||||
<TabsContent value="podcasts" className="flex-1 p-4 border rounded-md bg-muted/10 mt-0">
|
||||
Podcasts content
|
||||
</TabsContent>
|
||||
<TabsContent value="live" className="flex-1 p-4 border rounded-md bg-muted/10 mt-0">
|
||||
Live content
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
)
|
||||
}
|
||||
|
||||
// With Icons
|
||||
export const WithIcons: Story = {
|
||||
render: () => (
|
||||
<Tabs defaultValue="home" className="w-[400px]">
|
||||
<TabsList>
|
||||
<TabsTrigger value="home" className="gap-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round">
|
||||
<path d="m3 9 9-7 9 7v11a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2z" />
|
||||
<polyline points="9 22 9 12 15 12 15 22" />
|
||||
</svg>
|
||||
Home
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="settings" className="gap-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round">
|
||||
<path d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 1 1.72v.51a2 2 0 0 1-1 1.74l-.15.09a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1 1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.38a2 2 0 0 0-.73-2.73l-.15-.1a2 2 0 0 1-1-1.72v-.51a2 2 0 0 1 1-1.74l.15-.09a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z" />
|
||||
<circle cx="12" cy="12" r="3" />
|
||||
</svg>
|
||||
Settings
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
<TabsContent value="home" className="mt-2">
|
||||
Home Content
|
||||
</TabsContent>
|
||||
<TabsContent value="settings" className="mt-2">
|
||||
Settings Content
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
)
|
||||
}
|
||||
@@ -13,7 +13,6 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"outDir": "./dist",
|
||||
"paths": {
|
||||
"@cherrystudio/ui": ["./src/index.ts"],
|
||||
"@cherrystudio/ui/*": ["./src/*"]
|
||||
},
|
||||
"resolveJsonModule": true,
|
||||
|
||||
@@ -104,6 +104,12 @@ const router = express
|
||||
logger.warn('No models available from providers', { filter })
|
||||
}
|
||||
|
||||
logger.info('Models response ready', {
|
||||
filter,
|
||||
total: response.total,
|
||||
modelIds: response.data.map((m) => m.id)
|
||||
})
|
||||
|
||||
return res.json(response satisfies ApiModelsResponse)
|
||||
} catch (error: any) {
|
||||
logger.error('Error fetching models', { error })
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createServer } from 'node:http'
|
||||
import { loggerService } from '@logger'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
|
||||
import { agentService } from '../services/agents'
|
||||
import { windowService } from '../services/WindowService'
|
||||
import { app } from './app'
|
||||
import { config } from './config'
|
||||
@@ -31,6 +32,11 @@ export class ApiServer {
|
||||
// Load config
|
||||
const { port, host } = await config.load()
|
||||
|
||||
// Initialize AgentService
|
||||
logger.info('Initializing AgentService')
|
||||
await agentService.initialize()
|
||||
logger.info('AgentService initialized')
|
||||
|
||||
// Create server with Express app
|
||||
this.server = createServer(app)
|
||||
this.applyServerTimeouts(this.server)
|
||||
|
||||
@@ -32,7 +32,7 @@ export class ModelsService {
|
||||
|
||||
for (const model of models) {
|
||||
const provider = providers.find((p) => p.id === model.provider)
|
||||
// logger.debug(`Processing model ${model.id}`)
|
||||
logger.debug(`Processing model ${model.id}`)
|
||||
if (!provider) {
|
||||
logger.debug(`Skipping model ${model.id} . Reason: Provider not found.`)
|
||||
continue
|
||||
|
||||
@@ -45,10 +45,6 @@ export class CacheService {
|
||||
// Main process cache
|
||||
private cache = new Map<string, CacheEntry>()
|
||||
|
||||
// GC timer reference and interval time (e.g., every 10 minutes)
|
||||
private gcInterval: NodeJS.Timeout | null = null
|
||||
private readonly GC_INTERVAL_MS = 10 * 60 * 1000
|
||||
|
||||
private constructor() {
|
||||
// Private constructor for singleton pattern
|
||||
}
|
||||
@@ -60,9 +56,6 @@ export class CacheService {
|
||||
}
|
||||
|
||||
this.setupIpcHandlers()
|
||||
// Start garbage collection
|
||||
this.startGarbageCollection()
|
||||
|
||||
logger.info('CacheService initialized')
|
||||
}
|
||||
|
||||
@@ -78,32 +71,6 @@ export class CacheService {
|
||||
|
||||
// ============ Main Process Cache (Internal) ============
|
||||
|
||||
/**
|
||||
* Garbage collection logic
|
||||
*/
|
||||
private startGarbageCollection() {
|
||||
if (this.gcInterval) return
|
||||
|
||||
this.gcInterval = setInterval(() => {
|
||||
const now = Date.now()
|
||||
let removedCount = 0
|
||||
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (entry.expireAt && now > entry.expireAt) {
|
||||
this.cache.delete(key)
|
||||
removedCount++
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
logger.debug(`Garbage collection removed ${removedCount} expired items`)
|
||||
}
|
||||
}, this.GC_INTERVAL_MS)
|
||||
|
||||
// unref allows the process to exit if there are no other activities
|
||||
this.gcInterval.unref()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get value from main process cache
|
||||
*/
|
||||
@@ -191,12 +158,6 @@ export class CacheService {
|
||||
* Cleanup resources
|
||||
*/
|
||||
public cleanup(): void {
|
||||
// Clear the garbage collection interval
|
||||
if (this.gcInterval) {
|
||||
clearInterval(this.gcInterval)
|
||||
this.gcInterval = null
|
||||
}
|
||||
|
||||
// Clear cache
|
||||
this.cache.clear()
|
||||
|
||||
|
||||
@@ -144,9 +144,6 @@ export class PreferenceService {
|
||||
// Custom notifier for main process change notifications
|
||||
private notifier = new PreferenceNotifier()
|
||||
|
||||
// Saves the reference to the cleanup interval
|
||||
private cleanupInterval: NodeJS.Timeout | null = null
|
||||
|
||||
private constructor() {
|
||||
this.setupWindowCleanup()
|
||||
}
|
||||
@@ -504,9 +501,8 @@ export class PreferenceService {
|
||||
}
|
||||
}
|
||||
|
||||
// Run cleanup periodically (every 5 minutes)
|
||||
this.cleanupInterval = setInterval(cleanup, 300 * 1000)
|
||||
this.cleanupInterval.unref()
|
||||
// Run cleanup periodically (every 30 seconds)
|
||||
setInterval(cleanup, 30000)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -529,22 +525,6 @@ export class PreferenceService {
|
||||
return new Map(this.subscriptions)
|
||||
}
|
||||
|
||||
/**
|
||||
* Public cleanup method (for app shutdown or test teardown)
|
||||
*/
|
||||
public cleanup(): void {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval)
|
||||
this.cleanupInterval = null
|
||||
}
|
||||
|
||||
this.notifier.removeAllSubscriptions()
|
||||
this.subscriptions.clear()
|
||||
this.initialized = false
|
||||
|
||||
logger.debug('PreferenceService cleanup completed')
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep equality check for preference values
|
||||
* Handles primitives, arrays, and plain objects
|
||||
|
||||
@@ -30,7 +30,7 @@ src/main/data/
|
||||
│ # - TopicRepository.ts # Complex: Topic data access
|
||||
│ # - MessageRepository.ts # Complex: Message data access
|
||||
│
|
||||
├── db/ # Database layer
|
||||
├── db/ # Database layer
|
||||
│ ├── schemas/ # Drizzle table definitions
|
||||
│ │ ├── preference.ts # Preference configuration table
|
||||
│ │ ├── appState.ts # Application state table
|
||||
@@ -38,8 +38,8 @@ src/main/data/
|
||||
│ ├── seeding/ # Database initialization
|
||||
│ └── DbService.ts # Database connection and management
|
||||
│
|
||||
├── migration/ # Data migration system
|
||||
│ └── v2/ # v2 data refactoring migration tools
|
||||
├── migrate/ # Data migration system
|
||||
│ └── dataRefactor/ # v2 data refactoring migration tools
|
||||
│
|
||||
├── CacheService.ts # Infrastructure: Cache management
|
||||
├── DataApiService.ts # Infrastructure: API coordination
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { loggerService } from '@logger'
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/libsql'
|
||||
import { migrate } from 'drizzle-orm/libsql/migrator'
|
||||
import { app } from 'electron'
|
||||
@@ -14,52 +13,17 @@ const logger = loggerService.withContext('DbService')
|
||||
const DB_NAME = 'cherrystudio.sqlite'
|
||||
const MIGRATIONS_BASE_PATH = 'migrations/sqlite-drizzle'
|
||||
|
||||
/**
|
||||
* Database service managing SQLite connection via Drizzle ORM
|
||||
* Implements singleton pattern for centralized database access
|
||||
*
|
||||
* Features:
|
||||
* - Database initialization and connection management
|
||||
* - Migration and seeding support
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { dbService } from '@data/db/DbService'
|
||||
*
|
||||
* // Run migrations
|
||||
* await dbService.migrateDb()
|
||||
*
|
||||
* // Get database instance
|
||||
* const db = dbService.getDb()
|
||||
* ```
|
||||
*/
|
||||
class DbService {
|
||||
private static instance: DbService
|
||||
private db: DbType
|
||||
private isInitialized = false
|
||||
private walConfigured = false
|
||||
|
||||
private constructor() {
|
||||
try {
|
||||
this.db = drizzle({
|
||||
connection: { url: pathToFileURL(path.join(app.getPath('userData'), DB_NAME)).href },
|
||||
casing: 'snake_case'
|
||||
})
|
||||
logger.info('Database connection initialized', {
|
||||
dbPath: path.join(app.getPath('userData'), DB_NAME)
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize database connection', error as Error)
|
||||
throw new Error('Database initialization failed')
|
||||
}
|
||||
this.db = drizzle({
|
||||
connection: { url: pathToFileURL(path.join(app.getPath('userData'), DB_NAME)).href },
|
||||
casing: 'snake_case'
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get singleton instance of DbService
|
||||
* Creates a new instance if one doesn't exist
|
||||
* @returns {DbService} The singleton DbService instance
|
||||
* @throws {Error} If database initialization fails
|
||||
*/
|
||||
public static getInstance(): DbService {
|
||||
if (!DbService.instance) {
|
||||
DbService.instance = new DbService()
|
||||
@@ -67,106 +31,23 @@ class DbService {
|
||||
return DbService.instance
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the database
|
||||
* @throws {Error} If database initialization fails
|
||||
*/
|
||||
public async init(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
logger.warn('Database already initialized, do not need initialize again!')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// Configure WAL mode on first database operation
|
||||
await this.configureWAL()
|
||||
this.isInitialized = true
|
||||
} catch (error) {
|
||||
logger.error('Database initialization failed', error as Error)
|
||||
throw error
|
||||
}
|
||||
public async migrateDb() {
|
||||
const migrationsFolder = this.getMigrationsFolder()
|
||||
await migrate(this.db, { migrationsFolder })
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure WAL mode for better concurrency performance
|
||||
* Called once during the first database operation
|
||||
*/
|
||||
private async configureWAL(): Promise<void> {
|
||||
if (this.walConfigured) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await this.db.run(sql`PRAGMA journal_mode = WAL; PRAGMA synchronous = NORMAL; PRAGMA foreign_keys = ON`)
|
||||
|
||||
this.walConfigured = true
|
||||
logger.info('WAL mode configured for database')
|
||||
} catch (error) {
|
||||
logger.warn('Failed to configure WAL mode, using default journal mode', error as Error)
|
||||
// Don't throw error, allow database to continue with default mode
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run database migrations
|
||||
* @throws {Error} If migration fails
|
||||
*/
|
||||
public async migrateDb(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Database is not initialized, please call init() first!')
|
||||
}
|
||||
|
||||
try {
|
||||
const migrationsFolder = this.getMigrationsFolder()
|
||||
await migrate(this.db, { migrationsFolder })
|
||||
|
||||
logger.info('Database migration completed successfully')
|
||||
} catch (error) {
|
||||
logger.error('Database migration failed', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the database instance
|
||||
* @throws {Error} If database is not initialized
|
||||
*/
|
||||
public getDb(): DbType {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Database is not initialized, please call init() first!')
|
||||
}
|
||||
return this.db
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if database is initialized
|
||||
*/
|
||||
public isReady(): boolean {
|
||||
return this.isInitialized
|
||||
}
|
||||
|
||||
/**
|
||||
* Run seed data migration
|
||||
* @param seedName - Name of the seed to run
|
||||
* @throws {Error} If seed migration fails
|
||||
*/
|
||||
public async migrateSeed(seedName: keyof typeof Seeding): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error('Database is not initialized, please call init() first!')
|
||||
}
|
||||
|
||||
public async migrateSeed(seedName: keyof typeof Seeding): Promise<boolean> {
|
||||
try {
|
||||
const Seed = Seeding[seedName]
|
||||
if (!Seed) {
|
||||
throw new Error(`Seed "${seedName}" not found`)
|
||||
}
|
||||
|
||||
await new Seed().migrate(this.db)
|
||||
|
||||
logger.info('Seed migration completed successfully', { seedName })
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Seed migration failed', error as Error, { seedName })
|
||||
throw error
|
||||
logger.error('migration seeding failed', error as Error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,7 +55,7 @@ class DbService {
|
||||
* Get the migrations folder based on the app's packaging status
|
||||
* @returns The path to the migrations folder
|
||||
*/
|
||||
private getMigrationsFolder(): string {
|
||||
private getMigrationsFolder() {
|
||||
if (app.isPackaged) {
|
||||
//see electron-builder.yml, extraResources from/to
|
||||
return path.join(process.resourcesPath, MIGRATIONS_BASE_PATH)
|
||||
|
||||
983
src/main/data/migrate/dataRefactor/DataRefactorMigrateService.ts
Normal file
983
src/main/data/migrate/dataRefactor/DataRefactorMigrateService.ts
Normal file
@@ -0,0 +1,983 @@
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import { appStateTable } from '@data/db/schemas/appState'
|
||||
import { loggerService } from '@logger'
|
||||
import { isDev } from '@main/constant'
|
||||
import BackupManager from '@main/services/BackupManager'
|
||||
import { IpcChannel } from '@shared/IpcChannel'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { app, BrowserWindow, ipcMain } from 'electron'
|
||||
import { app as electronApp } from 'electron'
|
||||
import { join } from 'path'
|
||||
|
||||
import { PreferencesMigrator } from './migrators/PreferencesMigrator'
|
||||
|
||||
const logger = loggerService.withContext('DataRefactorMigrateService')
|
||||
|
||||
const DATA_REFACTOR_MIGRATION_STATUS = 'data_refactor_migration_status'
|
||||
|
||||
// Data refactor migration status interface
|
||||
interface DataRefactorMigrationStatus {
|
||||
completed: boolean
|
||||
completedAt?: number
|
||||
version?: string
|
||||
}
|
||||
|
||||
type MigrationStage =
|
||||
| 'introduction' // Introduction phase - user can cancel
|
||||
| 'backup_required' // Backup required - show backup requirement
|
||||
| 'backup_progress' // Backup in progress - user is backing up
|
||||
| 'backup_confirmed' // Backup confirmed - ready to migrate
|
||||
| 'migration' // Migration in progress - cannot cancel
|
||||
| 'completed' // Completed - restart app
|
||||
| 'error' // Error - recovery options
|
||||
|
||||
interface MigrationProgress {
|
||||
stage: MigrationStage
|
||||
progress: number
|
||||
total: number
|
||||
message: string
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface MigrationResult {
|
||||
success: boolean
|
||||
error?: string
|
||||
migratedCount: number
|
||||
}
|
||||
|
||||
export class DataRefactorMigrateService {
|
||||
private static instance: DataRefactorMigrateService | null = null
|
||||
private migrateWindow: BrowserWindow | null = null
|
||||
private testWindows: BrowserWindow[] = []
|
||||
private backupManager: BackupManager
|
||||
private db = dbService.getDb()
|
||||
private currentProgress: MigrationProgress = {
|
||||
stage: 'introduction',
|
||||
progress: 0,
|
||||
total: 100,
|
||||
message: 'Ready to start data migration'
|
||||
}
|
||||
private isMigrating: boolean = false
|
||||
private reduxData: any = null // Cache for Redux persist data
|
||||
|
||||
constructor() {
|
||||
this.backupManager = new BackupManager()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup manager instance for integration with existing backup system
|
||||
*/
|
||||
public getBackupManager(): BackupManager {
|
||||
return this.backupManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached Redux persist data for migration
|
||||
*/
|
||||
public getReduxData(): any {
|
||||
return this.reduxData
|
||||
}
|
||||
|
||||
/**
|
||||
* Set Redux persist data from renderer process
|
||||
*/
|
||||
public setReduxData(data: any): void {
|
||||
this.reduxData = data
|
||||
logger.info('Redux data cached for migration', {
|
||||
dataKeys: data ? Object.keys(data) : [],
|
||||
hasData: !!data
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Register migration-specific IPC handlers
|
||||
* This creates an isolated IPC environment only for migration operations
|
||||
*/
|
||||
public registerMigrationIpcHandlers(): void {
|
||||
logger.info('Registering migration-specific IPC handlers')
|
||||
|
||||
// Only register the minimal IPC handlers needed for migration
|
||||
ipcMain.handle(IpcChannel.DataMigrate_CheckNeeded, async () => {
|
||||
try {
|
||||
return await this.isMigrated()
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: checkMigrationNeeded', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_ProceedToBackup, async () => {
|
||||
try {
|
||||
await this.proceedToBackup()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: proceedToBackup', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_StartMigration, async () => {
|
||||
try {
|
||||
await this.startMigrationProcess()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: startMigrationProcess', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_RetryMigration, async () => {
|
||||
try {
|
||||
await this.retryMigration()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: retryMigration', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_GetProgress, () => {
|
||||
try {
|
||||
return this.getCurrentProgress()
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: getCurrentProgress', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_Cancel, async () => {
|
||||
try {
|
||||
return await this.cancelMigration()
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: cancelMigration', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_BackupCompleted, async () => {
|
||||
try {
|
||||
await this.notifyBackupCompleted()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: notifyBackupCompleted', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_ShowBackupDialog, async () => {
|
||||
try {
|
||||
logger.info('Opening backup dialog for migration')
|
||||
|
||||
// Update progress to indicate backup dialog is opening
|
||||
// await this.updateProgress('backup_progress', 10, 'Opening backup dialog...')
|
||||
|
||||
// Instead of performing backup automatically, let's open the file dialog
|
||||
// and let the user choose where to save the backup
|
||||
const { dialog } = await import('electron')
|
||||
const result = await dialog.showSaveDialog({
|
||||
title: 'Save Migration Backup',
|
||||
defaultPath: `cherry-studio-migration-backup-${new Date().toISOString().split('T')[0]}.zip`,
|
||||
filters: [
|
||||
{ name: 'Backup Files', extensions: ['zip'] },
|
||||
{ name: 'All Files', extensions: ['*'] }
|
||||
]
|
||||
})
|
||||
|
||||
if (!result.canceled && result.filePath) {
|
||||
logger.info('User selected backup location', { filePath: result.filePath })
|
||||
await this.updateProgress('backup_progress', 10, 'Creating backup file...')
|
||||
|
||||
// Perform the actual backup to the selected location
|
||||
const backupResult = await this.performBackupToFile(result.filePath)
|
||||
|
||||
if (backupResult.success) {
|
||||
await this.updateProgress('backup_progress', 100, 'Backup created successfully!')
|
||||
// Wait a moment to show the success message, then transition to confirmed state
|
||||
setTimeout(async () => {
|
||||
await this.updateProgress(
|
||||
'backup_confirmed',
|
||||
100,
|
||||
'Backup completed! Ready to start migration. Click "Start Migration" to continue.'
|
||||
)
|
||||
}, 1000)
|
||||
} else {
|
||||
await this.updateProgress('backup_required', 0, `Backup failed: ${backupResult.error}`)
|
||||
}
|
||||
|
||||
return backupResult
|
||||
} else {
|
||||
logger.info('User cancelled backup dialog')
|
||||
await this.updateProgress('backup_required', 0, 'Backup cancelled. Please create a backup to continue.')
|
||||
return { success: false, error: 'Backup cancelled by user' }
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: showBackupDialog', error as Error)
|
||||
await this.updateProgress('backup_required', 0, 'Backup process failed')
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_StartFlow, async () => {
|
||||
try {
|
||||
return await this.startMigrationFlow()
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: startMigrationFlow', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_RestartApp, async () => {
|
||||
try {
|
||||
await this.restartApplication()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: restartApplication', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_CloseWindow, () => {
|
||||
try {
|
||||
this.closeMigrateWindow()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: closeMigrateWindow', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_SendReduxData, (_event, data) => {
|
||||
try {
|
||||
this.setReduxData(data)
|
||||
return { success: true }
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: sendReduxData', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
ipcMain.handle(IpcChannel.DataMigrate_GetReduxData, () => {
|
||||
try {
|
||||
return this.getReduxData()
|
||||
} catch (error) {
|
||||
logger.error('IPC handler error: getReduxData', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('Migration IPC handlers registered successfully')
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove migration-specific IPC handlers
|
||||
* Clean up when migration is complete or cancelled
|
||||
*/
|
||||
public unregisterMigrationIpcHandlers(): void {
|
||||
logger.info('Unregistering migration-specific IPC handlers')
|
||||
|
||||
try {
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_CheckNeeded)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_GetProgress)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_Cancel)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_BackupCompleted)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_ShowBackupDialog)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_StartFlow)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_ProceedToBackup)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_StartMigration)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_RetryMigration)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_RestartApp)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_CloseWindow)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_SendReduxData)
|
||||
ipcMain.removeAllListeners(IpcChannel.DataMigrate_GetReduxData)
|
||||
|
||||
logger.info('Migration IPC handlers unregistered successfully')
|
||||
} catch (error) {
|
||||
logger.warn('Error unregistering migration IPC handlers', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
public static getInstance(): DataRefactorMigrateService {
|
||||
if (!DataRefactorMigrateService.instance) {
|
||||
DataRefactorMigrateService.instance = new DataRefactorMigrateService()
|
||||
}
|
||||
return DataRefactorMigrateService.instance
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenient static method to open test window
|
||||
*/
|
||||
public static openTestWindow(): BrowserWindow {
|
||||
const instance = DataRefactorMigrateService.getInstance()
|
||||
return instance.createTestWindow()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if migration is needed
|
||||
*/
|
||||
async isMigrated(): Promise<boolean> {
|
||||
try {
|
||||
const isMigrated = await this.isMigrationCompleted()
|
||||
if (isMigrated) {
|
||||
logger.info('Data Refactor Migration already completed')
|
||||
return true
|
||||
}
|
||||
|
||||
logger.info('Data Refactor Migration is needed')
|
||||
return false
|
||||
} catch (error) {
|
||||
logger.error('Failed to check migration status', error as Error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if migration is already completed
|
||||
*/
|
||||
private async isMigrationCompleted(): Promise<boolean> {
|
||||
try {
|
||||
logger.debug('Checking migration completion status in database')
|
||||
|
||||
// First check if the database is available
|
||||
if (!this.db) {
|
||||
logger.warn('Database not initialized, assuming migration not completed')
|
||||
return false
|
||||
}
|
||||
|
||||
const result = await this.db
|
||||
.select()
|
||||
.from(appStateTable)
|
||||
.where(eq(appStateTable.key, DATA_REFACTOR_MIGRATION_STATUS))
|
||||
.limit(1)
|
||||
|
||||
logger.debug('Migration status query result', { resultCount: result.length })
|
||||
|
||||
if (result.length === 0) {
|
||||
logger.info('No migration status record found, migration needed')
|
||||
return false
|
||||
}
|
||||
|
||||
const status = result[0].value as DataRefactorMigrationStatus
|
||||
const isCompleted = status.completed === true
|
||||
|
||||
logger.info('Migration status found', {
|
||||
completed: isCompleted,
|
||||
completedAt: status.completedAt,
|
||||
version: status.version
|
||||
})
|
||||
|
||||
return isCompleted
|
||||
} catch (error) {
|
||||
logger.error('Failed to check migration state - treating as not completed', error as Error)
|
||||
// In case of database errors, assume migration is needed to be safe
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark migration as completed
|
||||
*/
|
||||
private async markMigrationCompleted(): Promise<void> {
|
||||
try {
|
||||
const migrationStatus: DataRefactorMigrationStatus = {
|
||||
completed: true,
|
||||
completedAt: Date.now(),
|
||||
version: electronApp.getVersion()
|
||||
}
|
||||
|
||||
await this.db
|
||||
.insert(appStateTable)
|
||||
.values({
|
||||
key: DATA_REFACTOR_MIGRATION_STATUS,
|
||||
value: migrationStatus, // drizzle handles JSON serialization automatically
|
||||
description: 'Data refactoring migration status from legacy format (ElectronStore + Redux persist) to SQLite',
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now()
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: appStateTable.key,
|
||||
set: {
|
||||
value: migrationStatus,
|
||||
updatedAt: Date.now()
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('Migration marked as completed in app_state table', {
|
||||
version: migrationStatus.version,
|
||||
completedAt: migrationStatus.completedAt
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to mark migration as completed', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and show migration window
|
||||
*/
|
||||
private createMigrateWindow(): BrowserWindow {
|
||||
if (this.migrateWindow && !this.migrateWindow.isDestroyed()) {
|
||||
this.migrateWindow.show()
|
||||
return this.migrateWindow
|
||||
}
|
||||
|
||||
// Register migration-specific IPC handlers before creating window
|
||||
this.registerMigrationIpcHandlers()
|
||||
|
||||
this.migrateWindow = new BrowserWindow({
|
||||
width: 640,
|
||||
height: 480,
|
||||
resizable: false,
|
||||
maximizable: false,
|
||||
minimizable: false,
|
||||
show: false,
|
||||
frame: false,
|
||||
autoHideMenuBar: true,
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/simplest.js'),
|
||||
sandbox: false,
|
||||
webSecurity: false,
|
||||
contextIsolation: true
|
||||
}
|
||||
})
|
||||
|
||||
// Load the migration window
|
||||
if (isDev && process.env['ELECTRON_RENDERER_URL']) {
|
||||
this.migrateWindow.loadURL(process.env['ELECTRON_RENDERER_URL'] + '/dataRefactorMigrate.html')
|
||||
} else {
|
||||
this.migrateWindow.loadFile(join(__dirname, '../renderer/dataRefactorMigrate.html'))
|
||||
}
|
||||
|
||||
this.migrateWindow.once('ready-to-show', () => {
|
||||
this.migrateWindow?.show()
|
||||
})
|
||||
|
||||
this.migrateWindow.on('closed', () => {
|
||||
this.migrateWindow = null
|
||||
// Clean up IPC handlers when window is closed
|
||||
this.unregisterMigrationIpcHandlers()
|
||||
})
|
||||
|
||||
logger.info('Migration window created')
|
||||
return this.migrateWindow
|
||||
}
|
||||
|
||||
/**
|
||||
* Show migration window and initialize introduction stage
|
||||
*/
|
||||
async runMigration(): Promise<void> {
|
||||
if (this.isMigrating) {
|
||||
logger.warn('Migration already in progress')
|
||||
this.migrateWindow?.show()
|
||||
return
|
||||
}
|
||||
|
||||
this.isMigrating = true
|
||||
logger.info('Showing migration window')
|
||||
|
||||
// Initialize introduction stage
|
||||
await this.updateProgress('introduction', 0, 'Welcome to Cherry Studio data migration')
|
||||
|
||||
// Create migration window
|
||||
const window = this.createMigrateWindow()
|
||||
|
||||
// Wait for window to be ready
|
||||
await new Promise<void>((resolve) => {
|
||||
if (window.webContents.isLoading()) {
|
||||
window.webContents.once('did-finish-load', () => resolve())
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Start migration flow - simply ensure we're in introduction stage
|
||||
* This is called when user first opens the migration window
|
||||
*/
|
||||
async startMigrationFlow(): Promise<void> {
|
||||
if (!this.isMigrating) {
|
||||
logger.warn('Migration not started, cannot execute flow.')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Confirming introduction stage for migration flow')
|
||||
await this.updateProgress('introduction', 0, 'Ready to begin migration process. Please read the information below.')
|
||||
}
|
||||
|
||||
/**
|
||||
* Proceed from introduction to backup requirement stage
|
||||
* This is called when user clicks "Next" in introduction
|
||||
*/
|
||||
async proceedToBackup(): Promise<void> {
|
||||
if (!this.isMigrating) {
|
||||
logger.warn('Migration not started, cannot proceed to backup.')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Proceeding from introduction to backup stage')
|
||||
await this.updateProgress('backup_required', 0, 'Data backup is required before migration can proceed')
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the actual migration process
|
||||
* This is called when user confirms backup and clicks "Start Migration"
|
||||
*/
|
||||
async startMigrationProcess(): Promise<void> {
|
||||
if (!this.isMigrating) {
|
||||
logger.warn('Migration not started, cannot start migration process.')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting actual migration process')
|
||||
try {
|
||||
await this.executeMigrationFlow()
|
||||
} catch (error) {
|
||||
logger.error('Migration process failed', error as Error)
|
||||
// error is already handled in executeMigrationFlow
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the actual migration process
|
||||
* Called after user has confirmed backup completion
|
||||
*/
|
||||
private async executeMigrationFlow(): Promise<void> {
|
||||
try {
|
||||
// Start migration
|
||||
await this.updateProgress('migration', 0, 'Starting data migration...')
|
||||
const migrationResult = await this.executeMigration()
|
||||
|
||||
if (!migrationResult.success) {
|
||||
throw new Error(migrationResult.error || 'Migration failed')
|
||||
}
|
||||
|
||||
await this.updateProgress(
|
||||
'migration',
|
||||
100,
|
||||
`Migration completed: ${migrationResult.migratedCount} items migrated`
|
||||
)
|
||||
|
||||
// Mark as completed
|
||||
await this.markMigrationCompleted()
|
||||
|
||||
await this.updateProgress('completed', 100, 'Migration completed successfully! Click restart to continue.')
|
||||
} catch (error) {
|
||||
logger.error('Migration flow failed', error as Error)
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
await this.updateProgress(
|
||||
'error',
|
||||
0,
|
||||
'Migration failed. You can close this window and try again, or continue using the previous version.',
|
||||
errorMessage
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform backup to a specific file location
|
||||
*/
|
||||
private async performBackupToFile(filePath: string): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
logger.info('Performing backup to file', { filePath })
|
||||
|
||||
// Get backup data from the current application state
|
||||
const backupData = await this.getBackupData()
|
||||
|
||||
// Extract directory and filename from the full path
|
||||
const path = await import('path')
|
||||
const destinationDir = path.dirname(filePath)
|
||||
const fileName = path.basename(filePath)
|
||||
|
||||
// Use the existing backup manager to create a backup
|
||||
const backupPath = await this.backupManager.backup(
|
||||
null as any, // IpcMainInvokeEvent - we're calling directly so pass null
|
||||
fileName,
|
||||
backupData,
|
||||
destinationDir,
|
||||
false // Don't skip backup files - full backup for migration safety
|
||||
)
|
||||
|
||||
if (backupPath) {
|
||||
logger.info('Backup created successfully', { path: backupPath })
|
||||
return { success: true }
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Backup process did not return a file path'
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Backup failed during migration:', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup data from the current application
|
||||
* This creates a minimal backup with essential system information
|
||||
*/
|
||||
private async getBackupData(): Promise<string> {
|
||||
try {
|
||||
const fs = await import('fs-extra')
|
||||
const path = await import('path')
|
||||
|
||||
// Gather basic system information
|
||||
const data = {
|
||||
backup: {
|
||||
timestamp: new Date().toISOString(),
|
||||
version: electronApp.getVersion(),
|
||||
type: 'pre-migration-backup',
|
||||
note: 'This is a safety backup created before data migration'
|
||||
},
|
||||
system: {
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
nodeVersion: process.version
|
||||
},
|
||||
// Include basic configuration files if they exist
|
||||
configs: {} as Record<string, any>
|
||||
}
|
||||
|
||||
// Try to read some basic configuration files (non-critical if they fail)
|
||||
try {
|
||||
const { getDataPath } = await import('@main/utils')
|
||||
const dataPath = getDataPath()
|
||||
|
||||
// Check if there are any config files we should backup
|
||||
const configFiles = ['config.json', 'settings.json', 'preferences.json']
|
||||
for (const configFile of configFiles) {
|
||||
const configPath = path.join(dataPath, configFile)
|
||||
if (await fs.pathExists(configPath)) {
|
||||
try {
|
||||
const configContent = await fs.readJson(configPath)
|
||||
data.configs[configFile] = configContent
|
||||
} catch (err) {
|
||||
logger.warn(`Could not read config file ${configFile}`, err as Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Could not access data directory for config backup', err as Error)
|
||||
}
|
||||
|
||||
return JSON.stringify(data, null, 2)
|
||||
} catch (error) {
|
||||
logger.error('Failed to get backup data:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify that backup has been completed (called from IPC handler)
|
||||
*/
|
||||
public async notifyBackupCompleted(): Promise<void> {
|
||||
logger.info('Backup completed by user')
|
||||
await this.updateProgress(
|
||||
'backup_confirmed',
|
||||
100,
|
||||
'Backup completed! Ready to start migration. Click "Start Migration" to continue.'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the actual migration
|
||||
*/
|
||||
private async executeMigration(): Promise<MigrationResult> {
|
||||
try {
|
||||
logger.info('Executing migration')
|
||||
|
||||
// Create preferences migrator with reference to this service for Redux data access
|
||||
const preferencesMigrator = new PreferencesMigrator(this)
|
||||
|
||||
// Execute preferences migration with progress updates
|
||||
const result = await preferencesMigrator.migrate((progress, message) => {
|
||||
this.updateProgress('migration', progress, message)
|
||||
})
|
||||
|
||||
logger.info('Migration execution completed', result)
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
migratedCount: result.migratedCount,
|
||||
error: result.errors.length > 0 ? result.errors.map((e) => e.error).join('; ') : undefined
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Migration execution failed', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
migratedCount: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update migration progress and broadcast to window
|
||||
*/
|
||||
private async updateProgress(
|
||||
stage: MigrationStage,
|
||||
progress: number,
|
||||
message: string,
|
||||
error?: string
|
||||
): Promise<void> {
|
||||
this.currentProgress = {
|
||||
stage,
|
||||
progress,
|
||||
total: 100,
|
||||
message,
|
||||
error
|
||||
}
|
||||
|
||||
if (this.migrateWindow && !this.migrateWindow.isDestroyed()) {
|
||||
this.migrateWindow.webContents.send(IpcChannel.DataMigrateProgress, this.currentProgress)
|
||||
}
|
||||
|
||||
logger.debug('Progress updated', this.currentProgress)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current migration progress
|
||||
*/
|
||||
getCurrentProgress(): MigrationProgress {
|
||||
return this.currentProgress
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel migration process
|
||||
* Only allowed during introduction and backup phases
|
||||
*/
|
||||
async cancelMigration(): Promise<void> {
|
||||
if (!this.isMigrating) {
|
||||
return
|
||||
}
|
||||
|
||||
const currentStage = this.currentProgress.stage
|
||||
if (currentStage === 'migration') {
|
||||
logger.warn('Cannot cancel migration during migration process')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Cancelling migration process')
|
||||
this.isMigrating = false
|
||||
this.closeMigrateWindow()
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry migration after error
|
||||
*/
|
||||
async retryMigration(): Promise<void> {
|
||||
logger.info('Retrying migration process')
|
||||
await this.updateProgress(
|
||||
'introduction',
|
||||
0,
|
||||
'Ready to restart migration process. Please read the information below.'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Close migration window
|
||||
*/
|
||||
private closeMigrateWindow(): void {
|
||||
if (this.migrateWindow && !this.migrateWindow.isDestroyed()) {
|
||||
this.migrateWindow.close()
|
||||
this.migrateWindow = null
|
||||
}
|
||||
|
||||
this.isMigrating = false
|
||||
// Clean up migration-specific IPC handlers
|
||||
this.unregisterMigrationIpcHandlers()
|
||||
}
|
||||
|
||||
/**
|
||||
* Restart the application after successful migration
|
||||
*/
|
||||
private async restartApplication(): Promise<void> {
|
||||
try {
|
||||
logger.info('Preparing to restart application after migration completion')
|
||||
|
||||
// Ensure migration status is properly saved before restart
|
||||
await this.verifyMigrationStatus()
|
||||
|
||||
// Give some time for database operations to complete
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
|
||||
logger.info('Restarting application now')
|
||||
|
||||
// In development mode, relaunch might not work properly
|
||||
if (process.env.NODE_ENV === 'development' || !app.isPackaged) {
|
||||
logger.warn('Development mode detected - showing restart instruction instead of auto-restart')
|
||||
|
||||
const { dialog } = await import('electron')
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: 'Migration Complete - Restart Required',
|
||||
message:
|
||||
'Data migration completed successfully!\n\nSince you are in development mode, please manually restart the application to continue.',
|
||||
buttons: ['Close App'],
|
||||
defaultId: 0
|
||||
})
|
||||
|
||||
// Clean up migration window and handlers after showing dialog
|
||||
this.closeMigrateWindow()
|
||||
app.quit()
|
||||
} else {
|
||||
// Production mode - clean up first, then relaunch
|
||||
this.closeMigrateWindow()
|
||||
app.relaunch()
|
||||
app.exit(0)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to restart application', error as Error)
|
||||
// Update UI to show restart failure and provide manual restart instruction
|
||||
await this.updateProgress(
|
||||
'error',
|
||||
0,
|
||||
'Application restart failed. Please manually restart the application to complete migration.',
|
||||
error instanceof Error ? error.message : String(error)
|
||||
)
|
||||
// Fallback: just close migration window and let user manually restart
|
||||
this.closeMigrateWindow()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that migration status is properly saved
|
||||
*/
|
||||
private async verifyMigrationStatus(): Promise<void> {
|
||||
try {
|
||||
const isCompleted = await this.isMigrationCompleted()
|
||||
if (isCompleted) {
|
||||
logger.info('Migration status verified as completed')
|
||||
} else {
|
||||
logger.warn('Migration status not found as completed, attempting to mark again')
|
||||
await this.markMigrationCompleted()
|
||||
|
||||
// Double-check
|
||||
const recheck = await this.isMigrationCompleted()
|
||||
if (recheck) {
|
||||
logger.info('Migration status successfully marked as completed on retry')
|
||||
} else {
|
||||
logger.error('Failed to mark migration as completed even on retry')
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to verify migration status', error as Error)
|
||||
// Don't throw - still allow restart
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and show test window for testing PreferenceService and usePreference functionality
|
||||
*/
|
||||
public createTestWindow(): BrowserWindow {
|
||||
const windowNumber = this.testWindows.length + 1
|
||||
|
||||
const testWindow = new BrowserWindow({
|
||||
width: 1000,
|
||||
height: 700,
|
||||
minWidth: 800,
|
||||
minHeight: 600,
|
||||
resizable: true,
|
||||
maximizable: true,
|
||||
minimizable: true,
|
||||
show: false,
|
||||
frame: true,
|
||||
autoHideMenuBar: true,
|
||||
title: `Data Refactor Test Window #${windowNumber} - PreferenceService Testing`,
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/index.js'),
|
||||
sandbox: false,
|
||||
webSecurity: false,
|
||||
contextIsolation: true
|
||||
}
|
||||
})
|
||||
|
||||
// Add to test windows array
|
||||
this.testWindows.push(testWindow)
|
||||
|
||||
// Load the test window
|
||||
if (isDev && process.env['ELECTRON_RENDERER_URL']) {
|
||||
testWindow.loadURL(process.env['ELECTRON_RENDERER_URL'] + '/dataRefactorTest.html')
|
||||
// Open DevTools in development mode for easier testing
|
||||
testWindow.webContents.openDevTools()
|
||||
} else {
|
||||
testWindow.loadFile(join(__dirname, '../renderer/dataRefactorTest.html'))
|
||||
}
|
||||
|
||||
testWindow.once('ready-to-show', () => {
|
||||
testWindow?.show()
|
||||
testWindow?.focus()
|
||||
})
|
||||
|
||||
testWindow.on('closed', () => {
|
||||
// Remove from test windows array when closed
|
||||
const index = this.testWindows.indexOf(testWindow)
|
||||
if (index > -1) {
|
||||
this.testWindows.splice(index, 1)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`Test window #${windowNumber} created for PreferenceService testing`)
|
||||
return testWindow
|
||||
}
|
||||
|
||||
/**
|
||||
* Get test window instance (first one)
|
||||
*/
|
||||
public getTestWindow(): BrowserWindow | null {
|
||||
return this.testWindows.length > 0 ? this.testWindows[0] : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all test windows
|
||||
*/
|
||||
public getTestWindows(): BrowserWindow[] {
|
||||
return this.testWindows.filter((window) => !window.isDestroyed())
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all test windows
|
||||
*/
|
||||
public closeTestWindows(): void {
|
||||
this.testWindows.forEach((window) => {
|
||||
if (!window.isDestroyed()) {
|
||||
window.close()
|
||||
}
|
||||
})
|
||||
this.testWindows = []
|
||||
logger.info('All test windows closed')
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a specific test window
|
||||
*/
|
||||
public closeTestWindow(window?: BrowserWindow): void {
|
||||
if (window) {
|
||||
if (!window.isDestroyed()) {
|
||||
window.close()
|
||||
}
|
||||
} else {
|
||||
// Close first window if no specific window provided
|
||||
const firstWindow = this.getTestWindow()
|
||||
if (firstWindow && !firstWindow.isDestroyed()) {
|
||||
firstWindow.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if any test windows are open
|
||||
*/
|
||||
public isTestWindowOpen(): boolean {
|
||||
return this.testWindows.some((window) => !window.isDestroyed())
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const dataRefactorMigrateService = DataRefactorMigrateService.getInstance()
|
||||
@@ -0,0 +1,642 @@
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import { preferenceTable } from '@data/db/schemas/preference'
|
||||
import { loggerService } from '@logger'
|
||||
import { DefaultPreferences } from '@shared/data/preference/preferenceSchemas'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
|
||||
import { configManager } from '../../../../services/ConfigManager'
|
||||
import type { DataRefactorMigrateService } from '../DataRefactorMigrateService'
|
||||
import { ELECTRON_STORE_MAPPINGS, REDUX_STORE_MAPPINGS } from './PreferencesMappings'
|
||||
|
||||
const logger = loggerService.withContext('PreferencesMigrator')
|
||||
|
||||
export interface MigrationItem {
|
||||
originalKey: string
|
||||
targetKey: string
|
||||
type: string
|
||||
defaultValue: any
|
||||
source: 'electronStore' | 'redux'
|
||||
sourceCategory?: string // Optional for electronStore
|
||||
}
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean
|
||||
migratedCount: number
|
||||
errors: Array<{
|
||||
key: string
|
||||
error: string
|
||||
}>
|
||||
}
|
||||
|
||||
export interface PreparedMigrationData {
|
||||
targetKey: string
|
||||
value: any
|
||||
source: 'electronStore' | 'redux'
|
||||
originalKey: string
|
||||
sourceCategory?: string
|
||||
}
|
||||
|
||||
export interface BatchMigrationResult {
|
||||
newPreferences: PreparedMigrationData[]
|
||||
updatedPreferences: PreparedMigrationData[]
|
||||
skippedCount: number
|
||||
preparationErrors: Array<{
|
||||
key: string
|
||||
error: string
|
||||
}>
|
||||
}
|
||||
|
||||
export class PreferencesMigrator {
|
||||
private db = dbService.getDb()
|
||||
private migrateService: DataRefactorMigrateService
|
||||
|
||||
constructor(migrateService: DataRefactorMigrateService) {
|
||||
this.migrateService = migrateService
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute preferences migration from all sources using batch operations and transactions
|
||||
*/
|
||||
async migrate(onProgress?: (progress: number, message: string) => void): Promise<MigrationResult> {
|
||||
logger.info('Starting preferences migration with batch operations')
|
||||
|
||||
const result: MigrationResult = {
|
||||
success: true,
|
||||
migratedCount: 0,
|
||||
errors: []
|
||||
}
|
||||
|
||||
try {
|
||||
// Phase 1: Prepare all migration data in memory (50% of progress)
|
||||
onProgress?.(10, 'Loading migration items...')
|
||||
const migrationItems = await this.loadMigrationItems()
|
||||
logger.info(`Found ${migrationItems.length} items to migrate`)
|
||||
|
||||
onProgress?.(25, 'Preparing migration data...')
|
||||
const batchResult = await this.prepareMigrationData(migrationItems, (progress) => {
|
||||
// Map preparation progress to 25-50% of total progress
|
||||
const totalProgress = 25 + Math.floor(progress * 0.25)
|
||||
onProgress?.(totalProgress, 'Preparing migration data...')
|
||||
})
|
||||
|
||||
// Add preparation errors to result
|
||||
result.errors.push(...batchResult.preparationErrors)
|
||||
|
||||
if (batchResult.preparationErrors.length > 0) {
|
||||
logger.warn('Some items failed during preparation', {
|
||||
errorCount: batchResult.preparationErrors.length
|
||||
})
|
||||
}
|
||||
|
||||
// Phase 2: Execute batch migration in transaction (50% of progress)
|
||||
onProgress?.(50, 'Executing batch migration...')
|
||||
|
||||
const totalOperations = batchResult.newPreferences.length + batchResult.updatedPreferences.length
|
||||
if (totalOperations > 0) {
|
||||
try {
|
||||
await this.executeBatchMigration(batchResult, (progress) => {
|
||||
// Map execution progress to 50-90% of total progress
|
||||
const totalProgress = 50 + Math.floor(progress * 0.4)
|
||||
onProgress?.(totalProgress, 'Executing batch migration...')
|
||||
})
|
||||
|
||||
result.migratedCount = totalOperations
|
||||
logger.info('Batch migration completed successfully', {
|
||||
newPreferences: batchResult.newPreferences.length,
|
||||
updatedPreferences: batchResult.updatedPreferences.length,
|
||||
skippedCount: batchResult.skippedCount
|
||||
})
|
||||
} catch (batchError) {
|
||||
logger.error('Batch migration transaction failed - all changes rolled back', batchError as Error)
|
||||
result.success = false
|
||||
result.errors.push({
|
||||
key: 'batch_migration',
|
||||
error: `Transaction failed: ${batchError instanceof Error ? batchError.message : String(batchError)}`
|
||||
})
|
||||
// Note: No need to manually rollback - transaction handles this automatically
|
||||
}
|
||||
} else {
|
||||
logger.info('No preferences to migrate')
|
||||
}
|
||||
|
||||
onProgress?.(100, 'Migration completed')
|
||||
|
||||
// Set success based on whether we had any critical errors
|
||||
result.success = result.errors.length === 0
|
||||
|
||||
logger.info('Preferences migration completed', {
|
||||
migratedCount: result.migratedCount,
|
||||
errorCount: result.errors.length,
|
||||
skippedCount: batchResult.skippedCount
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Preferences migration failed', error as Error)
|
||||
result.success = false
|
||||
result.errors.push({
|
||||
key: 'global',
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Load migration items from generated mapping relationships
|
||||
* This uses the auto-generated PreferencesMappings.ts file
|
||||
*/
|
||||
private async loadMigrationItems(): Promise<MigrationItem[]> {
|
||||
logger.info('Loading migration items from generated mappings')
|
||||
const items: MigrationItem[] = []
|
||||
|
||||
// Process ElectronStore mappings - no sourceCategory needed
|
||||
ELECTRON_STORE_MAPPINGS.forEach((mapping) => {
|
||||
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
|
||||
items.push({
|
||||
originalKey: mapping.originalKey,
|
||||
targetKey: mapping.targetKey,
|
||||
type: 'unknown', // Type will be inferred from defaultValue during conversion
|
||||
defaultValue,
|
||||
source: 'electronStore'
|
||||
})
|
||||
})
|
||||
|
||||
// Process Redux mappings
|
||||
Object.entries(REDUX_STORE_MAPPINGS).forEach(([category, mappings]) => {
|
||||
mappings.forEach((mapping) => {
|
||||
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
|
||||
items.push({
|
||||
originalKey: mapping.originalKey, // May contain nested paths like "codeEditor.enabled"
|
||||
targetKey: mapping.targetKey,
|
||||
sourceCategory: category,
|
||||
type: 'unknown', // Type will be inferred from defaultValue during conversion
|
||||
defaultValue,
|
||||
source: 'redux'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
logger.info('Successfully loaded migration items from generated mappings', {
|
||||
totalItems: items.length,
|
||||
electronStoreItems: items.filter((i) => i.source === 'electronStore').length,
|
||||
reduxItems: items.filter((i) => i.source === 'redux').length
|
||||
})
|
||||
|
||||
return items
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare all migration data in memory before database operations
|
||||
* This phase reads all source data and performs conversions/validations
|
||||
*/
|
||||
private async prepareMigrationData(
|
||||
migrationItems: MigrationItem[],
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<BatchMigrationResult> {
|
||||
logger.info('Starting migration data preparation', { itemCount: migrationItems.length })
|
||||
|
||||
const batchResult: BatchMigrationResult = {
|
||||
newPreferences: [],
|
||||
updatedPreferences: [],
|
||||
skippedCount: 0,
|
||||
preparationErrors: []
|
||||
}
|
||||
|
||||
// Get existing preferences to determine which are new vs updated
|
||||
const existingPreferences = await this.getExistingPreferences()
|
||||
const existingKeys = new Set(existingPreferences.map((p) => p.key))
|
||||
|
||||
// Process each migration item
|
||||
for (let i = 0; i < migrationItems.length; i++) {
|
||||
const item = migrationItems[i]
|
||||
|
||||
try {
|
||||
// Read original value from source
|
||||
let originalValue: any
|
||||
if (item.source === 'electronStore') {
|
||||
originalValue = await this.readFromElectronStore(item.originalKey)
|
||||
} else if (item.source === 'redux') {
|
||||
if (!item.sourceCategory) {
|
||||
throw new Error(`Redux source requires sourceCategory for item: ${item.originalKey}`)
|
||||
}
|
||||
originalValue = await this.readFromReduxPersist(item.sourceCategory, item.originalKey)
|
||||
} else {
|
||||
throw new Error(`Unknown source: ${item.source}`)
|
||||
}
|
||||
|
||||
// Determine value to migrate
|
||||
let valueToMigrate = originalValue
|
||||
let shouldSkip = false
|
||||
|
||||
if (originalValue === undefined || originalValue === null) {
|
||||
if (item.defaultValue !== null && item.defaultValue !== undefined) {
|
||||
valueToMigrate = item.defaultValue
|
||||
logger.debug('Using default value for preparation', {
|
||||
targetKey: item.targetKey,
|
||||
source: item.source,
|
||||
originalKey: item.originalKey
|
||||
})
|
||||
} else {
|
||||
shouldSkip = true
|
||||
batchResult.skippedCount++
|
||||
logger.debug('Skipping item - no data and no meaningful default', {
|
||||
targetKey: item.targetKey,
|
||||
source: item.source,
|
||||
originalKey: item.originalKey
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldSkip) {
|
||||
// Convert value to appropriate type
|
||||
const convertedValue = this.convertValue(valueToMigrate, item.type)
|
||||
|
||||
// Create prepared migration data
|
||||
const preparedData: PreparedMigrationData = {
|
||||
targetKey: item.targetKey,
|
||||
value: convertedValue,
|
||||
source: item.source,
|
||||
originalKey: item.originalKey,
|
||||
sourceCategory: item.sourceCategory
|
||||
}
|
||||
|
||||
// Categorize as new or updated
|
||||
if (existingKeys.has(item.targetKey)) {
|
||||
batchResult.updatedPreferences.push(preparedData)
|
||||
} else {
|
||||
batchResult.newPreferences.push(preparedData)
|
||||
}
|
||||
|
||||
logger.debug('Prepared migration data', {
|
||||
targetKey: item.targetKey,
|
||||
isUpdate: existingKeys.has(item.targetKey),
|
||||
source: item.source
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to prepare migration item', { item, error })
|
||||
batchResult.preparationErrors.push({
|
||||
key: item.originalKey,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
})
|
||||
}
|
||||
|
||||
// Report progress
|
||||
const progress = Math.floor(((i + 1) / migrationItems.length) * 100)
|
||||
onProgress?.(progress)
|
||||
}
|
||||
|
||||
logger.info('Migration data preparation completed', {
|
||||
newPreferences: batchResult.newPreferences.length,
|
||||
updatedPreferences: batchResult.updatedPreferences.length,
|
||||
skippedCount: batchResult.skippedCount,
|
||||
errorCount: batchResult.preparationErrors.length
|
||||
})
|
||||
|
||||
return batchResult
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all existing preferences from database to determine new vs updated items
|
||||
*/
|
||||
private async getExistingPreferences(): Promise<Array<{ key: string; value: any }>> {
|
||||
try {
|
||||
const preferences = await this.db
|
||||
.select({
|
||||
key: preferenceTable.key,
|
||||
value: preferenceTable.value
|
||||
})
|
||||
.from(preferenceTable)
|
||||
.where(eq(preferenceTable.scope, 'default'))
|
||||
|
||||
logger.debug('Loaded existing preferences', { count: preferences.length })
|
||||
return preferences
|
||||
} catch (error) {
|
||||
logger.error('Failed to load existing preferences', error as Error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute batch migration using database transaction with bulk operations
|
||||
*/
|
||||
private async executeBatchMigration(
|
||||
batchData: BatchMigrationResult,
|
||||
onProgress?: (progress: number) => void
|
||||
): Promise<void> {
|
||||
logger.info('Starting batch migration execution', {
|
||||
newCount: batchData.newPreferences.length,
|
||||
updateCount: batchData.updatedPreferences.length
|
||||
})
|
||||
|
||||
// Validate batch data before starting transaction
|
||||
this.validateBatchData(batchData)
|
||||
|
||||
await this.db.transaction(async (tx) => {
|
||||
const scope = 'default'
|
||||
const timestamp = Date.now()
|
||||
let completedOperations = 0
|
||||
const totalOperations = batchData.newPreferences.length + batchData.updatedPreferences.length
|
||||
|
||||
// Batch insert new preferences
|
||||
if (batchData.newPreferences.length > 0) {
|
||||
logger.debug('Executing batch insert for new preferences', { count: batchData.newPreferences.length })
|
||||
|
||||
const insertValues = batchData.newPreferences.map((item) => ({
|
||||
scope,
|
||||
key: item.targetKey,
|
||||
value: item.value,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp
|
||||
}))
|
||||
|
||||
await tx.insert(preferenceTable).values(insertValues)
|
||||
|
||||
completedOperations += batchData.newPreferences.length
|
||||
const progress = Math.floor((completedOperations / totalOperations) * 100)
|
||||
onProgress?.(progress)
|
||||
|
||||
logger.info('Batch insert completed', { insertedCount: batchData.newPreferences.length })
|
||||
}
|
||||
|
||||
// Batch update existing preferences
|
||||
if (batchData.updatedPreferences.length > 0) {
|
||||
logger.debug('Executing batch updates for existing preferences', { count: batchData.updatedPreferences.length })
|
||||
|
||||
// Execute updates in batches to avoid SQL limitations
|
||||
const BATCH_SIZE = 50
|
||||
const updateBatches = this.chunkArray(batchData.updatedPreferences, BATCH_SIZE)
|
||||
|
||||
for (const batch of updateBatches) {
|
||||
// Use Promise.all to execute updates in parallel within the transaction
|
||||
await Promise.all(
|
||||
batch.map((item) =>
|
||||
tx
|
||||
.update(preferenceTable)
|
||||
.set({
|
||||
value: item.value,
|
||||
updatedAt: timestamp
|
||||
})
|
||||
.where(and(eq(preferenceTable.scope, scope), eq(preferenceTable.key, item.targetKey)))
|
||||
)
|
||||
)
|
||||
|
||||
completedOperations += batch.length
|
||||
const progress = Math.floor((completedOperations / totalOperations) * 100)
|
||||
onProgress?.(progress)
|
||||
}
|
||||
|
||||
logger.info('Batch updates completed', { updatedCount: batchData.updatedPreferences.length })
|
||||
}
|
||||
|
||||
logger.info('Transaction completed successfully', {
|
||||
totalOperations: completedOperations,
|
||||
newPreferences: batchData.newPreferences.length,
|
||||
updatedPreferences: batchData.updatedPreferences.length
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate batch data before executing migration
|
||||
*/
|
||||
private validateBatchData(batchData: BatchMigrationResult): void {
|
||||
const allData = [...batchData.newPreferences, ...batchData.updatedPreferences]
|
||||
|
||||
// Check for duplicate target keys
|
||||
const targetKeys = allData.map((item) => item.targetKey)
|
||||
const duplicateKeys = targetKeys.filter((key, index) => targetKeys.indexOf(key) !== index)
|
||||
|
||||
if (duplicateKeys.length > 0) {
|
||||
throw new Error(`Duplicate target keys found in migration data: ${duplicateKeys.join(', ')}`)
|
||||
}
|
||||
|
||||
// Validate each item has required fields
|
||||
for (const item of allData) {
|
||||
if (!item.targetKey || item.targetKey.trim() === '') {
|
||||
throw new Error(`Invalid targetKey found: '${item.targetKey}'`)
|
||||
}
|
||||
|
||||
if (item.value === undefined) {
|
||||
throw new Error(`Undefined value for targetKey: '${item.targetKey}'`)
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Batch data validation passed', {
|
||||
totalItems: allData.length,
|
||||
uniqueKeys: targetKeys.length
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Split array into chunks of specified size for batch processing
|
||||
*/
|
||||
private chunkArray<T>(array: T[], chunkSize: number): T[][] {
|
||||
const chunks: T[][] = []
|
||||
for (let i = 0; i < array.length; i += chunkSize) {
|
||||
chunks.push(array.slice(i, i + chunkSize))
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
/**
|
||||
* Read value from ElectronStore (via ConfigManager)
|
||||
*/
|
||||
private async readFromElectronStore(key: string): Promise<any> {
|
||||
try {
|
||||
return configManager.get(key)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read from ElectronStore', { key, error })
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read value from Redux persist data with support for nested paths
|
||||
*/
|
||||
private async readFromReduxPersist(category: string, key: string): Promise<any> {
|
||||
try {
|
||||
// Get cached Redux data from migrate service
|
||||
const reduxData = this.migrateService?.getReduxData()
|
||||
|
||||
if (!reduxData) {
|
||||
logger.warn('No Redux persist data available in cache', { category, key })
|
||||
return undefined
|
||||
}
|
||||
|
||||
logger.debug('Reading from cached Redux persist data', {
|
||||
category,
|
||||
key,
|
||||
availableCategories: Object.keys(reduxData),
|
||||
isNestedKey: key.includes('.')
|
||||
})
|
||||
|
||||
// Get the category data from Redux persist cache
|
||||
const categoryData = reduxData[category]
|
||||
if (!categoryData) {
|
||||
logger.debug('Category not found in Redux persist data', {
|
||||
category,
|
||||
availableCategories: Object.keys(reduxData)
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Redux persist usually stores data as JSON strings
|
||||
let parsedCategoryData
|
||||
try {
|
||||
parsedCategoryData = typeof categoryData === 'string' ? JSON.parse(categoryData) : categoryData
|
||||
} catch (parseError) {
|
||||
logger.warn('Failed to parse Redux persist category data', {
|
||||
category,
|
||||
categoryData: typeof categoryData,
|
||||
parseError
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Handle nested paths (e.g., "codeEditor.enabled")
|
||||
let value
|
||||
if (key.includes('.')) {
|
||||
// Parse nested path
|
||||
const keyPath = key.split('.')
|
||||
let current = parsedCategoryData
|
||||
|
||||
logger.debug('Parsing nested key path', {
|
||||
category,
|
||||
key,
|
||||
keyPath,
|
||||
rootDataKeys: current ? Object.keys(current) : []
|
||||
})
|
||||
|
||||
for (const pathSegment of keyPath) {
|
||||
if (current && typeof current === 'object' && !Array.isArray(current)) {
|
||||
current = current[pathSegment]
|
||||
logger.debug('Navigated to path segment', {
|
||||
pathSegment,
|
||||
foundValue: current !== undefined,
|
||||
valueType: typeof current
|
||||
})
|
||||
} else {
|
||||
logger.debug('Failed to navigate nested path - invalid structure', {
|
||||
pathSegment,
|
||||
currentType: typeof current,
|
||||
isArray: Array.isArray(current)
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
value = current
|
||||
} else {
|
||||
// Direct field access (e.g., "theme")
|
||||
value = parsedCategoryData[key]
|
||||
}
|
||||
|
||||
if (value !== undefined) {
|
||||
logger.debug('Successfully read from Redux persist cache', {
|
||||
category,
|
||||
key,
|
||||
value,
|
||||
valueType: typeof value,
|
||||
isNested: key.includes('.')
|
||||
})
|
||||
} else {
|
||||
logger.debug('Key not found in Redux persist data', {
|
||||
category,
|
||||
key,
|
||||
availableKeys: parsedCategoryData ? Object.keys(parsedCategoryData) : []
|
||||
})
|
||||
}
|
||||
|
||||
return value
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read from Redux persist cache', { category, key, error })
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert value to the specified type
|
||||
*/
|
||||
private convertValue(value: any, targetType: string): any {
|
||||
if (value === null || value === undefined) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
switch (targetType) {
|
||||
case 'boolean':
|
||||
return this.toBoolean(value)
|
||||
case 'string':
|
||||
return this.toString(value)
|
||||
case 'number':
|
||||
return this.toNumber(value)
|
||||
case 'array':
|
||||
case 'unknown[]':
|
||||
return this.toArray(value)
|
||||
case 'object':
|
||||
case 'Record<string, unknown>':
|
||||
return this.toObject(value)
|
||||
default:
|
||||
return value
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Type conversion failed, using original value', { value, targetType, error })
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
private toBoolean(value: any): boolean {
|
||||
if (typeof value === 'boolean') return value
|
||||
if (typeof value === 'string') {
|
||||
const lower = value.toLowerCase()
|
||||
return lower === 'true' || lower === '1' || lower === 'yes'
|
||||
}
|
||||
if (typeof value === 'number') return value !== 0
|
||||
return Boolean(value)
|
||||
}
|
||||
|
||||
private toString(value: any): string {
|
||||
if (typeof value === 'string') return value
|
||||
if (typeof value === 'number' || typeof value === 'boolean') return String(value)
|
||||
if (typeof value === 'object') return JSON.stringify(value)
|
||||
return String(value)
|
||||
}
|
||||
|
||||
private toNumber(value: any): number {
|
||||
if (typeof value === 'number') return value
|
||||
if (typeof value === 'string') {
|
||||
const parsed = parseFloat(value)
|
||||
return isNaN(parsed) ? 0 : parsed
|
||||
}
|
||||
if (typeof value === 'boolean') return value ? 1 : 0
|
||||
return 0
|
||||
}
|
||||
|
||||
private toArray(value: any): any[] {
|
||||
if (Array.isArray(value)) return value
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
return Array.isArray(parsed) ? parsed : [value]
|
||||
} catch {
|
||||
return [value]
|
||||
}
|
||||
}
|
||||
return [value]
|
||||
}
|
||||
|
||||
private toObject(value: any): Record<string, any> {
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
return typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed) ? parsed : { value }
|
||||
} catch {
|
||||
return { value }
|
||||
}
|
||||
}
|
||||
return { value }
|
||||
}
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
# Migration V2 (Main Process)
|
||||
|
||||
Architecture for the new one-shot migration from the legacy Dexie + Redux Persist stores into the SQLite schema. This module owns orchestration, data access helpers, migrator plugins, and IPC entry points used by the renderer migration window.
|
||||
|
||||
## Directory Layout
|
||||
|
||||
```
|
||||
src/main/data/migration/v2/
|
||||
├── core/ # Engine + shared context
|
||||
├── migrators/ # Domain-specific migrators and mappings
|
||||
├── utils/ # Data source readers (Redux, Dexie, streaming JSON)
|
||||
├── window/ # IPC handlers + migration window manager
|
||||
└── index.ts # Public exports for main process
|
||||
```
|
||||
|
||||
## Core Contracts
|
||||
|
||||
- `core/MigrationEngine.ts` coordinates all migrators in order, surfaces progress to the UI, and marks status in `app_state.key = 'migration_v2_status'`. It will clear new-schema tables before running and abort on any validation failure.
|
||||
- `core/MigrationContext.ts` builds the shared context passed to every migrator:
|
||||
- `sources`: `ConfigManager` (ElectronStore), `ReduxStateReader` (parsed Redux Persist data), `DexieFileReader` (JSON exports)
|
||||
- `db`: current SQLite connection
|
||||
- `sharedData`: `Map` for passing cross-cutting info between migrators
|
||||
- `logger`: `loggerService` scoped to migration
|
||||
- `@shared/data/migration/v2/types` defines stages, results, and validation stats used across main and renderer.
|
||||
|
||||
## Migrators
|
||||
|
||||
- Base contract: extend `migrators/BaseMigrator.ts` and implement:
|
||||
- `id`, `name`, `description`, `order` (lower runs first)
|
||||
- `prepare(ctx)`: dry-run checks, counts, and staging data; return `PrepareResult`
|
||||
- `execute(ctx)`: perform inserts/updates; manage your own transactions; report progress via `reportProgress`
|
||||
- `validate(ctx)`: verify counts and integrity; return `ValidateResult` with stats (`sourceCount`, `targetCount`, `skippedCount`) and any `errors`
|
||||
- Registration: list migrators (in order) in `migrators/index.ts` so the engine can sort and run them.
|
||||
- Current migrators:
|
||||
- `PreferencesMigrator` (implemented): maps ElectronStore + Redux settings to the `preference` table using `mappings/PreferencesMappings.ts`.
|
||||
- `AssistantMigrator`, `KnowledgeMigrator`, `ChatMigrator` (placeholders): scaffolding and TODO notes for future tables.
|
||||
- Conventions:
|
||||
- All logging goes through `loggerService` with a migrator-specific context.
|
||||
- Use `MigrationContext.sources` instead of accessing raw files/stores directly.
|
||||
- Use `sharedData` to pass IDs or lookup tables between migrators (e.g., assistant -> chat references) instead of re-reading sources.
|
||||
- Stream large Dexie exports (`JSONStreamReader`) and batch inserts to avoid memory spikes.
|
||||
- Count validation is mandatory; engine will fail the run if `targetCount < sourceCount - skippedCount` or if `ValidateResult.errors` is non-empty.
|
||||
- Keep migrations idempotent per run—engine clears target tables before it starts, but each migrator should tolerate retries within the same run.
|
||||
|
||||
## Utilities
|
||||
|
||||
- `utils/ReduxStateReader.ts`: safe accessor for categorized Redux Persist data with dot-path lookup.
|
||||
- `utils/DexieFileReader.ts`: reads exported Dexie JSON tables; can stream large tables.
|
||||
- `utils/JSONStreamReader.ts`: streaming reader with batching, counting, and sampling helpers for very large arrays.
|
||||
|
||||
## Window & IPC Integration
|
||||
|
||||
- `window/MigrationIpcHandler.ts` exposes IPC channels for the migration UI:
|
||||
- Receives Redux data and Dexie export path, starts the engine, and streams progress back to renderer.
|
||||
- Manages backup flow (dialogs via `BackupManager`) and retry/cancel/restart actions.
|
||||
- `window/MigrationWindowManager.ts` creates the frameless migration window, handles lifecycle, and relaunch instructions after completion in production.
|
||||
|
||||
## Implementation Checklist for New Migrators
|
||||
|
||||
- [ ] Add mapping definitions (if needed) under `migrators/mappings/`.
|
||||
- [ ] Implement `prepare/execute/validate` with explicit counts, batch inserts, and integrity checks.
|
||||
- [ ] Wire progress updates through `reportProgress` so UI shows per-migrator progress.
|
||||
- [ ] Register the migrator in `migrators/index.ts` with the correct `order`.
|
||||
- [ ] Add any new target tables to `MigrationEngine.verifyAndClearNewTables` once those tables exist.
|
||||
@@ -1,55 +0,0 @@
|
||||
/**
|
||||
* Migration context shared between all migrators
|
||||
*/
|
||||
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import type { DbType } from '@data/db/types'
|
||||
import { type LoggerService, loggerService } from '@logger'
|
||||
import type { ConfigManager } from '@main/services/ConfigManager'
|
||||
import { configManager } from '@main/services/ConfigManager'
|
||||
|
||||
import { DexieFileReader } from '../utils/DexieFileReader'
|
||||
import { ReduxStateReader } from '../utils/ReduxStateReader'
|
||||
|
||||
// Logger type for migration context (using actual LoggerService type)
|
||||
export type MigrationLogger = LoggerService
|
||||
|
||||
// Migration context interface
|
||||
export interface MigrationContext {
|
||||
// Data source accessors
|
||||
sources: {
|
||||
electronStore: ConfigManager
|
||||
reduxState: ReduxStateReader
|
||||
dexieExport: DexieFileReader
|
||||
}
|
||||
|
||||
// Target database
|
||||
db: DbType
|
||||
|
||||
// Shared data between migrators
|
||||
sharedData: Map<string, unknown>
|
||||
|
||||
// Logger
|
||||
logger: MigrationLogger
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a migration context with all data sources
|
||||
* @param reduxData - Parsed Redux state data from Renderer
|
||||
* @param dexieExportPath - Path to exported Dexie files
|
||||
*/
|
||||
export function createMigrationContext(reduxData: Record<string, unknown>, dexieExportPath: string): MigrationContext {
|
||||
const db = dbService.getDb()
|
||||
const logger = loggerService.withContext('Migration')
|
||||
|
||||
return {
|
||||
sources: {
|
||||
electronStore: configManager,
|
||||
reduxState: new ReduxStateReader(reduxData),
|
||||
dexieExport: new DexieFileReader(dexieExportPath)
|
||||
},
|
||||
db,
|
||||
sharedData: new Map(),
|
||||
logger
|
||||
}
|
||||
}
|
||||
@@ -1,370 +0,0 @@
|
||||
/**
|
||||
* Migration engine orchestrates the entire migration process
|
||||
* Coordinates migrators, manages progress, and handles failures
|
||||
*/
|
||||
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import { appStateTable } from '@data/db/schemas/appState'
|
||||
import { preferenceTable } from '@data/db/schemas/preference'
|
||||
import { loggerService } from '@logger'
|
||||
import type {
|
||||
MigrationProgress,
|
||||
MigrationResult,
|
||||
MigrationStage,
|
||||
MigrationStatusValue,
|
||||
MigratorResult,
|
||||
MigratorStatus,
|
||||
ValidateResult
|
||||
} from '@shared/data/migration/v2/types'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import fs from 'fs/promises'
|
||||
|
||||
import type { BaseMigrator } from '../migrators/BaseMigrator'
|
||||
import { createMigrationContext } from './MigrationContext'
|
||||
|
||||
// TODO: Import these tables when they are created in user data schema
|
||||
// import { assistantTable } from '../../db/schemas/assistant'
|
||||
// import { topicTable } from '../../db/schemas/topic'
|
||||
// import { messageTable } from '../../db/schemas/message'
|
||||
// import { fileTable } from '../../db/schemas/file'
|
||||
// import { knowledgeBaseTable } from '../../db/schemas/knowledgeBase'
|
||||
|
||||
const logger = loggerService.withContext('MigrationEngine')
|
||||
|
||||
const MIGRATION_V2_STATUS = 'migration_v2_status'
|
||||
|
||||
export class MigrationEngine {
|
||||
private migrators: BaseMigrator[] = []
|
||||
private progressCallback?: (progress: MigrationProgress) => void
|
||||
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Register migrators in execution order
|
||||
*/
|
||||
registerMigrators(migrators: BaseMigrator[]): void {
|
||||
this.migrators = migrators.sort((a, b) => a.order - b.order)
|
||||
logger.info('Migrators registered', {
|
||||
migrators: this.migrators.map((m) => ({ id: m.id, name: m.name, order: m.order }))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Set progress callback for UI updates
|
||||
*/
|
||||
onProgress(callback: (progress: MigrationProgress) => void): void {
|
||||
this.progressCallback = callback
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if migration is needed
|
||||
*/
|
||||
//TODO 不能仅仅判断数据库,如果是全新安装,而不是升级上来的用户,其实并不需要迁移,但是按现在的逻辑,还是会进行迁移,这不正确
|
||||
async needsMigration(): Promise<boolean> {
|
||||
const db = dbService.getDb()
|
||||
const status = await db.select().from(appStateTable).where(eq(appStateTable.key, MIGRATION_V2_STATUS)).get()
|
||||
|
||||
// Migration needed if: no status record, or status is not 'completed'
|
||||
if (!status?.value) return true
|
||||
|
||||
const statusValue = status.value as MigrationStatusValue
|
||||
return statusValue.status !== 'completed'
|
||||
}
|
||||
|
||||
/**
|
||||
* Get last migration error (for UI display)
|
||||
*/
|
||||
async getLastError(): Promise<string | null> {
|
||||
const db = dbService.getDb()
|
||||
const status = await db.select().from(appStateTable).where(eq(appStateTable.key, MIGRATION_V2_STATUS)).get()
|
||||
|
||||
if (status?.value) {
|
||||
const statusValue = status.value as MigrationStatusValue
|
||||
if (statusValue.status === 'failed') {
|
||||
return statusValue.error || 'Unknown error'
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute full migration
|
||||
* @param reduxData - Parsed Redux state data from Renderer
|
||||
* @param dexieExportPath - Path to exported Dexie files
|
||||
*/
|
||||
async run(reduxData: Record<string, unknown>, dexieExportPath: string): Promise<MigrationResult> {
|
||||
const startTime = Date.now()
|
||||
const results: MigratorResult[] = []
|
||||
|
||||
try {
|
||||
// Safety check: verify new tables status before clearing
|
||||
await this.verifyAndClearNewTables()
|
||||
|
||||
// Create migration context
|
||||
const context = createMigrationContext(reduxData, dexieExportPath)
|
||||
|
||||
for (let i = 0; i < this.migrators.length; i++) {
|
||||
const migrator = this.migrators[i]
|
||||
const migratorStartTime = Date.now()
|
||||
|
||||
logger.info(`Starting migrator: ${migrator.name}`, { id: migrator.id })
|
||||
|
||||
// Update progress: migrator starting
|
||||
this.updateProgress('migration', this.calculateProgress(i, 0), migrator)
|
||||
|
||||
// Set up migrator progress callback
|
||||
migrator.setProgressCallback((progress, message) => {
|
||||
this.updateProgress('migration', this.calculateProgress(i, progress), migrator, message)
|
||||
})
|
||||
|
||||
// Phase 1: Prepare (includes dry-run validation)
|
||||
const prepareResult = await migrator.prepare(context)
|
||||
if (!prepareResult.success) {
|
||||
throw new Error(`${migrator.name} prepare failed: ${prepareResult.warnings?.join(', ')}`)
|
||||
}
|
||||
|
||||
logger.info(`${migrator.name} prepare completed`, { itemCount: prepareResult.itemCount })
|
||||
|
||||
// Phase 2: Execute (each migrator manages its own transactions)
|
||||
const executeResult = await migrator.execute(context)
|
||||
if (!executeResult.success) {
|
||||
throw new Error(`${migrator.name} execute failed: ${executeResult.error}`)
|
||||
}
|
||||
|
||||
logger.info(`${migrator.name} execute completed`, {
|
||||
processedCount: executeResult.processedCount
|
||||
})
|
||||
|
||||
// Phase 3: Validate
|
||||
const validateResult = await migrator.validate(context)
|
||||
|
||||
// Engine-level validation
|
||||
this.validateMigratorResult(migrator, validateResult)
|
||||
|
||||
logger.info(`${migrator.name} validation passed`, { stats: validateResult.stats })
|
||||
|
||||
// Record result
|
||||
results.push({
|
||||
migratorId: migrator.id,
|
||||
migratorName: migrator.name,
|
||||
success: true,
|
||||
recordsProcessed: executeResult.processedCount,
|
||||
duration: Date.now() - migratorStartTime
|
||||
})
|
||||
|
||||
// Update progress: migrator completed
|
||||
this.updateProgress('migration', this.calculateProgress(i + 1, 0), migrator, 'completed')
|
||||
}
|
||||
|
||||
// Mark migration completed
|
||||
await this.markCompleted()
|
||||
|
||||
// Cleanup temporary files
|
||||
await this.cleanupTempFiles(dexieExportPath)
|
||||
|
||||
logger.info('Migration completed successfully', {
|
||||
totalDuration: Date.now() - startTime,
|
||||
migratorCount: results.length
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
migratorResults: results,
|
||||
totalDuration: Date.now() - startTime
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
|
||||
logger.error('Migration failed', { error: errorMessage })
|
||||
|
||||
// Mark migration as failed with error details
|
||||
await this.markFailed(errorMessage)
|
||||
|
||||
return {
|
||||
success: false,
|
||||
migratorResults: results,
|
||||
totalDuration: Date.now() - startTime,
|
||||
error: errorMessage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify and clear new architecture tables before migration
|
||||
* Safety check: log if tables are not empty (may indicate previous failed migration)
|
||||
*/
|
||||
private async verifyAndClearNewTables(): Promise<void> {
|
||||
const db = dbService.getDb()
|
||||
|
||||
// Tables to clear - add more as they are created
|
||||
const tables = [
|
||||
{ table: preferenceTable, name: 'preference' }
|
||||
// TODO: Add these when tables are created
|
||||
// { table: assistantTable, name: 'assistant' },
|
||||
// { table: topicTable, name: 'topic' },
|
||||
// { table: messageTable, name: 'message' },
|
||||
// { table: fileTable, name: 'file' },
|
||||
// { table: knowledgeBaseTable, name: 'knowledge_base' }
|
||||
]
|
||||
|
||||
// Check if tables have data (safety check)
|
||||
for (const { table, name } of tables) {
|
||||
const result = await db.select({ count: sql<number>`count(*)` }).from(table).get()
|
||||
const count = result?.count ?? 0
|
||||
if (count > 0) {
|
||||
logger.warn(`Table '${name}' is not empty (${count} rows), clearing for fresh migration`)
|
||||
}
|
||||
}
|
||||
|
||||
// Clear tables in reverse dependency order
|
||||
// TODO: Add these when tables are created (in correct order)
|
||||
// await db.delete(messageTable)
|
||||
// await db.delete(topicTable)
|
||||
// await db.delete(fileTable)
|
||||
// await db.delete(knowledgeBaseTable)
|
||||
// await db.delete(assistantTable)
|
||||
await db.delete(preferenceTable)
|
||||
|
||||
logger.info('All new architecture tables cleared successfully')
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate migrator result at engine level
|
||||
* Ensures count validation and error checking
|
||||
*/
|
||||
private validateMigratorResult(migrator: BaseMigrator, result: ValidateResult): void {
|
||||
const { stats } = result
|
||||
|
||||
// Count validation: target must have at least source count minus skipped
|
||||
const expectedCount = stats.sourceCount - stats.skippedCount
|
||||
if (stats.targetCount < expectedCount) {
|
||||
throw new Error(
|
||||
`${migrator.name} count mismatch: ` +
|
||||
`expected ${expectedCount}, ` +
|
||||
`got ${stats.targetCount}. ${stats.mismatchReason || ''}`
|
||||
)
|
||||
}
|
||||
|
||||
// Any validation errors are fatal
|
||||
if (result.errors.length > 0) {
|
||||
const errorSummary = result.errors
|
||||
.slice(0, 3)
|
||||
.map((e) => e.message)
|
||||
.join('; ')
|
||||
throw new Error(
|
||||
`${migrator.name} validation failed: ${errorSummary}` +
|
||||
(result.errors.length > 3 ? ` (+${result.errors.length - 3} more)` : '')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup temporary export files
|
||||
*/
|
||||
private async cleanupTempFiles(exportPath: string): Promise<void> {
|
||||
try {
|
||||
await fs.rm(exportPath, { recursive: true, force: true })
|
||||
logger.info('Temporary files cleaned up', { path: exportPath })
|
||||
} catch (error) {
|
||||
logger.warn('Failed to cleanup temp files', { error, path: exportPath })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall progress based on completed migrators and current migrator progress
|
||||
*/
|
||||
private calculateProgress(completedMigrators: number, currentMigratorProgress: number): number {
|
||||
if (this.migrators.length === 0) return 0
|
||||
const migratorWeight = 100 / this.migrators.length
|
||||
return Math.round(completedMigrators * migratorWeight + (currentMigratorProgress / 100) * migratorWeight)
|
||||
}
|
||||
|
||||
/**
|
||||
* Update progress callback with current state
|
||||
*/
|
||||
private updateProgress(
|
||||
stage: MigrationStage,
|
||||
overallProgress: number,
|
||||
currentMigrator: BaseMigrator,
|
||||
message?: string
|
||||
): void {
|
||||
const migratorsProgress = this.migrators.map((m) => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
status: this.getMigratorStatus(m, currentMigrator)
|
||||
}))
|
||||
|
||||
this.progressCallback?.({
|
||||
stage,
|
||||
overallProgress,
|
||||
currentMessage: message || `正在处理${currentMigrator.name}...`,
|
||||
migrators: migratorsProgress
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine migrator status based on execution order
|
||||
*/
|
||||
private getMigratorStatus(migrator: BaseMigrator, current: BaseMigrator): MigratorStatus {
|
||||
if (migrator.order < current.order) return 'completed'
|
||||
if (migrator.order === current.order) return 'running'
|
||||
return 'pending'
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark migration as completed in app_state
|
||||
*/
|
||||
private async markCompleted(): Promise<void> {
|
||||
const db = dbService.getDb()
|
||||
const statusValue: MigrationStatusValue = {
|
||||
status: 'completed',
|
||||
completedAt: Date.now(),
|
||||
version: '2.0.0',
|
||||
error: null
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(appStateTable)
|
||||
.values({
|
||||
key: MIGRATION_V2_STATUS,
|
||||
value: statusValue
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: appStateTable.key,
|
||||
set: {
|
||||
value: statusValue,
|
||||
updatedAt: Date.now()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark migration as failed in app_state with error details
|
||||
*/
|
||||
private async markFailed(error: string): Promise<void> {
|
||||
const db = dbService.getDb()
|
||||
const statusValue: MigrationStatusValue = {
|
||||
status: 'failed',
|
||||
failedAt: Date.now(),
|
||||
version: '2.0.0',
|
||||
error: error
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(appStateTable)
|
||||
.values({
|
||||
key: MIGRATION_V2_STATUS,
|
||||
value: statusValue
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: appStateTable.key,
|
||||
set: {
|
||||
value: statusValue,
|
||||
updatedAt: Date.now()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const migrationEngine = new MigrationEngine()
|
||||
@@ -1,25 +0,0 @@
|
||||
/**
|
||||
* Migration v2 module exports
|
||||
*/
|
||||
|
||||
// Core
|
||||
export { createMigrationContext, type MigrationContext } from './core/MigrationContext'
|
||||
export { MigrationEngine, migrationEngine } from './core/MigrationEngine'
|
||||
export * from '@shared/data/migration/v2/types'
|
||||
|
||||
// Migrators
|
||||
export { getAllMigrators } from './migrators'
|
||||
export { BaseMigrator } from './migrators/BaseMigrator'
|
||||
|
||||
// Utils
|
||||
export { DexieFileReader } from './utils/DexieFileReader'
|
||||
export { JSONStreamReader } from './utils/JSONStreamReader'
|
||||
export { ReduxStateReader } from './utils/ReduxStateReader'
|
||||
|
||||
// Window management
|
||||
export {
|
||||
registerMigrationIpcHandlers,
|
||||
resetMigrationData,
|
||||
unregisterMigrationIpcHandlers
|
||||
} from './window/MigrationIpcHandler'
|
||||
export { MigrationWindowManager, migrationWindowManager } from './window/MigrationWindowManager'
|
||||
@@ -1,67 +0,0 @@
|
||||
/**
|
||||
* Assistant migrator - migrates assistants from Redux to SQLite
|
||||
*
|
||||
* TODO: Implement when assistant tables are created
|
||||
* Data source: Redux assistants slice (not Dexie)
|
||||
* Target tables: assistant, agent, provider, model
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import type { ExecuteResult, PrepareResult, ValidateResult } from '@shared/data/migration/v2/types'
|
||||
|
||||
import { BaseMigrator } from './BaseMigrator'
|
||||
|
||||
const logger = loggerService.withContext('AssistantMigrator')
|
||||
|
||||
export class AssistantMigrator extends BaseMigrator {
|
||||
readonly id = 'assistant'
|
||||
readonly name = 'Assistant'
|
||||
readonly description = 'Migrate assistant and model configuration'
|
||||
readonly order = 2
|
||||
|
||||
async prepare(): Promise<PrepareResult> {
|
||||
logger.info('AssistantMigrator.prepare - placeholder implementation')
|
||||
|
||||
// TODO: Implement when assistant tables are created
|
||||
// 1. Read from _ctx.sources.reduxState.getCategory('assistants')
|
||||
// 2. Extract assistants, presets, defaultAssistant
|
||||
// 3. Prepare data for migration
|
||||
|
||||
return {
|
||||
success: true,
|
||||
itemCount: 0,
|
||||
warnings: ['AssistantMigrator not yet implemented - waiting for assistant tables']
|
||||
}
|
||||
}
|
||||
|
||||
async execute(): Promise<ExecuteResult> {
|
||||
logger.info('AssistantMigrator.execute - placeholder implementation')
|
||||
|
||||
// TODO: Implement when assistant tables are created
|
||||
// 1. Insert assistants into assistant table
|
||||
// 2. Insert related data (agents, providers, models)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
async validate(): Promise<ValidateResult> {
|
||||
logger.info('AssistantMigrator.validate - placeholder implementation')
|
||||
|
||||
// TODO: Implement when assistant tables are created
|
||||
// 1. Count validation
|
||||
// 2. Sample validation
|
||||
|
||||
return {
|
||||
success: true,
|
||||
errors: [],
|
||||
stats: {
|
||||
sourceCount: 0,
|
||||
targetCount: 0,
|
||||
skippedCount: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* Abstract base class for all migrators
|
||||
* Each migrator handles migration of a specific business domain
|
||||
*/
|
||||
|
||||
import type { ExecuteResult, PrepareResult, ValidateResult } from '@shared/data/migration/v2/types'
|
||||
|
||||
import type { MigrationContext } from '../core/MigrationContext'
|
||||
|
||||
export abstract class BaseMigrator {
|
||||
// Metadata - must be implemented by subclasses
|
||||
abstract readonly id: string
|
||||
abstract readonly name: string // Display name for UI
|
||||
abstract readonly description: string // Display description for UI
|
||||
abstract readonly order: number // Execution order (lower runs first)
|
||||
|
||||
// Progress callback for UI updates
|
||||
protected onProgress?: (progress: number, message: string) => void
|
||||
|
||||
/**
|
||||
* Set progress callback for reporting progress to UI
|
||||
*/
|
||||
setProgressCallback(callback: (progress: number, message: string) => void): void {
|
||||
this.onProgress = callback
|
||||
}
|
||||
|
||||
/**
|
||||
* Report progress to UI
|
||||
* @param progress - Progress percentage (0-100)
|
||||
* @param message - Progress message
|
||||
*/
|
||||
protected reportProgress(progress: number, message: string): void {
|
||||
this.onProgress?.(progress, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare phase - validate source data and count items
|
||||
* This includes dry-run validation to catch errors early
|
||||
*/
|
||||
abstract prepare(ctx: MigrationContext): Promise<PrepareResult>
|
||||
|
||||
/**
|
||||
* Execute phase - perform the actual data migration
|
||||
* Each migrator manages its own transactions
|
||||
*/
|
||||
abstract execute(ctx: MigrationContext): Promise<ExecuteResult>
|
||||
|
||||
/**
|
||||
* Validate phase - verify migrated data integrity
|
||||
* Must include count validation
|
||||
*/
|
||||
abstract validate(ctx: MigrationContext): Promise<ValidateResult>
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
/**
|
||||
* Chat migrator - migrates topics and messages from Dexie to SQLite
|
||||
*
|
||||
* TODO: Implement when chat tables are created
|
||||
* Data source: Dexie topics table (messages are embedded in topics)
|
||||
* Target tables: topic, message
|
||||
*
|
||||
* Note: This migrator handles the largest amount of data (potentially millions of messages)
|
||||
* and uses streaming JSON reading with batch inserts for memory efficiency.
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import type { ExecuteResult, PrepareResult, ValidateResult } from '@shared/data/migration/v2/types'
|
||||
|
||||
import { BaseMigrator } from './BaseMigrator'
|
||||
|
||||
const logger = loggerService.withContext('ChatMigrator')
|
||||
|
||||
export class ChatMigrator extends BaseMigrator {
|
||||
readonly id = 'chat'
|
||||
readonly name = 'ChatData'
|
||||
readonly description = 'Migrate chat data'
|
||||
readonly order = 4
|
||||
|
||||
async prepare(): Promise<PrepareResult> {
|
||||
logger.info('ChatMigrator.prepare - placeholder implementation')
|
||||
|
||||
// TODO: Implement when chat tables are created
|
||||
// 1. Check if topics.json export file exists
|
||||
// 2. Validate JSON format with sample read
|
||||
// 3. Count total topics and estimate message count
|
||||
// 4. Check for data integrity (e.g., messages have valid topic references)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
itemCount: 0,
|
||||
warnings: ['ChatMigrator not yet implemented - waiting for chat tables']
|
||||
}
|
||||
}
|
||||
|
||||
async execute(): Promise<ExecuteResult> {
|
||||
logger.info('ChatMigrator.execute - placeholder implementation')
|
||||
|
||||
// TODO: Implement when chat tables are created
|
||||
// Use streaming JSON reader for large message files:
|
||||
//
|
||||
// const streamReader = _ctx.sources.dexieExport.createStreamReader('topics')
|
||||
// await streamReader.readInBatches<OldTopic>(
|
||||
// BATCH_SIZE,
|
||||
// async (topics, batchIndex) => {
|
||||
// // 1. Insert topics
|
||||
// // 2. Extract and insert messages from each topic
|
||||
// // 3. Report progress
|
||||
// }
|
||||
// )
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
async validate(): Promise<ValidateResult> {
|
||||
logger.info('ChatMigrator.validate - placeholder implementation')
|
||||
|
||||
// TODO: Implement when chat tables are created
|
||||
// 1. Count validation for topics and messages
|
||||
// 2. Sample validation (check a few topics have correct message counts)
|
||||
// 3. Reference integrity validation
|
||||
|
||||
return {
|
||||
success: true,
|
||||
errors: [],
|
||||
stats: {
|
||||
sourceCount: 0,
|
||||
targetCount: 0,
|
||||
skippedCount: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
/**
|
||||
* Knowledge migrator - migrates knowledge bases from Redux and Dexie to SQLite
|
||||
*
|
||||
* TODO: Implement when knowledge tables are created
|
||||
* Data sources:
|
||||
* - Redux knowledge slice (knowledge.bases metadata)
|
||||
* - Dexie knowledge_notes table
|
||||
* - Dexie files table (for file references)
|
||||
* Target tables: knowledge_base, knowledge_note, file
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import type { ExecuteResult, PrepareResult, ValidateResult } from '@shared/data/migration/v2/types'
|
||||
|
||||
import { BaseMigrator } from './BaseMigrator'
|
||||
|
||||
const logger = loggerService.withContext('KnowledgeMigrator')
|
||||
|
||||
export class KnowledgeMigrator extends BaseMigrator {
|
||||
readonly id = 'knowledge'
|
||||
readonly name = 'KnowledgeBase'
|
||||
readonly description = 'Migrate knowledge base and file data'
|
||||
readonly order = 3
|
||||
|
||||
async prepare(): Promise<PrepareResult> {
|
||||
logger.info('KnowledgeMigrator.prepare - placeholder implementation')
|
||||
|
||||
// TODO: Implement when knowledge tables are created
|
||||
// 1. Read from _ctx.sources.reduxState.getCategory('knowledge')
|
||||
// 2. Read from _ctx.sources.dexieExport.readTable('knowledge_notes')
|
||||
// 3. Read from _ctx.sources.dexieExport.readTable('files')
|
||||
// 4. Check reference integrity between knowledge items and files
|
||||
// 5. Prepare data for migration
|
||||
|
||||
return {
|
||||
success: true,
|
||||
itemCount: 0,
|
||||
warnings: ['KnowledgeMigrator not yet implemented - waiting for knowledge tables']
|
||||
}
|
||||
}
|
||||
|
||||
async execute(): Promise<ExecuteResult> {
|
||||
logger.info('KnowledgeMigrator.execute - placeholder implementation')
|
||||
|
||||
// TODO: Implement when knowledge tables are created
|
||||
// 1. Insert files into file table
|
||||
// 2. Insert knowledge bases into knowledge_base table
|
||||
// 3. Insert knowledge notes into knowledge_note table
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedCount: 0
|
||||
}
|
||||
}
|
||||
|
||||
async validate(): Promise<ValidateResult> {
|
||||
logger.info('KnowledgeMigrator.validate - placeholder implementation')
|
||||
|
||||
// TODO: Implement when knowledge tables are created
|
||||
// 1. Count validation for each table
|
||||
// 2. Reference integrity validation
|
||||
// 3. Sample validation
|
||||
|
||||
return {
|
||||
success: true,
|
||||
errors: [],
|
||||
stats: {
|
||||
sourceCount: 0,
|
||||
targetCount: 0,
|
||||
skippedCount: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
/**
|
||||
* Preferences migrator - migrates preferences from ElectronStore and Redux to SQLite
|
||||
*/
|
||||
|
||||
import { preferenceTable } from '@data/db/schemas/preference'
|
||||
import { loggerService } from '@logger'
|
||||
import { configManager } from '@main/services/ConfigManager'
|
||||
import type { ExecuteResult, PrepareResult, ValidateResult, ValidationError } from '@shared/data/migration/v2/types'
|
||||
import { DefaultPreferences } from '@shared/data/preference/preferenceSchemas'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
|
||||
import type { MigrationContext } from '../core/MigrationContext'
|
||||
import { BaseMigrator } from './BaseMigrator'
|
||||
import { ELECTRON_STORE_MAPPINGS, REDUX_STORE_MAPPINGS } from './mappings/PreferencesMappings'
|
||||
|
||||
const logger = loggerService.withContext('PreferencesMigrator')
|
||||
|
||||
interface MigrationItem {
|
||||
originalKey: string
|
||||
targetKey: string
|
||||
defaultValue: unknown
|
||||
source: 'electronStore' | 'redux'
|
||||
sourceCategory?: string
|
||||
}
|
||||
|
||||
interface PreparedData {
|
||||
targetKey: string
|
||||
value: unknown
|
||||
source: 'electronStore' | 'redux'
|
||||
originalKey: string
|
||||
}
|
||||
|
||||
export class PreferencesMigrator extends BaseMigrator {
|
||||
readonly id = 'preferences'
|
||||
readonly name = 'Preferences'
|
||||
readonly description = 'Migrate application preferences'
|
||||
readonly order = 1
|
||||
|
||||
private preparedItems: PreparedData[] = []
|
||||
private skippedCount = 0
|
||||
|
||||
async prepare(ctx: MigrationContext): Promise<PrepareResult> {
|
||||
const warnings: string[] = []
|
||||
this.preparedItems = []
|
||||
this.skippedCount = 0
|
||||
|
||||
try {
|
||||
// Load migration items from mappings
|
||||
const migrationItems = this.loadMigrationItems()
|
||||
logger.info(`Found ${migrationItems.length} preference items to migrate`)
|
||||
|
||||
// Prepare each item
|
||||
for (const item of migrationItems) {
|
||||
try {
|
||||
let originalValue: unknown
|
||||
|
||||
// Read from source
|
||||
if (item.source === 'electronStore') {
|
||||
originalValue = configManager.get(item.originalKey)
|
||||
} else if (item.source === 'redux' && item.sourceCategory) {
|
||||
originalValue = ctx.sources.reduxState.get(item.sourceCategory, item.originalKey)
|
||||
}
|
||||
|
||||
// Determine value to migrate
|
||||
let valueToMigrate = originalValue
|
||||
if (originalValue === undefined || originalValue === null) {
|
||||
if (item.defaultValue !== null && item.defaultValue !== undefined) {
|
||||
valueToMigrate = item.defaultValue
|
||||
} else {
|
||||
this.skippedCount++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
this.preparedItems.push({
|
||||
targetKey: item.targetKey,
|
||||
value: valueToMigrate,
|
||||
source: item.source,
|
||||
originalKey: item.originalKey
|
||||
})
|
||||
} catch (error) {
|
||||
warnings.push(`Failed to prepare ${item.originalKey}: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Preparation completed', {
|
||||
itemCount: this.preparedItems.length,
|
||||
skipped: this.skippedCount
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
itemCount: this.preparedItems.length,
|
||||
warnings: warnings.length > 0 ? warnings : undefined
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Preparation failed', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
itemCount: 0,
|
||||
warnings: [error instanceof Error ? error.message : String(error)]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async execute(ctx: MigrationContext): Promise<ExecuteResult> {
|
||||
if (this.preparedItems.length === 0) {
|
||||
return { success: true, processedCount: 0 }
|
||||
}
|
||||
|
||||
try {
|
||||
const db = ctx.db
|
||||
const scope = 'default'
|
||||
const timestamp = Date.now()
|
||||
|
||||
// Use transaction for atomic insert
|
||||
await db.transaction(async (tx) => {
|
||||
// Batch insert all preferences
|
||||
const insertValues = this.preparedItems.map((item) => ({
|
||||
scope,
|
||||
key: item.targetKey,
|
||||
value: item.value,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp
|
||||
}))
|
||||
|
||||
// Insert in batches to avoid SQL limitations
|
||||
const BATCH_SIZE = 100
|
||||
for (let i = 0; i < insertValues.length; i += BATCH_SIZE) {
|
||||
const batch = insertValues.slice(i, i + BATCH_SIZE)
|
||||
await tx.insert(preferenceTable).values(batch)
|
||||
|
||||
// Report progress
|
||||
const progress = Math.round(((i + batch.length) / insertValues.length) * 100)
|
||||
this.reportProgress(progress, `已迁移 ${i + batch.length}/${insertValues.length} 条配置`)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('Execute completed', { processedCount: this.preparedItems.length })
|
||||
|
||||
return {
|
||||
success: true,
|
||||
processedCount: this.preparedItems.length
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Execute failed', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
processedCount: 0,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async validate(ctx: MigrationContext): Promise<ValidateResult> {
|
||||
const errors: ValidationError[] = []
|
||||
const db = ctx.db
|
||||
|
||||
try {
|
||||
// Count validation
|
||||
const result = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(preferenceTable)
|
||||
.where(eq(preferenceTable.scope, 'default'))
|
||||
.get()
|
||||
|
||||
const targetCount = result?.count ?? 0
|
||||
|
||||
// Sample validation - check critical keys
|
||||
const criticalKeys = ['app.language', 'ui.theme_mode', 'app.zoom_factor']
|
||||
for (const key of criticalKeys) {
|
||||
const record = await db
|
||||
.select()
|
||||
.from(preferenceTable)
|
||||
.where(and(eq(preferenceTable.scope, 'default'), eq(preferenceTable.key, key)))
|
||||
.get()
|
||||
|
||||
if (!record) {
|
||||
// Not an error if the key wasn't in source data
|
||||
const wasPrepared = this.preparedItems.some((item) => item.targetKey === key)
|
||||
if (wasPrepared) {
|
||||
errors.push({
|
||||
key,
|
||||
message: `Critical preference '${key}' not found after migration`
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: errors.length === 0,
|
||||
errors,
|
||||
stats: {
|
||||
sourceCount: this.preparedItems.length,
|
||||
targetCount,
|
||||
skippedCount: this.skippedCount
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Validation failed', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
errors: [
|
||||
{
|
||||
key: 'validation',
|
||||
message: error instanceof Error ? error.message : String(error)
|
||||
}
|
||||
],
|
||||
stats: {
|
||||
sourceCount: this.preparedItems.length,
|
||||
targetCount: 0,
|
||||
skippedCount: this.skippedCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private loadMigrationItems(): MigrationItem[] {
|
||||
const items: MigrationItem[] = []
|
||||
|
||||
// Process ElectronStore mappings
|
||||
for (const mapping of ELECTRON_STORE_MAPPINGS) {
|
||||
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
|
||||
items.push({
|
||||
originalKey: mapping.originalKey,
|
||||
targetKey: mapping.targetKey,
|
||||
defaultValue,
|
||||
source: 'electronStore'
|
||||
})
|
||||
}
|
||||
|
||||
// Process Redux mappings
|
||||
for (const [category, mappings] of Object.entries(REDUX_STORE_MAPPINGS)) {
|
||||
for (const mapping of mappings) {
|
||||
const defaultValue = DefaultPreferences.default[mapping.targetKey] ?? null
|
||||
items.push({
|
||||
originalKey: mapping.originalKey,
|
||||
targetKey: mapping.targetKey,
|
||||
sourceCategory: category,
|
||||
defaultValue,
|
||||
source: 'redux'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return items
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
* Migrator registration and exports
|
||||
*/
|
||||
|
||||
export { BaseMigrator } from './BaseMigrator'
|
||||
|
||||
// Import all migrators
|
||||
import { AssistantMigrator } from './AssistantMigrator'
|
||||
import { ChatMigrator } from './ChatMigrator'
|
||||
import { KnowledgeMigrator } from './KnowledgeMigrator'
|
||||
import { PreferencesMigrator } from './PreferencesMigrator'
|
||||
|
||||
// Export migrator classes
|
||||
export { AssistantMigrator, ChatMigrator, KnowledgeMigrator, PreferencesMigrator }
|
||||
|
||||
/**
|
||||
* Get all registered migrators in execution order
|
||||
*/
|
||||
export function getAllMigrators() {
|
||||
return [new PreferencesMigrator(), new AssistantMigrator(), new KnowledgeMigrator(), new ChatMigrator()]
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
/**
|
||||
* Dexie file reader for accessing exported Dexie table data
|
||||
* Dexie data is exported by Renderer to JSON files
|
||||
*/
|
||||
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
import { JSONStreamReader } from './JSONStreamReader'
|
||||
|
||||
export class DexieFileReader {
|
||||
private exportPath: string
|
||||
|
||||
constructor(exportPath: string) {
|
||||
this.exportPath = exportPath
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the export path
|
||||
*/
|
||||
getExportPath(): string {
|
||||
return this.exportPath
|
||||
}
|
||||
|
||||
/**
|
||||
* Read exported table data (for small tables)
|
||||
* @param tableName - Name of the table to read
|
||||
*/
|
||||
async readTable<T>(tableName: string): Promise<T[]> {
|
||||
const filePath = path.join(this.exportPath, `${tableName}.json`)
|
||||
const content = await fs.readFile(filePath, 'utf-8')
|
||||
return JSON.parse(content)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create stream reader for large tables
|
||||
* Use this for tables with large amounts of data (e.g., messages)
|
||||
* @param tableName - Name of the table to stream
|
||||
*/
|
||||
createStreamReader(tableName: string): JSONStreamReader {
|
||||
const filePath = path.join(this.exportPath, `${tableName}.json`)
|
||||
return new JSONStreamReader(filePath)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a table export file exists
|
||||
* @param tableName - Name of the table
|
||||
*/
|
||||
async tableExists(tableName: string): Promise<boolean> {
|
||||
const filePath = path.join(this.exportPath, `${tableName}.json`)
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file size for a table export
|
||||
* @param tableName - Name of the table
|
||||
*/
|
||||
async getTableFileSize(tableName: string): Promise<number> {
|
||||
const filePath = path.join(this.exportPath, `${tableName}.json`)
|
||||
const stats = await fs.stat(filePath)
|
||||
return stats.size
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
/**
|
||||
* Streaming JSON reader for processing large JSON array files
|
||||
* Uses stream-json library to avoid loading entire file into memory
|
||||
*/
|
||||
|
||||
import { createReadStream } from 'fs'
|
||||
import { parser } from 'stream-json'
|
||||
import { streamArray } from 'stream-json/streamers/StreamArray'
|
||||
|
||||
export class JSONStreamReader {
|
||||
private filePath: string
|
||||
|
||||
constructor(filePath: string) {
|
||||
this.filePath = filePath
|
||||
}
|
||||
|
||||
/**
|
||||
* Read JSON array in streaming mode with batch processing
|
||||
* @param batchSize - Number of items per batch
|
||||
* @param onBatch - Callback for each batch
|
||||
* @returns Total number of items processed
|
||||
*/
|
||||
async readInBatches<T>(
|
||||
batchSize: number,
|
||||
onBatch: (items: T[], batchIndex: number) => Promise<void>
|
||||
): Promise<number> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
|
||||
|
||||
let batch: T[] = []
|
||||
let batchIndex = 0
|
||||
let totalCount = 0
|
||||
let isPaused = false
|
||||
|
||||
const processBatch = async () => {
|
||||
if (batch.length === 0) return
|
||||
|
||||
const currentBatch = batch
|
||||
batch = []
|
||||
isPaused = true
|
||||
pipeline.pause()
|
||||
|
||||
try {
|
||||
await onBatch(currentBatch, batchIndex++)
|
||||
isPaused = false
|
||||
pipeline.resume()
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
}
|
||||
|
||||
pipeline.on('data', async ({ value }: { value: T }) => {
|
||||
batch.push(value)
|
||||
totalCount++
|
||||
|
||||
if (batch.length >= batchSize && !isPaused) {
|
||||
await processBatch()
|
||||
}
|
||||
})
|
||||
|
||||
pipeline.on('end', async () => {
|
||||
try {
|
||||
// Process remaining items
|
||||
if (batch.length > 0) {
|
||||
await onBatch(batch, batchIndex)
|
||||
}
|
||||
resolve(totalCount)
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
|
||||
pipeline.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Count total items in the JSON array without loading all data
|
||||
*/
|
||||
async count(): Promise<number> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
|
||||
|
||||
let count = 0
|
||||
|
||||
pipeline.on('data', () => {
|
||||
count++
|
||||
})
|
||||
|
||||
pipeline.on('end', () => {
|
||||
resolve(count)
|
||||
})
|
||||
|
||||
pipeline.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Read first N items for sampling/validation
|
||||
* @param n - Number of items to read
|
||||
*/
|
||||
async readSample<T>(n: number): Promise<T[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const pipeline = createReadStream(this.filePath).pipe(parser()).pipe(streamArray())
|
||||
|
||||
const items: T[] = []
|
||||
|
||||
pipeline.on('data', ({ value }: { value: T }) => {
|
||||
items.push(value)
|
||||
if (items.length >= n) {
|
||||
pipeline.destroy()
|
||||
resolve(items)
|
||||
}
|
||||
})
|
||||
|
||||
pipeline.on('end', () => {
|
||||
resolve(items)
|
||||
})
|
||||
|
||||
pipeline.on('error', (error) => {
|
||||
// Ignore error from destroy()
|
||||
if (items.length >= n) {
|
||||
resolve(items)
|
||||
} else {
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
/**
|
||||
* Redux state reader for accessing Redux Persist data
|
||||
* Data is parsed by Renderer before IPC transfer
|
||||
*/
|
||||
|
||||
export class ReduxStateReader {
|
||||
private data: Record<string, unknown>
|
||||
|
||||
constructor(rawData: Record<string, unknown>) {
|
||||
this.data = rawData
|
||||
}
|
||||
|
||||
/**
|
||||
* Read value from Redux state with nested path support
|
||||
* @param category - Top-level category (e.g., 'settings', 'assistants')
|
||||
* @param key - Key within category, supports dot notation (e.g., 'codeEditor.enabled')
|
||||
* @returns The value or undefined if not found
|
||||
* @example
|
||||
* reader.get('settings', 'codeEditor.enabled')
|
||||
* reader.get('assistants', 'defaultAssistant')
|
||||
*/
|
||||
get<T>(category: string, key: string): T | undefined {
|
||||
const categoryData = this.data[category]
|
||||
if (!categoryData) return undefined
|
||||
|
||||
// Support nested paths like "codeEditor.enabled"
|
||||
if (key.includes('.')) {
|
||||
const keyPath = key.split('.')
|
||||
let current: unknown = categoryData
|
||||
|
||||
for (const segment of keyPath) {
|
||||
if (current && typeof current === 'object') {
|
||||
current = (current as Record<string, unknown>)[segment]
|
||||
} else {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
return current as T
|
||||
}
|
||||
|
||||
return (categoryData as Record<string, unknown>)[key] as T
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entire category data
|
||||
* @param category - Category name
|
||||
*/
|
||||
getCategory<T>(category: string): T | undefined {
|
||||
return this.data[category] as T | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a category exists
|
||||
*/
|
||||
hasCategory(category: string): boolean {
|
||||
return category in this.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available categories
|
||||
*/
|
||||
getCategories(): string[] {
|
||||
return Object.keys(this.data)
|
||||
}
|
||||
}
|
||||
@@ -1,442 +0,0 @@
|
||||
/**
|
||||
* IPC handler for migration communication between Main and Renderer
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import BackupManager from '@main/services/BackupManager'
|
||||
import { MigrationIpcChannels, type MigrationProgress } from '@shared/data/migration/v2/types'
|
||||
import { app, dialog, ipcMain } from 'electron'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
|
||||
import { migrationEngine } from '../core/MigrationEngine'
|
||||
import { migrationWindowManager } from './MigrationWindowManager'
|
||||
|
||||
const logger = loggerService.withContext('MigrationIpcHandler')
|
||||
|
||||
// Store for cached data from Renderer
|
||||
let cachedReduxData: Record<string, unknown> | null = null
|
||||
let cachedDexieExportPath: string | null = null
|
||||
const backupManager = new BackupManager()
|
||||
|
||||
// Current migration progress
|
||||
let currentProgress: MigrationProgress = {
|
||||
stage: 'introduction',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Ready to start data migration',
|
||||
migrators: []
|
||||
}
|
||||
|
||||
/**
|
||||
* Register all migration IPC handlers
|
||||
*/
|
||||
export function registerMigrationIpcHandlers(): void {
|
||||
logger.info('Registering migration IPC handlers')
|
||||
|
||||
// Get user data path
|
||||
ipcMain.handle(MigrationIpcChannels.GetUserDataPath, () => {
|
||||
return app.getPath('userData')
|
||||
})
|
||||
|
||||
// Check if migration is needed
|
||||
ipcMain.handle(MigrationIpcChannels.CheckNeeded, async () => {
|
||||
try {
|
||||
return await migrationEngine.needsMigration()
|
||||
} catch (error) {
|
||||
logger.error('Error checking migration needed', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Get current progress
|
||||
ipcMain.handle(MigrationIpcChannels.GetProgress, () => {
|
||||
return currentProgress
|
||||
})
|
||||
|
||||
// Get last error
|
||||
ipcMain.handle(MigrationIpcChannels.GetLastError, async () => {
|
||||
try {
|
||||
return await migrationEngine.getLastError()
|
||||
} catch (error) {
|
||||
logger.error('Error getting last error', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Proceed to backup stage
|
||||
ipcMain.handle(MigrationIpcChannels.ProceedToBackup, async () => {
|
||||
try {
|
||||
updateProgress({
|
||||
stage: 'backup_required',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Data backup is required before migration can proceed',
|
||||
migrators: []
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error proceeding to backup', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Show Backup Dialog
|
||||
ipcMain.handle(MigrationIpcChannels.ShowBackupDialog, async () => {
|
||||
try {
|
||||
logger.info('Opening backup dialog for migration')
|
||||
|
||||
// Update progress to indicate backup dialog is opening
|
||||
updateProgress({
|
||||
stage: 'backup_progress',
|
||||
overallProgress: 10,
|
||||
currentMessage: 'Opening backup dialog...',
|
||||
migrators: []
|
||||
})
|
||||
|
||||
const result = await dialog.showSaveDialog({
|
||||
title: 'Save Migration Backup',
|
||||
defaultPath: `cherry-studio-migration-backup-${new Date().toISOString().split('T')[0]}.zip`,
|
||||
filters: [
|
||||
{ name: 'Backup Files', extensions: ['zip'] },
|
||||
{ name: 'All Files', extensions: ['*'] }
|
||||
]
|
||||
})
|
||||
|
||||
if (!result.canceled && result.filePath) {
|
||||
logger.info('User selected backup location', { filePath: result.filePath })
|
||||
updateProgress({
|
||||
stage: 'backup_progress',
|
||||
overallProgress: 10,
|
||||
currentMessage: 'Creating backup file...',
|
||||
migrators: []
|
||||
})
|
||||
|
||||
// Perform the actual backup to the selected location
|
||||
const backupResult = await performBackupToFile(result.filePath)
|
||||
|
||||
if (backupResult.success) {
|
||||
updateProgress({
|
||||
stage: 'backup_progress',
|
||||
overallProgress: 100,
|
||||
currentMessage: 'Backup created successfully!',
|
||||
migrators: []
|
||||
})
|
||||
|
||||
// Wait a moment to show the success message, then transition to confirmed state
|
||||
setTimeout(() => {
|
||||
updateProgress({
|
||||
stage: 'backup_confirmed',
|
||||
overallProgress: 100,
|
||||
currentMessage: 'Backup completed! Ready to start migration. Click "Start Migration" to continue.',
|
||||
migrators: []
|
||||
})
|
||||
}, 1000)
|
||||
} else {
|
||||
updateProgress({
|
||||
stage: 'backup_required',
|
||||
overallProgress: 0,
|
||||
currentMessage: `Backup failed: ${backupResult.error}`,
|
||||
migrators: []
|
||||
})
|
||||
}
|
||||
|
||||
return backupResult
|
||||
} else {
|
||||
logger.info('User cancelled backup dialog')
|
||||
updateProgress({
|
||||
stage: 'backup_required',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Backup cancelled. Please create a backup to continue.',
|
||||
migrators: []
|
||||
})
|
||||
return { success: false, error: 'Backup cancelled by user' }
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error showing backup dialog', error as Error)
|
||||
updateProgress({
|
||||
stage: 'backup_required',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Backup process failed',
|
||||
migrators: []
|
||||
})
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Backup completed
|
||||
ipcMain.handle(MigrationIpcChannels.BackupCompleted, async () => {
|
||||
try {
|
||||
updateProgress({
|
||||
stage: 'backup_confirmed',
|
||||
overallProgress: 100,
|
||||
currentMessage: 'Backup completed! Ready to start migration. Click "Start Migration" to continue.',
|
||||
migrators: []
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error confirming backup', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Receive Redux data from Renderer
|
||||
ipcMain.handle(MigrationIpcChannels.SendReduxData, async (_event, data: Record<string, unknown>) => {
|
||||
try {
|
||||
cachedReduxData = data
|
||||
logger.info('Redux data received', {
|
||||
categories: Object.keys(data)
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error receiving Redux data', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Dexie export completed
|
||||
ipcMain.handle(MigrationIpcChannels.DexieExportCompleted, async (_event, exportPath: string) => {
|
||||
try {
|
||||
cachedDexieExportPath = exportPath
|
||||
logger.info('Dexie export completed', { exportPath })
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error receiving Dexie export path', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Write export file from Renderer
|
||||
ipcMain.handle(
|
||||
MigrationIpcChannels.WriteExportFile,
|
||||
async (_event, exportPath: string, tableName: string, jsonData: string) => {
|
||||
try {
|
||||
// Ensure export directory exists
|
||||
await fs.mkdir(exportPath, { recursive: true })
|
||||
|
||||
// Write table data to file
|
||||
const filePath = path.join(exportPath, `${tableName}.json`)
|
||||
await fs.writeFile(filePath, jsonData, 'utf-8')
|
||||
|
||||
logger.info('Export file written', { tableName, filePath })
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error writing export file', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Start the migration process
|
||||
ipcMain.handle(MigrationIpcChannels.StartMigration, async () => {
|
||||
try {
|
||||
if (!cachedReduxData || !cachedDexieExportPath) {
|
||||
throw new Error('Migration data not ready. Redux data or Dexie export path missing.')
|
||||
}
|
||||
|
||||
// Set up progress callback
|
||||
migrationEngine.onProgress((progress) => {
|
||||
updateProgress(progress)
|
||||
})
|
||||
|
||||
// Run migration
|
||||
const result = await migrationEngine.run(cachedReduxData, cachedDexieExportPath)
|
||||
|
||||
if (result.success) {
|
||||
updateProgress({
|
||||
stage: 'migration_completed',
|
||||
overallProgress: 100,
|
||||
currentMessage: 'Migration completed successfully! Please confirm to continue.',
|
||||
migrators: currentProgress.migrators.map((m) => ({
|
||||
...m,
|
||||
status: 'completed'
|
||||
}))
|
||||
})
|
||||
} else {
|
||||
updateProgress({
|
||||
stage: 'error',
|
||||
overallProgress: currentProgress.overallProgress,
|
||||
currentMessage: result.error || 'Migration failed',
|
||||
migrators: currentProgress.migrators,
|
||||
error: result.error
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Error starting migration', error as Error)
|
||||
|
||||
updateProgress({
|
||||
stage: 'error',
|
||||
overallProgress: currentProgress.overallProgress,
|
||||
currentMessage: errorMessage,
|
||||
migrators: currentProgress.migrators,
|
||||
error: errorMessage
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Retry migration
|
||||
ipcMain.handle(MigrationIpcChannels.Retry, async () => {
|
||||
try {
|
||||
// Reset to backup confirmed stage
|
||||
updateProgress({
|
||||
stage: 'backup_confirmed',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Ready to retry migration',
|
||||
migrators: []
|
||||
})
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error retrying migration', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Cancel migration
|
||||
ipcMain.handle(MigrationIpcChannels.Cancel, async () => {
|
||||
try {
|
||||
logger.info('Migration cancelled by user')
|
||||
migrationWindowManager.close()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error cancelling migration', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
// Restart app
|
||||
ipcMain.handle(MigrationIpcChannels.Restart, async () => {
|
||||
try {
|
||||
logger.info('Restarting app after migration')
|
||||
migrationWindowManager.restartApp()
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error restarting app', error as Error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister all migration IPC handlers
|
||||
*/
|
||||
export function unregisterMigrationIpcHandlers(): void {
|
||||
logger.info('Unregistering migration IPC handlers')
|
||||
|
||||
const channels = Object.values(MigrationIpcChannels)
|
||||
for (const channel of channels) {
|
||||
ipcMain.removeHandler(channel)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update progress and broadcast to window
|
||||
*/
|
||||
function updateProgress(progress: MigrationProgress): void {
|
||||
currentProgress = progress
|
||||
migrationWindowManager.send(MigrationIpcChannels.Progress, progress)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset cached data
|
||||
*/
|
||||
export function resetMigrationData(): void {
|
||||
cachedReduxData = null
|
||||
cachedDexieExportPath = null
|
||||
currentProgress = {
|
||||
stage: 'introduction',
|
||||
overallProgress: 0,
|
||||
currentMessage: 'Ready to start data migration',
|
||||
migrators: []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get backup data from the current application
|
||||
*/
|
||||
async function getBackupData(): Promise<string> {
|
||||
try {
|
||||
const { getDataPath } = await import('@main/utils')
|
||||
const dataPath = getDataPath()
|
||||
|
||||
// Gather basic system information
|
||||
const data = {
|
||||
backup: {
|
||||
timestamp: new Date().toISOString(),
|
||||
version: app.getVersion(),
|
||||
type: 'pre-migration-backup',
|
||||
note: 'This is a safety backup created before data migration'
|
||||
},
|
||||
system: {
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
nodeVersion: process.version
|
||||
},
|
||||
// Include basic configuration files if they exist
|
||||
configs: {} as Record<string, any>
|
||||
}
|
||||
|
||||
// Check if there are any config files we should backup
|
||||
const configFiles = ['config.json', 'settings.json', 'preferences.json']
|
||||
for (const configFile of configFiles) {
|
||||
const configPath = path.join(dataPath, configFile)
|
||||
try {
|
||||
// Check if file exists
|
||||
await fs.access(configPath)
|
||||
const configContent = await fs.readFile(configPath, 'utf-8')
|
||||
data.configs[configFile] = JSON.parse(configContent)
|
||||
} catch (err) {
|
||||
// Ignore if file doesn't exist or can't be read
|
||||
}
|
||||
}
|
||||
|
||||
return JSON.stringify(data, null, 2)
|
||||
} catch (error) {
|
||||
logger.error('Failed to get backup data:', error as Error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform backup to a specific file location
|
||||
*/
|
||||
async function performBackupToFile(filePath: string): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
logger.info('Performing backup to file', { filePath })
|
||||
|
||||
// Get backup data
|
||||
const backupData = await getBackupData()
|
||||
|
||||
// Extract directory and filename from the full path
|
||||
const destinationDir = path.dirname(filePath)
|
||||
const fileName = path.basename(filePath)
|
||||
|
||||
// Use the existing backup manager to create a backup
|
||||
const backupPath = await backupManager.backup(
|
||||
null as any, // IpcMainInvokeEvent - we're calling directly so pass null
|
||||
fileName,
|
||||
backupData,
|
||||
destinationDir,
|
||||
false // Don't skip backup files - full backup for migration safety
|
||||
)
|
||||
|
||||
if (backupPath) {
|
||||
logger.info('Backup created successfully', { path: backupPath })
|
||||
return { success: true }
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Backup process did not return a file path'
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Backup failed during migration:', error as Error)
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* Migration window manager for creating and managing the migration window
|
||||
*/
|
||||
|
||||
import { loggerService } from '@logger'
|
||||
import { isDev } from '@main/constant'
|
||||
import { app, BrowserWindow, dialog } from 'electron'
|
||||
import { join } from 'path'
|
||||
|
||||
const logger = loggerService.withContext('MigrationWindowManager')
|
||||
|
||||
export class MigrationWindowManager {
|
||||
private window: BrowserWindow | null = null
|
||||
|
||||
/**
|
||||
* Check if migration window exists and is not destroyed
|
||||
*/
|
||||
hasWindow(): boolean {
|
||||
return this.window !== null && !this.window.isDestroyed()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current migration window
|
||||
*/
|
||||
getWindow(): BrowserWindow | null {
|
||||
return this.window
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and show the migration window
|
||||
*/
|
||||
create(): BrowserWindow {
|
||||
if (this.hasWindow()) {
|
||||
this.window!.show()
|
||||
return this.window!
|
||||
}
|
||||
|
||||
logger.info('Creating migration window')
|
||||
|
||||
this.window = new BrowserWindow({
|
||||
width: 640,
|
||||
height: 480,
|
||||
resizable: false,
|
||||
maximizable: false,
|
||||
minimizable: false,
|
||||
show: false,
|
||||
frame: false,
|
||||
autoHideMenuBar: true,
|
||||
webPreferences: {
|
||||
preload: join(__dirname, '../preload/simplest.js'),
|
||||
sandbox: false,
|
||||
webSecurity: false,
|
||||
contextIsolation: true
|
||||
}
|
||||
})
|
||||
|
||||
// Load the migration window
|
||||
if (isDev && process.env['ELECTRON_RENDERER_URL']) {
|
||||
this.window.loadURL(process.env['ELECTRON_RENDERER_URL'] + '/migrationV2.html')
|
||||
} else {
|
||||
this.window.loadFile(join(__dirname, '../renderer/migrationV2.html'))
|
||||
}
|
||||
|
||||
this.window.once('ready-to-show', () => {
|
||||
this.window?.show()
|
||||
logger.info('Migration window shown')
|
||||
})
|
||||
|
||||
this.window.on('closed', () => {
|
||||
this.window = null
|
||||
logger.info('Migration window closed')
|
||||
})
|
||||
|
||||
return this.window
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for window to be ready
|
||||
*/
|
||||
async waitForReady(): Promise<void> {
|
||||
if (!this.window) return
|
||||
|
||||
return new Promise<void>((resolve) => {
|
||||
if (this.window!.webContents.isLoading()) {
|
||||
this.window!.webContents.once('did-finish-load', () => resolve())
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the migration window
|
||||
*/
|
||||
close(): void {
|
||||
if (this.hasWindow()) {
|
||||
this.window!.close()
|
||||
this.window = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send message to the migration window
|
||||
*/
|
||||
send(channel: string, ...args: unknown[]): void {
|
||||
if (this.hasWindow()) {
|
||||
this.window!.webContents.send(channel, ...args)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restart the application
|
||||
*/
|
||||
async restartApp(): Promise<void> {
|
||||
logger.info('Restarting application after migration')
|
||||
|
||||
// In development mode, relaunch might not work properly
|
||||
if (isDev || !app.isPackaged) {
|
||||
logger.warn('Development mode detected - showing restart instruction instead of auto-restart')
|
||||
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: 'Migration Complete - Restart Required',
|
||||
message:
|
||||
'Data migration completed successfully!\n\nSince you are in development mode, please manually restart the application to continue.',
|
||||
buttons: ['Close App'],
|
||||
defaultId: 0
|
||||
})
|
||||
|
||||
this.close()
|
||||
app.quit()
|
||||
} else {
|
||||
// Production mode - clean up first, then relaunch
|
||||
this.close()
|
||||
app.relaunch()
|
||||
app.exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const migrationWindowManager = new MigrationWindowManager()
|
||||
@@ -10,7 +10,7 @@ import { electronApp, optimizer } from '@electron-toolkit/utils'
|
||||
import { dbService } from '@data/db/DbService'
|
||||
import { preferenceService } from '@data/PreferenceService'
|
||||
import { replaceDevtoolsFont } from '@main/utils/windowUtil'
|
||||
import { app, dialog, crashReporter } from 'electron'
|
||||
import { app, dialog } from 'electron'
|
||||
import installExtension, { REACT_DEVELOPER_TOOLS, REDUX_DEVTOOLS } from 'electron-devtools-installer'
|
||||
import { isDev, isLinux, isWin } from './constant'
|
||||
|
||||
@@ -34,28 +34,13 @@ import { registerShortcuts } from './services/ShortcutService'
|
||||
import { TrayService } from './services/TrayService'
|
||||
import { versionService } from './services/VersionService'
|
||||
import { windowService } from './services/WindowService'
|
||||
import {
|
||||
getAllMigrators,
|
||||
migrationEngine,
|
||||
migrationWindowManager,
|
||||
registerMigrationIpcHandlers,
|
||||
unregisterMigrationIpcHandlers
|
||||
} from '@data/migration/v2'
|
||||
import { dataRefactorMigrateService } from './data/migrate/dataRefactor/DataRefactorMigrateService'
|
||||
import { dataApiService } from '@data/DataApiService'
|
||||
import { cacheService } from '@data/CacheService'
|
||||
import { initWebviewHotkeys } from './services/WebviewService'
|
||||
import { runAsyncFunction } from './utils'
|
||||
|
||||
const logger = loggerService.withContext('MainEntry')
|
||||
|
||||
// enable local crash reports
|
||||
crashReporter.start({
|
||||
companyName: 'CherryHQ',
|
||||
productName: 'CherryStudio',
|
||||
submitURL: '',
|
||||
uploadToServer: false
|
||||
})
|
||||
|
||||
/**
|
||||
* Disable hardware acceleration if setting is enabled
|
||||
*/
|
||||
@@ -133,40 +118,27 @@ if (!app.requestSingleInstanceLock()) {
|
||||
// Some APIs can only be used after this event occurs.
|
||||
app.whenReady().then(async () => {
|
||||
// First of all, init & migrate the database
|
||||
await dbService.init()
|
||||
await dbService.migrateDb()
|
||||
await dbService.migrateSeed('preference')
|
||||
|
||||
// Data Migration v2
|
||||
// Data Refactor Migration
|
||||
// Check if data migration is needed BEFORE creating any windows
|
||||
try {
|
||||
logger.info('Checking if data migration v2 is needed')
|
||||
logger.info('Checking if data refactor migration is needed')
|
||||
const isMigrated = await dataRefactorMigrateService.isMigrated()
|
||||
logger.info('Migration status check result', { isMigrated })
|
||||
|
||||
// Register migration IPC handlers
|
||||
registerMigrationIpcHandlers()
|
||||
|
||||
// Register migrators
|
||||
migrationEngine.registerMigrators(getAllMigrators())
|
||||
|
||||
const needsMigration = await migrationEngine.needsMigration()
|
||||
logger.info('Migration status check result', { needsMigration })
|
||||
|
||||
if (needsMigration) {
|
||||
logger.info('Data Migration v2 needed, starting migration process')
|
||||
if (!isMigrated) {
|
||||
logger.info('Data Refactor Migration needed, starting migration process')
|
||||
|
||||
try {
|
||||
// Create and show migration window
|
||||
migrationWindowManager.create()
|
||||
await migrationWindowManager.waitForReady()
|
||||
await dataRefactorMigrateService.runMigration()
|
||||
logger.info('Migration window created successfully')
|
||||
// Migration window will handle the flow, no need to continue startup
|
||||
// Migration service will handle the migration flow, no need to continue startup
|
||||
return
|
||||
} catch (migrationError) {
|
||||
logger.error('Failed to start migration process', migrationError as Error)
|
||||
|
||||
// Cleanup IPC handlers on failure
|
||||
unregisterMigrationIpcHandlers()
|
||||
|
||||
// Migration is required for this version - show error and exit
|
||||
await dialog.showErrorBox(
|
||||
'Migration Required - Application Cannot Start',
|
||||
@@ -205,6 +177,17 @@ if (!app.requestSingleInstanceLock()) {
|
||||
// Initialize CacheService
|
||||
await cacheService.initialize()
|
||||
|
||||
// // Create two test windows for cross-window preference sync testing
|
||||
// logger.info('Creating test windows for PreferenceService cross-window sync testing')
|
||||
// const testWindow1 = dataRefactorMigrateService.createTestWindow()
|
||||
// const testWindow2 = dataRefactorMigrateService.createTestWindow()
|
||||
|
||||
// // Position windows to avoid overlap
|
||||
// testWindow1.once('ready-to-show', () => {
|
||||
// const [x, y] = testWindow1.getPosition()
|
||||
// testWindow2.setPosition(x + 50, y + 50)
|
||||
// })
|
||||
|
||||
/************FOR TESTING ONLY END****************/
|
||||
|
||||
// Record current version for tracking
|
||||
@@ -257,33 +240,39 @@ if (!app.requestSingleInstanceLock()) {
|
||||
//start selection assistant service
|
||||
initSelectionService()
|
||||
|
||||
runAsyncFunction(async () => {
|
||||
// Start API server if enabled or if agents exist
|
||||
try {
|
||||
const config = await apiServerService.getCurrentConfig()
|
||||
logger.info('API server config:', config)
|
||||
// Initialize Agent Service
|
||||
try {
|
||||
await agentService.initialize()
|
||||
logger.info('Agent service initialized successfully')
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to initialize Agent service:', error)
|
||||
}
|
||||
|
||||
// Check if there are any agents
|
||||
let shouldStart = config.enabled
|
||||
if (!shouldStart) {
|
||||
try {
|
||||
const { total } = await agentService.listAgents({ limit: 1 })
|
||||
if (total > 0) {
|
||||
shouldStart = true
|
||||
logger.info(`Detected ${total} agent(s), auto-starting API server`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to check agent count:', error)
|
||||
// Start API server if enabled or if agents exist
|
||||
try {
|
||||
const config = await apiServerService.getCurrentConfig()
|
||||
logger.info('API server config:', config)
|
||||
|
||||
// Check if there are any agents
|
||||
let shouldStart = config.enabled
|
||||
if (!shouldStart) {
|
||||
try {
|
||||
const { total } = await agentService.listAgents({ limit: 1 })
|
||||
if (total > 0) {
|
||||
shouldStart = true
|
||||
logger.info(`Detected ${total} agent(s), auto-starting API server`)
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to check agent count:', error)
|
||||
}
|
||||
|
||||
if (shouldStart) {
|
||||
await apiServerService.start()
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to check/start API server:', error)
|
||||
}
|
||||
})
|
||||
|
||||
if (shouldStart) {
|
||||
await apiServerService.start()
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to check/start API server:', error)
|
||||
}
|
||||
})
|
||||
|
||||
registerProtocolClient(app)
|
||||
|
||||
@@ -494,44 +494,6 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.System_GetDeviceType, () => (isMac ? 'mac' : isWin ? 'windows' : 'linux'))
|
||||
ipcMain.handle(IpcChannel.System_GetHostname, () => require('os').hostname())
|
||||
ipcMain.handle(IpcChannel.System_GetCpuName, () => require('os').cpus()[0].model)
|
||||
ipcMain.handle(IpcChannel.System_CheckGitBash, () => {
|
||||
if (!isWin) {
|
||||
return true // Non-Windows systems don't need Git Bash
|
||||
}
|
||||
|
||||
try {
|
||||
// Check common Git Bash installation paths
|
||||
const commonPaths = [
|
||||
path.join(process.env.ProgramFiles || 'C:\\Program Files', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env['ProgramFiles(x86)'] || 'C:\\Program Files (x86)', 'Git', 'bin', 'bash.exe'),
|
||||
path.join(process.env.LOCALAPPDATA || '', 'Programs', 'Git', 'bin', 'bash.exe')
|
||||
]
|
||||
|
||||
// Check if any of the common paths exist
|
||||
for (const bashPath of commonPaths) {
|
||||
if (fs.existsSync(bashPath)) {
|
||||
logger.debug('Git Bash found', { path: bashPath })
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check if git is in PATH
|
||||
const { execSync } = require('child_process')
|
||||
try {
|
||||
execSync('git --version', { stdio: 'ignore' })
|
||||
logger.debug('Git found in PATH')
|
||||
return true
|
||||
} catch {
|
||||
// Git not in PATH
|
||||
}
|
||||
|
||||
logger.debug('Git Bash not found on Windows system')
|
||||
return false
|
||||
} catch (error) {
|
||||
logger.error('Error checking Git Bash', error as Error)
|
||||
return false
|
||||
}
|
||||
})
|
||||
ipcMain.handle(IpcChannel.System_ToggleDevTools, (e) => {
|
||||
const win = BrowserWindow.fromWebContents(e.sender)
|
||||
win && win.webContents.toggleDevTools()
|
||||
@@ -1078,10 +1040,6 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
|
||||
ipcMain.handle(IpcChannel.WebSocket_SendFile, WebSocketService.sendFile)
|
||||
ipcMain.handle(IpcChannel.WebSocket_GetAllCandidates, WebSocketService.getAllCandidates)
|
||||
|
||||
ipcMain.handle(IpcChannel.APP_CrashRenderProcess, () => {
|
||||
mainWindow.webContents.forcefullyCrashRenderer()
|
||||
})
|
||||
|
||||
// Preference handlers
|
||||
PreferenceService.registerIpcHandler()
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ type ApiResponse<T> = {
|
||||
type BatchUploadResponse = {
|
||||
batch_id: string
|
||||
file_urls: string[]
|
||||
headers?: Record<string, string>[]
|
||||
}
|
||||
|
||||
type ExtractProgress = {
|
||||
@@ -56,7 +55,7 @@ type QuotaResponse = {
|
||||
export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
constructor(provider: PreprocessProvider, userId?: string) {
|
||||
super(provider, userId)
|
||||
// TODO: remove after free period ends
|
||||
// todo:免费期结束后删除
|
||||
this.provider.apiKey = this.provider.apiKey || import.meta.env.MAIN_VITE_MINERU_API_KEY
|
||||
}
|
||||
|
||||
@@ -69,21 +68,21 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
logger.info(`MinerU preprocess processing started: ${filePath}`)
|
||||
await this.validateFile(filePath)
|
||||
|
||||
// 1. Get upload URL and upload file
|
||||
// 1. 获取上传URL并上传文件
|
||||
const batchId = await this.uploadFile(file)
|
||||
logger.info(`MinerU file upload completed: batch_id=${batchId}`)
|
||||
|
||||
// 2. Wait for completion and fetch results
|
||||
// 2. 等待处理完成并获取结果
|
||||
const extractResult = await this.waitForCompletion(sourceId, batchId, file.origin_name)
|
||||
logger.info(`MinerU processing completed for batch: ${batchId}`)
|
||||
|
||||
// 3. Download and extract output
|
||||
// 3. 下载并解压文件
|
||||
const { path: outputPath } = await this.downloadAndExtractFile(extractResult.full_zip_url!, file)
|
||||
|
||||
// 4. check quota
|
||||
const quota = await this.checkQuota()
|
||||
|
||||
// 5. Create processed file metadata
|
||||
// 5. 创建处理后的文件信息
|
||||
return {
|
||||
processedFile: this.createProcessedFileInfo(file, outputPath),
|
||||
quota
|
||||
@@ -116,48 +115,23 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
|
||||
private async validateFile(filePath: string): Promise<void> {
|
||||
// Phase 1: check file size (without loading into memory)
|
||||
logger.info(`Validating PDF file: ${filePath}`)
|
||||
const stats = await fs.promises.stat(filePath)
|
||||
const fileSizeBytes = stats.size
|
||||
|
||||
// Ensure file size is under 200MB
|
||||
if (fileSizeBytes >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(fileSizeBytes / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
|
||||
// Phase 2: check page count (requires reading file with error handling)
|
||||
const pdfBuffer = await fs.promises.readFile(filePath)
|
||||
|
||||
try {
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
|
||||
// Ensure page count is under 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
|
||||
logger.info(`PDF validation passed: ${doc.numPages} pages, ${Math.round(fileSizeBytes / (1024 * 1024))}MB`)
|
||||
} catch (error: any) {
|
||||
// If the page limit is exceeded, rethrow immediately
|
||||
if (error.message.includes('exceeds the limit')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
// If PDF parsing fails, log a detailed warning but continue processing
|
||||
logger.warn(
|
||||
`Failed to parse PDF structure (file may be corrupted or use non-standard format). ` +
|
||||
`Skipping page count validation. Will attempt to process with MinerU API. ` +
|
||||
`Error details: ${error.message}. ` +
|
||||
`Suggestion: If processing fails, try repairing the PDF using tools like Adobe Acrobat or online PDF repair services.`
|
||||
)
|
||||
// Do not throw; continue processing
|
||||
// 文件页数小于600页
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
// 文件大小小于200MB
|
||||
if (pdfBuffer.length >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
}
|
||||
|
||||
private createProcessedFileInfo(file: FileMetadata, outputPath: string): FileMetadata {
|
||||
// Locate the main extracted file
|
||||
// 查找解压后的主要文件
|
||||
let finalPath = ''
|
||||
let finalName = file.origin_name.replace('.pdf', '.md')
|
||||
|
||||
@@ -169,14 +143,14 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
const originalMdPath = path.join(outputPath, mdFile)
|
||||
const newMdPath = path.join(outputPath, finalName)
|
||||
|
||||
// Rename the file to match the original name
|
||||
// 重命名文件为原始文件名
|
||||
try {
|
||||
fs.renameSync(originalMdPath, newMdPath)
|
||||
finalPath = newMdPath
|
||||
logger.info(`Renamed markdown file from ${mdFile} to ${finalName}`)
|
||||
} catch (renameError) {
|
||||
logger.warn(`Failed to rename file ${mdFile} to ${finalName}: ${renameError}`)
|
||||
// If renaming fails, fall back to the original file
|
||||
// 如果重命名失败,使用原文件
|
||||
finalPath = originalMdPath
|
||||
finalName = mdFile
|
||||
}
|
||||
@@ -204,7 +178,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
logger.info(`Downloading MinerU result to: ${zipPath}`)
|
||||
|
||||
try {
|
||||
// Download the ZIP file
|
||||
// 下载ZIP文件
|
||||
const response = await net.fetch(zipUrl, { method: 'GET' })
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`)
|
||||
@@ -213,17 +187,17 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
fs.writeFileSync(zipPath, Buffer.from(arrayBuffer))
|
||||
logger.info(`Downloaded ZIP file: ${zipPath}`)
|
||||
|
||||
// Ensure the extraction directory exists
|
||||
// 确保提取目录存在
|
||||
if (!fs.existsSync(extractPath)) {
|
||||
fs.mkdirSync(extractPath, { recursive: true })
|
||||
}
|
||||
|
||||
// Extract the ZIP contents
|
||||
// 解压文件
|
||||
const zip = new AdmZip(zipPath)
|
||||
zip.extractAllTo(extractPath, true)
|
||||
logger.info(`Extracted files to: ${extractPath}`)
|
||||
|
||||
// Remove the temporary ZIP file
|
||||
// 删除临时ZIP文件
|
||||
fs.unlinkSync(zipPath)
|
||||
|
||||
return { path: extractPath }
|
||||
@@ -235,11 +209,11 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
|
||||
private async uploadFile(file: FileMetadata): Promise<string> {
|
||||
try {
|
||||
// Step 1: obtain the upload URL
|
||||
const { batchId, fileUrls, uploadHeaders } = await this.getBatchUploadUrls(file)
|
||||
// Step 2: upload the file to the obtained URL
|
||||
// 步骤1: 获取上传URL
|
||||
const { batchId, fileUrls } = await this.getBatchUploadUrls(file)
|
||||
// 步骤2: 上传文件到获取的URL
|
||||
const filePath = fileStorage.getFilePathById(file)
|
||||
await this.putFileToUrl(filePath, fileUrls[0], file.origin_name, uploadHeaders?.[0])
|
||||
await this.putFileToUrl(filePath, fileUrls[0])
|
||||
logger.info(`File uploaded successfully: ${filePath}`, { batchId, fileUrls })
|
||||
|
||||
return batchId
|
||||
@@ -249,9 +223,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
}
|
||||
|
||||
private async getBatchUploadUrls(
|
||||
file: FileMetadata
|
||||
): Promise<{ batchId: string; fileUrls: string[]; uploadHeaders?: Record<string, string>[] }> {
|
||||
private async getBatchUploadUrls(file: FileMetadata): Promise<{ batchId: string; fileUrls: string[] }> {
|
||||
const endpoint = `${this.provider.apiHost}/api/v4/file-urls/batch`
|
||||
|
||||
const payload = {
|
||||
@@ -282,11 +254,10 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
if (response.ok) {
|
||||
const data: ApiResponse<BatchUploadResponse> = await response.json()
|
||||
if (data.code === 0 && data.data) {
|
||||
const { batch_id, file_urls, headers: uploadHeaders } = data.data
|
||||
const { batch_id, file_urls } = data.data
|
||||
return {
|
||||
batchId: batch_id,
|
||||
fileUrls: file_urls,
|
||||
uploadHeaders
|
||||
fileUrls: file_urls
|
||||
}
|
||||
} else {
|
||||
throw new Error(`API returned error: ${data.msg || JSON.stringify(data)}`)
|
||||
@@ -300,28 +271,18 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
}
|
||||
}
|
||||
|
||||
private async putFileToUrl(
|
||||
filePath: string,
|
||||
uploadUrl: string,
|
||||
fileName?: string,
|
||||
headers?: Record<string, string>
|
||||
): Promise<void> {
|
||||
private async putFileToUrl(filePath: string, uploadUrl: string): Promise<void> {
|
||||
try {
|
||||
const fileBuffer = await fs.promises.readFile(filePath)
|
||||
const fileSize = fileBuffer.byteLength
|
||||
const displayName = fileName ?? path.basename(filePath)
|
||||
|
||||
logger.info(`Uploading file to MinerU OSS: ${displayName} (${fileSize} bytes)`)
|
||||
|
||||
// https://mineru.net/apiManage/docs
|
||||
const response = await net.fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers,
|
||||
body: new Uint8Array(fileBuffer)
|
||||
body: fileBuffer as unknown as BodyInit
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
// Clone the response to avoid consuming the body stream
|
||||
// 克隆 response 以避免消费 body stream
|
||||
const responseClone = response.clone()
|
||||
|
||||
try {
|
||||
@@ -392,20 +353,20 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
try {
|
||||
const result = await this.getExtractResults(batchId)
|
||||
|
||||
// Find the corresponding file result
|
||||
// 查找对应文件的处理结果
|
||||
const fileResult = result.extract_result.find((item) => item.file_name === fileName)
|
||||
if (!fileResult) {
|
||||
throw new Error(`File ${fileName} not found in batch results`)
|
||||
}
|
||||
|
||||
// Check the processing state
|
||||
// 检查处理状态
|
||||
if (fileResult.state === 'done' && fileResult.full_zip_url) {
|
||||
logger.info(`Processing completed for file: ${fileName}`)
|
||||
return fileResult
|
||||
} else if (fileResult.state === 'failed') {
|
||||
throw new Error(`Processing failed for file: ${fileName}, error: ${fileResult.err_msg}`)
|
||||
} else if (fileResult.state === 'running') {
|
||||
// Send progress updates
|
||||
// 发送进度更新
|
||||
if (fileResult.extract_progress) {
|
||||
const progress = Math.round(
|
||||
(fileResult.extract_progress.extracted_pages / fileResult.extract_progress.total_pages) * 100
|
||||
@@ -413,7 +374,7 @@ export default class MineruPreprocessProvider extends BasePreprocessProvider {
|
||||
await this.sendPreprocessProgress(sourceId, progress)
|
||||
logger.info(`File ${fileName} processing progress: ${progress}%`)
|
||||
} else {
|
||||
// If no detailed progress information is available, send a generic update
|
||||
// 如果没有具体进度信息,发送一个通用进度
|
||||
await this.sendPreprocessProgress(sourceId, 50)
|
||||
logger.info(`File ${fileName} is still processing...`)
|
||||
}
|
||||
|
||||
@@ -53,43 +53,18 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
}
|
||||
|
||||
private async validateFile(filePath: string): Promise<void> {
|
||||
// 第一阶段:检查文件大小(无需读取文件到内存)
|
||||
logger.info(`Validating PDF file: ${filePath}`)
|
||||
const stats = await fs.promises.stat(filePath)
|
||||
const fileSizeBytes = stats.size
|
||||
|
||||
// File size must be less than 200MB
|
||||
if (fileSizeBytes >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(fileSizeBytes / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
|
||||
// 第二阶段:检查页数(需要读取文件,带错误处理)
|
||||
const pdfBuffer = await fs.promises.readFile(filePath)
|
||||
|
||||
try {
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
const doc = await this.readPdf(pdfBuffer)
|
||||
|
||||
// File page count must be less than 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
|
||||
logger.info(`PDF validation passed: ${doc.numPages} pages, ${Math.round(fileSizeBytes / (1024 * 1024))}MB`)
|
||||
} catch (error: any) {
|
||||
// 如果是页数超限错误,直接抛出
|
||||
if (error.message.includes('exceeds the limit')) {
|
||||
throw error
|
||||
}
|
||||
|
||||
// PDF 解析失败,记录详细警告但允许继续处理
|
||||
logger.warn(
|
||||
`Failed to parse PDF structure (file may be corrupted or use non-standard format). ` +
|
||||
`Skipping page count validation. Will attempt to process with MinerU API. ` +
|
||||
`Error details: ${error.message}. ` +
|
||||
`Suggestion: If processing fails, try repairing the PDF using tools like Adobe Acrobat or online PDF repair services.`
|
||||
)
|
||||
// 不抛出错误,允许继续处理
|
||||
// File page count must be less than 600 pages
|
||||
if (doc.numPages >= 600) {
|
||||
throw new Error(`PDF page count (${doc.numPages}) exceeds the limit of 600 pages`)
|
||||
}
|
||||
// File size must be less than 200MB
|
||||
if (pdfBuffer.length >= 200 * 1024 * 1024) {
|
||||
const fileSizeMB = Math.round(pdfBuffer.length / (1024 * 1024))
|
||||
throw new Error(`PDF file size (${fileSizeMB}MB) exceeds the limit of 200MB`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,8 +72,8 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
// Find the main file after extraction
|
||||
let finalPath = ''
|
||||
let finalName = file.origin_name.replace('.pdf', '.md')
|
||||
// Find the corresponding folder by file id
|
||||
outputPath = path.join(outputPath, file.id)
|
||||
// Find the corresponding folder by file name
|
||||
outputPath = path.join(outputPath, `${file.origin_name.replace('.pdf', '')}`)
|
||||
try {
|
||||
const files = fs.readdirSync(outputPath)
|
||||
|
||||
@@ -150,7 +125,7 @@ export default class OpenMineruPreprocessProvider extends BasePreprocessProvider
|
||||
formData.append('return_md', 'true')
|
||||
formData.append('response_format_zip', 'true')
|
||||
formData.append('files', fileBuffer, {
|
||||
filename: file.name
|
||||
filename: file.origin_name
|
||||
})
|
||||
|
||||
while (retries < maxRetries) {
|
||||
|
||||
@@ -396,13 +396,12 @@ class FileStorage {
|
||||
throw new Error(`Source file does not exist: ${filePath}`)
|
||||
}
|
||||
|
||||
// 确保目标目录存在
|
||||
// Ensure the destination directory exists
|
||||
const destDir = path.dirname(newPath)
|
||||
if (!fs.existsSync(destDir)) {
|
||||
await fs.promises.mkdir(destDir, { recursive: true })
|
||||
}
|
||||
|
||||
// 移动文件
|
||||
await fs.promises.rename(filePath, newPath)
|
||||
logger.debug(`File moved successfully: ${filePath} to ${newPath}`)
|
||||
} catch (error) {
|
||||
@@ -417,13 +416,12 @@ class FileStorage {
|
||||
throw new Error(`Source directory does not exist: ${dirPath}`)
|
||||
}
|
||||
|
||||
// 确保目标父目录存在
|
||||
// Ensure the parent directory of the destination exists
|
||||
const parentDir = path.dirname(newDirPath)
|
||||
if (!fs.existsSync(parentDir)) {
|
||||
await fs.promises.mkdir(parentDir, { recursive: true })
|
||||
}
|
||||
|
||||
// 移动目录
|
||||
await fs.promises.rename(dirPath, newDirPath)
|
||||
logger.debug(`Directory moved successfully: ${dirPath} to ${newDirPath}`)
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import { type Client, createClient } from '@libsql/client'
|
||||
import { loggerService } from '@logger'
|
||||
import { mcpApiService } from '@main/apiServer/services/mcp'
|
||||
import { type ModelValidationError, validateModelId } from '@main/apiServer/utils'
|
||||
import type { AgentType, MCPTool, SlashCommand, Tool } from '@types'
|
||||
import { objectKeys } from '@types'
|
||||
import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
import { DatabaseManager } from './database/DatabaseManager'
|
||||
import { MigrationService } from './database/MigrationService'
|
||||
import * as schema from './database/schema'
|
||||
import { dbPath } from './drizzle.config'
|
||||
import { type AgentModelField, AgentModelValidationError } from './errors'
|
||||
import { builtinSlashCommands } from './services/claudecode/commands'
|
||||
import { builtinTools } from './services/claudecode/tools'
|
||||
@@ -14,16 +18,22 @@ import { builtinTools } from './services/claudecode/tools'
|
||||
const logger = loggerService.withContext('BaseService')
|
||||
|
||||
/**
|
||||
* Base service class providing shared utilities for all agent-related services.
|
||||
* Base service class providing shared database connection and utilities
|
||||
* for all agent-related services.
|
||||
*
|
||||
* Features:
|
||||
* - Database access through DatabaseManager singleton
|
||||
* - JSON field serialization/deserialization
|
||||
* - Path validation and creation
|
||||
* - Model validation
|
||||
* - MCP tools and slash commands listing
|
||||
* - Programmatic schema management (no CLI dependencies)
|
||||
* - Automatic table creation and migration
|
||||
* - Schema version tracking and compatibility checks
|
||||
* - Transaction-based operations for safety
|
||||
* - Development vs production mode handling
|
||||
* - Connection retry logic with exponential backoff
|
||||
*/
|
||||
export abstract class BaseService {
|
||||
protected static client: Client | null = null
|
||||
protected static db: LibSQLDatabase<typeof schema> | null = null
|
||||
protected static isInitialized = false
|
||||
protected static initializationPromise: Promise<void> | null = null
|
||||
protected jsonFields: string[] = [
|
||||
'tools',
|
||||
'mcps',
|
||||
@@ -33,6 +43,23 @@ export abstract class BaseService {
|
||||
'slash_commands'
|
||||
]
|
||||
|
||||
/**
|
||||
* Initialize database with retry logic and proper error handling
|
||||
*/
|
||||
protected static async initialize(): Promise<void> {
|
||||
// Return existing initialization if in progress
|
||||
if (BaseService.initializationPromise) {
|
||||
return BaseService.initializationPromise
|
||||
}
|
||||
|
||||
if (BaseService.isInitialized) {
|
||||
return
|
||||
}
|
||||
|
||||
BaseService.initializationPromise = BaseService.performInitialization()
|
||||
return BaseService.initializationPromise
|
||||
}
|
||||
|
||||
public async listMcpTools(agentType: AgentType, ids?: string[]): Promise<Tool[]> {
|
||||
const tools: Tool[] = []
|
||||
if (agentType === 'claude-code') {
|
||||
@@ -72,13 +99,78 @@ export abstract class BaseService {
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Get database instance
|
||||
* Automatically waits for initialization to complete
|
||||
*/
|
||||
protected async getDatabase() {
|
||||
const dbManager = await DatabaseManager.getInstance()
|
||||
return dbManager.getDatabase()
|
||||
private static async performInitialization(): Promise<void> {
|
||||
const maxRetries = 3
|
||||
let lastError: Error
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
logger.info(`Initializing Agent database at: ${dbPath} (attempt ${attempt}/${maxRetries})`)
|
||||
|
||||
// Ensure the database directory exists
|
||||
const dbDir = path.dirname(dbPath)
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
logger.info(`Creating database directory: ${dbDir}`)
|
||||
fs.mkdirSync(dbDir, { recursive: true })
|
||||
}
|
||||
|
||||
BaseService.client = createClient({
|
||||
url: `file:${dbPath}`
|
||||
})
|
||||
|
||||
BaseService.db = drizzle(BaseService.client, { schema })
|
||||
|
||||
// Run database migrations
|
||||
const migrationService = new MigrationService(BaseService.db, BaseService.client)
|
||||
await migrationService.runMigrations()
|
||||
|
||||
BaseService.isInitialized = true
|
||||
logger.info('Agent database initialized successfully')
|
||||
return
|
||||
} catch (error) {
|
||||
lastError = error as Error
|
||||
logger.warn(`Database initialization attempt ${attempt} failed:`, lastError)
|
||||
|
||||
// Clean up on failure
|
||||
if (BaseService.client) {
|
||||
try {
|
||||
BaseService.client.close()
|
||||
} catch (closeError) {
|
||||
logger.warn('Failed to close client during cleanup:', closeError as Error)
|
||||
}
|
||||
}
|
||||
BaseService.client = null
|
||||
BaseService.db = null
|
||||
|
||||
// Wait before retrying (exponential backoff)
|
||||
if (attempt < maxRetries) {
|
||||
const delay = Math.pow(2, attempt) * 1000 // 2s, 4s, 8s
|
||||
logger.info(`Retrying in ${delay}ms...`)
|
||||
await new Promise((resolve) => setTimeout(resolve, delay))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All retries failed
|
||||
BaseService.initializationPromise = null
|
||||
logger.error('Failed to initialize Agent database after all retries:', lastError!)
|
||||
throw lastError!
|
||||
}
|
||||
|
||||
protected ensureInitialized(): void {
|
||||
if (!BaseService.isInitialized || !BaseService.db || !BaseService.client) {
|
||||
throw new Error('Database not initialized. Call initialize() first.')
|
||||
}
|
||||
}
|
||||
|
||||
protected get database(): LibSQLDatabase<typeof schema> {
|
||||
this.ensureInitialized()
|
||||
return BaseService.db!
|
||||
}
|
||||
|
||||
protected get rawClient(): Client {
|
||||
this.ensureInitialized()
|
||||
return BaseService.client!
|
||||
}
|
||||
|
||||
protected serializeJsonFields(data: any): any {
|
||||
@@ -190,7 +282,7 @@ export abstract class BaseService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate agent model configuration
|
||||
* Force re-initialization (for development/testing)
|
||||
*/
|
||||
protected async validateAgentModels(
|
||||
agentType: AgentType,
|
||||
@@ -231,4 +323,22 @@ export abstract class BaseService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async reinitialize(): Promise<void> {
|
||||
BaseService.isInitialized = false
|
||||
BaseService.initializationPromise = null
|
||||
|
||||
if (BaseService.client) {
|
||||
try {
|
||||
BaseService.client.close()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to close client during reinitialize:', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
BaseService.client = null
|
||||
BaseService.db = null
|
||||
|
||||
await BaseService.initialize()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import { type Client, createClient } from '@libsql/client'
|
||||
import { loggerService } from '@logger'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import { drizzle } from 'drizzle-orm/libsql'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
import { dbPath } from '../drizzle.config'
|
||||
import { MigrationService } from './MigrationService'
|
||||
import * as schema from './schema'
|
||||
|
||||
const logger = loggerService.withContext('DatabaseManager')
|
||||
|
||||
/**
|
||||
* Database initialization state
|
||||
*/
|
||||
enum InitState {
|
||||
INITIALIZING = 'initializing',
|
||||
INITIALIZED = 'initialized',
|
||||
FAILED = 'failed'
|
||||
}
|
||||
|
||||
/**
|
||||
* DatabaseManager - Singleton class for managing libsql database connections
|
||||
*
|
||||
* Responsibilities:
|
||||
* - Single source of truth for database connection
|
||||
* - Thread-safe initialization with state management
|
||||
* - Automatic migration handling
|
||||
* - Safe connection cleanup
|
||||
* - Error recovery and retry logic
|
||||
* - Windows platform compatibility fixes
|
||||
*/
|
||||
export class DatabaseManager {
|
||||
private static instance: DatabaseManager | null = null
|
||||
|
||||
private client: Client | null = null
|
||||
private db: LibSQLDatabase<typeof schema> | null = null
|
||||
private state: InitState = InitState.INITIALIZING
|
||||
|
||||
/**
|
||||
* Get the singleton instance (database initialization starts automatically)
|
||||
*/
|
||||
public static async getInstance(): Promise<DatabaseManager> {
|
||||
if (DatabaseManager.instance) {
|
||||
return DatabaseManager.instance
|
||||
}
|
||||
|
||||
const instance = new DatabaseManager()
|
||||
await instance.initialize()
|
||||
DatabaseManager.instance = instance
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the actual initialization
|
||||
*/
|
||||
public async initialize(): Promise<void> {
|
||||
if (this.state === InitState.INITIALIZED) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Initializing database at: ${dbPath}`)
|
||||
|
||||
// Ensure database directory exists
|
||||
const dbDir = path.dirname(dbPath)
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
logger.info(`Creating database directory: ${dbDir}`)
|
||||
fs.mkdirSync(dbDir, { recursive: true })
|
||||
}
|
||||
|
||||
// Check if database file is corrupted (Windows specific check)
|
||||
if (fs.existsSync(dbPath)) {
|
||||
const stats = fs.statSync(dbPath)
|
||||
if (stats.size === 0) {
|
||||
logger.warn('Database file is empty, removing corrupted file')
|
||||
fs.unlinkSync(dbPath)
|
||||
}
|
||||
}
|
||||
|
||||
// Create client with platform-specific options
|
||||
this.client = createClient({
|
||||
url: `file:${dbPath}`,
|
||||
// intMode: 'number' helps avoid some Windows compatibility issues
|
||||
intMode: 'number'
|
||||
})
|
||||
|
||||
// Create drizzle instance
|
||||
this.db = drizzle(this.client, { schema })
|
||||
|
||||
// Run migrations
|
||||
const migrationService = new MigrationService(this.db, this.client)
|
||||
await migrationService.runMigrations()
|
||||
|
||||
this.state = InitState.INITIALIZED
|
||||
logger.info('Database initialized successfully')
|
||||
} catch (error) {
|
||||
const err = error as Error
|
||||
logger.error('Database initialization failed:', {
|
||||
error: err.message,
|
||||
stack: err.stack
|
||||
})
|
||||
|
||||
// Clean up failed initialization
|
||||
this.cleanupFailedInit()
|
||||
|
||||
// Set failed state
|
||||
this.state = InitState.FAILED
|
||||
throw new Error(`Database initialization failed: ${err.message || 'Unknown error'}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up after failed initialization
|
||||
*/
|
||||
private cleanupFailedInit(): void {
|
||||
if (this.client) {
|
||||
try {
|
||||
// On Windows, closing a partially initialized client can crash
|
||||
// Wrap in try-catch and ignore errors during cleanup
|
||||
this.client.close()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to close client during cleanup:', error as Error)
|
||||
}
|
||||
}
|
||||
this.client = null
|
||||
this.db = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the database instance
|
||||
* Automatically waits for initialization to complete
|
||||
* @throws Error if database initialization failed
|
||||
*/
|
||||
public getDatabase(): LibSQLDatabase<typeof schema> {
|
||||
return this.db!
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw client (for advanced operations)
|
||||
* Automatically waits for initialization to complete
|
||||
* @throws Error if database initialization failed
|
||||
*/
|
||||
public async getClient(): Promise<Client> {
|
||||
return this.client!
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if database is initialized
|
||||
*/
|
||||
public isInitialized(): boolean {
|
||||
return this.state === InitState.INITIALIZED
|
||||
}
|
||||
}
|
||||
@@ -7,14 +7,8 @@
|
||||
* Schema evolution is handled by Drizzle Kit migrations.
|
||||
*/
|
||||
|
||||
// Database Manager (Singleton)
|
||||
export * from './DatabaseManager'
|
||||
|
||||
// Drizzle ORM schemas
|
||||
export * from './schema'
|
||||
|
||||
// Repository helpers
|
||||
export * from './sessionMessageRepository'
|
||||
|
||||
// Migration Service
|
||||
export * from './MigrationService'
|
||||
|
||||
@@ -15,16 +15,26 @@ import { sessionMessagesTable } from './schema'
|
||||
|
||||
const logger = loggerService.withContext('AgentMessageRepository')
|
||||
|
||||
type TxClient = any
|
||||
|
||||
export type PersistUserMessageParams = AgentMessageUserPersistPayload & {
|
||||
sessionId: string
|
||||
agentSessionId?: string
|
||||
tx?: TxClient
|
||||
}
|
||||
|
||||
export type PersistAssistantMessageParams = AgentMessageAssistantPersistPayload & {
|
||||
sessionId: string
|
||||
agentSessionId: string
|
||||
tx?: TxClient
|
||||
}
|
||||
|
||||
type PersistExchangeParams = AgentMessagePersistExchangePayload & {
|
||||
tx?: TxClient
|
||||
}
|
||||
|
||||
type PersistExchangeResult = AgentMessagePersistExchangeResult
|
||||
|
||||
class AgentMessageRepository extends BaseService {
|
||||
private static instance: AgentMessageRepository | null = null
|
||||
|
||||
@@ -77,13 +87,17 @@ class AgentMessageRepository extends BaseService {
|
||||
return deserialized
|
||||
}
|
||||
|
||||
private getWriter(tx?: TxClient): TxClient {
|
||||
return tx ?? this.database
|
||||
}
|
||||
|
||||
private async findExistingMessageRow(
|
||||
writer: TxClient,
|
||||
sessionId: string,
|
||||
role: string,
|
||||
messageId: string
|
||||
): Promise<SessionMessageRow | null> {
|
||||
const database = await this.getDatabase()
|
||||
const candidateRows: SessionMessageRow[] = await database
|
||||
const candidateRows: SessionMessageRow[] = await writer
|
||||
.select()
|
||||
.from(sessionMessagesTable)
|
||||
.where(and(eq(sessionMessagesTable.session_id, sessionId), eq(sessionMessagesTable.role, role)))
|
||||
@@ -108,7 +122,10 @@ class AgentMessageRepository extends BaseService {
|
||||
private async upsertMessage(
|
||||
params: PersistUserMessageParams | PersistAssistantMessageParams
|
||||
): Promise<AgentSessionMessageEntity> {
|
||||
const { sessionId, agentSessionId = '', payload, metadata, createdAt } = params
|
||||
await AgentMessageRepository.initialize()
|
||||
this.ensureInitialized()
|
||||
|
||||
const { sessionId, agentSessionId = '', payload, metadata, createdAt, tx } = params
|
||||
|
||||
if (!payload?.message?.role) {
|
||||
throw new Error('Message payload missing role')
|
||||
@@ -118,18 +135,18 @@ class AgentMessageRepository extends BaseService {
|
||||
throw new Error('Message payload missing id')
|
||||
}
|
||||
|
||||
const database = await this.getDatabase()
|
||||
const writer = this.getWriter(tx)
|
||||
const now = createdAt ?? payload.message.createdAt ?? new Date().toISOString()
|
||||
const serializedPayload = this.serializeMessage(payload)
|
||||
const serializedMetadata = this.serializeMetadata(metadata)
|
||||
|
||||
const existingRow = await this.findExistingMessageRow(sessionId, payload.message.role, payload.message.id)
|
||||
const existingRow = await this.findExistingMessageRow(writer, sessionId, payload.message.role, payload.message.id)
|
||||
|
||||
if (existingRow) {
|
||||
const metadataToPersist = serializedMetadata ?? existingRow.metadata ?? undefined
|
||||
const agentSessionToPersist = agentSessionId || existingRow.agent_session_id || ''
|
||||
|
||||
await database
|
||||
await writer
|
||||
.update(sessionMessagesTable)
|
||||
.set({
|
||||
content: serializedPayload,
|
||||
@@ -158,7 +175,7 @@ class AgentMessageRepository extends BaseService {
|
||||
updated_at: now
|
||||
}
|
||||
|
||||
const [saved] = await database.insert(sessionMessagesTable).values(insertData).returning()
|
||||
const [saved] = await writer.insert(sessionMessagesTable).values(insertData).returning()
|
||||
|
||||
return this.deserialize(saved)
|
||||
}
|
||||
@@ -171,38 +188,49 @@ class AgentMessageRepository extends BaseService {
|
||||
return this.upsertMessage(params)
|
||||
}
|
||||
|
||||
async persistExchange(params: AgentMessagePersistExchangePayload): Promise<AgentMessagePersistExchangeResult> {
|
||||
async persistExchange(params: PersistExchangeParams): Promise<PersistExchangeResult> {
|
||||
await AgentMessageRepository.initialize()
|
||||
this.ensureInitialized()
|
||||
|
||||
const { sessionId, agentSessionId, user, assistant } = params
|
||||
|
||||
const exchangeResult: AgentMessagePersistExchangeResult = {}
|
||||
const result = await this.database.transaction(async (tx) => {
|
||||
const exchangeResult: PersistExchangeResult = {}
|
||||
|
||||
if (user?.payload) {
|
||||
exchangeResult.userMessage = await this.persistUserMessage({
|
||||
sessionId,
|
||||
agentSessionId,
|
||||
payload: user.payload,
|
||||
metadata: user.metadata,
|
||||
createdAt: user.createdAt
|
||||
})
|
||||
}
|
||||
if (user?.payload) {
|
||||
exchangeResult.userMessage = await this.persistUserMessage({
|
||||
sessionId,
|
||||
agentSessionId,
|
||||
payload: user.payload,
|
||||
metadata: user.metadata,
|
||||
createdAt: user.createdAt,
|
||||
tx
|
||||
})
|
||||
}
|
||||
|
||||
if (assistant?.payload) {
|
||||
exchangeResult.assistantMessage = await this.persistAssistantMessage({
|
||||
sessionId,
|
||||
agentSessionId,
|
||||
payload: assistant.payload,
|
||||
metadata: assistant.metadata,
|
||||
createdAt: assistant.createdAt
|
||||
})
|
||||
}
|
||||
if (assistant?.payload) {
|
||||
exchangeResult.assistantMessage = await this.persistAssistantMessage({
|
||||
sessionId,
|
||||
agentSessionId,
|
||||
payload: assistant.payload,
|
||||
metadata: assistant.metadata,
|
||||
createdAt: assistant.createdAt,
|
||||
tx
|
||||
})
|
||||
}
|
||||
|
||||
return exchangeResult
|
||||
return exchangeResult
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getSessionHistory(sessionId: string): Promise<AgentPersistedMessage[]> {
|
||||
await AgentMessageRepository.initialize()
|
||||
this.ensureInitialized()
|
||||
|
||||
try {
|
||||
const database = await this.getDatabase()
|
||||
const rows = await database
|
||||
const rows = await this.database
|
||||
.select()
|
||||
.from(sessionMessagesTable)
|
||||
.where(eq(sessionMessagesTable.session_id, sessionId))
|
||||
|
||||
@@ -32,8 +32,14 @@ export class AgentService extends BaseService {
|
||||
return AgentService.instance
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
await BaseService.initialize()
|
||||
}
|
||||
|
||||
// Agent Methods
|
||||
async createAgent(req: CreateAgentRequest): Promise<CreateAgentResponse> {
|
||||
this.ensureInitialized()
|
||||
|
||||
const id = `agent_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`
|
||||
const now = new Date().toISOString()
|
||||
|
||||
@@ -69,9 +75,8 @@ export class AgentService extends BaseService {
|
||||
updated_at: now
|
||||
}
|
||||
|
||||
const database = await this.getDatabase()
|
||||
await database.insert(agentsTable).values(insertData)
|
||||
const result = await database.select().from(agentsTable).where(eq(agentsTable.id, id)).limit(1)
|
||||
await this.database.insert(agentsTable).values(insertData)
|
||||
const result = await this.database.select().from(agentsTable).where(eq(agentsTable.id, id)).limit(1)
|
||||
if (!result[0]) {
|
||||
throw new Error('Failed to create agent')
|
||||
}
|
||||
@@ -81,8 +86,9 @@ export class AgentService extends BaseService {
|
||||
}
|
||||
|
||||
async getAgent(id: string): Promise<GetAgentResponse | null> {
|
||||
const database = await this.getDatabase()
|
||||
const result = await database.select().from(agentsTable).where(eq(agentsTable.id, id)).limit(1)
|
||||
this.ensureInitialized()
|
||||
|
||||
const result = await this.database.select().from(agentsTable).where(eq(agentsTable.id, id)).limit(1)
|
||||
|
||||
if (!result[0]) {
|
||||
return null
|
||||
@@ -112,9 +118,9 @@ export class AgentService extends BaseService {
|
||||
}
|
||||
|
||||
async listAgents(options: ListOptions = {}): Promise<{ agents: AgentEntity[]; total: number }> {
|
||||
// Build query with pagination
|
||||
const database = await this.getDatabase()
|
||||
const totalResult = await database.select({ count: count() }).from(agentsTable)
|
||||
this.ensureInitialized() // Build query with pagination
|
||||
|
||||
const totalResult = await this.database.select({ count: count() }).from(agentsTable)
|
||||
|
||||
const sortBy = options.sortBy || 'created_at'
|
||||
const orderBy = options.orderBy || 'desc'
|
||||
@@ -122,7 +128,7 @@ export class AgentService extends BaseService {
|
||||
const sortField = agentsTable[sortBy]
|
||||
const orderFn = orderBy === 'asc' ? asc : desc
|
||||
|
||||
const baseQuery = database.select().from(agentsTable).orderBy(orderFn(sortField))
|
||||
const baseQuery = this.database.select().from(agentsTable).orderBy(orderFn(sortField))
|
||||
|
||||
const result =
|
||||
options.limit !== undefined
|
||||
@@ -145,6 +151,8 @@ export class AgentService extends BaseService {
|
||||
updates: UpdateAgentRequest,
|
||||
options: { replace?: boolean } = {}
|
||||
): Promise<UpdateAgentResponse | null> {
|
||||
this.ensureInitialized()
|
||||
|
||||
// Check if agent exists
|
||||
const existing = await this.getAgent(id)
|
||||
if (!existing) {
|
||||
@@ -187,21 +195,22 @@ export class AgentService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const database = await this.getDatabase()
|
||||
await database.update(agentsTable).set(updateData).where(eq(agentsTable.id, id))
|
||||
await this.database.update(agentsTable).set(updateData).where(eq(agentsTable.id, id))
|
||||
return await this.getAgent(id)
|
||||
}
|
||||
|
||||
async deleteAgent(id: string): Promise<boolean> {
|
||||
const database = await this.getDatabase()
|
||||
const result = await database.delete(agentsTable).where(eq(agentsTable.id, id))
|
||||
this.ensureInitialized()
|
||||
|
||||
const result = await this.database.delete(agentsTable).where(eq(agentsTable.id, id))
|
||||
|
||||
return result.rowsAffected > 0
|
||||
}
|
||||
|
||||
async agentExists(id: string): Promise<boolean> {
|
||||
const database = await this.getDatabase()
|
||||
const result = await database
|
||||
this.ensureInitialized()
|
||||
|
||||
const result = await this.database
|
||||
.select({ id: agentsTable.id })
|
||||
.from(agentsTable)
|
||||
.where(eq(agentsTable.id, id))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user