Compare commits
5 Commits
fix/input-
...
feat/opena
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a1f61b0d2e | ||
|
|
155dc1c578 | ||
|
|
5e33a91154 | ||
|
|
0d60b34c17 | ||
|
|
60a89998fe |
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
isFunctionCallingModel,
|
||||
isNotSupportTemperatureAndTopP,
|
||||
isOpenAIDeepResearchModel,
|
||||
isOpenAIModel,
|
||||
isSupportedFlexServiceTier
|
||||
} from '@renderer/config/models'
|
||||
@@ -206,6 +207,9 @@ export abstract class BaseApiClient<
|
||||
if (isSupportedFlexServiceTier(model)) {
|
||||
return 15 * 1000 * 60
|
||||
}
|
||||
if (isOpenAIDeepResearchModel(model)) {
|
||||
return 60 * 1000 * 60
|
||||
}
|
||||
return defaultTimeout
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { GenericChunk } from '@renderer/aiCore/middleware/schemas'
|
||||
import { CompletionsContext } from '@renderer/aiCore/middleware/types'
|
||||
import {
|
||||
isOpenAIChatCompletionOnlyModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
isOpenAILLMModel,
|
||||
isSupportedReasoningEffortOpenAIModel,
|
||||
isVisionModel
|
||||
@@ -406,7 +407,7 @@ export class OpenAIResponseAPIClient extends OpenAIBaseClient<
|
||||
reqMessages = [systemMessage, ...userMessage].filter(Boolean) as OpenAI.Responses.EasyInputMessage[]
|
||||
}
|
||||
|
||||
if (enableWebSearch) {
|
||||
if (enableWebSearch || isOpenAIDeepResearchModel(model)) {
|
||||
tools.push({
|
||||
type: 'web_search_preview'
|
||||
})
|
||||
|
||||
184
src/renderer/src/components/DeepResearchCard.tsx
Normal file
184
src/renderer/src/components/DeepResearchCard.tsx
Normal file
@@ -0,0 +1,184 @@
|
||||
import { getTopicByMessageId } from '@renderer/hooks/useMessageOperations'
|
||||
import Markdown from '@renderer/pages/home/Markdown/Markdown'
|
||||
import { useAppDispatch } from '@renderer/store'
|
||||
import { retryDeepResearchClarificationThunk } from '@renderer/store/thunk/messageThunk'
|
||||
import { DeepResearchMessageBlock, MessageBlockStatus } from '@renderer/types/newMessage'
|
||||
import { deepResearchConfirmation } from '@renderer/utils/deepResearchConfirmation'
|
||||
import { Button, Input } from 'antd'
|
||||
import { Brain, RotateCcw } from 'lucide-react'
|
||||
import { FC, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import styled from 'styled-components'
|
||||
|
||||
import SvgSpinners180Ring from './Icons/SvgSpinners180Ring'
|
||||
|
||||
const { TextArea } = Input
|
||||
|
||||
interface DeepResearchCardProps {
|
||||
block: DeepResearchMessageBlock
|
||||
}
|
||||
|
||||
const DeepResearchCard: FC<DeepResearchCardProps> = ({ block }) => {
|
||||
const { t } = useTranslation()
|
||||
const dispatch = useAppDispatch()
|
||||
const [isRetrying, setIsRetrying] = useState(false)
|
||||
const [userSupplementInfo, setUserSupplementInfo] = useState('')
|
||||
|
||||
const {
|
||||
metadata: { deepResearchState }
|
||||
} = block
|
||||
const isWaitingForContinue = deepResearchState.phase === 'waiting_confirmation'
|
||||
|
||||
const onContinueResearch = () => {
|
||||
try {
|
||||
const success = deepResearchConfirmation.triggerResolver(block.id, userSupplementInfo)
|
||||
if (!success) {
|
||||
console.error('[continueDeepResearchThunk] No continue resolver found for message', block.id)
|
||||
return
|
||||
}
|
||||
// resolver会在fetchDeepResearch的onResearchStarted中处理后续的研究阶段逻辑
|
||||
} catch (error) {
|
||||
console.error('[continueDeepResearchThunk] Error:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const onRetryResearch = async () => {
|
||||
try {
|
||||
setIsRetrying(true)
|
||||
const topic = await getTopicByMessageId(block.messageId)
|
||||
if (!topic) {
|
||||
console.error('[onRetryResearch] Topic not found for message', block.messageId)
|
||||
return
|
||||
}
|
||||
// 重试时清空补全信息
|
||||
setUserSupplementInfo('')
|
||||
dispatch(retryDeepResearchClarificationThunk(topic.id, block.messageId))
|
||||
} catch (error) {
|
||||
console.error('[onRetryResearch] Error:', error)
|
||||
} finally {
|
||||
setIsRetrying(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{block.status === MessageBlockStatus.PENDING ? (
|
||||
<SvgSpinners180Ring color="var(--color-text-2)" style={{ marginBottom: 15 }} />
|
||||
) : (
|
||||
<CardContainer>
|
||||
<ClarificationSection>
|
||||
<SectionTitle>
|
||||
<Brain size={16} />
|
||||
{t('research.clarification.title')}
|
||||
</SectionTitle>
|
||||
{block.content ? (
|
||||
<Markdown block={block} />
|
||||
) : deepResearchState.phase === 'clarification' && block.status === MessageBlockStatus.STREAMING ? (
|
||||
<SvgSpinners180Ring color="var(--color-text-2)" style={{ marginBottom: 15 }} />
|
||||
) : null}
|
||||
</ClarificationSection>
|
||||
|
||||
{isWaitingForContinue && (
|
||||
<ActionSection>
|
||||
<ActionTitle>{t('research.ready_to_start')}</ActionTitle>
|
||||
|
||||
<SupplementSection>
|
||||
<SupplementLabel>{t('research.supplement_info_label')}</SupplementLabel>
|
||||
<StyledTextArea
|
||||
value={userSupplementInfo}
|
||||
onChange={(e) => setUserSupplementInfo(e.target.value)}
|
||||
placeholder={t('research.supplement_info_placeholder')}
|
||||
rows={3}
|
||||
maxLength={500}
|
||||
/>
|
||||
</SupplementSection>
|
||||
|
||||
<ButtonGroup>
|
||||
<RetryButton
|
||||
type="default"
|
||||
icon={<RotateCcw size={16} />}
|
||||
onClick={onRetryResearch}
|
||||
loading={isRetrying}
|
||||
disabled={isRetrying}>
|
||||
{t('research.retry')}
|
||||
</RetryButton>
|
||||
<ContinueButton type="primary" icon={<Brain size={16} />} onClick={onContinueResearch}>
|
||||
{t('research.continue_research')}
|
||||
</ContinueButton>
|
||||
</ButtonGroup>
|
||||
</ActionSection>
|
||||
)}
|
||||
</CardContainer>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
const CardContainer = styled.div`
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 8px;
|
||||
background: var(--color-background);
|
||||
margin: 12px 0;
|
||||
overflow: hidden;
|
||||
`
|
||||
|
||||
const ClarificationSection = styled.div`
|
||||
padding: 16px;
|
||||
border-bottom: 1px solid var(--color-border-soft);
|
||||
`
|
||||
|
||||
const SectionTitle = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
color: var(--color-text);
|
||||
margin-bottom: 12px;
|
||||
`
|
||||
|
||||
const ActionSection = styled.div`
|
||||
padding: 16px;
|
||||
background: var(--color-background-soft);
|
||||
`
|
||||
|
||||
const ActionTitle = styled.div`
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
color: var(--color-text);
|
||||
margin-bottom: 12px;
|
||||
`
|
||||
|
||||
const ButtonGroup = styled.div`
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
`
|
||||
|
||||
const RetryButton = styled(Button)`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
`
|
||||
|
||||
const ContinueButton = styled(Button)`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
`
|
||||
|
||||
const SupplementSection = styled.div`
|
||||
margin-bottom: 12px;
|
||||
`
|
||||
|
||||
const SupplementLabel = styled.div`
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
color: var(--color-text);
|
||||
margin-bottom: 8px;
|
||||
`
|
||||
|
||||
const StyledTextArea = styled(TextArea)`
|
||||
width: 100%;
|
||||
`
|
||||
|
||||
export default DeepResearchCard
|
||||
@@ -2466,6 +2466,16 @@ export function isOpenAIWebSearchModel(model: Model): boolean {
|
||||
)
|
||||
}
|
||||
|
||||
export function isOpenAIDeepResearchModel(model?: Model): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
}
|
||||
if (!isOpenAIModel(model)) {
|
||||
return false
|
||||
}
|
||||
return model.id.includes('deep-research')
|
||||
}
|
||||
|
||||
export function isSupportedThinkingTokenModel(model?: Model): boolean {
|
||||
if (!model) {
|
||||
return false
|
||||
|
||||
@@ -456,3 +456,82 @@ Example: [nytimes.com](https://nytimes.com/some-page).
|
||||
If have multiple citations, please directly list them like this:
|
||||
[www.nytimes.com](https://nytimes.com/some-page)[www.bbc.com](https://bbc.com/some-page)
|
||||
`
|
||||
|
||||
export const DEEP_RESEARCH_CLARIFICATION_PROMPT = `
|
||||
You are talking to a user who is asking for a research task to be conducted. Your job is to gather more information from the user to successfully complete the task.
|
||||
|
||||
GUIDELINES:
|
||||
- Be concise while gathering all necessary information**
|
||||
- Make sure to gather all the information needed to carry out the research task in a concise, well-structured manner.
|
||||
- Use bullet points or numbered lists if appropriate for clarity.
|
||||
- Don't ask for unnecessary information, or information that the user has already provided.
|
||||
- Use user's language to ask questions.
|
||||
|
||||
IMPORTANT: Do NOT conduct any research yourself, just gather information that will be given to a researcher to conduct the research task.
|
||||
`
|
||||
|
||||
export const DEEP_RESEARCH_PROMPT_REWRITE_PROMPT = `
|
||||
You will be given a research task by a user. Your job is to produce a set of
|
||||
instructions for a researcher that will complete the task. Do NOT complete the
|
||||
task yourself, just provide instructions on how to complete it.
|
||||
|
||||
GUIDELINES:
|
||||
1. **Maximize Specificity and Detail**
|
||||
- Include all known user preferences and explicitly list key attributes or
|
||||
dimensions to consider.
|
||||
- It is of utmost importance that all details from the user are included in
|
||||
the instructions.
|
||||
|
||||
2. **Fill in Unstated But Necessary Dimensions as Open-Ended**
|
||||
- If certain attributes are essential for a meaningful output but the user
|
||||
has not provided them, explicitly state that they are open-ended or default
|
||||
to no specific constraint.
|
||||
|
||||
3. **Avoid Unwarranted Assumptions**
|
||||
- If the user has not provided a particular detail, do not invent one.
|
||||
- Instead, state the lack of specification and guide the researcher to treat
|
||||
it as flexible or accept all possible options.
|
||||
|
||||
4. **Use the First Person**
|
||||
- Phrase the request from the perspective of the user.
|
||||
|
||||
5. **Tables**
|
||||
- If you determine that including a table will help illustrate, organize, or
|
||||
enhance the information in the research output, you must explicitly request
|
||||
that the researcher provide them.
|
||||
|
||||
Examples:
|
||||
- Product Comparison (Consumer): When comparing different smartphone models,
|
||||
request a table listing each model's features, price, and consumer ratings
|
||||
side-by-side.
|
||||
- Project Tracking (Work): When outlining project deliverables, create a table
|
||||
showing tasks, deadlines, responsible team members, and status updates.
|
||||
- Budget Planning (Consumer): When creating a personal or household budget,
|
||||
request a table detailing income sources, monthly expenses, and savings goals.
|
||||
- Competitor Analysis (Work): When evaluating competitor products, request a
|
||||
table with key metrics, such as market share, pricing, and main differentiators.
|
||||
|
||||
6. **Headers and Formatting**
|
||||
- You should include the expected output format in the prompt.
|
||||
- If the user is asking for content that would be best returned in a
|
||||
structured format (e.g. a report, plan, etc.), ask the researcher to format
|
||||
as a report with the appropriate headers and formatting that ensures clarity
|
||||
and structure.
|
||||
|
||||
7. **Language**
|
||||
- If the user input is in a language other than English, tell the researcher
|
||||
to respond in this language, unless the user query explicitly asks for the
|
||||
response in a different language.
|
||||
|
||||
8. **Sources**
|
||||
- If specific sources should be prioritized, specify them in the prompt.
|
||||
- For product and travel research, prefer linking directly to official or
|
||||
primary websites (e.g., official brand sites, manufacturer pages, or
|
||||
reputable e-commerce platforms like Amazon for user reviews) rather than
|
||||
aggregator sites or SEO-heavy blogs.
|
||||
- For academic or scientific queries, prefer linking directly to the original
|
||||
paper or official journal publication rather than survey papers or secondary
|
||||
summaries.
|
||||
- If the query is in a specific language, prioritize sources published in that
|
||||
language.
|
||||
`
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
removeBlocksThunk,
|
||||
resendMessageThunk,
|
||||
resendUserMessageWithEditThunk,
|
||||
updateMessageAndBlocksThunk,
|
||||
updateTranslationBlockThunk
|
||||
updateBlockThunk,
|
||||
updateMessageAndBlocksThunk
|
||||
} from '@renderer/store/thunk/messageThunk'
|
||||
import type { Assistant, LanguageCode, Model, Topic } from '@renderer/types'
|
||||
import type { Message, MessageBlock } from '@renderer/types/newMessage'
|
||||
@@ -26,6 +26,8 @@ import { abortCompletion } from '@renderer/utils/abortController'
|
||||
import { throttle } from 'lodash'
|
||||
import { useCallback } from 'react'
|
||||
|
||||
import { TopicManager } from './useTopic'
|
||||
|
||||
const selectMessagesState = (state: RootState) => state.messages
|
||||
|
||||
export const selectNewTopicLoading = createSelector(
|
||||
@@ -232,7 +234,7 @@ export function useMessageOperations(topic: Topic) {
|
||||
}
|
||||
}
|
||||
dispatch(updateOneBlock({ id: blockId, changes }))
|
||||
await dispatch(updateTranslationBlockThunk(blockId, '', false))
|
||||
await dispatch(updateBlockThunk(blockId, '', false))
|
||||
} else {
|
||||
blockId = await dispatch(
|
||||
initiateTranslationThunk(messageId, topic.id, targetLanguage, sourceBlockId, sourceLanguage)
|
||||
@@ -246,7 +248,7 @@ export function useMessageOperations(topic: Topic) {
|
||||
|
||||
return throttle(
|
||||
(accumulatedText: string, isComplete: boolean = false) => {
|
||||
dispatch(updateTranslationBlockThunk(blockId!, accumulatedText, isComplete))
|
||||
dispatch(updateBlockThunk(blockId!, accumulatedText, isComplete))
|
||||
},
|
||||
200,
|
||||
{ leading: true, trailing: true }
|
||||
@@ -452,3 +454,18 @@ export const useTopicMessages = (topicId: string) => {
|
||||
export const useTopicLoading = (topic: Topic) => {
|
||||
return useAppSelector((state) => selectNewTopicLoading(state, topic.id))
|
||||
}
|
||||
|
||||
export const getTopicByMessageId = async (messageId: string) => {
|
||||
const state = store.getState()
|
||||
const message = state.messages.entities[messageId]
|
||||
if (!message) {
|
||||
return null
|
||||
}
|
||||
const topicId = message.topicId
|
||||
console.log('[getTopicByMessageId] topicId', topicId)
|
||||
const topic = await TopicManager.getTopic(topicId)
|
||||
if (!topic) {
|
||||
return null
|
||||
}
|
||||
return topic
|
||||
}
|
||||
|
||||
@@ -2459,6 +2459,16 @@
|
||||
"show_window": "Show Window",
|
||||
"visualization": "Visualization"
|
||||
},
|
||||
"research": {
|
||||
"clarification": {
|
||||
"title": "Research Clarification"
|
||||
},
|
||||
"ready_to_start": "Ready to start deep research",
|
||||
"retry": "Retry Clarification",
|
||||
"continue_research": "Start Research",
|
||||
"supplement_info_label": "Additional Information (Optional)",
|
||||
"supplement_info_placeholder": "You can provide additional information here to help us better understand your requirements..."
|
||||
},
|
||||
"memory": {
|
||||
"title": "Memories",
|
||||
"actions": "Actions",
|
||||
|
||||
@@ -2459,6 +2459,16 @@
|
||||
"show_window": "ウィンドウを表示",
|
||||
"visualization": "可視化"
|
||||
},
|
||||
"research": {
|
||||
"clarification": {
|
||||
"title": "研究の明確化"
|
||||
},
|
||||
"ready_to_start": "深い研究を開始する準備ができました",
|
||||
"retry": "再明確化",
|
||||
"continue_research": "研究を続ける",
|
||||
"supplement_info_label": "補足情報 (任意)",
|
||||
"supplement_info_placeholder": "ここに補足情報を提供して、より良く理解してください..."
|
||||
},
|
||||
"memory": {
|
||||
"title": "グローバルメモリ",
|
||||
"add_memory": "メモリーを追加",
|
||||
|
||||
@@ -2459,6 +2459,16 @@
|
||||
"show_window": "Показать окно",
|
||||
"visualization": "Визуализация"
|
||||
},
|
||||
"research": {
|
||||
"clarification": {
|
||||
"title": "Уточнение исследования"
|
||||
},
|
||||
"ready_to_start": "Готов к началу глубокого исследования",
|
||||
"retry": "Повторное уточнение",
|
||||
"continue_research": "Продолжить исследование",
|
||||
"supplement_info_label": "Дополнительная информация (необязательно)",
|
||||
"supplement_info_placeholder": "Вы можете предоставить дополнительную информацию здесь, чтобы помочь нам лучше понять ваши требования..."
|
||||
},
|
||||
"memory": {
|
||||
"title": "Глобальная память",
|
||||
"add_memory": "Добавить память",
|
||||
|
||||
@@ -2459,6 +2459,16 @@
|
||||
"show_window": "显示窗口",
|
||||
"visualization": "可视化"
|
||||
},
|
||||
"research": {
|
||||
"clarification": {
|
||||
"title": "研究澄清"
|
||||
},
|
||||
"ready_to_start": "准备开始深度研究",
|
||||
"retry": "重新澄清",
|
||||
"continue_research": "开始研究",
|
||||
"supplement_info_label": "补充信息(可选)",
|
||||
"supplement_info_placeholder": "您可以在这里补充更多信息,帮助我们更好地理解您的需求..."
|
||||
},
|
||||
"memory": {
|
||||
"title": "全局记忆",
|
||||
"settings": "设置",
|
||||
|
||||
@@ -2459,6 +2459,16 @@
|
||||
"show_window": "顯示視窗",
|
||||
"visualization": "視覺化"
|
||||
},
|
||||
"research": {
|
||||
"clarification": {
|
||||
"title": "研究澄清"
|
||||
},
|
||||
"ready_to_start": "準備開始深度研究",
|
||||
"retry": "重新澄清",
|
||||
"continue_research": "繼續研究",
|
||||
"supplement_info_label": "補充資訊 (可選)",
|
||||
"supplement_info_placeholder": "您可以在此處提供補充資訊,幫助我們更好地理解您的需求..."
|
||||
},
|
||||
"memory": {
|
||||
"title": "全域記憶",
|
||||
"add_memory": "新增記憶",
|
||||
|
||||
@@ -9,6 +9,7 @@ import { useQuickPanel } from '@renderer/components/QuickPanel'
|
||||
import {
|
||||
GEMINI_FLASH_MODEL_REGEX,
|
||||
isDoubaoThinkingAutoModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
isSupportedReasoningEffortGrokModel,
|
||||
isSupportedThinkingTokenDoubaoModel,
|
||||
isSupportedThinkingTokenGeminiModel,
|
||||
@@ -40,7 +41,8 @@ const MODEL_SUPPORTED_OPTIONS: Record<string, ThinkingOption[]> = {
|
||||
gemini: ['off', 'low', 'medium', 'high', 'auto'],
|
||||
gemini_pro: ['low', 'medium', 'high', 'auto'],
|
||||
qwen: ['off', 'low', 'medium', 'high'],
|
||||
doubao: ['off', 'auto', 'high']
|
||||
doubao: ['off', 'auto', 'high'],
|
||||
openai_deep_research: ['off', 'medium']
|
||||
}
|
||||
|
||||
// 选项转换映射表:当选项不支持时使用的替代选项
|
||||
@@ -48,7 +50,7 @@ const OPTION_FALLBACK: Record<ThinkingOption, ThinkingOption> = {
|
||||
off: 'low', // off -> low (for Gemini Pro models)
|
||||
low: 'high',
|
||||
medium: 'high', // medium -> high (for Grok models)
|
||||
high: 'high',
|
||||
high: 'medium',
|
||||
auto: 'high' // auto -> high (for non-Gemini models)
|
||||
}
|
||||
|
||||
@@ -62,6 +64,7 @@ const ThinkingButton: FC<Props> = ({ ref, model, assistant, ToolbarButton }): Re
|
||||
const isGeminiFlashModel = GEMINI_FLASH_MODEL_REGEX.test(model.id)
|
||||
const isQwenModel = isSupportedThinkingTokenQwenModel(model)
|
||||
const isDoubaoModel = isSupportedThinkingTokenDoubaoModel(model)
|
||||
const isDeepResearchModel = isOpenAIDeepResearchModel(model)
|
||||
|
||||
const currentReasoningEffort = useMemo(() => {
|
||||
return assistant.settings?.reasoning_effort || 'off'
|
||||
@@ -79,8 +82,9 @@ const ThinkingButton: FC<Props> = ({ ref, model, assistant, ToolbarButton }): Re
|
||||
if (isGrokModel) return 'grok'
|
||||
if (isQwenModel) return 'qwen'
|
||||
if (isDoubaoModel) return 'doubao'
|
||||
if (isDeepResearchModel) return 'openai_deep_research'
|
||||
return 'default'
|
||||
}, [isGeminiModel, isGrokModel, isQwenModel, isDoubaoModel, isGeminiFlashModel])
|
||||
}, [isGeminiModel, isGrokModel, isQwenModel, isDoubaoModel, isDeepResearchModel, isGeminiFlashModel])
|
||||
|
||||
// 获取当前模型支持的选项
|
||||
const supportedOptions = useMemo(() => {
|
||||
|
||||
@@ -6,7 +6,12 @@ import ImageViewer from '@renderer/components/ImageViewer'
|
||||
import MarkdownShadowDOMRenderer from '@renderer/components/MarkdownShadowDOMRenderer'
|
||||
import { useSettings } from '@renderer/hooks/useSettings'
|
||||
import { EVENT_NAMES, EventEmitter } from '@renderer/services/EventService'
|
||||
import type { MainTextMessageBlock, ThinkingMessageBlock, TranslationMessageBlock } from '@renderer/types/newMessage'
|
||||
import type {
|
||||
DeepResearchMessageBlock,
|
||||
MainTextMessageBlock,
|
||||
ThinkingMessageBlock,
|
||||
TranslationMessageBlock
|
||||
} from '@renderer/types/newMessage'
|
||||
import { parseJSON } from '@renderer/utils'
|
||||
import { removeSvgEmptyLines } from '@renderer/utils/formats'
|
||||
import { findCitationInChildren, getCodeBlockId, processLatexBrackets } from '@renderer/utils/markdown'
|
||||
@@ -34,7 +39,7 @@ const DISALLOWED_ELEMENTS = ['iframe']
|
||||
|
||||
interface Props {
|
||||
// message: Message & { content: string }
|
||||
block: MainTextMessageBlock | TranslationMessageBlock | ThinkingMessageBlock
|
||||
block: MainTextMessageBlock | TranslationMessageBlock | ThinkingMessageBlock | DeepResearchMessageBlock
|
||||
}
|
||||
|
||||
const Markdown: FC<Props> = ({ block }) => {
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import DeepResearchCard from '@renderer/components/DeepResearchCard'
|
||||
import type { DeepResearchMessageBlock } from '@renderer/types/newMessage'
|
||||
import React from 'react'
|
||||
|
||||
interface Props {
|
||||
block: DeepResearchMessageBlock
|
||||
}
|
||||
|
||||
const DeepResearchBlock: React.FC<Props> = ({ block }) => {
|
||||
return <DeepResearchCard block={block} />
|
||||
}
|
||||
|
||||
export default React.memo(DeepResearchBlock)
|
||||
@@ -8,6 +8,7 @@ import { useSelector } from 'react-redux'
|
||||
import styled from 'styled-components'
|
||||
|
||||
import CitationBlock from './CitationBlock'
|
||||
import DeepResearchBlock from './DeepResearchBlock'
|
||||
import ErrorBlock from './ErrorBlock'
|
||||
import FileBlock from './FileBlock'
|
||||
import ImageBlock from './ImageBlock'
|
||||
@@ -143,6 +144,9 @@ const MessageBlockRenderer: React.FC<Props> = ({ blocks, message }) => {
|
||||
case MessageBlockType.TRANSLATION:
|
||||
blockComponent = <TranslationBlock key={block.id} block={block} />
|
||||
break
|
||||
case MessageBlockType.DEEP_RESEARCH:
|
||||
blockComponent = <DeepResearchBlock key={block.id} block={block} />
|
||||
break
|
||||
default:
|
||||
console.warn('Unsupported block type in MessageBlockRenderer:', (block as any).type, block)
|
||||
break
|
||||
|
||||
@@ -3,6 +3,7 @@ import Logger from '@renderer/config/logger'
|
||||
import {
|
||||
isEmbeddingModel,
|
||||
isGenerateImageModel,
|
||||
isOpenAIDeepResearchModel,
|
||||
isOpenRouterBuiltInWebSearchModel,
|
||||
isReasoningModel,
|
||||
isSupportedDisableGenerationModel,
|
||||
@@ -385,7 +386,13 @@ export async function fetchChatCompletion({
|
||||
// try {
|
||||
// NOTE: The search results are NOT added to the messages sent to the AI here.
|
||||
// They will be retrieved and used by the messageThunk later to create CitationBlocks.
|
||||
const { mcpTools } = await fetchExternalTool(lastUserMessage, assistant, onChunkReceived, lastAnswer)
|
||||
|
||||
const mcpTools: MCPTool[] = []
|
||||
if (!isOpenAIDeepResearchModel(assistant.model || getDefaultModel())) {
|
||||
const { mcpTools: tools } = await fetchExternalTool(lastUserMessage, assistant, onChunkReceived, lastAnswer)
|
||||
mcpTools.push(...(tools || []))
|
||||
}
|
||||
|
||||
const model = assistant.model || getDefaultModel()
|
||||
|
||||
const { maxTokens, contextCount } = getAssistantSettings(assistant)
|
||||
|
||||
151
src/renderer/src/services/DeepResearchService.ts
Normal file
151
src/renderer/src/services/DeepResearchService.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import AiProvider from '@renderer/aiCore'
|
||||
import { CompletionsParams } from '@renderer/aiCore/middleware/schemas'
|
||||
import {
|
||||
isOpenAIDeepResearchModel,
|
||||
isReasoningModel,
|
||||
isSupportedReasoningEffortModel,
|
||||
isSupportedThinkingTokenModel
|
||||
} from '@renderer/config/models'
|
||||
import { DEEP_RESEARCH_CLARIFICATION_PROMPT, DEEP_RESEARCH_PROMPT_REWRITE_PROMPT } from '@renderer/config/prompts'
|
||||
import { Assistant, Message } from '@renderer/types'
|
||||
import { Chunk } from '@renderer/types/chunk'
|
||||
import { findLast } from 'lodash'
|
||||
|
||||
import { fetchChatCompletion } from './ApiService'
|
||||
import { getAssistantProvider, getDefaultAssistant, getDefaultModel, getTopNamingModel } from './AssistantService'
|
||||
|
||||
interface DeepResearchCallbacks {
|
||||
onResearchStarted: () => Promise<string | undefined> // 返回用户补全信息
|
||||
onResponse: (text: string, isComplete: boolean) => void
|
||||
onChunkReceived: (chunk: Chunk) => void
|
||||
}
|
||||
|
||||
// 澄清阶段:生成澄清问题
|
||||
export async function fetchDeepResearchClarification({
|
||||
messages,
|
||||
assistant,
|
||||
onResponse
|
||||
}: {
|
||||
messages: Message[]
|
||||
assistant: Assistant
|
||||
onResponse: (text: string, isComplete: boolean) => void
|
||||
}) {
|
||||
const clarificationAssistant = getDefaultAssistant()
|
||||
const model = getTopNamingModel() || getDefaultModel()
|
||||
clarificationAssistant.model = model
|
||||
clarificationAssistant.prompt = DEEP_RESEARCH_CLARIFICATION_PROMPT
|
||||
|
||||
const lastUserMessage = findLast(messages, (m) => m.role === 'user')
|
||||
if (!lastUserMessage) {
|
||||
throw new Error('No user message found for clarification')
|
||||
}
|
||||
|
||||
const enableReasoning =
|
||||
((isSupportedThinkingTokenModel(model) || isSupportedReasoningEffortModel(model)) &&
|
||||
assistant.settings?.reasoning_effort !== undefined) ||
|
||||
(isReasoningModel(model) && (!isSupportedThinkingTokenModel(model) || !isSupportedReasoningEffortModel(model)))
|
||||
|
||||
const params: CompletionsParams = {
|
||||
callType: 'chat',
|
||||
messages: [lastUserMessage],
|
||||
onResponse: onResponse,
|
||||
assistant: clarificationAssistant,
|
||||
streamOutput: assistant.settings?.streamOutput || false,
|
||||
enableReasoning
|
||||
}
|
||||
|
||||
const provider = getAssistantProvider(clarificationAssistant)
|
||||
const AI = new AiProvider(provider)
|
||||
|
||||
const result = await AI.completions(params, {
|
||||
streamOutput: assistant.settings?.streamOutput || false
|
||||
})
|
||||
return result.getText()
|
||||
}
|
||||
|
||||
// 提示词重写阶段
|
||||
export async function fetchDeepResearchPromptRewrite(
|
||||
clarificationAnswers: string,
|
||||
userSupplementInfo?: string
|
||||
): Promise<string> {
|
||||
const rewriteAssistant = getDefaultAssistant()
|
||||
rewriteAssistant.model = getTopNamingModel() || getDefaultModel()
|
||||
|
||||
rewriteAssistant.prompt = DEEP_RESEARCH_PROMPT_REWRITE_PROMPT
|
||||
|
||||
// 构建包含澄清答案和用户补全信息的完整内容
|
||||
let contentForRewrite = clarificationAnswers
|
||||
if (userSupplementInfo && userSupplementInfo.trim()) {
|
||||
contentForRewrite += `\n\n用户补充信息:\n${userSupplementInfo.trim()}`
|
||||
}
|
||||
|
||||
const params: CompletionsParams = {
|
||||
callType: 'summary',
|
||||
messages: contentForRewrite,
|
||||
assistant: rewriteAssistant,
|
||||
streamOutput: false,
|
||||
enableReasoning: false
|
||||
}
|
||||
|
||||
const provider = getAssistantProvider(rewriteAssistant)
|
||||
const AI = new AiProvider(provider)
|
||||
|
||||
try {
|
||||
const result = await AI.completions(params)
|
||||
const rewrittenPrompt = result.getText()
|
||||
|
||||
return rewrittenPrompt
|
||||
} catch (error: any) {
|
||||
console.error('Prompt rewrite phase failed:', error)
|
||||
return contentForRewrite
|
||||
}
|
||||
}
|
||||
|
||||
// 主要的Deep Research函数
|
||||
export async function fetchDeepResearch({
|
||||
messages,
|
||||
assistant,
|
||||
callbacks
|
||||
}: {
|
||||
messages: Message[]
|
||||
assistant: Assistant
|
||||
callbacks: DeepResearchCallbacks
|
||||
}) {
|
||||
const model = assistant.model || getDefaultModel()
|
||||
if (!isOpenAIDeepResearchModel(model)) {
|
||||
throw new Error('Model is not supported for deep research')
|
||||
}
|
||||
|
||||
const lastUserMessage = findLast(messages, (m) => m.role === 'user')
|
||||
if (!lastUserMessage) {
|
||||
throw new Error('No user message found for deep research')
|
||||
}
|
||||
|
||||
try {
|
||||
// 阶段1:澄清用户意图
|
||||
const clarificationAnswers = await fetchDeepResearchClarification({
|
||||
messages,
|
||||
assistant,
|
||||
onResponse: callbacks.onResponse
|
||||
})
|
||||
|
||||
// 等待用户确认并获取补全信息
|
||||
const userSupplementInfo = await callbacks.onResearchStarted()
|
||||
|
||||
// 阶段2:重写提示词
|
||||
const rewrittenPrompt = await fetchDeepResearchPromptRewrite(clarificationAnswers, userSupplementInfo)
|
||||
|
||||
// 使用增强后的提示词调用Deep Research模型
|
||||
await fetchChatCompletion({
|
||||
messages: [lastUserMessage],
|
||||
assistant: {
|
||||
...assistant,
|
||||
prompt: rewrittenPrompt
|
||||
},
|
||||
onChunkReceived: callbacks.onChunkReceived
|
||||
})
|
||||
} catch (error: any) {
|
||||
console.error('Deep research failed:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,10 @@ export const createToolCallbacks = (deps: ToolCallbacksDependencies) => {
|
||||
return {
|
||||
onToolCallPending: (toolResponse: MCPToolResponse) => {
|
||||
if (blockManager.hasInitialPlaceholder) {
|
||||
const changes = {
|
||||
const changes: Partial<ToolMessageBlock> = {
|
||||
type: MessageBlockType.TOOL,
|
||||
status: MessageBlockStatus.PENDING,
|
||||
toolId: toolResponse.id,
|
||||
toolName: toolResponse.tool.name,
|
||||
metadata: { rawMcpToolResponse: toolResponse }
|
||||
}
|
||||
@@ -49,7 +50,7 @@ export const createToolCallbacks = (deps: ToolCallbacksDependencies) => {
|
||||
const targetBlockId = toolCallIdToBlockIdMap.get(toolResponse.id)
|
||||
|
||||
if (targetBlockId && toolResponse.status === 'invoking') {
|
||||
const changes = {
|
||||
const changes: Partial<ToolMessageBlock> = {
|
||||
status: MessageBlockStatus.PROCESSING,
|
||||
metadata: { rawMcpToolResponse: toolResponse }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import Logger from '@renderer/config/logger'
|
||||
import { isOpenAIDeepResearchModel } from '@renderer/config/models'
|
||||
import db from '@renderer/databases'
|
||||
import { fetchChatCompletion } from '@renderer/services/ApiService'
|
||||
import { fetchDeepResearch } from '@renderer/services/DeepResearchService'
|
||||
import FileManager from '@renderer/services/FileManager'
|
||||
import { BlockManager } from '@renderer/services/messageStreaming/BlockManager'
|
||||
import { createCallbacks } from '@renderer/services/messageStreaming/callbacks'
|
||||
@@ -10,12 +13,15 @@ import { type Assistant, type FileMetadata, type Model, type Topic } from '@rend
|
||||
import type { FileMessageBlock, ImageMessageBlock, Message, MessageBlock } from '@renderer/types/newMessage'
|
||||
import { AssistantMessageStatus, MessageBlockStatus, MessageBlockType } from '@renderer/types/newMessage'
|
||||
import { uuid } from '@renderer/utils'
|
||||
import { abortCompletion } from '@renderer/utils/abortController'
|
||||
import { deepResearchConfirmation } from '@renderer/utils/deepResearchConfirmation'
|
||||
import {
|
||||
createAssistantMessage,
|
||||
createDeepResearchBlock,
|
||||
createTranslationBlock,
|
||||
resetAssistantMessage
|
||||
} from '@renderer/utils/messageUtils/create'
|
||||
import { getTopicQueue } from '@renderer/utils/queue'
|
||||
import { clearTopicQueue, getTopicQueue } from '@renderer/utils/queue'
|
||||
import { waitForTopicQueue } from '@renderer/utils/queue'
|
||||
import { t } from 'i18next'
|
||||
import { isEmpty, throttle } from 'lodash'
|
||||
@@ -883,11 +889,49 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
const streamProcessorCallbacks = createStreamProcessor(callbacks)
|
||||
|
||||
// const startTime = Date.now()
|
||||
await fetchChatCompletion({
|
||||
messages: messagesForContext,
|
||||
assistant: assistant,
|
||||
onChunkReceived: streamProcessorCallbacks
|
||||
})
|
||||
|
||||
if (isOpenAIDeepResearchModel(assistant.model)) {
|
||||
const deepResearchBlock = await handleDeepResearchFlow(dispatch, getState, topicId, assistantMessage)
|
||||
const clarificationUpdater = getClarificationUpdater(assistantMessage.id, dispatch)
|
||||
if (!clarificationUpdater) {
|
||||
return
|
||||
}
|
||||
await fetchDeepResearch({
|
||||
messages: messagesForContext,
|
||||
assistant,
|
||||
callbacks: {
|
||||
onResearchStarted: async (): Promise<string | undefined> => {
|
||||
// 等待用户确认并获取补全信息
|
||||
return new Promise<string | undefined>((resolve) => {
|
||||
deepResearchConfirmation.registerResolver(deepResearchBlock.id, (userSupplementInfo?: string) => {
|
||||
dispatch(
|
||||
updateOneBlock({
|
||||
id: deepResearchBlock.id,
|
||||
changes: {
|
||||
metadata: {
|
||||
deepResearchState: {
|
||||
phase: 'research'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
resolve(userSupplementInfo)
|
||||
})
|
||||
})
|
||||
},
|
||||
onResponse: clarificationUpdater,
|
||||
onChunkReceived: streamProcessorCallbacks
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// 正常聊天流程
|
||||
await fetchChatCompletion({
|
||||
messages: messagesForContext,
|
||||
assistant: assistant,
|
||||
onChunkReceived: streamProcessorCallbacks
|
||||
})
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('Error fetching chat completion:', error)
|
||||
if (assistantMessage) {
|
||||
@@ -897,6 +941,101 @@ const fetchAndProcessAssistantResponseImpl = async (
|
||||
}
|
||||
}
|
||||
|
||||
const getClarificationUpdater = (messageId: string, dispatch: AppDispatch) => {
|
||||
const state = store.getState()
|
||||
const message = state.messages.entities[messageId]
|
||||
if (!message) {
|
||||
return null
|
||||
}
|
||||
let deepResearchBlockId: string | undefined
|
||||
if (message.blocks && message.blocks.length > 0) {
|
||||
for (const blockId of message.blocks) {
|
||||
const block = state.messageBlocks.entities[blockId]
|
||||
if (block && block.type === MessageBlockType.DEEP_RESEARCH) {
|
||||
deepResearchBlockId = blockId
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (deepResearchBlockId) {
|
||||
const blockId = deepResearchBlockId
|
||||
const changes: Partial<MessageBlock> = {
|
||||
content: '',
|
||||
status: MessageBlockStatus.STREAMING,
|
||||
metadata: {
|
||||
deepResearchState: {
|
||||
phase: 'clarification'
|
||||
}
|
||||
}
|
||||
}
|
||||
dispatch(updateOneBlock({ id: blockId, changes }))
|
||||
return throttle(
|
||||
(accumulatedText: string, isComplete: boolean = false) => {
|
||||
dispatch(updateBlockThunk(blockId, accumulatedText, isComplete))
|
||||
|
||||
// 澄清阶段完成,更新状态为等待用户确认
|
||||
if (isComplete) {
|
||||
dispatch(
|
||||
updateOneBlock({
|
||||
id: blockId,
|
||||
changes: {
|
||||
metadata: {
|
||||
deepResearchState: {
|
||||
phase: 'waiting_confirmation'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
},
|
||||
200,
|
||||
{ leading: true, trailing: true }
|
||||
)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理Deep Research流程初始化
|
||||
*/
|
||||
const handleDeepResearchFlow = async (
|
||||
dispatch: AppDispatch,
|
||||
getState: () => RootState,
|
||||
topicId: string,
|
||||
assistantMessage: Message
|
||||
) => {
|
||||
// 创建Deep Research状态块并标记消息类型
|
||||
const deepResearchBlock = createDeepResearchBlock(
|
||||
assistantMessage.id,
|
||||
'',
|
||||
{
|
||||
phase: 'clarification'
|
||||
},
|
||||
{
|
||||
status: MessageBlockStatus.PENDING
|
||||
}
|
||||
)
|
||||
|
||||
dispatch(upsertOneBlock(deepResearchBlock))
|
||||
dispatch(
|
||||
newMessagesActions.updateMessage({
|
||||
topicId,
|
||||
messageId: assistantMessage.id,
|
||||
updates: {
|
||||
blocks: [...(assistantMessage.blocks || []), deepResearchBlock.id],
|
||||
type: 'deep_research'
|
||||
}
|
||||
})
|
||||
)
|
||||
const finalMessagesToSave = selectMessagesForTopic(getState(), topicId)
|
||||
await db.transaction('rw', db.topics, db.message_blocks, async () => {
|
||||
await db.message_blocks.put(deepResearchBlock)
|
||||
await db.topics.update(topicId, { messages: finalMessagesToSave })
|
||||
})
|
||||
return deepResearchBlock
|
||||
}
|
||||
|
||||
/**
|
||||
* 发送消息并处理助手回复
|
||||
* @param userMessage 已创建的用户消息
|
||||
@@ -943,6 +1082,83 @@ export const sendMessage =
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 重试Deep Research澄清阶段
|
||||
*/
|
||||
export const retryDeepResearchClarificationThunk =
|
||||
(topicId: string, messageId: string) => async (dispatch: AppDispatch, getState: () => RootState) => {
|
||||
try {
|
||||
const state = getState()
|
||||
const message = state.messages.entities[messageId]
|
||||
|
||||
if (!message) {
|
||||
Logger.error(`[retryDeepResearchClarificationThunk] Message ${messageId} not found in state`)
|
||||
return
|
||||
}
|
||||
|
||||
// 找到并清除澄清相关的块,保留状态块但重置其状态
|
||||
const blocksToRemove: string[] = []
|
||||
const blocksToUpdate: MessageBlock[] = []
|
||||
|
||||
message.blocks?.forEach((blockId) => {
|
||||
const block = state.messageBlocks.entities[blockId]
|
||||
if (block && block.type === MessageBlockType.DEEP_RESEARCH) {
|
||||
// 清理现有的resolver
|
||||
deepResearchConfirmation.clearResolver(blockId)
|
||||
|
||||
if (block.type === MessageBlockType.DEEP_RESEARCH) {
|
||||
blocksToRemove.push(blockId)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (blocksToRemove.length > 0) {
|
||||
cleanupMultipleBlocks(dispatch, blocksToRemove)
|
||||
|
||||
// 更新消息的blocks数组
|
||||
const updatedBlocks = (message.blocks || []).filter((id) => !blocksToRemove.includes(id))
|
||||
dispatch(
|
||||
newMessagesActions.updateMessage({
|
||||
topicId,
|
||||
messageId,
|
||||
updates: { blocks: updatedBlocks }
|
||||
})
|
||||
)
|
||||
await db.message_blocks.bulkDelete(blocksToRemove)
|
||||
}
|
||||
|
||||
if (blocksToUpdate.length > 0) {
|
||||
dispatch(upsertManyBlocks(blocksToUpdate))
|
||||
}
|
||||
|
||||
// 1. 先中止当前正在执行的任务(如果有的话)
|
||||
if (message.askId) {
|
||||
try {
|
||||
abortCompletion(message.askId)
|
||||
} catch (error) {
|
||||
Logger.warn(`[retryDeepResearchClarificationThunk] Failed to abort current task:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. 清空队列中的待处理任务并重新添加新任务
|
||||
clearTopicQueue(topicId)
|
||||
|
||||
const queue = getTopicQueue(topicId)
|
||||
|
||||
queue.add(async () => {
|
||||
const assistant = state.assistants.assistants.find((a) => a.id === message.assistantId)
|
||||
if (assistant) {
|
||||
await fetchAndProcessAssistantResponseImpl(dispatch, getState, topicId, assistant, message)
|
||||
} else {
|
||||
Logger.error(`[retryDeepResearchClarificationThunk] Assistant ${message.assistantId} not found`)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
Logger.error(`[retryDeepResearchClarificationThunk] Unexpected error during retry:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads messages and their blocks for a specific topic from the database
|
||||
* and updates the Redux store.
|
||||
@@ -1393,7 +1609,7 @@ export const initiateTranslationThunk =
|
||||
}
|
||||
|
||||
// --- Thunk to update the translation block with new content ---
|
||||
export const updateTranslationBlockThunk =
|
||||
export const updateBlockThunk =
|
||||
(blockId: string, accumulatedText: string, isComplete: boolean = false) =>
|
||||
async (dispatch: AppDispatch) => {
|
||||
// Logger.log(`[updateTranslationBlockThunk] 更新翻译块 ${blockId}, isComplete: ${isComplete}`)
|
||||
|
||||
@@ -680,7 +680,7 @@ export interface GetMCPPromptResponse {
|
||||
messages: {
|
||||
role: string
|
||||
content: {
|
||||
type: 'text' | 'image' | 'audio' | 'resource'
|
||||
type: 'text' | 'image' | 'audio' | 'resource' | 'resource_link'
|
||||
text?: string
|
||||
data?: string
|
||||
mimeType?: string
|
||||
|
||||
@@ -27,7 +27,8 @@ export enum MessageBlockType {
|
||||
TOOL = 'tool', // Added unified tool block type
|
||||
FILE = 'file', // 文件内容
|
||||
ERROR = 'error', // 错误信息
|
||||
CITATION = 'citation' // 引用类型 (Now includes web search, grounding, etc.)
|
||||
CITATION = 'citation', // 引用类型 (Now includes web search, grounding, etc.)
|
||||
DEEP_RESEARCH = 'deep_research' // Deep Research
|
||||
}
|
||||
|
||||
// 块状态定义
|
||||
@@ -133,6 +134,15 @@ export interface ErrorMessageBlock extends BaseMessageBlock {
|
||||
type: MessageBlockType.ERROR
|
||||
}
|
||||
|
||||
// Deep Research状态块
|
||||
export interface DeepResearchMessageBlock extends BaseMessageBlock {
|
||||
type: MessageBlockType.DEEP_RESEARCH
|
||||
content: string
|
||||
metadata: BaseMessageBlock['metadata'] & {
|
||||
deepResearchState: DeepResearchMetadata
|
||||
}
|
||||
}
|
||||
|
||||
// MessageBlock 联合类型
|
||||
export type MessageBlock =
|
||||
| PlaceholderMessageBlock
|
||||
@@ -145,6 +155,7 @@ export type MessageBlock =
|
||||
| FileMessageBlock
|
||||
| ErrorMessageBlock
|
||||
| CitationMessageBlock
|
||||
| DeepResearchMessageBlock
|
||||
|
||||
export enum UserMessageStatus {
|
||||
SUCCESS = 'success'
|
||||
@@ -158,6 +169,15 @@ export enum AssistantMessageStatus {
|
||||
PAUSED = 'paused',
|
||||
ERROR = 'error'
|
||||
}
|
||||
|
||||
// Deep Research相关类型定义
|
||||
export interface DeepResearchMetadata {
|
||||
phase: 'clarification' | 'waiting_confirmation' | 'research' | 'completed'
|
||||
clarificationBlockId?: string
|
||||
}
|
||||
|
||||
// 扩展消息类型
|
||||
export type MessageType = 'clear' | 'deep_research'
|
||||
// Message 核心类型 - 包含元数据和块集合
|
||||
export type Message = {
|
||||
id: string
|
||||
@@ -171,7 +191,7 @@ export type Message = {
|
||||
// 消息元数据
|
||||
modelId?: string
|
||||
model?: Model
|
||||
type?: 'clear'
|
||||
type?: MessageType
|
||||
useful?: boolean
|
||||
askId?: string // 关联的问题消息ID
|
||||
mentions?: Model[]
|
||||
|
||||
68
src/renderer/src/utils/deepResearchConfirmation.ts
Normal file
68
src/renderer/src/utils/deepResearchConfirmation.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
// Deep Research确认机制管理
|
||||
interface DeepResearchResolver {
|
||||
blockId: string
|
||||
resolve: (userSupplementInfo?: string) => void
|
||||
createdAt: number
|
||||
}
|
||||
|
||||
class DeepResearchConfirmationManager {
|
||||
private resolvers = new Map<string, DeepResearchResolver>()
|
||||
|
||||
// 注册一个resolver
|
||||
registerResolver(blockId: string, resolve: (userSupplementInfo?: string) => void): void {
|
||||
this.resolvers.set(blockId, {
|
||||
blockId,
|
||||
resolve,
|
||||
createdAt: Date.now()
|
||||
})
|
||||
}
|
||||
|
||||
// 触发resolver并传递用户补全信息
|
||||
triggerResolver(blockId: string, userSupplementInfo?: string): boolean {
|
||||
const resolver = this.resolvers.get(blockId)
|
||||
if (resolver) {
|
||||
resolver.resolve(userSupplementInfo)
|
||||
this.resolvers.delete(blockId)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// 清理resolver
|
||||
clearResolver(blockId: string): void {
|
||||
this.resolvers.delete(blockId)
|
||||
}
|
||||
|
||||
// 检查是否存在resolver
|
||||
hasResolver(blockId: string): boolean {
|
||||
return this.resolvers.has(blockId)
|
||||
}
|
||||
|
||||
// 清理过期的resolvers (超过10分钟)
|
||||
cleanupExpiredResolvers(): void {
|
||||
const now = Date.now()
|
||||
const expireTime = 10 * 60 * 1000 // 10分钟
|
||||
|
||||
for (const [blockId, resolver] of this.resolvers.entries()) {
|
||||
if (now - resolver.createdAt > expireTime) {
|
||||
this.resolvers.delete(blockId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 获取所有pending的message IDs
|
||||
getPendingblockIds(): string[] {
|
||||
return Array.from(this.resolvers.keys())
|
||||
}
|
||||
}
|
||||
|
||||
// 导出单例实例
|
||||
export const deepResearchConfirmation = new DeepResearchConfirmationManager()
|
||||
|
||||
// 定期清理过期的resolvers
|
||||
setInterval(
|
||||
() => {
|
||||
deepResearchConfirmation.cleanupExpiredResolvers()
|
||||
},
|
||||
5 * 60 * 1000
|
||||
) // 每5分钟清理一次
|
||||
@@ -5,6 +5,8 @@ import type {
|
||||
BaseMessageBlock,
|
||||
CitationMessageBlock,
|
||||
CodeMessageBlock,
|
||||
DeepResearchMessageBlock,
|
||||
DeepResearchMetadata,
|
||||
ErrorMessageBlock,
|
||||
FileMessageBlock,
|
||||
ImageMessageBlock,
|
||||
@@ -274,6 +276,30 @@ export function createCitationBlock(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Deep Research Message Block.
|
||||
* @param messageId - The ID of the parent message.
|
||||
* @param deepResearchState - The deep research state metadata.
|
||||
* @param overrides - Optional properties to override the defaults.
|
||||
* @returns A DeepResearchStateMessageBlock object.
|
||||
*/
|
||||
export function createDeepResearchBlock(
|
||||
messageId: string,
|
||||
content: string,
|
||||
deepResearchState: DeepResearchMetadata,
|
||||
overrides: Partial<Omit<DeepResearchMessageBlock, 'id' | 'messageId' | 'type' | 'metadata'>> = {}
|
||||
): DeepResearchMessageBlock {
|
||||
const baseBlock = createBaseMessageBlock(messageId, MessageBlockType.DEEP_RESEARCH, overrides)
|
||||
return {
|
||||
...baseBlock,
|
||||
content,
|
||||
metadata: {
|
||||
...baseBlock.metadata,
|
||||
deepResearchState
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Message object
|
||||
* @param role - The role of the message sender ('user' or 'assistant').
|
||||
|
||||
Reference in New Issue
Block a user