Compare commits

...

1 Commits

Author SHA1 Message Date
lizhixuan
a4232ef7fa feat: integrate ai-sdk providers and enhance API client functionality
- Added support for new ai-sdk providers: Anthropic, Google, OpenAI, and XAI.
- Implemented a universal API client for dynamic provider imports.
- Enhanced the completions method to handle streaming responses.
- Updated package.json and yarn.lock to include new dependencies.
- Refactored ApiClientFactory to manage multiple provider clients efficiently.
2025-06-16 18:24:13 +08:00
8 changed files with 436 additions and 3 deletions

View File

@@ -71,6 +71,10 @@
"@agentic/exa": "^7.3.3",
"@agentic/searxng": "^7.3.3",
"@agentic/tavily": "^7.3.3",
"@ai-sdk/anthropic": "^1.2.12",
"@ai-sdk/google": "^1.2.19",
"@ai-sdk/openai": "^1.3.22",
"@ai-sdk/xai": "^1.2.16",
"@ant-design/v5-patch-for-react-19": "^1.0.3",
"@anthropic-ai/sdk": "^0.41.0",
"@cherrystudio/embedjs": "^0.1.31",
@@ -132,6 +136,7 @@
"@vitest/ui": "^3.1.4",
"@vitest/web-worker": "^3.1.4",
"@xyflow/react": "^12.4.4",
"ai": "^4.3.16",
"antd": "^5.22.5",
"archiver": "^7.0.1",
"async-mutex": "^0.5.0",

View File

@@ -6,12 +6,39 @@ import { BaseApiClient } from './BaseApiClient'
import { GeminiAPIClient } from './gemini/GeminiAPIClient'
import { OpenAIAPIClient } from './openai/OpenAIApiClient'
import { OpenAIResponseAPIClient } from './openai/OpenAIResponseAPIClient'
import { UniversalAiSdkClient } from './UniversalAiSdkClient'
/**
* Factory for creating ApiClient instances based on provider configuration
* 根据提供者配置创建ApiClient实例的工厂
*/
export class ApiClientFactory {
private static sdkClients = new Map<string, UniversalAiSdkClient>()
/**
* [NEW METHOD] Create a new universal client for ai-sdk providers.
* [新方法] 为 ai-sdk 提供商创建一个新的通用客户端。
*/
static async createAiSdkClient(providerName: string, options?: any): Promise<UniversalAiSdkClient> {
// A simple cache key. For providers with auth options,
// you might want a more sophisticated key.
const cacheKey = `${providerName}-${JSON.stringify(options || {})}`
if (this.sdkClients.has(cacheKey)) {
return this.sdkClients.get(cacheKey)!
}
// 1. Create a new instance of our universal client
const client = new UniversalAiSdkClient(providerName, options)
// 2. Initialize it (this will perform the dynamic import)
await client.initialize()
// 3. Cache and return it
this.sdkClients.set(cacheKey, client)
return client
}
/**
* Create an ApiClient instance for the given provider
* 为给定的提供者创建ApiClient实例

View File

@@ -61,7 +61,7 @@ export abstract class BaseApiClient<
private static readonly SYSTEM_PROMPT_THRESHOLD: number = 128
public provider: Provider
protected host: string
protected apiKey: string
public apiKey: string
protected sdkInstance?: TSdkInstance
public useSystemPromptForTools: boolean = true

View File

@@ -0,0 +1,88 @@
import type { CoreMessage, GenerateTextResult, LanguageModel, StreamTextResult } from 'ai'
import { generateText, streamText } from 'ai'
import { PROVIDER_REGISTRY } from './providerRegistry'
// This is our internal, standardized request object
export interface AiCoreRequest {
modelId: string
messages: CoreMessage[]
tools?: Record<string, any>
// ... any other standardized parameters you want to support
}
export class UniversalAiSdkClient {
private provider: any // The instantiated provider (e.g., from createOpenAI)
private isInitialized = false
constructor(
private providerName: string,
private options: any // API keys, etc.
) {}
// Initialization is now an async step because of dynamic imports
async initialize(): Promise<void> {
if (this.isInitialized) return
const config = PROVIDER_REGISTRY[this.providerName]
if (!config) {
throw new Error(`Provider "${this.providerName}" is not registered.`)
}
try {
// Directly call the import function from the registry.
// This is elegant and bundler-friendly.
const module = await config.import()
// Get the creator function (e.g., createOpenAI) from the module
const creatorFunction = module[config.creatorFunctionName]
if (typeof creatorFunction !== 'function') {
throw new Error(
`Creator function "${config.creatorFunctionName}" not found in the imported module for provider "${this.providerName}".`
)
}
this.provider = creatorFunction(this.options)
this.isInitialized = true
} catch (error) {
if (error instanceof Error) {
throw new Error(`Failed to initialize provider "${this.providerName}": ${error.message}`)
}
throw new Error(`An unknown error occurred while initializing provider "${this.providerName}".`)
}
}
// A helper to get the specific model instance from the provider
private getModel(modelId: string): LanguageModel {
if (!this.isInitialized) throw new Error('Client not initialized')
// Most providers have a .chat() or similar method.
// You might need a slightly more complex mapping here if some providers differ.
return this.provider.chat(modelId)
}
// Implements the streaming logic using the core ai-sdk function
async stream(request: AiCoreRequest): Promise<StreamTextResult<any, any>> {
if (!this.isInitialized) await this.initialize()
const model = this.getModel(request.modelId)
// Directly call the standard ai-sdk function
return streamText({
model,
messages: request.messages,
tools: request.tools
})
}
// Implements the non-streaming logic
async generate(request: AiCoreRequest): Promise<GenerateTextResult<any, any>> {
if (!this.isInitialized) await this.initialize()
const model = this.getModel(request.modelId)
return generateText({
model,
messages: request.messages,
tools: request.tools
})
}
}

View File

@@ -0,0 +1,32 @@
interface ProviderConfig {
// A function that returns a dynamic import promise for the provider's package.
// This approach is friendly to bundlers like Vite.
import: () => Promise<any>
// The name of the creator function within that package (e.g., 'createOpenAI')
creatorFunctionName: string
}
export const PROVIDER_REGISTRY: Record<string, ProviderConfig> = {
openai: {
import: () => import('@ai-sdk/openai'),
creatorFunctionName: 'createOpenAI'
},
anthropic: {
import: () => import('@ai-sdk/anthropic'),
creatorFunctionName: 'createAnthropic'
},
google: {
import: () => import('@ai-sdk/google'),
creatorFunctionName: 'createGoogle'
},
// mistral: {
// import: () => import('@ai-sdk/mistral'),
// creatorFunctionName: 'createMistral'
// },
xai: {
import: () => import('@ai-sdk/xai'),
creatorFunctionName: 'createXai'
}
// You can add all your providers here.
// This file is the ONLY place you'll need to update when adding a new provider.
}

View File

@@ -2,13 +2,18 @@ import { ApiClientFactory } from '@renderer/aiCore/clients/ApiClientFactory'
import { BaseApiClient } from '@renderer/aiCore/clients/BaseApiClient'
import { isDedicatedImageGenerationModel, isFunctionCallingModel } from '@renderer/config/models'
import type { GenerateImageParams, Model, Provider } from '@renderer/types'
import { Chunk, ChunkType } from '@renderer/types/chunk'
import { Message } from '@renderer/types/newMessage'
import { RequestOptions, SdkModel } from '@renderer/types/sdk'
import { isEnabledToolUse } from '@renderer/utils/mcp-tools'
import { getMainTextContent } from '@renderer/utils/messageUtils/find'
import { type CoreMessage } from 'ai'
import { OpenAIAPIClient } from './clients'
import { AihubmixAPIClient } from './clients/AihubmixAPIClient'
import { AnthropicAPIClient } from './clients/anthropic/AnthropicAPIClient'
import { OpenAIResponseAPIClient } from './clients/openai/OpenAIResponseAPIClient'
import type { AiCoreRequest } from './clients/UniversalAiSdkClient'
import { CompletionsMiddlewareBuilder } from './middleware/builder'
import { MIDDLEWARE_NAME as AbortHandlerMiddlewareName } from './middleware/common/AbortHandlerMiddleware'
import { MIDDLEWARE_NAME as FinalChunkConsumerMiddlewareName } from './middleware/common/FinalChunkConsumerMiddleware'
@@ -101,6 +106,76 @@ export default class AiProvider {
return wrappedCompletionMethod(params, options)
}
public async completionsAiSdk(params: CompletionsParams): Promise<CompletionsResult> {
if (!params.assistant?.model) {
throw new Error('Assistant model configuration is missing.')
}
// --- 1. Get Provider Info & API Key ---
// The provider type (e.g., 'openai') is on the model object.
const providerType = params.assistant.model.provider
// The API key is retrieved from the currently initialized apiClient on the instance.
// This assumes that a relevant apiClient has been set up before this call.
if (!this.apiClient) {
// If no client, create one based on the current assistant's provider info
this.apiClient = ApiClientFactory.create(params.assistant.model.provider)
}
const providerOptions = { apiKey: this.apiClient.apiKey }
// --- 2. Message Conversion ---
const extractTextFromMessage = (message: Message): string => getMainTextContent(message)
const coreMessages: CoreMessage[] = (Array.isArray(params.messages) ? params.messages : [])
.map((msg) => {
const content = extractTextFromMessage(msg)
console.log('content', content)
// Correctly handle the discriminated union for CoreMessage
if (msg.role === 'user' || msg.role === 'assistant' || msg.role === 'system') {
return { role: msg.role, content }
}
// Handle other roles like 'tool' if they have a different structure,
// or filter them out if they are not meant for this call.
return null
})
.filter((msg): msg is CoreMessage => msg !== null && msg.content !== '')
if (coreMessages.length === 0) {
throw new Error('Could not extract any valid content from messages.')
}
// --- 3. Prepare and Execute Request ---
const client = await ApiClientFactory.createAiSdkClient('xai', providerOptions)
const request: AiCoreRequest = {
modelId: params.assistant.model.id,
messages: coreMessages
}
const request = async () => {
const result = await client.stream(request)
return result.fullStream
}
let fullText = ''
// --- 4. Process Stream ---
for await (const part of result.fullStream) {
if (part.type === 'text-delta' && params.onChunk) {
fullText += part.textDelta
const chunk: Chunk = {
type: ChunkType.TEXT_DELTA,
text: part.textDelta
}
params.onChunk(chunk)
}
}
// --- 5. Return Correct Result Shape ---
return {
getText: () => fullText
}
}
public async models(): Promise<SdkModel[]> {
return this.apiClient.listModels()
}

View File

@@ -343,7 +343,7 @@ export async function fetchChatCompletion({
if (enableWebSearch) {
onChunkReceived({ type: ChunkType.LLM_WEB_SEARCH_IN_PROGRESS })
}
await AI.completions(
await AI.completionsAiSdk(
{
callType: 'chat',
messages: _messages,

208
yarn.lock
View File

@@ -74,6 +74,120 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/anthropic@npm:^1.2.12":
version: 1.2.12
resolution: "@ai-sdk/anthropic@npm:1.2.12"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
peerDependencies:
zod: ^3.0.0
checksum: 10c0/da13e1ed3c03efe207dbb0fd5fe9f399e4119e6687ec1096418a33a7eeea3c5f912a51c74b185bba3c203b15ee0c1b9cdf649711815ff8e769e31af266ac00fb
languageName: node
linkType: hard
"@ai-sdk/google@npm:^1.2.19":
version: 1.2.19
resolution: "@ai-sdk/google@npm:1.2.19"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
peerDependencies:
zod: ^3.0.0
checksum: 10c0/b40d62ce822ce00850492e4a41c8b6b1ba2ddaaaa8f8d9b8381c198781adb23000fc4f434ef7edf5ba356a4455f8afbbdc5cbecbb0f66b7bcabbcd25758fc6b8
languageName: node
linkType: hard
"@ai-sdk/openai-compatible@npm:0.2.14":
version: 0.2.14
resolution: "@ai-sdk/openai-compatible@npm:0.2.14"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
peerDependencies:
zod: ^3.0.0
checksum: 10c0/60980df8507c1e5d04ac51123bc15ea5cbf29eb88485f63da28d64ab5d9c3b335d2a2c9155a383605972ef5fa636929c8e2d360bf799153acf2b358e1af1fd47
languageName: node
linkType: hard
"@ai-sdk/openai@npm:^1.3.22":
version: 1.3.22
resolution: "@ai-sdk/openai@npm:1.3.22"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
peerDependencies:
zod: ^3.0.0
checksum: 10c0/bcc73a84bebd15aa54568c3c77cedd5f999e282c5be180d5e28ebc789f8873dd0a74d87f1ec4a0f16e3e61b658c3b0734835daf176ed910966246db73c72b468
languageName: node
linkType: hard
"@ai-sdk/provider-utils@npm:2.2.8":
version: 2.2.8
resolution: "@ai-sdk/provider-utils@npm:2.2.8"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
nanoid: "npm:^3.3.8"
secure-json-parse: "npm:^2.7.0"
peerDependencies:
zod: ^3.23.8
checksum: 10c0/34c72bf5f23f2d3e7aef496da7099422ba3b3ff243c35511853e16c3f1528717500262eea32b19e3e09bc4452152a5f31e650512f53f08a5f5645d907bff429e
languageName: node
linkType: hard
"@ai-sdk/provider@npm:1.1.3":
version: 1.1.3
resolution: "@ai-sdk/provider@npm:1.1.3"
dependencies:
json-schema: "npm:^0.4.0"
checksum: 10c0/40e080e223328e7c89829865e9c48f4ce8442a6a59f7ed5dfbdb4f63e8d859a76641e2d31e91970dd389bddb910f32ec7c3dbb0ce583c119e5a1e614ea7b8bc4
languageName: node
linkType: hard
"@ai-sdk/react@npm:1.2.12":
version: 1.2.12
resolution: "@ai-sdk/react@npm:1.2.12"
dependencies:
"@ai-sdk/provider-utils": "npm:2.2.8"
"@ai-sdk/ui-utils": "npm:1.2.11"
swr: "npm:^2.2.5"
throttleit: "npm:2.1.0"
peerDependencies:
react: ^18 || ^19 || ^19.0.0-rc
zod: ^3.23.8
peerDependenciesMeta:
zod:
optional: true
checksum: 10c0/5422feb4ffeebd3287441cf658733e9ad7f9081fc279e85f57700d7fe9f4ed8a0504789c1be695790df44b28730e525cf12acf0f52bfa5adecc561ffd00cb2a5
languageName: node
linkType: hard
"@ai-sdk/ui-utils@npm:1.2.11":
version: 1.2.11
resolution: "@ai-sdk/ui-utils@npm:1.2.11"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
zod-to-json-schema: "npm:^3.24.1"
peerDependencies:
zod: ^3.23.8
checksum: 10c0/de0a10f9e16010126a21a1690aaf56d545b9c0f8d8b2cc33ffd22c2bb2e914949acb9b3f86e0e39a0e4b0d4f24db12e2b094045e34b311de0c8f84bfab48cc92
languageName: node
linkType: hard
"@ai-sdk/xai@npm:^1.2.16":
version: 1.2.16
resolution: "@ai-sdk/xai@npm:1.2.16"
dependencies:
"@ai-sdk/openai-compatible": "npm:0.2.14"
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
peerDependencies:
zod: ^3.0.0
checksum: 10c0/5418f42506679c49f8c6127b0cdf8185622ccb844d6cf928efe1f819b20cbb4eae631eb7dc534468c790fa7087438b4ebd3b4072c13fb28e0c947ebdd6628ec2
languageName: node
linkType: hard
"@ampproject/remapping@npm:^2.2.0, @ampproject/remapping@npm:^2.3.0":
version: 2.3.0
resolution: "@ampproject/remapping@npm:2.3.0"
@@ -3184,6 +3298,13 @@ __metadata:
languageName: node
linkType: hard
"@opentelemetry/api@npm:1.9.0":
version: 1.9.0
resolution: "@opentelemetry/api@npm:1.9.0"
checksum: 10c0/9aae2fe6e8a3a3eeb6c1fdef78e1939cf05a0f37f8a4fae4d6bf2e09eb1e06f966ece85805626e01ba5fab48072b94f19b835449e58b6d26720ee19a58298add
languageName: node
linkType: hard
"@parcel/watcher-android-arm64@npm:2.5.1":
version: 2.5.1
resolution: "@parcel/watcher-android-arm64@npm:2.5.1"
@@ -4394,6 +4515,13 @@ __metadata:
languageName: node
linkType: hard
"@types/diff-match-patch@npm:^1.0.36":
version: 1.0.36
resolution: "@types/diff-match-patch@npm:1.0.36"
checksum: 10c0/0bad011ab138baa8bde94e7815064bb881f010452463272644ddbbb0590659cb93f7aa2776ff442c6721d70f202839e1053f8aa62d801cc4166f7a3ea9130055
languageName: node
linkType: hard
"@types/diff@npm:^7":
version: 7.0.2
resolution: "@types/diff@npm:7.0.2"
@@ -5573,6 +5701,10 @@ __metadata:
"@agentic/exa": "npm:^7.3.3"
"@agentic/searxng": "npm:^7.3.3"
"@agentic/tavily": "npm:^7.3.3"
"@ai-sdk/anthropic": "npm:^1.2.12"
"@ai-sdk/google": "npm:^1.2.19"
"@ai-sdk/openai": "npm:^1.3.22"
"@ai-sdk/xai": "npm:^1.2.16"
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"
"@anthropic-ai/sdk": "npm:^0.41.0"
"@cherrystudio/embedjs": "npm:^0.1.31"
@@ -5637,6 +5769,7 @@ __metadata:
"@vitest/ui": "npm:^3.1.4"
"@vitest/web-worker": "npm:^3.1.4"
"@xyflow/react": "npm:^12.4.4"
ai: "npm:^4.3.16"
antd: "npm:^5.22.5"
archiver: "npm:^7.0.1"
async-mutex: "npm:^0.5.0"
@@ -5824,6 +5957,26 @@ __metadata:
languageName: node
linkType: hard
"ai@npm:^4.3.16":
version: 4.3.16
resolution: "ai@npm:4.3.16"
dependencies:
"@ai-sdk/provider": "npm:1.1.3"
"@ai-sdk/provider-utils": "npm:2.2.8"
"@ai-sdk/react": "npm:1.2.12"
"@ai-sdk/ui-utils": "npm:1.2.11"
"@opentelemetry/api": "npm:1.9.0"
jsondiffpatch: "npm:0.6.0"
peerDependencies:
react: ^18 || ^19 || ^19.0.0-rc
zod: ^3.23.8
peerDependenciesMeta:
react:
optional: true
checksum: 10c0/befe761c9386cda6de33370a2590900352b444d81959255c624e2bfd40765f126d29269f0ef3e00bde07daf237004aa0b66d0b253664aa478c148e923ce78c41
languageName: node
linkType: hard
"ajv-formats@npm:^2.1.1":
version: 2.1.1
resolution: "ajv-formats@npm:2.1.1"
@@ -6736,7 +6889,7 @@ __metadata:
languageName: node
linkType: hard
"chalk@npm:^5.4.1":
"chalk@npm:^5.3.0, chalk@npm:^5.4.1":
version: 5.4.1
resolution: "chalk@npm:5.4.1"
checksum: 10c0/b23e88132c702f4855ca6d25cb5538b1114343e41472d5263ee8a37cccfccd9c4216d111e1097c6a27830407a1dc81fecdf2a56f2c63033d4dbbd88c10b0dcef
@@ -8169,6 +8322,13 @@ __metadata:
languageName: node
linkType: hard
"diff-match-patch@npm:^1.0.5":
version: 1.0.5
resolution: "diff-match-patch@npm:1.0.5"
checksum: 10c0/142b6fad627b9ef309d11bd935e82b84c814165a02500f046e2773f4ea894d10ed3017ac20454900d79d4a0322079f5b713cf0986aaf15fce0ec4a2479980c86
languageName: node
linkType: hard
"diff@npm:^7.0.0":
version: 7.0.0
resolution: "diff@npm:7.0.0"
@@ -11458,6 +11618,13 @@ __metadata:
languageName: node
linkType: hard
"json-schema@npm:^0.4.0":
version: 0.4.0
resolution: "json-schema@npm:0.4.0"
checksum: 10c0/d4a637ec1d83544857c1c163232f3da46912e971d5bf054ba44fdb88f07d8d359a462b4aec46f2745efbc57053365608d88bc1d7b1729f7b4fc3369765639ed3
languageName: node
linkType: hard
"json-stable-stringify-without-jsonify@npm:^1.0.1":
version: 1.0.1
resolution: "json-stable-stringify-without-jsonify@npm:1.0.1"
@@ -11490,6 +11657,19 @@ __metadata:
languageName: node
linkType: hard
"jsondiffpatch@npm:0.6.0":
version: 0.6.0
resolution: "jsondiffpatch@npm:0.6.0"
dependencies:
"@types/diff-match-patch": "npm:^1.0.36"
chalk: "npm:^5.3.0"
diff-match-patch: "npm:^1.0.5"
bin:
jsondiffpatch: bin/jsondiffpatch.js
checksum: 10c0/f7822e48a8ef8b9f7c6024cc59b7d3707a9fe6d84fd776d169de5a1803ad551ffe7cfdc7587f3900f224bc70897355884ed43eb1c8ccd02e7f7b43a7ebcfed4f
languageName: node
linkType: hard
"jsonfile@npm:^4.0.0":
version: 4.0.0
resolution: "jsonfile@npm:4.0.0"
@@ -16438,6 +16618,13 @@ __metadata:
languageName: node
linkType: hard
"secure-json-parse@npm:^2.7.0":
version: 2.7.0
resolution: "secure-json-parse@npm:2.7.0"
checksum: 10c0/f57eb6a44a38a3eeaf3548228585d769d788f59007454214fab9ed7f01fbf2e0f1929111da6db28cf0bcc1a2e89db5219a59e83eeaec3a54e413a0197ce879e4
languageName: node
linkType: hard
"seek-bzip@npm:^1.0.5":
version: 1.0.6
resolution: "seek-bzip@npm:1.0.6"
@@ -17214,6 +17401,18 @@ __metadata:
languageName: node
linkType: hard
"swr@npm:^2.2.5":
version: 2.3.3
resolution: "swr@npm:2.3.3"
dependencies:
dequal: "npm:^2.0.3"
use-sync-external-store: "npm:^1.4.0"
peerDependencies:
react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
checksum: 10c0/882fc8291912860e0c50eae3470ebf0cd58b0144cb12adcc4b14c5cef913ea06479043830508d8b0b3d4061d99ad8dd52485c9c879fbd4e9b893484e6d8da9e3
languageName: node
linkType: hard
"symbol-tree@npm:^3.2.4":
version: 3.2.4
resolution: "symbol-tree@npm:3.2.4"
@@ -17361,6 +17560,13 @@ __metadata:
languageName: node
linkType: hard
"throttleit@npm:2.1.0":
version: 2.1.0
resolution: "throttleit@npm:2.1.0"
checksum: 10c0/1696ae849522cea6ba4f4f3beac1f6655d335e51b42d99215e196a718adced0069e48deaaf77f7e89f526ab31de5b5c91016027da182438e6f9280be2f3d5265
languageName: node
linkType: hard
"through2@npm:4.0.2":
version: 4.0.2
resolution: "through2@npm:4.0.2"