From 6d94523f4db4c16289dfc720383431c931cba0d7 Mon Sep 17 00:00:00 2001 From: "retrox.jcy" Date: Fri, 7 Feb 2025 17:38:49 +0800 Subject: [PATCH] feat: support llm tool call streaming and ui, more mcp tools --- .gitignore | 3 +- packages/ai-native/package.json | 7 +- .../src/browser/ai-core.contribution.ts | 15 +++ .../ai-native/src/browser/chat/chat-model.ts | 9 +- .../src/browser/chat/chat-proxy.service.ts | 25 +++- .../src/browser/components/ChatReply.tsx | 4 +- .../components/ChatToolRender.module.less | 86 +++++++++++++ .../src/browser/components/ChatToolRender.tsx | 86 ++++++++++--- packages/ai-native/src/browser/index.ts | 2 + .../mcp/tools/replaceOpenEditorFile.ts | 80 ++++++++++++ .../replaceOpenEditorFileByDiffPreviewer.ts | 91 ++++++++++++++ .../src/browser/preferences/schema.ts | 10 ++ .../anthropic/anthropic-language-model.ts | 93 ++------------ .../ai-native/src/node/base-language-model.ts | 118 ++++++++++++++++++ .../node/deepseek/deepseek-language-model.ts | 25 ++++ .../ai-native/src/node/mcp/sumi-mcp-server.ts | 2 +- .../core-common/src/settings/ai-native.ts | 6 + .../core-common/src/types/ai-native/index.ts | 7 +- packages/i18n/src/common/en-US.lang.ts | 7 ++ packages/i18n/src/common/zh-CN.lang.ts | 7 ++ .../ai-native/ai.back.service.ts | 14 +-- yarn.lock | 110 +++++++++------- 22 files changed, 638 insertions(+), 169 deletions(-) create mode 100644 packages/ai-native/src/browser/components/ChatToolRender.module.less create mode 100644 packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFile.ts create mode 100644 packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFileByDiffPreviewer.ts create mode 100644 packages/ai-native/src/node/base-language-model.ts create mode 100644 packages/ai-native/src/node/deepseek/deepseek-language-model.ts diff --git a/.gitignore b/.gitignore index fad568c715..26c7cf18ce 100644 --- a/.gitignore +++ b/.gitignore @@ -98,4 +98,5 @@ tools/workspace # jupyter .ipynb_checkpoints -*.tsbuildinfo \ No newline at end of file +*.tsbuildinfo +.env \ No newline at end of file diff --git a/packages/ai-native/package.json b/packages/ai-native/package.json index aedfa22674..ea7f97a461 100644 --- a/packages/ai-native/package.json +++ b/packages/ai-native/package.json @@ -19,8 +19,9 @@ "url": "git@github.com:opensumi/core.git" }, "dependencies": { - "@ai-sdk/anthropic": "^1.0.9", - "@anthropic-ai/sdk": "^0.32.1", + "@ai-sdk/anthropic": "^1.1.6", + "@ai-sdk/deepseek": "^0.1.8", + "@anthropic-ai/sdk": "^0.36.3", "@modelcontextprotocol/sdk": "^1.3.1", "@opensumi/ide-components": "workspace:*", "@opensumi/ide-core-common": "workspace:*", @@ -41,7 +42,7 @@ "@opensumi/ide-utils": "workspace:*", "@opensumi/ide-workspace": "workspace:*", "@xterm/xterm": "5.5.0", - "ai": "^4.0.38", + "ai": "^4.1.21", "ansi-regex": "^2.0.0", "dom-align": "^1.7.0", "openai": "^4.55.7", diff --git a/packages/ai-native/src/browser/ai-core.contribution.ts b/packages/ai-native/src/browser/ai-core.contribution.ts index 00fd34531a..8f9a177faa 100644 --- a/packages/ai-native/src/browser/ai-core.contribution.ts +++ b/packages/ai-native/src/browser/ai-core.contribution.ts @@ -393,6 +393,21 @@ export class AINativeBrowserContribution }, ], }); + + // Register language model API key settings + registry.registerSettingSection(AI_NATIVE_SETTING_GROUP_ID, { + title: localize('preference.ai.native.apiKeys.title'), + preferences: [ + { + id: AINativeSettingSectionsId.DeepseekApiKey, + localized: 'preference.ai.native.deepseek.apiKey', + }, + { + id: AINativeSettingSectionsId.AnthropicApiKey, + localized: 'preference.ai.native.anthropic.apiKey', + }, + ], + }); } if (this.aiNativeConfigService.capabilities.supportsInlineChat) { diff --git a/packages/ai-native/src/browser/chat/chat-model.ts b/packages/ai-native/src/browser/chat/chat-model.ts index c52119090f..f5e5675e62 100644 --- a/packages/ai-native/src/browser/chat/chat-model.ts +++ b/packages/ai-native/src/browser/chat/chat-model.ts @@ -125,15 +125,14 @@ export class ChatResponseModel extends Disposable { this.#responseParts.push(progress); this.#updateResponseText(quiet); } else if (progress.kind === 'toolCall') { - // @ts-ignore - const find: IChatToolContent | undefined = this.#responseParts.find((item) => item.kind === 'toolCall' && (item.content.id === progress.content.id || item.content.index === progress.content.index)); + const find = this.#responseParts.find((item) => item.kind === 'toolCall' && (item.content.id === progress.content.id)); if (find) { - find.content.function.arguments = find.content.function.arguments + progress.content.function.arguments; - this.#responseParts[responsePartLength] = find; + // @ts-ignore + find.content = progress.content; + // this.#responseParts[responsePartLength] = find; } else { this.#responseParts.push(progress); } - console.log("🚀 ~ ChatResponseModel ~ updateContent ~ this.#responseParts:", this.#responseParts) this.#updateResponseText(quiet); } } diff --git a/packages/ai-native/src/browser/chat/chat-proxy.service.ts b/packages/ai-native/src/browser/chat/chat-proxy.service.ts index 26f3920799..77eb5f3f14 100644 --- a/packages/ai-native/src/browser/chat/chat-proxy.service.ts +++ b/packages/ai-native/src/browser/chat/chat-proxy.service.ts @@ -1,16 +1,19 @@ import { Autowired, Injectable } from '@opensumi/di'; +import { PreferenceService } from '@opensumi/ide-core-browser'; import { AIBackSerivcePath, CancellationToken, + ChatAgentViewServiceToken, ChatFeatureRegistryToken, ChatServiceToken, Deferred, Disposable, IAIBackService, IAIReporter, + IApplicationService, IChatProgress, - uuid, -} from '@opensumi/ide-core-common'; + uuid } from '@opensumi/ide-core-common'; +import { AINativeSettingSectionsId } from '@opensumi/ide-core-common/lib/settings/ai-native'; import { IChatMessage } from '@opensumi/ide-core-common/lib/types/ai-native'; import { MonacoCommandRegistry } from '@opensumi/ide-editor/lib/browser/monaco-contrib/command/command.service'; import { listenReadable } from '@opensumi/ide-utils/lib/stream'; @@ -22,12 +25,12 @@ import { IChatAgentService, IChatAgentWelcomeMessage, } from '../../common'; +import { ChatToolRender } from '../components/ChatToolRender'; +import { IChatAgentViewService } from '../types'; import { ChatService } from './chat.api.service'; import { ChatFeatureRegistry } from './chat.feature.registry'; -import { ChatAgentViewServiceToken } from '@opensumi/ide-core-common'; -import { IChatAgentViewService } from '../types'; -import { ChatToolRender } from '../components/ChatToolRender'; + /** * @internal @@ -58,6 +61,12 @@ export class ChatProxyService extends Disposable { @Autowired(ChatAgentViewServiceToken) private readonly chatAgentViewService: IChatAgentViewService; + @Autowired(PreferenceService) + private readonly preferenceService: PreferenceService; + + @Autowired(IApplicationService) + private readonly applicationService: IApplicationService; + private chatDeferred: Deferred = new Deferred(); public registerDefaultAgent() { @@ -91,12 +100,18 @@ export class ChatProxyService extends Disposable { } } + const model = 'claude-3-5-sonnet'; // TODO 从配置中获取 + const apiKey = this.preferenceService.get(AINativeSettingSectionsId.AnthropicApiKey); + const stream = await this.aiBackService.requestStream( prompt, { requestId: request.requestId, sessionId: request.sessionId, history: this.aiChatService.getHistoryMessages(), + clientId: this.applicationService.clientId, + apiKey, + model, }, token, ); diff --git a/packages/ai-native/src/browser/components/ChatReply.tsx b/packages/ai-native/src/browser/components/ChatReply.tsx index e59f858d73..eec809f5c3 100644 --- a/packages/ai-native/src/browser/components/ChatReply.tsx +++ b/packages/ai-native/src/browser/components/ChatReply.tsx @@ -150,7 +150,6 @@ const TreeRenderer = (props: { treeData: IChatResponseProgressFileTreeData }) => }; const ToolCallRender = (props: { toolCall: IChatToolContent['content'] }) => { - console.log("🚀 ~ ToolCallRender ~ props:", props) const { toolCall } = props; const chatAgentViewService = useInjectable(ChatAgentViewServiceToken); const [node, setNode] = useState(null); @@ -172,7 +171,7 @@ const ToolCallRender = (props: { toolCall: IChatToolContent['content'] }) => { deferred.promise.then(({ component: Component, initialProps }) => { setNode(); }); - }, [toolCall]); + }, [toolCall.state]); return node; }; @@ -231,7 +230,6 @@ export const ChatReply = (props: IChatReplyProps) => { disposableCollection.push( request.response.onDidChange(() => { - console.log("🚀 ~ request.response.onDidChange ~ onDidChange:", 'onDidChange') history.updateAssistantMessage(msgId, { content: request.response.responseText }); if (request.response.isComplete) { diff --git a/packages/ai-native/src/browser/components/ChatToolRender.module.less b/packages/ai-native/src/browser/components/ChatToolRender.module.less new file mode 100644 index 0000000000..90ffea9e95 --- /dev/null +++ b/packages/ai-native/src/browser/components/ChatToolRender.module.less @@ -0,0 +1,86 @@ +.chat-tool-render { + margin: 8px 0; + border: 1px solid #363636; + border-radius: 6px; + overflow: hidden; + + .tool-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 8px 12px; + background-color: #2D2D2D; + cursor: pointer; + user-select: none; + + &:hover { + background-color: #363636; + } + } + + .tool-name { + display: flex; + align-items: center; + font-weight: 500; + color: #CCCCCC; + } + + .expand-icon { + display: inline-block; + margin-right: 8px; + transition: transform 0.2s; + color: #888888; + + &.expanded { + transform: rotate(90deg); + } + } + + .tool-state { + display: flex; + align-items: center; + font-size: 12px; + color: #888888; + } + + .state-icon { + display: flex; + align-items: center; + margin-right: 6px; + } + + .loading-icon { + width: 12px; + height: 12px; + } + + .state-label { + margin-left: 4px; + } + + .tool-content { + max-height: 0; + overflow: hidden; + transition: max-height 0.3s ease-out; + background-color: #1E1E1E; + + &.expanded { + max-height: 1000px; + } + } + + .tool-arguments, + .tool-result { + padding: 12px; + } + + .section-label { + font-size: 12px; + color: #888888; + margin-bottom: 8px; + } + + .tool-result { + border-top: 1px solid #363636; + } +} \ No newline at end of file diff --git a/packages/ai-native/src/browser/components/ChatToolRender.tsx b/packages/ai-native/src/browser/components/ChatToolRender.tsx index 06df8fa33c..fd501c285e 100644 --- a/packages/ai-native/src/browser/components/ChatToolRender.tsx +++ b/packages/ai-native/src/browser/components/ChatToolRender.tsx @@ -1,29 +1,77 @@ -import React from 'react'; +import cls from 'classnames'; +import React, { useState } from 'react'; +import { Icon } from '@opensumi/ide-core-browser/lib/components'; +import { Loading } from '@opensumi/ide-core-browser/lib/components/ai-native'; import { IChatToolContent, uuid } from '@opensumi/ide-core-common'; import { CodeEditorWithHighlight } from './ChatEditor'; +import styles from './ChatToolRender.module.less'; export const ChatToolRender = (props: { value: IChatToolContent['content'] }) => { - const { value } = props; - console.log('🚀 ~ ChatToolRender ~ toolCall:', value); + const { value } = props; + const [isExpanded, setIsExpanded] = useState(false); - if (!value || !value.function || !value.id) { - return null; + if (!value || !value.function || !value.id) { + return null; + } + + const getStateInfo = (state?: string): { label: string; icon: React.ReactNode } => { + switch (state) { + case 'streaming-start': + case 'streaming': + return { label: 'Generating', icon: }; + case 'complete': + return { label: 'Complete', icon: }; + case 'result': + return { label: 'Result Ready', icon: }; + default: + return { label: state || 'Unknown', icon: }; } + }; + + const toggleExpand = () => { + setIsExpanded(!isExpanded); + }; + + const stateInfo = getStateInfo(value.state); - return
- Using Tool: - {value?.function?.name} -
- - { - value?.function?.arguments && - () - } -
; + return ( +
+
+
+ + {value?.function?.name} +
+ {value.state && ( +
+ {stateInfo.icon} + {stateInfo.label} +
+ )} +
+
+ {value?.function?.arguments && ( +
+
Arguments
+ +
+ )} + {value?.result && ( +
+
Result
+ +
+ )} +
+
+ ); }; diff --git a/packages/ai-native/src/browser/index.ts b/packages/ai-native/src/browser/index.ts index dc03650619..b16d16342d 100644 --- a/packages/ai-native/src/browser/index.ts +++ b/packages/ai-native/src/browser/index.ts @@ -61,6 +61,7 @@ import { GetFileTextByPathTool } from './mcp/tools/getFileTextByPath'; import { GetOpenEditorFileDiagnosticsTool } from './mcp/tools/getOpenEditorFileDiagnostics'; import { GetOpenEditorFileTextTool } from './mcp/tools/getOpenEditorFileText'; import { GetSelectedTextTool } from './mcp/tools/getSelectedText'; +import { ReplaceOpenEditorFileByDiffPreviewerTool } from './mcp/tools/replaceOpenEditorFileByDiffPreviewer'; import { AINativePreferencesContribution } from './preferences'; import { AINativeCoreContribution, MCPServerContribution, TokenMCPServerRegistry } from './types'; import { InlineChatFeatureRegistry } from './widget/inline-chat/inline-chat.feature.registry'; @@ -96,6 +97,7 @@ export class AINativeModule extends BrowserModule { GetCurrentFilePathTool, FindFilesByNameSubstringTool, GetDiagnosticsByPathTool, + ReplaceOpenEditorFileByDiffPreviewerTool, // MCP Server Contributions END { diff --git a/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFile.ts b/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFile.ts new file mode 100644 index 0000000000..ce00b85f65 --- /dev/null +++ b/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFile.ts @@ -0,0 +1,80 @@ +import { z } from 'zod'; +import { zodToJsonSchema } from 'zod-to-json-schema'; + +import { Autowired, Injectable } from '@opensumi/di'; +import { Domain } from '@opensumi/ide-core-common'; +import { WorkbenchEditorService } from '@opensumi/ide-editor'; + +import { IMCPServerRegistry, MCPLogger, MCPServerContribution, MCPToolDefinition } from '../../types'; + +const inputSchema = z.object({ + text: z.string().describe('The new content to replace the entire file with'), +}); + +@Domain(MCPServerContribution) +export class ReplaceOpenEditorFileTool implements MCPServerContribution { + @Autowired(WorkbenchEditorService) + private readonly editorService: WorkbenchEditorService; + + registerMCPServer(registry: IMCPServerRegistry): void { + registry.registerMCPTool(this.getToolDefinition()); + } + + getToolDefinition(): MCPToolDefinition { + return { + name: 'replace_open_in_editor_file_text', + description: + 'Replaces the entire content of the currently active file in the IDE editor with specified new text. ' + + 'Use this tool when you need to completely overwrite the current file\'s content. ' + + 'Requires a text parameter containing the new content. ' + + 'Returns one of three possible responses: ' + + '"ok" if the file content was successfully replaced, ' + + '"no file open" if no editor is active, ' + + '"unknown error" if the operation fails.', + inputSchema: zodToJsonSchema(inputSchema), + handler: this.handler.bind(this), + }; + } + + private async handler(args: z.infer, logger: MCPLogger) { + try { + const editor = this.editorService.currentEditor; + if (!editor || !editor.monacoEditor) { + logger.appendLine('Error: No active text editor found'); + return { + content: [{ type: 'text', text: 'no file open' }], + isError: true, + }; + } + + // Get the model and its full range + const model = editor.monacoEditor.getModel(); + if (!model) { + logger.appendLine('Error: No model found for current editor'); + return { + content: [{ type: 'text', text: 'unknown error' }], + isError: true, + }; + } + + const fullRange = model.getFullModelRange(); + + // Execute the replacement + editor.monacoEditor.executeEdits('mcp.tool.replace-file', [{ + range: fullRange, + text: args.text, + }]); + + logger.appendLine('Successfully replaced file content'); + return { + content: [{ type: 'text', text: 'ok' }], + }; + } catch (error) { + logger.appendLine(`Error during file content replacement: ${error}`); + return { + content: [{ type: 'text', text: 'unknown error' }], + isError: true, + }; + } + } +} diff --git a/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFileByDiffPreviewer.ts b/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFileByDiffPreviewer.ts new file mode 100644 index 0000000000..b9e96dabda --- /dev/null +++ b/packages/ai-native/src/browser/mcp/tools/replaceOpenEditorFileByDiffPreviewer.ts @@ -0,0 +1,91 @@ +import { z } from 'zod'; +import { zodToJsonSchema } from 'zod-to-json-schema'; + +import { Autowired, Injectable } from '@opensumi/di'; +import { Domain } from '@opensumi/ide-core-common'; +import { WorkbenchEditorService } from '@opensumi/ide-editor'; +import { Selection, SelectionDirection } from '@opensumi/monaco-editor-core/esm/vs/editor/common/core/selection'; + +import { IMCPServerRegistry, MCPLogger, MCPServerContribution, MCPToolDefinition } from '../../types'; +import { LiveInlineDiffPreviewer } from '../../widget/inline-diff/inline-diff-previewer'; +import { InlineDiffController } from '../../widget/inline-diff/inline-diff.controller'; + +const inputSchema = z.object({ + text: z.string().describe('The new content to replace the entire file with'), +}); + +@Domain(MCPServerContribution) +export class ReplaceOpenEditorFileByDiffPreviewerTool implements MCPServerContribution { + @Autowired(WorkbenchEditorService) + private readonly editorService: WorkbenchEditorService; + + registerMCPServer(registry: IMCPServerRegistry): void { + registry.registerMCPTool(this.getToolDefinition()); + } + + getToolDefinition(): MCPToolDefinition { + return { + name: 'replace_open_in_editor_file_text', + description: + 'Replaces the entire content of the currently active file in the IDE editor with specified new text using diff previewer. ' + + 'Use this tool when you need to completely overwrite the current file\'s content with diff preview. ' + + 'Requires a text parameter containing the new content. ' + + 'Returns one of three possible responses: ' + + '"ok" if the file content was successfully replaced, ' + + '"no file open" if no editor is active, ' + + '"unknown error" if the operation fails.', + inputSchema: zodToJsonSchema(inputSchema), + handler: this.handler.bind(this), + }; + } + + private async handler(args: z.infer, logger: MCPLogger) { + try { + const editor = this.editorService.currentEditor; + if (!editor || !editor.monacoEditor) { + logger.appendLine('Error: No active text editor found'); + return { + content: [{ type: 'text', text: 'no file open' }], + isError: true, + }; + } + + // Get the model and its full range + const model = editor.monacoEditor.getModel(); + if (!model) { + logger.appendLine('Error: No model found for current editor'); + return { + content: [{ type: 'text', text: 'unknown error' }], + isError: true, + }; + } + + const fullRange = model.getFullModelRange(); + const inlineDiffHandler = InlineDiffController.get(editor.monacoEditor)!; + + // Create diff previewer + const previewer = inlineDiffHandler.createDiffPreviewer( + editor.monacoEditor, + Selection.fromRange(fullRange, SelectionDirection.LTR), + { + disposeWhenEditorClosed: false, + renderRemovedWidgetImmediately: true, + }, + ) as LiveInlineDiffPreviewer; + + // Set the new content + previewer.setValue(args.text); + + logger.appendLine('Successfully created diff preview with new content'); + return { + content: [{ type: 'text', text: 'ok' }], + }; + } catch (error) { + logger.appendLine(`Error during file content replacement: ${error}`); + return { + content: [{ type: 'text', text: 'unknown error' }], + isError: true, + }; + } + } +} \ No newline at end of file diff --git a/packages/ai-native/src/browser/preferences/schema.ts b/packages/ai-native/src/browser/preferences/schema.ts index d41812f9ee..48e7b045b0 100644 --- a/packages/ai-native/src/browser/preferences/schema.ts +++ b/packages/ai-native/src/browser/preferences/schema.ts @@ -58,5 +58,15 @@ export const aiNativePreferenceSchema: PreferenceSchema = { type: 'boolean', default: false, }, + [AINativeSettingSectionsId.DeepseekApiKey]: { + type: 'string', + default: '', + description: localize('preference.ai.native.deepseek.apiKey.description'), + }, + [AINativeSettingSectionsId.AnthropicApiKey]: { + type: 'string', + default: '', + description: localize('preference.ai.native.anthropic.apiKey.description'), + }, }, }; diff --git a/packages/ai-native/src/node/anthropic/anthropic-language-model.ts b/packages/ai-native/src/node/anthropic/anthropic-language-model.ts index f520e4bcc4..dce9051c8d 100644 --- a/packages/ai-native/src/node/anthropic/anthropic-language-model.ts +++ b/packages/ai-native/src/node/anthropic/anthropic-language-model.ts @@ -1,96 +1,25 @@ import { AnthropicProvider, createAnthropic } from '@ai-sdk/anthropic'; -import { jsonSchema, streamText, tool } from 'ai'; -import { Autowired, Injectable } from '@opensumi/di'; -import { ChatReadableStream } from '@opensumi/ide-core-node'; -import { CancellationToken } from '@opensumi/ide-utils'; - -import { ToolInvocationRegistry, ToolInvocationRegistryImpl, ToolRequest } from '../../common/tool-invocation-registry'; +import { Injectable } from '@opensumi/di'; +import { IAIBackServiceOption } from '@opensumi/ide-core-common'; +import { AINativeSettingSectionsId } from '@opensumi/ide-core-common/lib/settings/ai-native'; +import { BaseLanguageModel } from '../base-language-model'; export const AnthropicModelIdentifier = Symbol('AnthropicModelIdentifier'); -const apiKey = ''; - @Injectable() -export class AnthropicModel { - @Autowired(ToolInvocationRegistry) - private readonly toolInvocationRegistry: ToolInvocationRegistryImpl; - - protected initializeAnthropicProvider() { +export class AnthropicModel extends BaseLanguageModel { + protected initializeProvider(options: IAIBackServiceOption): AnthropicProvider { + const apiKey = options.apiKey; if (!apiKey) { - throw new Error('Please provide ANTHROPIC_API_KEY in preferences or via environment variable'); + throw new Error(`Please provide Anthropic API Key in preferences (${AINativeSettingSectionsId.AnthropicApiKey})`); } - const anthropic = createAnthropic({ apiKey }); - - return anthropic; - } - - async request(request: string, chatReadableStream: ChatReadableStream, cancellationToken?: CancellationToken): Promise { - const anthropic = this.initializeAnthropicProvider(); - const allFunctions = this.toolInvocationRegistry.getAllFunctions(); - return this.handleStreamingRequest(anthropic, request, allFunctions, chatReadableStream, cancellationToken); - } - - private convertToolRequestToAITool(toolRequest: ToolRequest) { - return tool({ - description: toolRequest.description || '', - // TODO 这里应该是 z.object 而不是 JSON Schema - parameters: jsonSchema(toolRequest.parameters), - execute: async (args: any) => await toolRequest.handler(JSON.stringify(args)), - }); + return createAnthropic({ apiKey }); } - protected async handleStreamingRequest( - anthropic: AnthropicProvider, - request: string, - tools: ToolRequest[], - chatReadableStream: ChatReadableStream, - cancellationToken?: CancellationToken, - ): Promise { - - try { - const aiTools = Object.fromEntries( - tools.map((tool) => [tool.name, this.convertToolRequestToAITool(tool)]), - ); - - const abortController = new AbortController(); - if (cancellationToken) { - cancellationToken.onCancellationRequested(() => { - abortController.abort(); - }); - } - - const stream = await streamText({ - model: anthropic('claude-3-5-sonnet-20241022'), - maxTokens: 4096, - tools: aiTools, - messages: [{ role: 'user', content: request }], - abortSignal: abortController.signal, - maxSteps: 5, - }); - - for await (const chunk of stream.fullStream) { - console.log(chunk); - if (chunk.type === 'text-delta') { - chatReadableStream.emitData({ kind: 'content', content: chunk.textDelta }); - } else if (chunk.type === 'tool-call') { - chatReadableStream.emitData({ kind: 'toolCall', content: { - id: chunk.toolCallId || Date.now().toString(), - type: 'function', - function: { name: chunk.toolName, arguments: JSON.stringify(chunk.args) }, - }}); - } - } - - chatReadableStream.end(); - } catch (error) { - console.error('Error during streaming:', error); - chatReadableStream.emitError(error); - } - - return chatReadableStream; + protected getModelIdentifier(provider: AnthropicProvider) { + return provider('claude-3-5-sonnet-20241022'); } - } diff --git a/packages/ai-native/src/node/base-language-model.ts b/packages/ai-native/src/node/base-language-model.ts new file mode 100644 index 0000000000..8a463317f7 --- /dev/null +++ b/packages/ai-native/src/node/base-language-model.ts @@ -0,0 +1,118 @@ +import { jsonSchema, streamText, tool } from 'ai'; + +import { Autowired, Injectable } from '@opensumi/di'; +import { IAIBackServiceOption } from '@opensumi/ide-core-common'; +import { ChatReadableStream } from '@opensumi/ide-core-node'; +import { CancellationToken } from '@opensumi/ide-utils'; + +import { ToolInvocationRegistry, ToolInvocationRegistryImpl, ToolRequest } from '../common/tool-invocation-registry'; + +@Injectable() +export abstract class BaseLanguageModel { + @Autowired(ToolInvocationRegistry) + protected readonly toolInvocationRegistry: ToolInvocationRegistryImpl; + + protected abstract initializeProvider(options: IAIBackServiceOption): any; + + async request(request: string, chatReadableStream: ChatReadableStream, options: IAIBackServiceOption, cancellationToken?: CancellationToken): Promise { + const provider = this.initializeProvider(options); + const allFunctions = this.toolInvocationRegistry.getAllFunctions(); + return this.handleStreamingRequest(provider, request, allFunctions, chatReadableStream, cancellationToken); + } + + private convertToolRequestToAITool(toolRequest: ToolRequest) { + return tool({ + description: toolRequest.description || '', + // TODO 这里应该是 z.object 而不是 JSON Schema + parameters: jsonSchema(toolRequest.parameters), + execute: async (args: any) => await toolRequest.handler(JSON.stringify(args)), + }); + } + + protected abstract getModelIdentifier(provider: any): any; + + protected async handleStreamingRequest( + provider: any, + request: string, + tools: ToolRequest[], + chatReadableStream: ChatReadableStream, + cancellationToken?: CancellationToken, + ): Promise { + try { + const aiTools = Object.fromEntries( + tools.map((tool) => [tool.name, this.convertToolRequestToAITool(tool)]), + ); + + const abortController = new AbortController(); + if (cancellationToken) { + cancellationToken.onCancellationRequested(() => { + abortController.abort(); + }); + } + + const stream = await streamText({ + model: this.getModelIdentifier(provider), + maxTokens: 4096, + tools: aiTools, + messages: [{ role: 'user', content: request }], + abortSignal: abortController.signal, + experimental_toolCallStreaming: true, + maxSteps: 5, + }); + + for await (const chunk of stream.fullStream) { + if (chunk.type === 'text-delta') { + chatReadableStream.emitData({ kind: 'content', content: chunk.textDelta }); + } else if (chunk.type === 'tool-call') { + chatReadableStream.emitData({ + kind: 'toolCall', + content: { + id: chunk.toolCallId || Date.now().toString(), + type: 'function', + function: { name: chunk.toolName, arguments: JSON.stringify(chunk.args) }, + state: 'complete', + }, + }); + } else if (chunk.type === 'tool-call-streaming-start') { + chatReadableStream.emitData({ + kind: 'toolCall', + content: { + id: chunk.toolCallId, + type: 'function', + function: { name: chunk.toolName }, + state: 'streaming-start', + }, + }); + } else if (chunk.type === 'tool-call-delta') { + chatReadableStream.emitData({ + kind: 'toolCall', + content: { + id: chunk.toolCallId, + type: 'function', + function: { name: chunk.toolName, arguments: chunk.argsTextDelta }, + state: 'streaming', + }, + }); + } else if (chunk.type === 'tool-result') { + chatReadableStream.emitData({ + kind: 'toolCall', + content: { + id: chunk.toolCallId, + type: 'function', + function: { name: chunk.toolName, arguments: JSON.stringify(chunk.args) }, + result: chunk.result, + state: 'result', + }, + }); + } + } + + chatReadableStream.end(); + } catch (error) { + // Use a logger service in production instead of console + chatReadableStream.emitError(error); + } + + return chatReadableStream; + } +} diff --git a/packages/ai-native/src/node/deepseek/deepseek-language-model.ts b/packages/ai-native/src/node/deepseek/deepseek-language-model.ts new file mode 100644 index 0000000000..c3aa009cae --- /dev/null +++ b/packages/ai-native/src/node/deepseek/deepseek-language-model.ts @@ -0,0 +1,25 @@ +import { DeepSeekProvider, createDeepSeek } from '@ai-sdk/deepseek'; + +import { Injectable } from '@opensumi/di'; +import { IAIBackServiceOption } from '@opensumi/ide-core-common'; +import { AINativeSettingSectionsId } from '@opensumi/ide-core-common/lib/settings/ai-native'; + +import { BaseLanguageModel } from '../base-language-model'; + +export const DeepSeekModelIdentifier = Symbol('DeepSeekModelIdentifier'); + +@Injectable() +export class DeepSeekModel extends BaseLanguageModel { + protected initializeProvider(options: IAIBackServiceOption): DeepSeekProvider { + const apiKey = options.apiKey; + if (!apiKey) { + throw new Error(`Please provide Deepseek API Key in preferences (${AINativeSettingSectionsId.DeepseekApiKey})`); + } + + return createDeepSeek({ apiKey }); + } + + protected getModelIdentifier(provider: DeepSeekProvider) { + return provider('deepseek-chat'); + } +} diff --git a/packages/ai-native/src/node/mcp/sumi-mcp-server.ts b/packages/ai-native/src/node/mcp/sumi-mcp-server.ts index fb8c3f4e29..f6619b7852 100644 --- a/packages/ai-native/src/node/mcp/sumi-mcp-server.ts +++ b/packages/ai-native/src/node/mcp/sumi-mcp-server.ts @@ -149,7 +149,7 @@ export class BuiltinMCPServer implements IMCPServer { throw new Error('MCP Server not started'); } const tools = await this.sumiMCPServer.getMCPTools(); - return { tools }; + return { tools } as any; } update(_command: string, _args?: string[], _env?: { [key: string]: string }): void { diff --git a/packages/core-common/src/settings/ai-native.ts b/packages/core-common/src/settings/ai-native.ts index a034cd9f52..bac0f040ee 100644 --- a/packages/core-common/src/settings/ai-native.ts +++ b/packages/core-common/src/settings/ai-native.ts @@ -22,6 +22,12 @@ export enum AINativeSettingSectionsId { */ CodeEditsLintErrors = 'ai.native.codeEdits.lintErrors', CodeEditsLineChange = 'ai.native.codeEdits.lineChange', + + /** + * Language model API keys + */ + DeepseekApiKey = 'ai.native.deepseek.apiKey', + AnthropicApiKey = 'ai.native.anthropic.apiKey', } export const AI_NATIVE_SETTING_GROUP_ID = 'AI-Native'; export const AI_NATIVE_SETTING_GROUP_TITLE = 'AI Native'; diff --git a/packages/core-common/src/types/ai-native/index.ts b/packages/core-common/src/types/ai-native/index.ts index db3377014f..8b7e83c1ed 100644 --- a/packages/core-common/src/types/ai-native/index.ts +++ b/packages/core-common/src/types/ai-native/index.ts @@ -123,6 +123,9 @@ export interface IAIBackServiceOption { sessionId?: string; history?: IHistoryChatMessage[]; tools?: any[]; + clientId?: string; + apiKey?: string; + model?: string; } /** @@ -293,9 +296,11 @@ export interface IChatToolContent { type: string; function: { name: string; - arguments: string; + arguments?: string; }; + result?: string; index?: number; + state?: 'streaming-start' | 'streaming' | 'complete' | 'result'; }; kind: 'toolCall'; } diff --git a/packages/i18n/src/common/en-US.lang.ts b/packages/i18n/src/common/en-US.lang.ts index 15a56e65bd..fafcae6158 100644 --- a/packages/i18n/src/common/en-US.lang.ts +++ b/packages/i18n/src/common/en-US.lang.ts @@ -1531,5 +1531,12 @@ export const localizationBundle = { ...browserViews, ...editorLocalizations, ...mergeConflicts, + + // AI Native Settings + 'preference.ai.native.apiKeys.title': 'API Keys', + 'preference.ai.native.deepseek.apiKey': 'Deepseek API Key', + 'preference.ai.native.deepseek.apiKey.description': 'API key for Deepseek language model', + 'preference.ai.native.anthropic.apiKey': 'Anthropic API Key', + 'preference.ai.native.anthropic.apiKey.description': 'API key for Anthropic language model', }, }; diff --git a/packages/i18n/src/common/zh-CN.lang.ts b/packages/i18n/src/common/zh-CN.lang.ts index 38528ade5d..cf5cdb4c8c 100644 --- a/packages/i18n/src/common/zh-CN.lang.ts +++ b/packages/i18n/src/common/zh-CN.lang.ts @@ -1294,5 +1294,12 @@ export const localizationBundle = { ...browserViews, ...editorLocalizations, ...mergeConflicts, + + // AI Native Settings + 'preference.ai.native.apiKeys.title': 'API 密钥', + 'preference.ai.native.deepseek.apiKey': 'Deepseek API 密钥', + 'preference.ai.native.deepseek.apiKey.description': 'Deepseek 语言模型的 API 密钥', + 'preference.ai.native.anthropic.apiKey': 'Anthropic API 密钥', + 'preference.ai.native.anthropic.apiKey.description': 'Anthropic 语言模型的 API 密钥', }, }; diff --git a/packages/startup/entry/sample-modules/ai-native/ai.back.service.ts b/packages/startup/entry/sample-modules/ai-native/ai.back.service.ts index e0d8c973e1..f30025db9b 100644 --- a/packages/startup/entry/sample-modules/ai-native/ai.back.service.ts +++ b/packages/startup/entry/sample-modules/ai-native/ai.back.service.ts @@ -1,16 +1,12 @@ import { Autowired, Injectable } from '@opensumi/di'; -import { - ToolInvocationRegistry, - ToolInvocationRegistryImpl, -} from '@opensumi/ide-ai-native/lib/common/tool-invocation-registry'; import { AnthropicModel } from '@opensumi/ide-ai-native/lib/node/anthropic/anthropic-language-model'; +import { DeepSeekModel } from '@opensumi/ide-ai-native/lib/node/deepseek/deepseek-language-model'; import { OpenAIModel } from '@opensumi/ide-ai-native/lib/node/openai/openai-language-model'; -import { IAICompletionOption } from '@opensumi/ide-core-common'; +import { IAIBackServiceOption } from '@opensumi/ide-core-common'; import { CancellationToken, ChatReadableStream, IAIBackService, - IAIBackServiceOption, IAIBackServiceResponse, INodeLogger, sleep, @@ -59,6 +55,9 @@ export class AIBackService implements IAIBackService