Skip to content

Commit d2ed82f

Browse files
committed
feat: chatMode and switchConversation
1 parent 8005437 commit d2ed82f

File tree

7 files changed

+179
-153
lines changed

7 files changed

+179
-153
lines changed

packages/plugins/robot/src/Home.vue

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,11 @@
1313
ref="robotChatRef"
1414
:prompt-items="promptItems"
1515
:bubbleRenderers="
16-
aiMode === CHAT_MODE.Agent ? { markdown: BuildLoadingRenderer, loading: BuildLoadingRenderer } : {}
16+
robotSettingState.chatMode === CHAT_MODE.Agent
17+
? { markdown: BuildLoadingRenderer, loading: BuildLoadingRenderer }
18+
: {}
1719
"
18-
:allowFiles="isVisualModel() && aiMode === CHAT_MODE.Agent"
20+
:allowFiles="isVisualModel() && robotSettingState.chatMode === CHAT_MODE.Agent"
1921
@fileSelected="handleFileSelected"
2022
>
2123
<template #operations>
@@ -28,7 +30,7 @@
2830
>
2931
<robot-setting-popover
3032
v-if="showSettingPopover"
31-
@changeType="handleChatModeChange"
33+
@changeType="saveSettingState"
3234
@close="closePanel"
3335
></robot-setting-popover>
3436
<template #reference>
@@ -39,8 +41,11 @@
3941
</tiny-popover>
4042
</template>
4143
<template #footer-left>
42-
<robot-type-select :aiMode="aiMode" @typeChange="typeChange"></robot-type-select>
43-
<mcp-server :position="mcpDrawerPosition" v-if="aiMode === CHAT_MODE.Chat"></mcp-server>
44+
<robot-type-select
45+
:chatMode="robotSettingState.chatMode"
46+
@typeChange="handleChatModeChange"
47+
></robot-type-select>
48+
<mcp-server :position="mcpDrawerPosition" v-if="robotSettingState.chatMode === CHAT_MODE.Chat"></mcp-server>
4449
</template>
4550
</robot-chat>
4651
</div>
@@ -62,7 +67,6 @@ import type { PromptProps } from '@opentiny/tiny-robot'
6267
import RobotTypeSelect from './components/RobotTypeSelect.vue'
6368
import McpServer from './mcp/McpServer.vue'
6469
import BuildLoadingRenderer from './BuildLoadingRenderer.vue'
65-
import { updateLLMConfig } from './client'
6670
import useChat from './composables/useChat'
6771
6872
const { options } = defineProps({
@@ -112,29 +116,23 @@ const promptItems: PromptProps[] = [
112116
const showTeleport = ref(false)
113117
const showSettingPopover = ref(false)
114118
115-
const { robotSettingState, CHAT_MODE, AIModelOptions, aiMode } = useRobot()
116-
const { inputMessage } = useChat()
119+
const { robotSettingState, CHAT_MODE, AIModelOptions } = useRobot()
120+
const { inputMessage, changeChatMode } = useChat()
117121
118122
const isVisualModel = () => {
119123
const platform = AIModelOptions.find((option) => option.value === robotSettingState.selectedModel.baseUrl)
120124
const modelAbility = platform?.model.find((item) => item.value === robotSettingState.selectedModel.model)
121125
return modelAbility?.ability?.includes('visual') || false
122126
}
123127
124-
const typeChange = (type: string) => {
125-
aiMode.value = type
126-
robotChatRef.value?.createConversation()
127-
updateLLMConfig({
128-
apiUrl: type === CHAT_MODE.Agent ? '/app-center/api/ai/chat' : '/app-center/api/chat/completions'
129-
})
130-
}
131-
132-
const handleChatModeChange = () => {
128+
const handleChatModeChange = (type: string) => {
129+
changeChatMode(type)
133130
// singleAttachmentItems.value = []
134131
// imageUrl.value = ''
135-
// endContent()
136132
}
137133
134+
const saveSettingState = () => {}
135+
138136
const closePanel = () => {
139137
showSettingPopover.value = false
140138
}

packages/plugins/robot/src/Main.vue

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@
9393
</div>
9494
</template>
9595
<template #footer-left>
96-
<robot-type-select :aiType="aiType" @typeChange="typeChange"></robot-type-select>
96+
<robot-type-select :chatMode="aiType" @typeChange="typeChange"></robot-type-select>
9797
<mcp-server :position="mcpDrawerPosition" v-if="aiType === CHAT_MODE.Chat"></mcp-server>
9898
</template>
9999
</tr-sender>
Lines changed: 12 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -1,88 +1,20 @@
11
import { AIClient, type AIModelConfig } from '@opentiny/tiny-robot-kit'
22
import { OpenAICompatibleProvider } from './OpenAICompatibleProvider'
3-
import useMcp from '../composables/useMcp'
4-
import useRobot from '../js/useRobot'
5-
import type { LLMMessage } from '../types/mcp-types'
6-
import { getAgentSystemPrompt } from '../js/prompts'
7-
import { utils } from '@opentiny/tiny-engine-utils'
8-
import { getMetaApi, META_SERVICE, useCanvas } from '@opentiny/tiny-engine-meta-register'
9-
10-
const { deepClone } = utils
11-
const { loadRobotSettingState, EXISTING_MODELS, aiMode, CHAT_MODE } = useRobot()
12-
const { activeName, existModel, customizeModel } = loadRobotSettingState() || {}
13-
14-
const storageSettingState = (activeName === EXISTING_MODELS ? existModel : customizeModel) || {}
15-
16-
const config: Omit<AIModelConfig, 'provider' | 'providerImplementation'> = {
17-
apiKey: storageSettingState.apiKey || '',
18-
apiUrl: aiMode.value === CHAT_MODE.Agent ? '/app-center/api/ai/chat' : '/app-center/api/chat/completions',
19-
defaultModel: storageSettingState.model || 'deepseek-v3'
20-
}
21-
22-
let provider: OpenAICompatibleProvider | null = null
23-
24-
const addSystemPrompt = (messages: LLMMessage[], prompt: string = '') => {
25-
if (!messages.length || messages[0].role !== 'system') {
26-
messages.unshift({ role: 'system', content: prompt })
27-
} else if (messages[0].role === 'system' && messages[0].content !== prompt) {
28-
messages[0].content = prompt
29-
}
30-
}
31-
32-
export const search = async (content: string) => {
33-
let result = ''
34-
const MAX_SEARCH_LENGTH = 8000
35-
try {
36-
const res = await getMetaApi(META_SERVICE.Http).post('/app-center/api/ai/search', { content })
37-
38-
res.forEach((item: { content: string }) => {
39-
if (result.length + item.content.length > MAX_SEARCH_LENGTH) {
40-
return
41-
}
42-
result += item.content
43-
})
44-
} catch (error) {
45-
// error
46-
}
47-
return result
48-
}
49-
50-
const beforeRequest = async (requestParams: any) => {
51-
const { aiMode, CHAT_MODE, robotSettingState } = useRobot()
52-
const pageSchema = deepClone(useCanvas().pageState.pageSchema)
53-
const isAgentMode = aiMode.value === CHAT_MODE.Agent
54-
const tools = await useMcp().getLLMTools()
55-
if (!requestParams.tools && tools?.length && !isAgentMode) {
56-
Object.assign(requestParams, { tools })
57-
}
58-
if (isAgentMode) {
59-
requestParams.apiKey = robotSettingState.selectedModel.apiKey
60-
// let referenceContext = ''
61-
// if (requestParams.messages?.[0].role && requestParams.messages?.[0].role !== 'system') {
62-
// referenceContext = await search(requestParams.messages?.at(-1)?.content)
63-
// }
64-
addSystemPrompt(requestParams.messages, getAgentSystemPrompt(pageSchema, ''))
65-
}
66-
requestParams.baseUrl = robotSettingState.selectedModel.baseUrl
67-
requestParams.model = robotSettingState.selectedModel.model
68-
if (config.apiKey !== robotSettingState.selectedModel.apiKey) {
69-
provider?.updateConfig({ apiKey: robotSettingState.selectedModel.apiKey })
70-
config.apiKey = robotSettingState.selectedModel.apiKey
71-
}
72-
return requestParams
3+
interface ClientOptions {
4+
config: Omit<AIModelConfig, 'provider' | 'providerImplementation'>
5+
beforeRequest: () => object
736
}
747

75-
provider = new OpenAICompatibleProvider(config, { beforeRequest })
8+
const createClient = ({ config, beforeRequest }: ClientOptions) => {
9+
const provider: OpenAICompatibleProvider = new OpenAICompatibleProvider(config, { beforeRequest })
7610

77-
const client = new AIClient({
78-
...config,
79-
provider: 'custom',
80-
providerImplementation: provider
81-
})
11+
const client = new AIClient({
12+
...config,
13+
provider: 'custom',
14+
providerImplementation: provider
15+
})
8216

83-
const updateLLMConfig = (newConfig: Omit<AIModelConfig, 'provider' | 'providerImplementation'>) => {
84-
provider?.updateConfig(newConfig)
85-
Object.assign(config, newConfig)
17+
return { client, provider }
8618
}
8719

88-
export { client, updateLLMConfig }
20+
export { createClient }

packages/plugins/robot/src/components/RobotTypeSelect.vue

Lines changed: 14 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
<template>
22
<div class="button-wrapper">
3-
<tiny-tabs v-model="state.activeNameTabs" tab-style="button-card">
4-
<tiny-tab-item :name="CHAT_MODE.Agent">
3+
<tiny-tabs v-model="aiChatMode" tab-style="button-card">
4+
<tiny-tab-item name="agent">
55
<template #title>
66
<tiny-tooltip effect="light">
77
<template #content>
@@ -19,7 +19,7 @@
1919
</tiny-tooltip>
2020
</template>
2121
</tiny-tab-item>
22-
<tiny-tab-item class="json-tab" :name="CHAT_MODE.Chat">
22+
<tiny-tab-item class="json-tab" name="chat">
2323
<template #title>
2424
<tiny-tooltip effect="light">
2525
<template #content>
@@ -42,10 +42,8 @@
4242
</template>
4343

4444
<script lang="ts">
45-
import { reactive, watch } from 'vue'
46-
import type { Component } from 'vue'
45+
import { computed, type Component } from 'vue'
4746
import { Tabs, TabItem, Tooltip } from '@opentiny/vue'
48-
import { useRobot } from '@opentiny/tiny-engine-meta-register'
4947
5048
export default {
5149
components: {
@@ -54,33 +52,23 @@ export default {
5452
TinyTooltip: Tooltip as Component
5553
},
5654
props: {
57-
aiType: {
58-
type: String
55+
chatMode: {
56+
type: String,
57+
default: 'agent'
5958
}
6059
},
6160
emits: ['typeChange'],
6261
setup(props, { emit }) {
63-
const { CHAT_MODE } = useRobot()
64-
const state = reactive({
65-
activeNameTabs: props.aiType || CHAT_MODE.Agent
66-
})
67-
68-
const handleTabChange = (value) => {
69-
emit('typeChange', value)
70-
}
71-
72-
// TODO: 这里不需要watch监听,直接用modelChange方法监听activeNameTabs的变化
73-
// 使用watch监听activeNameTabs的变化
74-
watch(
75-
() => state.activeNameTabs,
76-
(newValue) => {
77-
handleTabChange(newValue)
62+
const aiChatMode = computed<string>({
63+
get: () => props.chatMode,
64+
set: (value: string) => {
65+
if (value === props.chatMode) return
66+
emit('typeChange', value)
7867
}
79-
)
68+
})
8069
8170
return {
82-
state,
83-
CHAT_MODE
71+
aiChatMode
8472
}
8573
}
8674
}

packages/plugins/robot/src/composables/useAgent.ts

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { jsonrepair } from 'jsonrepair'
22
import * as jsonpatch from 'fast-json-patch'
33
import { utils } from '@opentiny/tiny-engine-utils'
4-
import { useCanvas, useHistory } from '@opentiny/tiny-engine-meta-register'
4+
import { getMetaApi, META_SERVICE, useCanvas, useHistory } from '@opentiny/tiny-engine-meta-register'
55
import useRobot from '../js/useRobot'
66
import SvgICons from '@opentiny/vue-icon'
77

@@ -44,8 +44,8 @@ const schemaAutoFix = (data: object | object[]) => {
4444
}
4545

4646
const _updatePageSchema = (streamContent: string, currentPageSchema: object, isFinial: boolean = false) => {
47-
const { aiMode, CHAT_MODE, isValidFastJsonPatch } = useRobot()
48-
if (aiMode.value !== CHAT_MODE.Agent) {
47+
const { robotSettingState, CHAT_MODE, isValidFastJsonPatch } = useRobot()
48+
if (robotSettingState.chatMode !== CHAT_MODE.Agent) {
4949
return
5050
}
5151

@@ -98,3 +98,21 @@ const _updatePageSchema = (streamContent: string, currentPageSchema: object, isF
9898
}
9999

100100
export const updatePageSchema = useThrottleFn(_updatePageSchema, 200, true)
101+
102+
export const search = async (content: string) => {
103+
let result = ''
104+
const MAX_SEARCH_LENGTH = 8000
105+
try {
106+
const res = await getMetaApi(META_SERVICE.Http).post('/app-center/api/ai/search', { content })
107+
108+
res.forEach((item: { content: string }) => {
109+
if (result.length + item.content.length > MAX_SEARCH_LENGTH) {
110+
return
111+
}
112+
result += item.content
113+
})
114+
} catch (error) {
115+
// error
116+
}
117+
return result
118+
}

0 commit comments

Comments
 (0)