feat(web): agent support deep thinking

This commit is contained in:
zhaoying
2026-03-31 18:07:32 +08:00
parent b40f4829cb
commit ca255304d9
12 changed files with 203 additions and 33 deletions

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2025-12-10 16:46:17
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-27 14:17:38
* @Last Modified time: 2026-03-31 15:01:53
*/
import { type FC, useRef, useEffect, useState } from 'react'
import clsx from 'clsx'
@@ -38,6 +38,22 @@ const ChatContent: FC<ChatContentProps> = ({
const isScrolledToBottomRef = useRef(true);
const audioRef = useRef<HTMLAudioElement | null>(null)
const [playingIndex, setPlayingIndex] = useState<number | null>(null)
const [expandedReasoning, setExpandedReasoning] = useState<Set<number>>(new Set())
const [manualToggledReasoning, setManualToggledReasoning] = useState<Set<number>>(new Set())
const toggleReasoning = (index: number) => {
setManualToggledReasoning(prev => new Set(prev).add(index))
setExpandedReasoning(prev => {
const next = new Set(prev)
next.has(index) ? next.delete(index) : next.add(index)
return next
})
}
const isReasoningExpanded = (index: number) => {
if (manualToggledReasoning.has(index)) return expandedReasoning.has(index)
return !data[index]?.content
}
const handlePlay = (index: number, audio_url: string, audio_status?: string) => {
if (audio_status !== 'completed' && !audio_status) return
@@ -120,7 +136,7 @@ const ChatContent: FC<ChatContentProps> = ({
{labelFormat(item)}
</div>
}
{item.meta_data?.files && item.meta_data?.files.length > 0 && <Flex gap={8} vertical align="end">
{item.meta_data?.files && item.meta_data?.files.length > 0 && <Flex gap={8} vertical align="end" className="rb:mb-2!">
{item.meta_data?.files?.map((file) => {
if (file.type.includes('image')) {
return (
@@ -174,6 +190,22 @@ const ChatContent: FC<ChatContentProps> = ({
'rb:mt-1.5': labelPosition === 'top',
'rb:mb-1.5': labelPosition === 'bottom',
})}>
{item.meta_data?.reasoning_content && <div className="rb:mb-2 rb:border rb:rounded-md rb:px-3 rb:pt-2 rb:bg-white rb:text-[12px]">
<Flex
align="center"
justify="space-between"
className="rb:text-[#5B6167] rb:font-medium rb:cursor-pointer rb:pb-2!"
onClick={() => toggleReasoning(index)}
>
<span>{t('memoryConversation.reasoning_content')}</span>
<div
className={clsx("rb:size-4 rb:bg-cover rb:bg-[url('@/assets/images/common/arrow_up.svg')]", {
'rb:rotate-180': !isReasoningExpanded(index),
})}
></div>
</Flex>
{isReasoningExpanded(index) && <Markdown content={item.meta_data.reasoning_content} />}
</div>}
{item.status && <div className="rb:size-5 rb:bg-cover rb:bg-[url('@/assets/images/conversation/exclamation_circle.svg')] rb:absolute rb:-left-7"></div>}
{item.subContent && renderRuntime && renderRuntime(item, index)}
{/* Render message content using Markdown component */}

View File

@@ -27,12 +27,14 @@ const Chat: FC<ChatProps> = ({
fileList,
fileChange,
className,
renderRuntime
renderRuntime,
conversationId
}) => {
return (
<div className={`rb:h-full rb:relative rb:pt-2 ${className}`}>
{/* Chat content display area */}
<ChatContent
key={conversationId ?? 'new'}
classNames={contentClassName}
data={data}
streamLoading={streamLoading}

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2025-12-10 16:45:54
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-26 12:30:51
* @Last Modified time: 2026-03-31 15:01:46
*/
import { type ReactNode } from 'react'
@@ -33,7 +33,8 @@ export interface ChatItem {
file_name: string;
knowledge_id: string;
score: string;
}[]
}[];
reasoning_content?: string;
},
}
@@ -66,6 +67,7 @@ export interface ChatProps {
fileChange?: (fileList: any[]) => void;
className?: string;
renderRuntime?: (item: ChatItem, index: number) => ReactNode;
conversationId?: string | null;
}
/**

View File

@@ -1787,6 +1787,11 @@ Memory Bear: After the rebellion, regional warlordism intensified for several re
vision_id: 'Vision model',
audio_id: 'Audio model',
video_id: 'Video model',
onlyDelete: 'Only Delete Fill',
semanticFiltering: 'Semantic Filtering',
sceneFocus: 'Scene Focus',
loose: 'Loose',
strict: 'Strict',
},
memoryConversation: {
searchPlaceholder: 'Enter user ID...',

View File

@@ -1783,6 +1783,11 @@ export const zh = {
vision_id: '视觉模型',
audio_id: '音频模型',
video_id: '视频模型',
onlyDelete: '仅删填充',
semanticFiltering: '语义过滤',
sceneFocus: '场景聚焦',
loose: '宽松',
strict: '严格',
},
memoryConversation: {
chatEmpty:'有什么我可以帮您的吗?',

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:29:21
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-27 18:13:51
* @Last Modified time: 2026-03-31 16:50:10
*/
import { useEffect, useRef, useState, forwardRef, useImperativeHandle, useMemo } from 'react';
import { useTranslation } from 'react-i18next'
@@ -194,7 +194,7 @@ const Agent = forwardRef<AgentRef, { onFeaturesLoad?: (features: FeaturesConfigF
* Open model configuration modal
*/
const handleModelConfig = () => {
modelConfigModalRef.current?.handleOpen('model')
modelConfigModalRef.current?.handleOpen('model', { ...defaultModel, model_parameters : values?.model_parameters })
}
/**
* Clear all debugging chat sessions
@@ -287,7 +287,7 @@ const Agent = forwardRef<AgentRef, { onFeaturesLoad?: (features: FeaturesConfigF
setChatList([{
label: filterValue?.name || '',
model_config_id: filterValue?.id || '',
model_parameters: {...(filterValue?.config || {})} as unknown as ModelConfig,
model_parameters: {...(values?.model_parameters || {})} as unknown as ModelConfig,
list: []
}])
form.setFieldValue('capability', filterValue?.capability)
@@ -361,7 +361,6 @@ const Agent = forwardRef<AgentRef, { onFeaturesLoad?: (features: FeaturesConfigF
useEffect(() => {
const opening_statement = form.getFieldValue(['features', 'opening_statement'])
console.log('opening_statement', opening_statement, defaultModel, chatList)
if (opening_statement?.enabled && opening_statement?.statement && opening_statement?.statement.trim() !== '') {
const assistantMsg: ChatItem = {

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-03-13 17:27:52
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-26 15:35:13
* @Last Modified time: 2026-03-31 16:04:15
*/
import { type FC, useState, useRef, useEffect } from 'react'
import { useTranslation } from 'react-i18next'
@@ -171,6 +171,7 @@ const TestChat: FC<TestChatProps> = ({
...lastMsg,
content: lastMsg.content + content,
meta_data: {
...(lastMsg.meta_data || {}),
audio_url: audio_url || lastMsg.meta_data?.audio_url,
audio_status: audio_status || lastMsg.meta_data?.audio_status,
citations: citations || lastMsg.meta_data?.citations
@@ -180,6 +181,24 @@ const TestChat: FC<TestChatProps> = ({
return newList
})
}
const updateAssistantReasoningMessage = (content: string) => {
if (!content) return
if (streamLoading) setStreamLoading(false)
setChatList(prev => {
const newList = [...prev]
const lastMsg = newList[newList.length - 1]
if (lastMsg?.role === 'assistant') {
newList[newList.length - 1] = {
...lastMsg,
meta_data: {
...(lastMsg.meta_data || {}),
reasoning_content: (lastMsg.meta_data?.reasoning_content || '') + content
}
}
}
return newList
})
}
const updateErrorAssistantMessage = (message_length: number) => {
if (message_length > 0) return
@@ -273,6 +292,10 @@ const TestChat: FC<TestChatProps> = ({
case 'start':
if (conversation_id && conversationId !== conversation_id) setConversationId(conversation_id)
break
case 'reasoning':
updateAssistantReasoningMessage(content)
if (conversation_id && conversationId !== conversation_id) setConversationId(conversation_id)
break
case 'message':
updateAssistantMessage(content)
if (conversation_id && conversationId !== conversation_id) setConversationId(conversation_id)

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:27:39
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-27 17:59:07
* @Last Modified time: 2026-03-31 15:02:07
*/
/**
* Chat debugging component for application testing
@@ -141,6 +141,36 @@ const Chat: FC<ChatProps> = ({
}
}
/** Update assistant message with streaming content */
const updateAssistantReasoningMessage = (content?: string, model_config_id?: string, conversation_id?: string) => {
if (!content || !model_config_id) return
updateChatList(prev => {
const targetIndex = prev.findIndex(item => item.model_config_id === model_config_id);
if (targetIndex !== -1) {
const modelChatList = [...prev]
const curModelChat = modelChatList[targetIndex]
const curChatMsgList = curModelChat.list || []
const lastMsg = curChatMsgList[curChatMsgList.length - 1]
if (lastMsg && lastMsg.role === 'assistant') {
modelChatList[targetIndex] = {
...modelChatList[targetIndex],
conversation_id,
list: [
...curChatMsgList.slice(0, curChatMsgList.length - 1),
{
...lastMsg,
meta_data: {
reasoning_content: (lastMsg.meta_data?.reasoning_content || '') + (content || ''),
}
}
]
}
}
return [...modelChatList]
}
return prev;
})
}
/** Update assistant message with streaming content */
const updateAssistantMessage = (content?: string, model_config_id?: string, conversation_id?: string, audio_url?: string, citations?: any[]) => {
if ((!content && !audio_url && (!citations || citations?.length < 1)) || !model_config_id) return
updateChatList(prev => {
@@ -160,6 +190,7 @@ const Chat: FC<ChatProps> = ({
...lastMsg,
content: lastMsg.content + (content || ''),
meta_data: {
...(lastMsg.meta_data || {}),
...(audio_url !== undefined ? { audio_url, audio_status: 'pending' } : {}),
citations: citations || lastMsg.meta_data?.citations
}
@@ -274,6 +305,9 @@ const Chat: FC<ChatProps> = ({
};
switch (item.event) {
case 'model_reasoning':
updateAssistantReasoningMessage(content, model_config_id, conversation_id)
break;
case 'model_message':
updateAssistantMessage(content, model_config_id, conversation_id, audio_url)
break;

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:28:07
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-25 11:28:02
* @Last Modified time: 2026-03-31 16:56:57
*/
/**
* Model Configuration Modal
@@ -11,7 +11,7 @@
*/
import { forwardRef, useImperativeHandle, useState, useEffect } from 'react';
import { Form, type SelectProps } from 'antd';
import { Form, type SelectProps, Checkbox } from 'antd';
import { useTranslation } from 'react-i18next';
import type { ModelConfig, ModelConfigModalRef, Config, Source } from '../types'
@@ -70,7 +70,8 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
if (source === 'model') {
form.setFieldsValue({
...(data?.model_parameters || {}),
default_model_config_id: data.default_model_config_id || ''
default_model_config_id: data.default_model_config_id || '',
capability: model?.capability || []
})
} else if (source === 'chat' || source === 'multi_agent') {
if (model) {
@@ -103,9 +104,12 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
const handleChange: SelectProps['onChange'] = (_value, option) => {
if (source === 'chat') {
form.setFieldValue('label', (option as Model).name)
} else {
form.setFieldValue('capability', (option as Model).capability)
}
form.setFieldsValue({
capability: (option as Model).capability,
deep_thinking: false,
})
}
/** Expose methods to parent component */
@@ -115,8 +119,12 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
}));
useEffect(() => {
form.setFieldsValue({...(data?.model_parameters || {})})
const { deep_thinking: _, ...rest } = data?.model_parameters || {}
form.setFieldsValue(rest)
}, [values?.default_model_config_id])
console.log('handleChange values', values)
return (
<RbModal
title={t('application.modelConfig')}
@@ -145,9 +153,17 @@ const ModelConfigModal = forwardRef<ModelConfigModalRef, ModelConfigModalProps>(
/>
}
</FormItem>
{source === 'model' && <FormItem name="capability" hidden />}
{['model', 'chat'].includes(source) && <>
<FormItem name="capability" hidden />
{(values?.deep_thinking || values?.capability?.includes('thinking')) && (
<FormItem name="deep_thinking" valuePropName="checked">
<Checkbox>{t('application.deep_thinking')}</Checkbox>
</FormItem>
)}
</>}
{source === 'chat' && <FormItem name="label" hidden />}
<div className="rb:text-[14px] rb:font-medium rb:text-[#5B6167] rb:mb-4">{t('application.parameterConfig')}</div>
{configFields.map(item => (

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:29:49
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-24 15:44:33
* @Last Modified time: 2026-03-31 15:45:17
*/
import type { KnowledgeConfig } from './components/Knowledge/types'
import type { Variable } from './components/VariableList/types'
@@ -36,6 +36,7 @@ export interface ModelConfig {
n: number;
/** Stop sequences */
stop?: string;
deep_thinking?: boolean;
}
/**

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:58:03
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-27 14:28:19
* @Last Modified time: 2026-03-31 16:24:47
*/
/**
* Conversation Page
@@ -27,12 +27,11 @@ import ChatEmpty from '@/assets/images/empty/chatEmpty.png'
import Chat from '@/components/Chat'
import type { ChatItem } from '@/components/Chat/types'
import { type SSEMessage } from '@/utils/stream'
import { shareFileUploadUrlWithoutApiPrefix } from '@/api/fileStorage'
import { shareFileUploadUrlWithoutApiPrefix, getFileStatusById } from '@/api/fileStorage'
import ChatToolbar, { type ChatToolbarRef } from '@/components/Chat/ChatToolbar'
import type { Variable } from '@/views/Workflow/components/Properties/VariableList/types'
import type { Variable as AppVariable } from '@/views/ApplicationConfig/components/VariableList/types'
import type { FeaturesConfigForm } from '@/views/ApplicationConfig/types';
import { getFileStatusById } from '@/api/fileStorage';
import { replaceVariables } from '@/views/ApplicationConfig/Agent'
const Conversation: FC = () => {
@@ -43,7 +42,6 @@ const Conversation: FC = () => {
const searchParams = new URLSearchParams(location.search)
const userId = searchParams.get('user_id')
const [loading, setLoading] = useState(false)
const [streamLoading, setStreamLoading] = useState(false)
const [message, setMessage] = useState<string>('')
const [conversation_id, setConversationId] = useState<string | null>(null)
const [historyList, setHistoryList] = useState<HistoryItem[]>([])
@@ -63,6 +61,9 @@ const Conversation: FC = () => {
const [features, setFeatures] = useState<FeaturesConfigForm>({} as FeaturesConfigForm)
const [config, setConfig] = useState<Record<string, any>>({})
const [audioStatusMap, setAudioStatusMap] = useState<Record<string, string>>({})
const streamLoadingRef = useRef(false)
const [isDeepThinking, setIsDeepThinking] = useState<Record<string, any>>({})
const [thinking, setThinking] = useState(false)
useEffect(() => {
const shareToken = localStorage.getItem(`shareToken_${token}`)
@@ -86,11 +87,12 @@ const Conversation: FC = () => {
if (shareToken && token) {
getExperienceConfig(token)
.then(res => {
const response = res as { variables: Variable[]; features: FeaturesConfigForm; app_type: string; memory: boolean; }
const response = res as { variables: Variable[]; features: FeaturesConfigForm; model_parameters?: Record<string, any>; app_type: string; memory: boolean; }
toolbarRef.current?.setVariables(response.variables || [])
setConfig(response)
setFeatures(response.features)
setIsHasMemory((response.app_type === 'workflow' && response.memory) || response.memory)
setIsDeepThinking(response.model_parameters?.deep_thinking || false)
})
} else {
setChatList([])
@@ -190,7 +192,7 @@ const Conversation: FC = () => {
const updateAssistantMessage = (content: string = '', audio_url?: string, audio_status?: string, citations?: any[]) => {
if (!content && !audio_url && (!citations || citations?.length < 1)) return
if (streamLoading) setStreamLoading(false)
if (streamLoadingRef.current) streamLoadingRef.current = false
setChatList(prev => {
const lastList = [...prev]
const lastIndex = lastList.length - 1
@@ -202,6 +204,7 @@ const Conversation: FC = () => {
...lastMsg,
content: lastMsg.content + content,
meta_data: {
...(lastMsg.meta_data || {}),
audio_url: audio_url || lastMsg.meta_data?.audio_url,
audio_status: audio_status || lastMsg.meta_data?.audio_status,
citations: citations || lastMsg.meta_data?.citations
@@ -212,6 +215,28 @@ const Conversation: FC = () => {
return prev
})
}
const updateAssistantReasoningMessage = (content: string = '') => {
if (!content) return
if (streamLoadingRef.current) streamLoadingRef.current = false
setChatList(prev => {
const lastList = [...prev]
const lastIndex = lastList.length - 1
const lastMsg = lastList[lastIndex]
if (lastMsg?.role === 'assistant') {
return [
...lastList.slice(0, lastIndex),
{
...lastMsg,
meta_data: {
...(lastMsg.meta_data || {}),
reasoning_content: (lastMsg.meta_data?.reasoning_content || '') + content
}
}
]
}
return prev
})
}
useEffect(() => {
if (!Object.keys(audioStatusMap).length) return
setChatList(prev => prev.map(msg => {
@@ -252,7 +277,7 @@ const Conversation: FC = () => {
if (!isCanSend) return
setLoading(true)
setStreamLoading(true)
streamLoadingRef.current = true
addUserMessage(msg || message, files)
addAssistantMessage()
toolbarRef.current?.setFiles([])
@@ -276,6 +301,10 @@ const Conversation: FC = () => {
const { conversation_id: newId } = item.data as { conversation_id: string }
currentConversationId = newId
break
case 'reasoning':
updateAssistantReasoningMessage(content)
if (curId) currentConversationId = curId;
break
case 'message':
updateAssistantMessage(content, audio_url, audio_url ? 'pending' : undefined)
if (curId) currentConversationId = curId;
@@ -349,15 +378,16 @@ const Conversation: FC = () => {
}
}
}),
variables: params
variables: params,
thinking,
}, handleStreamMessage, shareToken)
.catch(() => {
setLoading(false)
setStreamLoading(false)
streamLoadingRef.current = false
})
.finally(() => {
setLoading(false)
setStreamLoading(false)
streamLoadingRef.current = false
})
}
@@ -376,6 +406,9 @@ const Conversation: FC = () => {
}
})
}
const handleChangeDeepThinking = () => {
setThinking(prev => !prev)
}
const handleChangeVariables = (variables: Variable[]) => {
setChatList(prev => {
@@ -388,7 +421,7 @@ const Conversation: FC = () => {
})
}
console.log('chatList', chatList)
console.log('chatList', chatList, streamLoadingRef.current)
return (
<Flex className="rb:w-full rb:p-[-16px]!">
@@ -450,11 +483,12 @@ const Conversation: FC = () => {
empty={<Empty url={ChatEmpty} className="rb:h-full" size={[320,180]} title={t('memoryConversation.chatEmpty')} subTitle={t('memoryConversation.emptyDesc')} />}
contentClassName={!fileList.length ? "rb:h-[calc(100%-144px)] rb:w-full" : "rb:h-[calc(100%-208px)] rb:w-full"}
data={chatList}
streamLoading={streamLoading}
streamLoading={streamLoadingRef.current}
loading={loading}
onChange={setMessage}
onSend={handleSend}
labelFormat={(item) => dayjs(item.created_at).locale('en').format('MMMM D, YYYY [at] h:mm A')}
conversationId={conversation_id}
fileList={fileList}
fileChange={(list) => {
setFileList(list || [])
@@ -473,8 +507,24 @@ const Conversation: FC = () => {
}
}}
rightExtra={
(features?.web_search?.enabled || isHasMemory)
(features?.web_search?.enabled || isHasMemory || isDeepThinking)
? <Flex align="center" justify="end" gap={8}>
{isDeepThinking &&
<Tooltip title={t('memoryConversation.deepThinking')}>
<Flex justify="center" align="center"
className={clsx("rb:size-7 rb:cursor-pointer rb:border rb:hover:bg-[#F6F6F6] rb:rounded-full rb:shadow-[0px_2px_12px_0px_rgba(23,23,25,0.12)]", {
'rb:bg-[rgba(21,94,239,0.06)] rb:border-[rgba(21,94,239,0.25)]': thinking,
'rb:border-[#EBEBEB]': !thinking,
})}
onClick={handleChangeDeepThinking}
>
<div className={clsx("rb:size-4 rb:bg-cover", {
"rb:bg-[url('@/assets/images/conversation/deepThinking.svg')]": !thinking,
"rb:bg-[url('@/assets/images/conversation/deepThinkingChecked.svg')]": thinking
})} />
</Flex>
</Tooltip>
}
{features?.web_search?.enabled &&
<Tooltip title={t('memoryConversation.web_search')}>
<Flex justify="center" align="center"

View File

@@ -2,7 +2,7 @@
* @Author: ZhaoYing
* @Date: 2026-02-03 16:57:46
* @Last Modified by: ZhaoYing
* @Last Modified time: 2026-03-03 13:46:55
* @Last Modified time: 2026-03-31 16:23:44
*/
/**
* Type definitions for Conversation
@@ -52,6 +52,7 @@ export interface QueryParams {
conversation_id?: string | null;
files?: any[];
variables?: Record<string, any>;
thinking?: boolean;
}
export interface UploadFileListModalRef {