fix: chat generation render

This commit is contained in:
zxhlyh
2026-01-27 13:36:22 +08:00
parent 74f94633d7
commit 53bc060cea
12 changed files with 201 additions and 120 deletions

View File

@@ -43,7 +43,7 @@ import {
import { TransferMethod } from '@/types/app'
import { addFileInfos, sortAgentSorts } from '../../../tools/utils'
import { CONVERSATION_ID_INFO } from '../constants'
import { buildChatItemTree, buildToolCallsFromHistorySequence, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils'
import { buildChatItemTree, buildLLMGenerationItemsFromHistorySequence, getProcessedSystemVariablesFromUrlParams, getRawInputsFromUrlParams, getRawUserVariablesFromUrlParams } from '../utils'
function getFormattedChatList(messages: any[]) {
const newChatList: ChatItem[] = []
@@ -59,8 +59,8 @@ function getFormattedChatList(messages: any[]) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
newChatList.push({
id: item.id,
content: buildToolCallsFromHistorySequence(item).message,
toolCalls: buildToolCallsFromHistorySequence(item).toolCalls,
content: buildLLMGenerationItemsFromHistorySequence(item).message,
llmGenerationItems: buildLLMGenerationItemsFromHistorySequence(item).llmGenerationItems,
agent_thoughts: addFileInfos(item.agent_thoughts ? sortAgentSorts(item.agent_thoughts) : item.agent_thoughts, item.message_files),
feedback: item.feedback,
isAnswer: true,

View File

@@ -0,0 +1,23 @@
import type { LLMGenerationItem } from '@/types/workflow'
import ToolCallItemComponent from '@/app/components/workflow/run/llm-log/tool-call-item'
type GenerationContentProps = {
llmGenerationItems: LLMGenerationItem[]
}
const GenerationContent = ({
llmGenerationItems,
}: GenerationContentProps) => {
return (
<div className="my-1 space-y-1">
{llmGenerationItems.map((llmGenerationItem: LLMGenerationItem, index: number) => (
<ToolCallItemComponent
key={index}
payload={llmGenerationItem}
className="bg-background-gradient-bg-fill-chat-bubble-bg-2 shadow-none"
/>
))}
</div>
)
}
export default GenerationContent

View File

@@ -18,10 +18,10 @@ import { cn } from '@/utils/classnames'
import ContentSwitch from '../content-switch'
import AgentContent from './agent-content'
import BasicContent from './basic-content'
import GenerationContent from './generation-content'
import More from './more'
import Operation from './operation'
import SuggestedQuestions from './suggested-questions'
import ToolCalls from './tool-calls'
import WorkflowProcessItem from './workflow-process'
type AnswerProps = {
@@ -62,7 +62,7 @@ const Answer: FC<AnswerProps> = ({
workflowProcess,
allFiles,
message_files,
toolCalls,
llmGenerationItems,
} = item
const hasAgentThoughts = !!agent_thoughts?.length
@@ -114,6 +114,9 @@ const Answer: FC<AnswerProps> = ({
}, [switchSibling, item.prevSibling, item.nextSibling])
const contentIsEmpty = typeof content === 'string' && content.trim() === ''
const generationContentRenderIsUsed = llmGenerationItems?.length && llmGenerationItems.some((item) => {
return item.type === 'tool' || item.type === 'thought'
})
return (
<div className="mb-2 flex last:mb-0">
@@ -157,8 +160,8 @@ const Answer: FC<AnswerProps> = ({
)
}
{
!!toolCalls?.length && (
<ToolCalls toolCalls={toolCalls} />
generationContentRenderIsUsed && (
<GenerationContent llmGenerationItems={llmGenerationItems} />
)
}
{
@@ -169,7 +172,7 @@ const Answer: FC<AnswerProps> = ({
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
!contentIsEmpty && !hasAgentThoughts && !generationContentRenderIsUsed && (
<BasicContent item={item} />
)
}

View File

@@ -1,23 +0,0 @@
import type { ToolCallItem } from '@/types/workflow'
import ToolCallItemComponent from '@/app/components/workflow/run/llm-log/tool-call-item'
type ToolCallsProps = {
toolCalls: ToolCallItem[]
}
const ToolCalls = ({
toolCalls,
}: ToolCallsProps) => {
return (
<div className="my-1 space-y-1">
{toolCalls.map((toolCall: ToolCallItem, index: number) => (
<ToolCallItemComponent
key={index}
payload={toolCall}
className="bg-background-gradient-bg-fill-chat-bubble-bg-2 shadow-none"
/>
))}
</div>
)
}
export default ToolCalls

View File

@@ -343,8 +343,87 @@ export const useChat = (
tool_elapsed_time,
}: any) => {
if (!isAgentMode) {
if (chunk_type === 'text')
if (chunk_type === 'text') {
responseItem.content = responseItem.content + message
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].text += message
}
else {
toolCallId = uuidV4()
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'text',
text: message,
})
}
}
if (chunk_type === 'tool_call') {
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
toolCallId = uuidV4()
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'tool',
toolName: tool_name,
toolArguments: tool_arguments,
toolIcon: tool_icon,
toolIconDark: tool_icon_dark,
})
}
if (chunk_type === 'tool_result') {
const currentToolCallIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === toolCallId) ?? -1
if (currentToolCallIndex > -1) {
responseItem.llmGenerationItems![currentToolCallIndex].toolError = tool_error
responseItem.llmGenerationItems![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.llmGenerationItems![currentToolCallIndex].toolFiles = tool_files
responseItem.llmGenerationItems![currentToolCallIndex].toolOutput = message
}
}
if (chunk_type === 'thought_start') {
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
thoughtId = uuidV4()
responseItem.llmGenerationItems?.push({
id: thoughtId,
type: 'thought',
thoughtOutput: '',
})
}
if (chunk_type === 'thought') {
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
}
}
if (chunk_type === 'thought_end') {
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
responseItem.llmGenerationItems![currentThoughtIndex].thoughtCompleted = true
}
}
}
else {
const lastThought = responseItem.agent_thoughts?.[responseItem.agent_thoughts?.length - 1]
@@ -352,57 +431,6 @@ export const useChat = (
lastThought.thought = lastThought.thought + message // need immer setAutoFreeze
}
if (chunk_type === 'tool_call') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
toolCallId = uuidV4()
responseItem.toolCalls?.push({
id: toolCallId,
type: 'tool',
toolName: tool_name,
toolArguments: tool_arguments,
toolIcon: tool_icon,
toolIconDark: tool_icon_dark,
})
}
if (chunk_type === 'tool_result') {
const currentToolCallIndex = responseItem.toolCalls?.findIndex(item => item.id === toolCallId) ?? -1
if (currentToolCallIndex > -1) {
responseItem.toolCalls![currentToolCallIndex].toolError = tool_error
responseItem.toolCalls![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.toolCalls![currentToolCallIndex].toolFiles = tool_files
responseItem.toolCalls![currentToolCallIndex].toolOutput = message
}
}
if (chunk_type === 'thought_start') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
thoughtId = uuidV4()
responseItem.toolCalls.push({
id: thoughtId,
type: 'thought',
thoughtOutput: '',
})
}
if (chunk_type === 'thought') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
}
}
if (chunk_type === 'thought_end') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
responseItem.toolCalls![currentThoughtIndex].thoughtCompleted = true
}
}
if (messageId && !hasSetResponseId) {
questionItem.id = `question-${messageId}`
responseItem.id = messageId

View File

@@ -2,7 +2,7 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { TypeWithI18N } from '@/app/components/header/account-setting/model-provider-page/declarations'
import type { InputVarType } from '@/app/components/workflow/types'
import type { Annotation, MessageRating } from '@/models/log'
import type { FileResponse, IconObject, ToolCallItem } from '@/types/workflow'
import type { FileResponse, IconObject, LLMGenerationItem } from '@/types/workflow'
export type MessageMore = {
time: string
@@ -104,7 +104,7 @@ export type IChatItem = {
siblingIndex?: number
prevSibling?: string
nextSibling?: string
toolCalls?: ToolCallItem[]
llmGenerationItems?: LLMGenerationItem[]
}
export type Metadata = {

View File

@@ -1,6 +1,6 @@
import type { ChatMessageRes, IChatItem } from './chat/type'
import type { ChatItem, ChatItemInTree } from './types'
import type { ToolCallItem } from '@/types/workflow'
import type { LLMGenerationItem } from '@/types/workflow'
import { v4 as uuidV4 } from 'uuid'
import { UUID_NIL } from './constants'
@@ -234,18 +234,18 @@ function getThreadMessages(tree: ChatItemInTree[], targetMessageId?: string): Ch
return ret
}
const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
toolCalls: ToolCallItem[]
const buildLLMGenerationItemsFromHistorySequence = (message: ChatMessageRes): {
llmGenerationItems: LLMGenerationItem[]
message: string
} => {
const { answer, generation_detail } = message
if (!generation_detail) {
return { toolCalls: [], message: answer || '' }
return { llmGenerationItems: [], message: answer || '' }
}
const { reasoning_content = [], tool_calls = [], sequence = [] } = generation_detail
const toolCalls: ToolCallItem[] = []
const llmGenerationItems: LLMGenerationItem[] = []
let answerMessage = ''
sequence.forEach((segment) => {
@@ -260,7 +260,7 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
case 'reasoning': {
const reasoning = reasoning_content[segment.index]
if (reasoning) {
toolCalls.push({
llmGenerationItems.push({
id: uuidV4(),
type: 'thought',
thoughtOutput: reasoning,
@@ -272,7 +272,7 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
case 'tool_call': {
const toolCall = tool_calls[segment.index]
if (toolCall) {
toolCalls.push({
llmGenerationItems.push({
id: uuidV4(),
type: 'tool',
toolName: toolCall.name,
@@ -288,12 +288,12 @@ const buildToolCallsFromHistorySequence = (message: ChatMessageRes): {
}
})
return { toolCalls, message: answerMessage || '' }
return { llmGenerationItems, message: answerMessage || '' }
}
export {
buildChatItemTree,
buildToolCallsFromHistorySequence,
buildLLMGenerationItemsFromHistorySequence,
getLastAnswer,
getProcessedInputsFromUrlParams,
getProcessedSystemVariablesFromUrlParams,

View File

@@ -12,7 +12,7 @@ import {
} from 'react'
import { useStore as useAppStore } from '@/app/components/app/store'
import Chat from '@/app/components/base/chat/chat'
import { buildChatItemTree, buildToolCallsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
import { buildChatItemTree, buildLLMGenerationItemsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
import Loading from '@/app/components/base/loading'
import { fetchConversationMessages } from '@/service/debug'
@@ -38,8 +38,8 @@ function getFormattedChatList(messages: ChatMessageRes[]) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
res.push({
id: item.id,
content: buildToolCallsFromHistorySequence(item).message,
toolCalls: buildToolCallsFromHistorySequence(item).toolCalls,
content: buildLLMGenerationItemsFromHistorySequence(item).message,
llmGenerationItems: buildLLMGenerationItemsFromHistorySequence(item).llmGenerationItems,
feedback: item.feedback,
isAnswer: true,
citation: item.metadata?.retriever_resources,

View File

@@ -158,14 +158,37 @@ export function useChatMessageSender({
}) => {
if (!isCurrentRun())
return
if (chunk_type === 'text')
if (chunk_type === 'text') {
responseItem.content = responseItem.content + message
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].text += message
}
else {
toolCallId = uuidV4()
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'text',
text: message,
})
}
}
if (chunk_type === 'tool_call') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
toolCallId = uuidV4()
responseItem.toolCalls?.push({
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'tool',
toolName: tool_name,
@@ -176,21 +199,26 @@ export function useChatMessageSender({
}
if (chunk_type === 'tool_result') {
const currentToolCallIndex = responseItem.toolCalls?.findIndex(item => item.id === toolCallId) ?? -1
const currentToolCallIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === toolCallId) ?? -1
if (currentToolCallIndex > -1) {
responseItem.toolCalls![currentToolCallIndex].toolError = tool_error
responseItem.toolCalls![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.toolCalls![currentToolCallIndex].toolFiles = tool_files
responseItem.toolCalls![currentToolCallIndex].toolOutput = message
responseItem.llmGenerationItems![currentToolCallIndex].toolError = tool_error
responseItem.llmGenerationItems![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.llmGenerationItems![currentToolCallIndex].toolFiles = tool_files
responseItem.llmGenerationItems![currentToolCallIndex].toolOutput = message
}
}
if (chunk_type === 'thought_start') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
thoughtId = uuidV4()
responseItem.toolCalls.push({
responseItem.llmGenerationItems?.push({
id: thoughtId,
type: 'thought',
thoughtOutput: '',
@@ -198,17 +226,17 @@ export function useChatMessageSender({
}
if (chunk_type === 'thought') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
}
}
if (chunk_type === 'thought_end') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
responseItem.toolCalls![currentThoughtIndex].thoughtCompleted = true
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
responseItem.llmGenerationItems![currentThoughtIndex].thoughtCompleted = true
}
}
@@ -245,6 +273,10 @@ export function useChatMessageSender({
if (errorMessage) {
responseItem.content = errorMessage
responseItem.isError = true
responseItem.llmGenerationItems?.forEach((item) => {
if (item.type === 'text')
item.isError = true
})
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,

View File

@@ -2,8 +2,8 @@
import type { FC } from 'react'
import type {
LLMGenerationItem,
LLMTraceItem,
ToolCallItem,
} from '@/types/workflow'
import {
RiArrowLeftLine,
@@ -63,7 +63,7 @@ const LLMResultPanel: FC<Props> = ({
<div className="space-y-1 p-2">
{
formattedList.map((item, index) => (
<ToolCallItemComponent key={index} payload={item as ToolCallItem} />
<ToolCallItemComponent key={index} payload={item as LLMGenerationItem} />
))
}
</div>

View File

@@ -1,4 +1,4 @@
import type { ToolCallItem } from '@/types/workflow'
import type { LLMGenerationItem } from '@/types/workflow'
import {
RiArrowDownSLine,
} from '@remixicon/react'
@@ -6,6 +6,7 @@ import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '@/app/components/base/app-icon'
import { Thinking } from '@/app/components/base/icons/src/vender/workflow'
import { Markdown } from '@/app/components/base/markdown'
import BlockIcon from '@/app/components/workflow/block-icon'
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
@@ -14,7 +15,7 @@ import { cn } from '@/utils/classnames'
type ToolCallItemComponentProps = {
className?: string
payload: ToolCallItem
payload: LLMGenerationItem
}
const ToolCallItemComponent = ({
className,
@@ -22,6 +23,19 @@ const ToolCallItemComponent = ({
}: ToolCallItemComponentProps) => {
const { t } = useTranslation()
const [expand, setExpand] = useState(false)
if (payload.type === 'text') {
return (
<Markdown
className={cn(
'px-2',
payload.isError && '!text-[#F04438]',
)}
content={payload.text ?? ''}
/>
)
}
return (
<div
className={cn('rounded-[10px] border-[0.5px] border-components-panel-border bg-background-default-subtle px-2 pb-1 pt-2 shadow-xs', className)}

View File

@@ -33,9 +33,9 @@ export type IconObject = {
content: string
}
export type ToolCallItem = {
export type LLMGenerationItem = {
id: string
type: 'model' | 'tool' | 'thought'
type: 'model' | 'tool' | 'thought' | 'text'
thoughtCompleted?: boolean
thoughtOutput?: string
@@ -55,6 +55,10 @@ export type ToolCallItem = {
modelDuration?: number
modelIcon?: string | IconObject
modelIconDark?: string | IconObject
text?: string
textCompleted?: boolean
isError?: boolean
}
export type ToolCallDetail = {