fix: In the LLM model in dify, when a message is added, the first cli… (#29540)

Co-authored-by: 青枕 <qingzhen.ww@alibaba-inc.com>
This commit is contained in:
青枕 2026-01-13 15:42:32 +08:00 committed by GitHub
parent ea708e7a32
commit 0e33dfb5c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -106,12 +106,12 @@ const ConfigPrompt: FC<Props> = ({
const handleAddPrompt = useCallback(() => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
if (draft.length === 0) {
draft.push({ role: PromptRole.system, text: '' })
draft.push({ role: PromptRole.system, text: '', id: uuid4() })
return
}
const isLastItemUser = draft[draft.length - 1].role === PromptRole.user
draft.push({ role: isLastItemUser ? PromptRole.assistant : PromptRole.user, text: '' })
draft.push({ role: isLastItemUser ? PromptRole.assistant : PromptRole.user, text: '', id: uuid4() })
})
onChange(newPrompt)
}, [onChange, payload])