feat: AI dialogue nodes support historical chat history parameters (#4245)

This commit is contained in:
shaohuzhang1 2025-10-24 11:35:27 +08:00 committed by GitHub
parent 47c27e58f7
commit 586c3536ca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 17 additions and 9 deletions

View File

@ -17,7 +17,6 @@ from django.db.models import QuerySet
from langchain.schema import HumanMessage, SystemMessage
from langchain_core.messages import BaseMessage, AIMessage
from application.flow.i_step_node import NodeResult, INode
from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
from application.flow.tools import Reasoning, mcp_response_generator
@ -91,7 +90,6 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)
def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据
@ -194,12 +192,16 @@ class BaseChatNode(IChatNode):
if stream:
r = chat_model.stream(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
'history_message': [{'content': message.content, 'role': message.type} for message in
(history_message if history_message is not None else [])],
'question': question.content}, {},
_write_context=write_context_stream)
else:
r = chat_model.invoke(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
'history_message': [{'content': message.content, 'role': message.type} for message in
(history_message if history_message is not None else [])],
'question': question.content}, {},
_write_context=write_context)
def _handle_mcp_request(self, mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids,
@ -250,7 +252,9 @@ class BaseChatNode(IChatNode):
r = mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config), mcp_output_enable)
return NodeResult(
{'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
'history_message': [{'content': message.content, 'role': message.type} for message in
(history_message if history_message is not None else [])],
'question': question.content}, {},
_write_context=write_context_stream)
return None
@ -316,9 +320,7 @@ class BaseChatNode(IChatNode):
"index": index,
'run_time': self.context.get('run_time'),
'system': self.context.get('system'),
'history_message': [{'content': message.content, 'role': message.type} for message in
(self.context.get('history_message') if self.context.get(
'history_message') is not None else [])],
'history_message': self.context.get('history_message'),
'question': self.context.get('question'),
'answer': self.context.get('answer'),
'reasoning_content': self.context.get('reasoning_content'),

View File

@ -125,6 +125,7 @@ export default {
},
defaultPrompt: 'Known Information',
think: 'Thinking Process',
historyMessage: 'Historical chat records',
},
searchKnowledgeNode: {
label: 'Knowledge Retrieval',

View File

@ -128,6 +128,7 @@ export default {
},
defaultPrompt: '已知信息',
think: '思考过程',
historyMessage: '历史聊天记录',
},
searchKnowledgeNode: {
label: '知识库检索',

View File

@ -126,6 +126,7 @@ export default {
},
defaultPrompt: '已知信息',
think: '思考過程',
historyMessage: '歷史聊天記錄',
},
searchKnowledgeNode: {
label: '知識庫檢索',

View File

@ -82,6 +82,10 @@ export const aiChatNode = {
label: t('views.applicationWorkflow.nodes.aiChatNode.think'),
value: 'reasoning_content',
},
{
label: t('views.applicationWorkflow.nodes.aiChatNode.historyMessage'),
value: 'history_message',
},
],
},
},

View File

@ -463,7 +463,6 @@ const openGeneratePromptDialog = (modelId: string) => {
}
}
const replace = (v: any) => {
console.log(props.nodeModel.properties.node_data.model_setting)
set(props.nodeModel.properties.node_data, 'system', v)
}
const openReasoningParamSettingDialog = () => {