mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
fix: 修复图片理解携带历史会话
This commit is contained in:
parent
10fc464fb2
commit
a32977094f
|
|
@ -6,7 +6,7 @@ from functools import reduce
|
|||
from typing import List, Dict
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
|
||||
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage, AIMessage
|
||||
|
||||
from application.flow.i_step_node import NodeResult, INode
|
||||
from application.flow.step_node.image_understand_step_node.i_image_understand_node import IImageUnderstandNode
|
||||
|
|
@ -96,11 +96,19 @@ class BaseImageUnderstandNode(IImageUnderstandNode):
|
|||
def get_history_message_for_details(self, history_chat_record, dialogue_number):
|
||||
start_index = len(history_chat_record) - dialogue_number
|
||||
history_message = reduce(lambda x, y: [*x, *y], [
|
||||
[self.generate_history_human_message_for_details(history_chat_record[index]), history_chat_record[index].get_ai_message()]
|
||||
[self.generate_history_human_message_for_details(history_chat_record[index]), self.generate_history_ai_message(history_chat_record[index])]
|
||||
for index in
|
||||
range(start_index if start_index > 0 else 0, len(history_chat_record))], [])
|
||||
return history_message
|
||||
|
||||
def generate_history_ai_message(self, chat_record):
|
||||
for val in chat_record.details.values():
|
||||
if self.node.id == val['node_id'] and 'image_list' in val:
|
||||
if val['dialogue_type'] == 'WORKFLOW':
|
||||
return chat_record.get_ai_message()
|
||||
return AIMessage(content=val['answer'])
|
||||
return chat_record.get_ai_message()
|
||||
|
||||
def generate_history_human_message_for_details(self, chat_record):
|
||||
for data in chat_record.details.values():
|
||||
if self.node.id == data['node_id'] and 'image_list' in data:
|
||||
|
|
@ -113,7 +121,7 @@ class BaseImageUnderstandNode(IImageUnderstandNode):
|
|||
def get_history_message(self, history_chat_record, dialogue_number):
|
||||
start_index = len(history_chat_record) - dialogue_number
|
||||
history_message = reduce(lambda x, y: [*x, *y], [
|
||||
[self.generate_history_human_message(history_chat_record[index]), history_chat_record[index].get_ai_message()]
|
||||
[self.generate_history_human_message(history_chat_record[index]), self.generate_history_ai_message(history_chat_record[index])]
|
||||
for index in
|
||||
range(start_index if start_index > 0 else 0, len(history_chat_record))], [])
|
||||
return history_message
|
||||
|
|
|
|||
Loading…
Reference in New Issue