mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
fix: Lost content during the answering process (#2256)
This commit is contained in:
parent
2ba7a24f95
commit
6a5ec866c0
|
|
@ -76,10 +76,12 @@ def event_content(response,
|
|||
all_text = ''
|
||||
reasoning_content = ''
|
||||
try:
|
||||
response_reasoning_content = False
|
||||
for chunk in response:
|
||||
reasoning_chunk = reasoning.get_reasoning_content(chunk)
|
||||
content_chunk = reasoning_chunk.get('content')
|
||||
if 'reasoning_content' in chunk.additional_kwargs:
|
||||
response_reasoning_content = True
|
||||
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
|
||||
else:
|
||||
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
|
||||
|
|
@ -95,6 +97,21 @@ def event_content(response,
|
|||
'node_type': 'ai-chat-node',
|
||||
'real_node_id': 'ai-chat-node',
|
||||
'reasoning_content': reasoning_content_chunk if reasoning_content_enable else ''})
|
||||
reasoning_chunk = reasoning.get_end_reasoning_content()
|
||||
all_text += reasoning_chunk.get('content')
|
||||
reasoning_content_chunk = ""
|
||||
if not response_reasoning_content:
|
||||
reasoning_content_chunk = reasoning_chunk.get(
|
||||
'reasoning_content')
|
||||
yield manage.get_base_to_response().to_stream_chunk_response(chat_id, str(chat_record_id), 'ai-chat-node',
|
||||
[], reasoning_chunk.get('content'),
|
||||
False,
|
||||
0, 0, {'node_is_end': False,
|
||||
'view_type': 'many_view',
|
||||
'node_type': 'ai-chat-node',
|
||||
'real_node_id': 'ai-chat-node',
|
||||
'reasoning_content'
|
||||
: reasoning_content_chunk if reasoning_content_enable else ''})
|
||||
# 获取token
|
||||
if is_ai_chat:
|
||||
try:
|
||||
|
|
@ -276,11 +293,13 @@ class BaseChatStep(IChatStep):
|
|||
response_token = 0
|
||||
write_context(self, manage, request_token, response_token, chat_result.content)
|
||||
reasoning_result = reasoning.get_reasoning_content(chat_result)
|
||||
content = reasoning_result.get('content')
|
||||
reasoning_result_end = reasoning.get_end_reasoning_content()
|
||||
content = reasoning_result.get('content') + reasoning_result_end.get('content')
|
||||
if 'reasoning_content' in chat_result.response_metadata:
|
||||
reasoning_content = chat_result.response_metadata.get('reasoning_content', '')
|
||||
else:
|
||||
reasoning_content = reasoning_result.get('reasoning_content')
|
||||
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get(
|
||||
'reasoning_content')
|
||||
post_response_handler.handler(chat_id, chat_record_id, paragraph_list, problem_text,
|
||||
chat_result.content, manage, self, padding_problem_text, client_id,
|
||||
reasoning_content=reasoning_content if reasoning_content_enable else '')
|
||||
|
|
|
|||
|
|
@ -55,10 +55,12 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
|
|||
'reasoning_content_start': '<think>'})
|
||||
reasoning = Reasoning(model_setting.get('reasoning_content_start', '<think>'),
|
||||
model_setting.get('reasoning_content_end', '</think>'))
|
||||
response_reasoning_content = False
|
||||
for chunk in response:
|
||||
reasoning_chunk = reasoning.get_reasoning_content(chunk)
|
||||
content_chunk = reasoning_chunk.get('content')
|
||||
if 'reasoning_content' in chunk.additional_kwargs:
|
||||
response_reasoning_content = True
|
||||
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
|
||||
else:
|
||||
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
|
||||
|
|
@ -69,6 +71,16 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
|
|||
yield {'content': content_chunk,
|
||||
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
|
||||
False) else ''}
|
||||
|
||||
reasoning_chunk = reasoning.get_end_reasoning_content()
|
||||
answer += reasoning_chunk.get('content')
|
||||
reasoning_content_chunk = ""
|
||||
if not response_reasoning_content:
|
||||
reasoning_content_chunk = reasoning_chunk.get(
|
||||
'reasoning_content')
|
||||
yield {'content': reasoning_chunk.get('content'),
|
||||
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
|
||||
False) else ''}
|
||||
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)
|
||||
|
||||
|
||||
|
|
@ -86,11 +98,12 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor
|
|||
'reasoning_content_start': '<think>'})
|
||||
reasoning = Reasoning(model_setting.get('reasoning_content_start'), model_setting.get('reasoning_content_end'))
|
||||
reasoning_result = reasoning.get_reasoning_content(response)
|
||||
content = reasoning_result.get('content')
|
||||
reasoning_result_end = reasoning.get_end_reasoning_content()
|
||||
content = reasoning_result.get('content') + reasoning_result_end.get('content')
|
||||
if 'reasoning_content' in response.response_metadata:
|
||||
reasoning_content = response.response_metadata.get('reasoning_content', '')
|
||||
else:
|
||||
reasoning_content = reasoning_result.get('reasoning_content')
|
||||
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get('reasoning_content')
|
||||
_write_context(node_variable, workflow_variable, node, workflow, content, reasoning_content)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -32,6 +32,17 @@ class Reasoning:
|
|||
self.reasoning_content_is_end = False
|
||||
self.reasoning_content_chunk = ""
|
||||
|
||||
def get_end_reasoning_content(self):
|
||||
if not self.reasoning_content_is_start and not self.reasoning_content_is_end:
|
||||
r = {'content': self.all_content, 'reasoning_content': ''}
|
||||
self.reasoning_content_chunk = ""
|
||||
return r
|
||||
if self.reasoning_content_is_start and not self.reasoning_content_is_end:
|
||||
r = {'content': '', 'reasoning_content': self.reasoning_content_chunk}
|
||||
self.reasoning_content_chunk = ""
|
||||
return r
|
||||
return {'content': '', 'reasoning_content': ''}
|
||||
|
||||
def get_reasoning_content(self, chunk):
|
||||
# 如果没有开始思考过程标签那么就全是结果
|
||||
if self.reasoning_content_start_tag is None or len(self.reasoning_content_start_tag) == 0:
|
||||
|
|
@ -60,8 +71,7 @@ class Reasoning:
|
|||
return {'content': chunk.content, 'reasoning_content': ''}
|
||||
# 是否包含结束
|
||||
if reasoning_content_end_tag_prefix_index > -1:
|
||||
if len(
|
||||
self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index > self.reasoning_content_end_tag_len:
|
||||
if len(self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index >= self.reasoning_content_end_tag_len:
|
||||
reasoning_content_end_tag_index = self.reasoning_content_chunk.find(self.reasoning_content_end_tag)
|
||||
if reasoning_content_end_tag_index > -1:
|
||||
reasoning_content_chunk = self.reasoning_content_chunk[0:reasoning_content_end_tag_index]
|
||||
|
|
|
|||
|
|
@ -158,12 +158,15 @@ export class ChatRecordManage {
|
|||
get_run_node() {
|
||||
if (
|
||||
this.write_node_info &&
|
||||
(this.write_node_info.current_node.buffer.length > 0 ||
|
||||
(this.write_node_info.current_node.reasoning_content_buffer.length > 0 ||
|
||||
this.write_node_info.current_node.buffer.length > 0 ||
|
||||
!this.write_node_info.current_node.is_end)
|
||||
) {
|
||||
return this.write_node_info
|
||||
}
|
||||
const run_node = this.node_list.filter((item) => item.buffer.length > 0 || !item.is_end)[0]
|
||||
const run_node = this.node_list.filter(
|
||||
(item) => item.reasoning_content_buffer.length > 0 || item.buffer.length > 0 || !item.is_end
|
||||
)[0]
|
||||
|
||||
if (run_node) {
|
||||
const index = this.node_list.indexOf(run_node)
|
||||
|
|
|
|||
Loading…
Reference in New Issue