fix: 修复对话使用api调用无法响应数据 (#1755)

This commit is contained in:
shaohuzhang1 2024-12-04 14:19:37 +08:00 committed by GitHub
parent c4c4b6e9cd
commit 6b4cee1412
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 16 additions and 8 deletions

View File

@ -19,8 +19,8 @@ def _is_interrupt_exec(node, node_variable: Dict, workflow_variable: Dict):
def _write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow, answer: str):
result = node_variable.get('result')
node.context['child_node'] = node_variable['child_node']
node.context['is_interrupt_exec'] = node_variable['is_interrupt_exec']
node.context['child_node'] = node_variable.get('child_node')
node.context['is_interrupt_exec'] = node_variable.get('is_interrupt_exec')
node.context['message_tokens'] = result.get('usage', {}).get('prompt_tokens', 0)
node.context['answer_tokens'] = result.get('usage', {}).get('completion_tokens', 0)
node.context['answer'] = answer
@ -81,7 +81,9 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor
@param node: 节点实例对象
@param workflow: 工作流管理器
"""
response = node_variable.get('result')['choices'][0]['message']
response = node_variable.get('result', {}).get('data', {})
node_variable['result'] = {'usage': {'completion_tokens': response.get('completion_tokens'),
'prompt_tokens': response.get('prompt_tokens')}}
answer = response.get('content', '') or "抱歉,没有查找到相关内容,请重新描述您的问题或提供更多信息。"
_write_context(node_variable, workflow_variable, node, workflow, answer)

View File

@ -328,11 +328,13 @@ class WorkflowManage:
'message_tokens' in row and row.get('message_tokens') is not None])
answer_tokens = sum([row.get('answer_tokens') for row in details.values() if
'answer_tokens' in row and row.get('answer_tokens') is not None])
answer_text_list = self.get_answer_text_list()
answer_text = '\n\n'.join(answer['content'] for answer in answer_text_list)
self.work_flow_post_handler.handler(self.params['chat_id'], self.params['chat_record_id'],
self.answer,
answer_text,
self)
return self.base_to_response.to_block_response(self.params['chat_id'],
self.params['chat_record_id'], self.answer, True
self.params['chat_record_id'], answer_text, True
, message_tokens, answer_tokens,
_status=status.HTTP_200_OK if self.status == 200 else status.HTTP_500_INTERNAL_SERVER_ERROR)

View File

@ -21,15 +21,19 @@ class SystemToResponse(BaseToResponse):
if other_params is None:
other_params = {}
return result.success({'chat_id': str(chat_id), 'id': str(chat_record_id), 'operate': True,
'content': content, 'is_end': is_end, **other_params}, response_status=_status,
'content': content, 'is_end': is_end, **other_params,
'completion_tokens': completion_tokens, 'prompt_tokens': prompt_tokens},
response_status=_status,
code=_status)
def to_stream_chunk_response(self, chat_id, chat_record_id, node_id, up_node_id_list, content, is_end, completion_tokens,
def to_stream_chunk_response(self, chat_id, chat_record_id, node_id, up_node_id_list, content, is_end,
completion_tokens,
prompt_tokens, other_params: dict = None):
if other_params is None:
other_params = {}
chunk = json.dumps({'chat_id': str(chat_id), 'chat_record_id': str(chat_record_id), 'operate': True,
'content': content, 'node_id': node_id, 'up_node_id_list': up_node_id_list, 'is_end': is_end,
'content': content, 'node_id': node_id, 'up_node_id_list': up_node_id_list,
'is_end': is_end,
'usage': {'completion_tokens': completion_tokens,
'prompt_tokens': prompt_tokens,
'total_tokens': completion_tokens + prompt_tokens},