This commit is contained in:
liqiang-fit2cloud 2025-02-27 15:00:51 +08:00
commit b988d5abee
4 changed files with 10 additions and 4 deletions

View File

@ -47,7 +47,7 @@ class BaseImageGenerateNode(IImageGenerateNode):
file_url = FileSerializer(data={'file': file, 'meta': meta}).upload()
file_urls.append(file_url)
self.context['image_list'] = [{'file_id': path.split('/')[-1], 'url': path} for path in file_urls]
answer = '\n'.join([f"![Image]({path})" for path in file_urls])
answer = ' '.join([f"![Image]({path})" for path in file_urls])
return NodeResult({'answer': answer, 'chat_model': tti_model, 'message_list': message_list,
'image': [{'file_id': path.split('/')[-1], 'url': path} for path in file_urls],
'history_message': history_message, 'question': question}, {})

View File

@ -30,11 +30,15 @@ class BaseVariableAssignNode(IVariableAssignNode):
val = json.loads(variable['value'])
self.workflow_manage.context[variable['fields'][1]] = val
result['output_value'] = variable['value'] = val
else:
elif variable['type'] == 'string':
# 变量解析 例如:{{global.xxx}}
val = self.workflow_manage.generate_prompt(variable['value'])
self.workflow_manage.context[variable['fields'][1]] = val
result['output_value'] = val
else:
val = variable['value']
self.workflow_manage.context[variable['fields'][1]] = val
result['output_value'] = val
else:
reference = self.get_reference_content(variable['reference'])
self.workflow_manage.context[variable['fields'][1]] = reference

View File

@ -382,6 +382,8 @@ class WorkflowManage:
break
yield chunk
finally:
while self.is_run():
pass
details = self.get_runtime_details()
message_tokens = sum([row.get('message_tokens') for row in details.values() if
'message_tokens' in row and row.get('message_tokens') is not None])

View File

@ -44,10 +44,10 @@ class GZipMiddleware(MiddlewareMixin):
if response.is_async:
# pull to lexical scope to capture fixed reference in case
# streaming_content is set again later.
orignal_iterator = response.streaming_content
original_iterator = response.streaming_content
async def gzip_wrapper():
async for chunk in orignal_iterator:
async for chunk in original_iterator:
yield compress_string(
chunk,
max_random_bytes=self.max_random_bytes,