From 49efb185e027f847323d42f24749131140043e30 Mon Sep 17 00:00:00 2001 From: shaohuzhang1 Date: Fri, 20 Sep 2024 18:46:49 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=E3=80=90=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E8=AE=BE=E7=BD=AE=E3=80=91=E4=BD=BF=E7=94=A8=E5=BA=94?= =?UTF-8?q?=E7=94=A8baseurl=E5=88=9B=E5=BB=BA=E6=A8=A1=E5=9E=8B=E6=8A=A5?= =?UTF-8?q?=E9=94=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/common/handle/impl/response/openai_to_response.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/common/handle/impl/response/openai_to_response.py b/apps/common/handle/impl/response/openai_to_response.py index 791224aff..b2508078f 100644 --- a/apps/common/handle/impl/response/openai_to_response.py +++ b/apps/common/handle/impl/response/openai_to_response.py @@ -29,7 +29,7 @@ class OpenaiToResponse(BaseToResponse): prompt_tokens=prompt_tokens, total_tokens=completion_tokens + prompt_tokens) ).dict() - return JsonResponse(data=data, status=status) + return JsonResponse(data=data, status=_status) def to_stream_chunk_response(self, chat_id, chat_record_id, content, is_end, completion_tokens, prompt_tokens): chunk = ChatCompletionChunk(id=chat_record_id, model='', object='chat.completion.chunk',