From ec5c07655702c7b692c9140d74aa4512291050e4 Mon Sep 17 00:00:00 2001 From: CaptainB Date: Mon, 8 Sep 2025 15:04:48 +0800 Subject: [PATCH] chore: add mcp_output_enable field to application and applicationversion models --- .../step/chat_step/i_chat_step.py | 3 +- .../step/chat_step/impl/base_chat_step.py | 32 ++++++++++++------- .../ai_chat_step_node/i_chat_node.py | 3 +- .../ai_chat_step_node/impl/base_chat_node.py | 9 +++--- apps/application/flow/tools.py | 8 ++--- ...more.py => 0002_application_simple_mcp.py} | 10 ++++++ apps/application/models/application.py | 2 ++ apps/application/serializers/application.py | 4 +-- apps/application/serializers/common.py | 1 + ui/src/api/type/application.ts | 1 + .../views/application/ApplicationSetting.vue | 17 ++++++++++ ui/src/workflow/nodes/ai-chat-node/index.vue | 20 +++++++++++- 12 files changed, 85 insertions(+), 25 deletions(-) rename apps/application/migrations/{0002_application_mcp_enable_application_mcp_servers_and_more.py => 0002_application_simple_mcp.py} (86%) diff --git a/apps/application/chat_pipeline/step/chat_step/i_chat_step.py b/apps/application/chat_pipeline/step/chat_step/i_chat_step.py index 25cff9d54..c278dfacc 100644 --- a/apps/application/chat_pipeline/step/chat_step/i_chat_step.py +++ b/apps/application/chat_pipeline/step/chat_step/i_chat_step.py @@ -88,6 +88,7 @@ class IChatStep(IBaseChatPipelineStep): mcp_source = serializers.CharField(label="MCP Source", required=False, default="referencing") tool_enable = serializers.BooleanField(label="工具是否启用", required=False, default=False) tool_ids = serializers.JSONField(label="工具ID列表", required=False, default=list) + mcp_output_enable = serializers.BooleanField(label="MCP输出是否启用", required=False, default=True) def is_valid(self, *, raise_exception=False): super().is_valid(raise_exception=True) @@ -114,6 +115,6 @@ class IChatStep(IBaseChatPipelineStep): padding_problem_text: str = None, stream: bool = True, chat_user_id=None, chat_user_type=None, no_references_setting=None, model_params_setting=None, model_setting=None, mcp_enable=False, mcp_tool_ids=None, mcp_servers='', mcp_source="referencing", - tool_enable=False, tool_ids=None, + tool_enable=False, tool_ids=None, mcp_output_enable=True, **kwargs): pass diff --git a/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py b/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py index ef5e31704..05015f758 100644 --- a/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py +++ b/apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py @@ -181,6 +181,7 @@ class BaseChatStep(IChatStep): mcp_source="referencing", tool_enable=False, tool_ids=None, + mcp_output_enable=True, **kwargs): chat_model = get_model_instance_by_model_workspace_id(model_id, workspace_id, **model_params_setting) if model_id is not None else None @@ -190,13 +191,13 @@ class BaseChatStep(IChatStep): manage, padding_problem_text, chat_user_id, chat_user_type, no_references_setting, model_setting, - mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids) + mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable) else: return self.execute_block(message_list, chat_id, problem_text, post_response_handler, chat_model, paragraph_list, manage, padding_problem_text, chat_user_id, chat_user_type, no_references_setting, model_setting, - mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids) + mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable) def get_details(self, manage, **kwargs): # 删除临时生成的MCP代码文件 @@ -229,7 +230,7 @@ class BaseChatStep(IChatStep): return result def _handle_mcp_request(self, mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, - chat_model, message_list): + mcp_output_enable, chat_model, message_list): if not mcp_enable and not tool_enable: return None @@ -269,7 +270,7 @@ class BaseChatStep(IChatStep): mcp_servers_config[str(tool.id)] = tool_config if len(mcp_servers_config) > 0: - return mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config)) + return mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config), mcp_output_enable) return None @@ -284,7 +285,8 @@ class BaseChatStep(IChatStep): mcp_servers='', mcp_source="referencing", tool_enable=False, - tool_ids=None): + tool_ids=None, + mcp_output_enable=True): if paragraph_list is None: paragraph_list = [] directly_return_chunk_list = [AIMessageChunk(content=paragraph.content) @@ -302,7 +304,7 @@ class BaseChatStep(IChatStep): else: # 处理 MCP 请求 mcp_result = self._handle_mcp_request( - mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, chat_model, message_list, + mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, mcp_output_enable, chat_model, message_list, ) if mcp_result: return mcp_result, True @@ -324,9 +326,11 @@ class BaseChatStep(IChatStep): mcp_servers='', mcp_source="referencing", tool_enable=False, - tool_ids=None): + tool_ids=None, + mcp_output_enable=True): chat_result, is_ai_chat = self.get_stream_result(message_list, chat_model, paragraph_list, - no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids) + no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, + mcp_output_enable) chat_record_id = uuid.uuid7() r = StreamingHttpResponse( streaming_content=event_content(chat_result, chat_id, chat_record_id, paragraph_list, @@ -348,7 +352,9 @@ class BaseChatStep(IChatStep): mcp_servers='', mcp_source="referencing", tool_enable=False, - tool_ids=None): + tool_ids=None, + mcp_output_enable=True + ): if paragraph_list is None: paragraph_list = [] directly_return_chunk_list = [AIMessageChunk(content=paragraph.content) @@ -365,7 +371,8 @@ class BaseChatStep(IChatStep): else: # 处理 MCP 请求 mcp_result = self._handle_mcp_request( - mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, chat_model, message_list, + mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, mcp_output_enable, + chat_model, message_list, ) if mcp_result: return mcp_result, True @@ -386,7 +393,8 @@ class BaseChatStep(IChatStep): mcp_servers='', mcp_source="referencing", tool_enable=False, - tool_ids=None): + tool_ids=None, + mcp_output_enable=True): reasoning_content_enable = model_setting.get('reasoning_content_enable', False) reasoning_content_start = model_setting.get('reasoning_content_start', '') reasoning_content_end = model_setting.get('reasoning_content_end', '') @@ -396,7 +404,7 @@ class BaseChatStep(IChatStep): # 调用模型 try: chat_result, is_ai_chat = self.get_block_result(message_list, chat_model, paragraph_list, - no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids) + no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable) if is_ai_chat: request_token = chat_model.get_num_tokens_from_messages(message_list) response_token = chat_model.get_num_tokens(chat_result.content) diff --git a/apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py b/apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py index 120c1c694..21e23f167 100644 --- a/apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py +++ b/apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py @@ -40,7 +40,7 @@ class ChatNodeSerializer(serializers.Serializer): tool_enable = serializers.BooleanField(required=False, default=False, label=_("Whether to enable tools")) tool_ids = serializers.ListField(child=serializers.UUIDField(), required=False, allow_empty=True, label=_("Tool IDs"), ) - + mcp_output_enable = serializers.BooleanField(required=False, default=True, label=_("Whether to enable MCP output")) class IChatNode(INode): type = 'ai-chat-node' @@ -63,5 +63,6 @@ class IChatNode(INode): mcp_source=None, tool_enable=False, tool_ids=None, + mcp_output_enable=True, **kwargs) -> NodeResult: pass diff --git a/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py b/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py index 31e0cb834..390b0765a 100644 --- a/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py +++ b/apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py @@ -159,6 +159,7 @@ class BaseChatNode(IChatNode): mcp_source=None, tool_enable=False, tool_ids=None, + mcp_output_enable=True, **kwargs) -> NodeResult: if dialogue_type is None: dialogue_type = 'WORKFLOW' @@ -184,8 +185,8 @@ class BaseChatNode(IChatNode): # 处理 MCP 请求 mcp_result = self._handle_mcp_request( - mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids, chat_model, message_list, - history_message, question + mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids, mcp_output_enable, + chat_model, message_list, history_message, question ) if mcp_result: return mcp_result @@ -202,7 +203,7 @@ class BaseChatNode(IChatNode): _write_context=write_context) def _handle_mcp_request(self, mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids, - chat_model, message_list, history_message, question): + mcp_output_enable, chat_model, message_list, history_message, question): if not mcp_enable and not tool_enable: return None @@ -244,7 +245,7 @@ class BaseChatNode(IChatNode): mcp_servers_config[str(tool.id)] = tool_config if len(mcp_servers_config) > 0: - r = mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config)) + r = mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config), mcp_output_enable) return NodeResult( {'result': r, 'chat_model': chat_model, 'message_list': message_list, 'history_message': history_message, 'question': question.content}, {}, diff --git a/apps/application/flow/tools.py b/apps/application/flow/tools.py index a76d35308..29c95a14b 100644 --- a/apps/application/flow/tools.py +++ b/apps/application/flow/tools.py @@ -227,13 +227,13 @@ def generate_tool_message_template(name, context): return tool_message_template % (name, tool_message_json_template % (context)) -async def _yield_mcp_response(chat_model, message_list, mcp_servers): +async def _yield_mcp_response(chat_model, message_list, mcp_servers, mcp_output_enable=True): client = MultiServerMCPClient(json.loads(mcp_servers)) tools = await client.get_tools() agent = create_react_agent(chat_model, tools) response = agent.astream({"messages": message_list}, stream_mode='messages') async for chunk in response: - if isinstance(chunk[0], ToolMessage): + if mcp_output_enable and isinstance(chunk[0], ToolMessage): content = generate_tool_message_template(chunk[0].name, chunk[0].content) chunk[0].content = content yield chunk[0] @@ -241,10 +241,10 @@ async def _yield_mcp_response(chat_model, message_list, mcp_servers): yield chunk[0] -def mcp_response_generator(chat_model, message_list, mcp_servers): +def mcp_response_generator(chat_model, message_list, mcp_servers, mcp_output_enable=True): loop = asyncio.new_event_loop() try: - async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers) + async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers, mcp_output_enable) while True: try: chunk = loop.run_until_complete(anext_async(async_gen)) diff --git a/apps/application/migrations/0002_application_mcp_enable_application_mcp_servers_and_more.py b/apps/application/migrations/0002_application_simple_mcp.py similarity index 86% rename from apps/application/migrations/0002_application_mcp_enable_application_mcp_servers_and_more.py rename to apps/application/migrations/0002_application_simple_mcp.py index 6e27dab8e..14996f254 100644 --- a/apps/application/migrations/0002_application_mcp_enable_application_mcp_servers_and_more.py +++ b/apps/application/migrations/0002_application_simple_mcp.py @@ -40,6 +40,11 @@ class Migration(migrations.Migration): name='tool_ids', field=models.JSONField(default=list, verbose_name='工具ID列表'), ), + migrations.AddField( + model_name='application', + name='mcp_output_enable', + field=models.BooleanField(default=True, verbose_name='MCP输出是否启用'), + ), migrations.AddField( model_name='applicationversion', name='mcp_enable', @@ -70,4 +75,9 @@ class Migration(migrations.Migration): name='tool_ids', field=models.JSONField(default=list, verbose_name='工具ID列表'), ), + migrations.AddField( + model_name='applicationversion', + name='mcp_output_enable', + field=models.BooleanField(default=True, verbose_name='MCP输出是否启用'), + ), ] diff --git a/apps/application/models/application.py b/apps/application/models/application.py index 548b02627..a8524b863 100644 --- a/apps/application/models/application.py +++ b/apps/application/models/application.py @@ -99,6 +99,7 @@ class Application(AppModelMixin): mcp_source = models.CharField(verbose_name="MCP Source", max_length=20, default="referencing") tool_enable = models.BooleanField(verbose_name="工具是否启用", default=False) tool_ids = models.JSONField(verbose_name="工具ID列表", default=list) + mcp_output_enable = models.BooleanField(verbose_name="MCP输出是否启用", default=True) @staticmethod def get_default_model_prompt(): @@ -170,6 +171,7 @@ class ApplicationVersion(AppModelMixin): mcp_source = models.CharField(verbose_name="MCP Source", max_length=20, default="referencing") tool_enable = models.BooleanField(verbose_name="工具是否启用", default=False) tool_ids = models.JSONField(verbose_name="工具ID列表", default=list) + mcp_output_enable = models.BooleanField(verbose_name="MCP输出是否启用", default=True) class Meta: db_table = "application_version" diff --git a/apps/application/serializers/application.py b/apps/application/serializers/application.py index b5c10836d..f7a875cf8 100644 --- a/apps/application/serializers/application.py +++ b/apps/application/serializers/application.py @@ -707,7 +707,7 @@ class ApplicationOperateSerializer(serializers.Serializer): 'tts_autoplay': 'tts_autoplay', 'stt_autosend': 'stt_autosend', 'file_upload_enable': 'file_upload_enable', 'file_upload_setting': 'file_upload_setting', 'mcp_enable': 'mcp_enable', 'mcp_tool_ids': 'mcp_tool_ids', 'mcp_servers': 'mcp_servers', - 'mcp_source': 'mcp_source', 'tool_enable': 'tool_enable', 'tool_ids': 'tool_ids', + 'mcp_source': 'mcp_source', 'tool_enable': 'tool_enable', 'tool_ids': 'tool_ids', 'mcp_output_enable': 'mcp_output_enable', 'type': 'type' } @@ -831,7 +831,7 @@ class ApplicationOperateSerializer(serializers.Serializer): 'stt_model_id', 'tts_model_id', 'tts_model_enable', 'stt_model_enable', 'tts_type', 'tts_autoplay', 'stt_autosend', 'file_upload_enable', 'file_upload_setting', 'api_key_is_active', 'icon', 'work_flow', 'model_params_setting', 'tts_model_params_setting', - 'mcp_enable', 'mcp_tool_ids', 'mcp_servers', 'mcp_source', 'tool_enable', 'tool_ids', + 'mcp_enable', 'mcp_tool_ids', 'mcp_servers', 'mcp_source', 'tool_enable', 'tool_ids', 'mcp_output_enable', 'problem_optimization_prompt', 'clean_time', 'folder_id'] for update_key in update_keys: if update_key in instance and instance.get(update_key) is not None: diff --git a/apps/application/serializers/common.py b/apps/application/serializers/common.py index 1a5db5ef3..1f2d12356 100644 --- a/apps/application/serializers/common.py +++ b/apps/application/serializers/common.py @@ -161,6 +161,7 @@ class ChatInfo: 'mcp_source': self.application.mcp_source, 'tool_enable': self.application.tool_enable, 'tool_ids': self.application.tool_ids, + 'mcp_output_enable': self.application.mcp_output_enable, } def to_pipeline_manage_params(self, problem_text: str, post_response_handler: PostResponseHandler, diff --git a/ui/src/api/type/application.ts b/ui/src/api/type/application.ts index 6fbb603cc..b7b4a85c6 100644 --- a/ui/src/api/type/application.ts +++ b/ui/src/api/type/application.ts @@ -32,6 +32,7 @@ interface ApplicationFormType { mcp_source?: string tool_enable?: boolean tool_ids?: string[] + mcp_output_enable?: boolean } interface Chunk { real_node_id: string diff --git a/ui/src/views/application/ApplicationSetting.vue b/ui/src/views/application/ApplicationSetting.vue index c13375bd7..19031a7fe 100644 --- a/ui/src/views/application/ApplicationSetting.vue +++ b/ui/src/views/application/ApplicationSetting.vue @@ -378,6 +378,22 @@ + + + - + + +