mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
chore: add mcp_output_enable field to application and applicationversion models
This commit is contained in:
parent
4a8cd95119
commit
ec5c076557
|
|
@ -88,6 +88,7 @@ class IChatStep(IBaseChatPipelineStep):
|
|||
mcp_source = serializers.CharField(label="MCP Source", required=False, default="referencing")
|
||||
tool_enable = serializers.BooleanField(label="工具是否启用", required=False, default=False)
|
||||
tool_ids = serializers.JSONField(label="工具ID列表", required=False, default=list)
|
||||
mcp_output_enable = serializers.BooleanField(label="MCP输出是否启用", required=False, default=True)
|
||||
|
||||
def is_valid(self, *, raise_exception=False):
|
||||
super().is_valid(raise_exception=True)
|
||||
|
|
@ -114,6 +115,6 @@ class IChatStep(IBaseChatPipelineStep):
|
|||
padding_problem_text: str = None, stream: bool = True, chat_user_id=None, chat_user_type=None,
|
||||
no_references_setting=None, model_params_setting=None, model_setting=None,
|
||||
mcp_enable=False, mcp_tool_ids=None, mcp_servers='', mcp_source="referencing",
|
||||
tool_enable=False, tool_ids=None,
|
||||
tool_enable=False, tool_ids=None, mcp_output_enable=True,
|
||||
**kwargs):
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -181,6 +181,7 @@ class BaseChatStep(IChatStep):
|
|||
mcp_source="referencing",
|
||||
tool_enable=False,
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True,
|
||||
**kwargs):
|
||||
chat_model = get_model_instance_by_model_workspace_id(model_id, workspace_id,
|
||||
**model_params_setting) if model_id is not None else None
|
||||
|
|
@ -190,13 +191,13 @@ class BaseChatStep(IChatStep):
|
|||
manage, padding_problem_text, chat_user_id, chat_user_type,
|
||||
no_references_setting,
|
||||
model_setting,
|
||||
mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids)
|
||||
mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable)
|
||||
else:
|
||||
return self.execute_block(message_list, chat_id, problem_text, post_response_handler, chat_model,
|
||||
paragraph_list,
|
||||
manage, padding_problem_text, chat_user_id, chat_user_type, no_references_setting,
|
||||
model_setting,
|
||||
mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids)
|
||||
mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable)
|
||||
|
||||
def get_details(self, manage, **kwargs):
|
||||
# 删除临时生成的MCP代码文件
|
||||
|
|
@ -229,7 +230,7 @@ class BaseChatStep(IChatStep):
|
|||
return result
|
||||
|
||||
def _handle_mcp_request(self, mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids,
|
||||
chat_model, message_list):
|
||||
mcp_output_enable, chat_model, message_list):
|
||||
if not mcp_enable and not tool_enable:
|
||||
return None
|
||||
|
||||
|
|
@ -269,7 +270,7 @@ class BaseChatStep(IChatStep):
|
|||
mcp_servers_config[str(tool.id)] = tool_config
|
||||
|
||||
if len(mcp_servers_config) > 0:
|
||||
return mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config))
|
||||
return mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config), mcp_output_enable)
|
||||
|
||||
return None
|
||||
|
||||
|
|
@ -284,7 +285,8 @@ class BaseChatStep(IChatStep):
|
|||
mcp_servers='',
|
||||
mcp_source="referencing",
|
||||
tool_enable=False,
|
||||
tool_ids=None):
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True):
|
||||
if paragraph_list is None:
|
||||
paragraph_list = []
|
||||
directly_return_chunk_list = [AIMessageChunk(content=paragraph.content)
|
||||
|
|
@ -302,7 +304,7 @@ class BaseChatStep(IChatStep):
|
|||
else:
|
||||
# 处理 MCP 请求
|
||||
mcp_result = self._handle_mcp_request(
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, chat_model, message_list,
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, mcp_output_enable, chat_model, message_list,
|
||||
)
|
||||
if mcp_result:
|
||||
return mcp_result, True
|
||||
|
|
@ -324,9 +326,11 @@ class BaseChatStep(IChatStep):
|
|||
mcp_servers='',
|
||||
mcp_source="referencing",
|
||||
tool_enable=False,
|
||||
tool_ids=None):
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True):
|
||||
chat_result, is_ai_chat = self.get_stream_result(message_list, chat_model, paragraph_list,
|
||||
no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids)
|
||||
no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids,
|
||||
mcp_output_enable)
|
||||
chat_record_id = uuid.uuid7()
|
||||
r = StreamingHttpResponse(
|
||||
streaming_content=event_content(chat_result, chat_id, chat_record_id, paragraph_list,
|
||||
|
|
@ -348,7 +352,9 @@ class BaseChatStep(IChatStep):
|
|||
mcp_servers='',
|
||||
mcp_source="referencing",
|
||||
tool_enable=False,
|
||||
tool_ids=None):
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True
|
||||
):
|
||||
if paragraph_list is None:
|
||||
paragraph_list = []
|
||||
directly_return_chunk_list = [AIMessageChunk(content=paragraph.content)
|
||||
|
|
@ -365,7 +371,8 @@ class BaseChatStep(IChatStep):
|
|||
else:
|
||||
# 处理 MCP 请求
|
||||
mcp_result = self._handle_mcp_request(
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, chat_model, message_list,
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_ids, tool_ids, mcp_output_enable,
|
||||
chat_model, message_list,
|
||||
)
|
||||
if mcp_result:
|
||||
return mcp_result, True
|
||||
|
|
@ -386,7 +393,8 @@ class BaseChatStep(IChatStep):
|
|||
mcp_servers='',
|
||||
mcp_source="referencing",
|
||||
tool_enable=False,
|
||||
tool_ids=None):
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True):
|
||||
reasoning_content_enable = model_setting.get('reasoning_content_enable', False)
|
||||
reasoning_content_start = model_setting.get('reasoning_content_start', '<think>')
|
||||
reasoning_content_end = model_setting.get('reasoning_content_end', '</think>')
|
||||
|
|
@ -396,7 +404,7 @@ class BaseChatStep(IChatStep):
|
|||
# 调用模型
|
||||
try:
|
||||
chat_result, is_ai_chat = self.get_block_result(message_list, chat_model, paragraph_list,
|
||||
no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids)
|
||||
no_references_setting, problem_text, mcp_enable, mcp_tool_ids, mcp_servers, mcp_source, tool_enable, tool_ids, mcp_output_enable)
|
||||
if is_ai_chat:
|
||||
request_token = chat_model.get_num_tokens_from_messages(message_list)
|
||||
response_token = chat_model.get_num_tokens(chat_result.content)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class ChatNodeSerializer(serializers.Serializer):
|
|||
tool_enable = serializers.BooleanField(required=False, default=False, label=_("Whether to enable tools"))
|
||||
tool_ids = serializers.ListField(child=serializers.UUIDField(), required=False, allow_empty=True,
|
||||
label=_("Tool IDs"), )
|
||||
|
||||
mcp_output_enable = serializers.BooleanField(required=False, default=True, label=_("Whether to enable MCP output"))
|
||||
|
||||
class IChatNode(INode):
|
||||
type = 'ai-chat-node'
|
||||
|
|
@ -63,5 +63,6 @@ class IChatNode(INode):
|
|||
mcp_source=None,
|
||||
tool_enable=False,
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True,
|
||||
**kwargs) -> NodeResult:
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -159,6 +159,7 @@ class BaseChatNode(IChatNode):
|
|||
mcp_source=None,
|
||||
tool_enable=False,
|
||||
tool_ids=None,
|
||||
mcp_output_enable=True,
|
||||
**kwargs) -> NodeResult:
|
||||
if dialogue_type is None:
|
||||
dialogue_type = 'WORKFLOW'
|
||||
|
|
@ -184,8 +185,8 @@ class BaseChatNode(IChatNode):
|
|||
|
||||
# 处理 MCP 请求
|
||||
mcp_result = self._handle_mcp_request(
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids, chat_model, message_list,
|
||||
history_message, question
|
||||
mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids, mcp_output_enable,
|
||||
chat_model, message_list, history_message, question
|
||||
)
|
||||
if mcp_result:
|
||||
return mcp_result
|
||||
|
|
@ -202,7 +203,7 @@ class BaseChatNode(IChatNode):
|
|||
_write_context=write_context)
|
||||
|
||||
def _handle_mcp_request(self, mcp_enable, tool_enable, mcp_source, mcp_servers, mcp_tool_id, mcp_tool_ids, tool_ids,
|
||||
chat_model, message_list, history_message, question):
|
||||
mcp_output_enable, chat_model, message_list, history_message, question):
|
||||
if not mcp_enable and not tool_enable:
|
||||
return None
|
||||
|
||||
|
|
@ -244,7 +245,7 @@ class BaseChatNode(IChatNode):
|
|||
mcp_servers_config[str(tool.id)] = tool_config
|
||||
|
||||
if len(mcp_servers_config) > 0:
|
||||
r = mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config))
|
||||
r = mcp_response_generator(chat_model, message_list, json.dumps(mcp_servers_config), mcp_output_enable)
|
||||
return NodeResult(
|
||||
{'result': r, 'chat_model': chat_model, 'message_list': message_list,
|
||||
'history_message': history_message, 'question': question.content}, {},
|
||||
|
|
|
|||
|
|
@ -227,13 +227,13 @@ def generate_tool_message_template(name, context):
|
|||
return tool_message_template % (name, tool_message_json_template % (context))
|
||||
|
||||
|
||||
async def _yield_mcp_response(chat_model, message_list, mcp_servers):
|
||||
async def _yield_mcp_response(chat_model, message_list, mcp_servers, mcp_output_enable=True):
|
||||
client = MultiServerMCPClient(json.loads(mcp_servers))
|
||||
tools = await client.get_tools()
|
||||
agent = create_react_agent(chat_model, tools)
|
||||
response = agent.astream({"messages": message_list}, stream_mode='messages')
|
||||
async for chunk in response:
|
||||
if isinstance(chunk[0], ToolMessage):
|
||||
if mcp_output_enable and isinstance(chunk[0], ToolMessage):
|
||||
content = generate_tool_message_template(chunk[0].name, chunk[0].content)
|
||||
chunk[0].content = content
|
||||
yield chunk[0]
|
||||
|
|
@ -241,10 +241,10 @@ async def _yield_mcp_response(chat_model, message_list, mcp_servers):
|
|||
yield chunk[0]
|
||||
|
||||
|
||||
def mcp_response_generator(chat_model, message_list, mcp_servers):
|
||||
def mcp_response_generator(chat_model, message_list, mcp_servers, mcp_output_enable=True):
|
||||
loop = asyncio.new_event_loop()
|
||||
try:
|
||||
async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers)
|
||||
async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers, mcp_output_enable)
|
||||
while True:
|
||||
try:
|
||||
chunk = loop.run_until_complete(anext_async(async_gen))
|
||||
|
|
|
|||
|
|
@ -40,6 +40,11 @@ class Migration(migrations.Migration):
|
|||
name='tool_ids',
|
||||
field=models.JSONField(default=list, verbose_name='工具ID列表'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='application',
|
||||
name='mcp_output_enable',
|
||||
field=models.BooleanField(default=True, verbose_name='MCP输出是否启用'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='applicationversion',
|
||||
name='mcp_enable',
|
||||
|
|
@ -70,4 +75,9 @@ class Migration(migrations.Migration):
|
|||
name='tool_ids',
|
||||
field=models.JSONField(default=list, verbose_name='工具ID列表'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='applicationversion',
|
||||
name='mcp_output_enable',
|
||||
field=models.BooleanField(default=True, verbose_name='MCP输出是否启用'),
|
||||
),
|
||||
]
|
||||
|
|
@ -99,6 +99,7 @@ class Application(AppModelMixin):
|
|||
mcp_source = models.CharField(verbose_name="MCP Source", max_length=20, default="referencing")
|
||||
tool_enable = models.BooleanField(verbose_name="工具是否启用", default=False)
|
||||
tool_ids = models.JSONField(verbose_name="工具ID列表", default=list)
|
||||
mcp_output_enable = models.BooleanField(verbose_name="MCP输出是否启用", default=True)
|
||||
|
||||
@staticmethod
|
||||
def get_default_model_prompt():
|
||||
|
|
@ -170,6 +171,7 @@ class ApplicationVersion(AppModelMixin):
|
|||
mcp_source = models.CharField(verbose_name="MCP Source", max_length=20, default="referencing")
|
||||
tool_enable = models.BooleanField(verbose_name="工具是否启用", default=False)
|
||||
tool_ids = models.JSONField(verbose_name="工具ID列表", default=list)
|
||||
mcp_output_enable = models.BooleanField(verbose_name="MCP输出是否启用", default=True)
|
||||
|
||||
class Meta:
|
||||
db_table = "application_version"
|
||||
|
|
|
|||
|
|
@ -707,7 +707,7 @@ class ApplicationOperateSerializer(serializers.Serializer):
|
|||
'tts_autoplay': 'tts_autoplay', 'stt_autosend': 'stt_autosend', 'file_upload_enable': 'file_upload_enable',
|
||||
'file_upload_setting': 'file_upload_setting',
|
||||
'mcp_enable': 'mcp_enable', 'mcp_tool_ids': 'mcp_tool_ids', 'mcp_servers': 'mcp_servers',
|
||||
'mcp_source': 'mcp_source', 'tool_enable': 'tool_enable', 'tool_ids': 'tool_ids',
|
||||
'mcp_source': 'mcp_source', 'tool_enable': 'tool_enable', 'tool_ids': 'tool_ids', 'mcp_output_enable': 'mcp_output_enable',
|
||||
'type': 'type'
|
||||
}
|
||||
|
||||
|
|
@ -831,7 +831,7 @@ class ApplicationOperateSerializer(serializers.Serializer):
|
|||
'stt_model_id', 'tts_model_id', 'tts_model_enable', 'stt_model_enable', 'tts_type',
|
||||
'tts_autoplay', 'stt_autosend', 'file_upload_enable', 'file_upload_setting',
|
||||
'api_key_is_active', 'icon', 'work_flow', 'model_params_setting', 'tts_model_params_setting',
|
||||
'mcp_enable', 'mcp_tool_ids', 'mcp_servers', 'mcp_source', 'tool_enable', 'tool_ids',
|
||||
'mcp_enable', 'mcp_tool_ids', 'mcp_servers', 'mcp_source', 'tool_enable', 'tool_ids', 'mcp_output_enable',
|
||||
'problem_optimization_prompt', 'clean_time', 'folder_id']
|
||||
for update_key in update_keys:
|
||||
if update_key in instance and instance.get(update_key) is not None:
|
||||
|
|
|
|||
|
|
@ -161,6 +161,7 @@ class ChatInfo:
|
|||
'mcp_source': self.application.mcp_source,
|
||||
'tool_enable': self.application.tool_enable,
|
||||
'tool_ids': self.application.tool_ids,
|
||||
'mcp_output_enable': self.application.mcp_output_enable,
|
||||
}
|
||||
|
||||
def to_pipeline_manage_params(self, problem_text: str, post_response_handler: PostResponseHandler,
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ interface ApplicationFormType {
|
|||
mcp_source?: string
|
||||
tool_enable?: boolean
|
||||
tool_ids?: string[]
|
||||
mcp_output_enable?: boolean
|
||||
}
|
||||
interface Chunk {
|
||||
real_node_id: string
|
||||
|
|
|
|||
|
|
@ -378,6 +378,22 @@
|
|||
</div>
|
||||
</template>
|
||||
</div>
|
||||
<el-form-item @click.prevent>
|
||||
<template #label>
|
||||
<div class="flex-between">
|
||||
<span class="mr-4">
|
||||
输出MCP/工具执行过程
|
||||
</span>
|
||||
<div class="flex">
|
||||
<el-switch
|
||||
class="ml-8"
|
||||
size="small"
|
||||
v-model="applicationForm.mcp_output_enable"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</el-form-item>
|
||||
<el-form-item @click.prevent>
|
||||
<template #label>
|
||||
<div class="flex-between">
|
||||
|
|
@ -644,6 +660,7 @@ const applicationForm = ref<ApplicationFormType>({
|
|||
mcp_source: 'referencing',
|
||||
tool_enable: false,
|
||||
tool_ids: [],
|
||||
mcp_output_enable: true,
|
||||
})
|
||||
const themeDetail = ref({})
|
||||
|
||||
|
|
|
|||
|
|
@ -211,7 +211,22 @@
|
|||
</div>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<el-form-item @click.prevent>
|
||||
<template #label>
|
||||
<div class="flex-between">
|
||||
<span class="mr-4">
|
||||
输出MCP/工具执行过程
|
||||
</span>
|
||||
<div class="flex">
|
||||
<el-switch
|
||||
class="ml-8"
|
||||
size="small"
|
||||
v-model="chat_data.mcp_output_enable"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
</el-form-item>
|
||||
<el-form-item @click.prevent>
|
||||
<template #label>
|
||||
<div class="flex-between w-full">
|
||||
|
|
@ -516,6 +531,9 @@ onMounted(() => {
|
|||
set(props.nodeModel.properties.node_data, 'mcp_tool_ids', [props.nodeModel.properties.node_data?.mcp_tool_id])
|
||||
set(props.nodeModel.properties.node_data, 'mcp_tool_id', undefined)
|
||||
}
|
||||
if (props.nodeModel.properties.node_data?.mcp_output_enable === undefined) {
|
||||
set(props.nodeModel.properties.node_data, 'mcp_output_enable', true)
|
||||
}
|
||||
|
||||
getToolSelectOptions()
|
||||
getMcpToolSelectOptions()
|
||||
|
|
|
|||
Loading…
Reference in New Issue