feat: AI chat node mcp server config

This commit is contained in:
CaptainB 2025-03-24 11:05:28 +08:00 committed by 刘瑞斌
parent 2d6ac806ff
commit 563516f835
4 changed files with 193 additions and 1 deletions

View File

@ -33,6 +33,9 @@ class ChatNodeSerializer(serializers.Serializer):
error_messages=ErrMessage.dict('Model settings'))
dialogue_type = serializers.CharField(required=False, allow_blank=True, allow_null=True,
error_messages=ErrMessage.char(_("Context Type")))
mcp_enable = serializers.BooleanField(required=False,
error_messages=ErrMessage.boolean(_("Whether to enable MCP")))
mcp_servers = serializers.JSONField(required=False, error_messages=ErrMessage.list(_("MCP Server")))
class IChatNode(INode):
@ -49,5 +52,7 @@ class IChatNode(INode):
model_params_setting=None,
dialogue_type=None,
model_setting=None,
mcp_enable=False,
mcp_servers=None,
**kwargs) -> NodeResult:
pass

View File

@ -6,14 +6,19 @@
@date2024/6/4 14:30
@desc:
"""
import asyncio
import json
import re
import time
from functools import reduce
from types import AsyncGeneratorType
from typing import List, Dict
from django.db.models import QuerySet
from langchain.schema import HumanMessage, SystemMessage
from langchain_core.messages import BaseMessage, AIMessage
from langchain_core.messages import BaseMessage, AIMessage, AIMessageChunk, ToolMessage
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from application.flow.i_step_node import NodeResult, INode
from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
@ -56,6 +61,7 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
reasoning = Reasoning(model_setting.get('reasoning_content_start', '<think>'),
model_setting.get('reasoning_content_end', '</think>'))
response_reasoning_content = False
for chunk in response:
reasoning_chunk = reasoning.get_reasoning_content(chunk)
content_chunk = reasoning_chunk.get('content')
@ -84,6 +90,47 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)
async def _yield_mcp_response(chat_model, message_list, mcp_servers):
async with MultiServerMCPClient(json.loads(mcp_servers)) as client:
agent = create_react_agent(chat_model, client.get_tools())
response = agent.astream({"messages": message_list}, stream_mode='messages')
async for chunk in response:
# if isinstance(chunk[0], ToolMessage):
# print(chunk[0])
if isinstance(chunk[0], AIMessageChunk):
yield chunk[0]
def mcp_response_generator(chat_model, message_list, mcp_servers):
loop = asyncio.new_event_loop()
try:
async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers)
while True:
try:
chunk = loop.run_until_complete(anext_async(async_gen))
yield chunk
except StopAsyncIteration:
break
except Exception as e:
print(f'exception: {e}')
finally:
loop.close()
async def anext_async(agen):
return await agen.__anext__()
async def _get_mcp_response(chat_model, message_list, mcp_servers):
async with MultiServerMCPClient(json.loads(mcp_servers)) as client:
agent = create_react_agent(chat_model, client.get_tools())
response = agent.astream({"messages": message_list}, stream_mode='messages')
result = []
async for chunk in response:
# if isinstance(chunk[0], ToolMessage):
# print(chunk[0].content)
if isinstance(chunk[0], AIMessageChunk):
result.append(chunk[0])
return result
def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据
@ -142,6 +189,8 @@ class BaseChatNode(IChatNode):
model_params_setting=None,
dialogue_type=None,
model_setting=None,
mcp_enable=False,
mcp_servers=None,
**kwargs) -> NodeResult:
if dialogue_type is None:
dialogue_type = 'WORKFLOW'
@ -163,6 +212,14 @@ class BaseChatNode(IChatNode):
self.context['system'] = system
message_list = self.generate_message_list(system, prompt, history_message)
self.context['message_list'] = message_list
if mcp_enable and mcp_servers is not None:
r = mcp_response_generator(chat_model, message_list, mcp_servers)
return NodeResult(
{'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
_write_context=write_context_stream)
if stream:
r = chat_model.stream(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,

View File

@ -0,0 +1,98 @@
<template>
<el-dialog
align-center
:title="$t('common.setting')"
class="param-dialog"
v-model="dialogVisible"
style="width: 550px"
append-to-body
:close-on-click-modal="false"
:close-on-press-escape="false"
>
<el-form label-position="top" ref="paramFormRef" :model="form"
require-asterisk-position="right">
<el-form-item label="MCP" prop="mcp_enable">
<el-switch v-model="form.mcp_enable" />
</el-form-item>
<el-form-item label="MCP Server Config" prop="mcp_servers"
:rules="[{ required: true, message: $t('common.required') }]">
<el-input
v-model="form.mcp_servers"
:rows="6"
type="textarea"
/>
</el-form-item>
</el-form>
<template #footer>
<span class="dialog-footer p-16">
<el-button @click.prevent="dialogVisible = false">{{ $t('common.cancel') }}</el-button>
<el-button type="primary" @click="submit()" :loading="loading">
{{ $t('common.save') }}
</el-button>
</span>
</template>
</el-dialog>
</template>
<script setup lang="ts">
import { ref, watch } from 'vue'
const emit = defineEmits(['refresh'])
const paramFormRef = ref()
const form = ref<any>({
mcp_servers: '',
mcp_enable: false
})
const dialogVisible = ref<boolean>(false)
const loading = ref(false)
watch(dialogVisible, (bool) => {
if (!bool) {
form.value = {
mcp_servers: '',
mcp_enable: false
}
}
})
const open = (data: any) => {
form.value = { ...form.value, ...data }
dialogVisible.value = true
}
const submit = () => {
paramFormRef.value.validate().then((valid: any) => {
if (valid) {
emit('refresh', form.value)
dialogVisible.value = false
}
})
}
defineExpose({ open })
</script>
<style lang="scss" scoped>
.param-dialog {
padding: 8px 8px 24px 8px;
.el-dialog__header {
padding: 16px 16px 0 16px;
}
.el-dialog__body {
padding: 0 !important;
}
.dialog-max-height {
height: 560px;
}
.custom-slider {
.el-input-number.is-without-controls .el-input__wrapper {
padding: 0 !important;
}
}
}
</style>

View File

@ -116,6 +116,22 @@
/>
</el-form-item>
<el-form-item>
<template #label>
<div class="flex-between">
<div>{{ $t('views.applicationWorkflow.nodes.mcpNode.tool') }}</div>
<el-button
type="primary"
link
@click="openMcpServersDialog"
@refreshForm="refreshParam"
>
<el-icon><Setting /></el-icon>
</el-button>
</div>
</template>
</el-form-item>
<el-form-item @click.prevent>
<template #label>
<div class="flex align-center">
@ -163,6 +179,7 @@
ref="ReasoningParamSettingDialogRef"
@refresh="submitReasoningDialog"
/>
<McpServersDialog ref="mcpServersDialogRef" @refresh="submitMcpServersDialog" />
</NodeContainer>
</template>
<script setup lang="ts">
@ -177,6 +194,7 @@ import { isLastNode } from '@/workflow/common/data'
import AIModeParamSettingDialog from '@/views/application/component/AIModeParamSettingDialog.vue'
import { t } from '@/locales'
import ReasoningParamSettingDialog from '@/views/application/component/ReasoningParamSettingDialog.vue'
import McpServersDialog from '@/views/application/component/McpServersDialog.vue'
const { model } = useStore()
const wheel = (e: any) => {
@ -300,6 +318,20 @@ function submitReasoningDialog(val: any) {
set(props.nodeModel.properties.node_data, 'model_setting', model_setting)
}
const mcpServersDialogRef = ref()
function openMcpServersDialog() {
const config = {
mcp_servers: chat_data.value.mcp_servers,
mcp_enable: chat_data.value.mcp_enable
}
mcpServersDialogRef.value.open(config)
}
function submitMcpServersDialog(config: any) {
set(props.nodeModel.properties.node_data, 'mcp_servers', config.mcp_servers)
set(props.nodeModel.properties.node_data, 'mcp_enable', config.mcp_enable)
}
onMounted(() => {
getModel()
if (typeof props.nodeModel.properties.node_data?.is_result === 'undefined') {