feat: 支持工作流 (#671)

This commit is contained in:
shaohuzhang1 2024-07-01 09:45:59 +08:00 committed by GitHub
parent 46fa265ac2
commit 2f2f74fdab
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
165 changed files with 7791 additions and 613 deletions

View File

@ -0,0 +1,8 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py.py
@date2024/6/7 14:43
@desc:
"""

View File

@ -0,0 +1,426 @@
{
"nodes": [
{
"id": "base-node",
"type": "base-node",
"x": 440,
"y": 3350,
"properties": {
"config": {},
"height": 517,
"stepName": "基本信息",
"node_data": {
"desc": "",
"name": "",
"prologue": "您好,我是 MaxKB 小助手,您可以向我提出 MaxKB 使用问题。\n- MaxKB 主要功能有什么?\n- MaxKB 支持哪些大语言模型?\n- MaxKB 支持哪些文档类型?"
}
}
},
{
"id": "start-node",
"type": "start-node",
"x": 440,
"y": 3710,
"properties": {
"config": {
"fields": [
{
"label": "用户问题",
"value": "question"
}
],
"globalFields": [
{
"value": "time",
"label": "当前时间"
}
]
},
"fields": [
{
"label": "用户问题",
"value": "question"
}
],
"height": 268.533,
"stepName": "开始",
"globalFields": [
{
"label": "当前时间",
"value": "time"
}
]
}
},
{
"id": "b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"type": "search-dataset-node",
"x": 830,
"y": 3470,
"properties": {
"config": {
"fields": [
{
"label": "检索结果的分段列表",
"value": "paragraph_list"
},
{
"label": "满足直接回答的分段列表",
"value": "is_hit_handling_method_list"
},
{
"label": "检索结果",
"value": "data"
},
{
"label": "满足直接回答的分段内容",
"value": "directly_return"
}
]
},
"height": 754.8,
"stepName": "知识库检索",
"node_data": {
"dataset_id_list": [],
"dataset_setting": {
"top_n": 3,
"similarity": 0.6,
"search_mode": "embedding",
"max_paragraph_char_number": 5000
},
"question_reference_address": [
"start-node",
"question"
]
}
}
},
{
"id": "fc60863a-dec2-4854-9e5a-7a44b7187a2b",
"type": "condition-node",
"x": 1380,
"y": 3470,
"properties": {
"width": 600,
"config": {
"fields": [
{
"label": "分支名称",
"value": "branch_name"
}
]
},
"height": 524.6669999999999,
"stepName": "判断器",
"node_data": {
"branch": [
{
"id": "1009",
"type": "IF",
"condition": "and",
"conditions": [
{
"field": [
"b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"is_hit_handling_method_list"
],
"value": "1",
"compare": "len_ge"
}
]
},
{
"id": "4908",
"type": "ELSE IF 1",
"condition": "and",
"conditions": [
{
"field": [
"b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"paragraph_list"
],
"value": "1",
"compare": "len_ge"
}
]
},
{
"id": "161",
"type": "ELSE",
"condition": "and",
"conditions": []
}
]
},
"branch_condition_list": [
{
"index": 0,
"height": 116.133,
"id": "1009"
},
{
"index": 1,
"height": 116.133,
"id": "4908"
},
{
"index": 2,
"height": 40,
"id": "161"
}
]
}
},
{
"id": "4ffe1086-25df-4c85-b168-979b5bbf0a26",
"type": "reply-node",
"x": 2090,
"y": 2820,
"properties": {
"config": {
"fields": [
{
"label": "内容",
"value": "answer"
}
]
},
"height": 312.267,
"stepName": "指定回复",
"node_data": {
"fields": [
"b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"directly_return"
],
"content": "",
"reply_type": "referencing"
}
}
},
{
"id": "f1f1ee18-5a02-46f6-b4e6-226253cdffbb",
"type": "ai-chat-node",
"x": 2090,
"y": 3460,
"properties": {
"config": {
"fields": [
{
"label": "AI 回答内容",
"value": "answer"
}
]
},
"height": 681.4,
"stepName": "AI 对话",
"node_data": {
"prompt": "已知信息:\n{{知识库检索.data}}\n问题\n{{开始.question}}",
"system": "",
"model_id": "",
"dialogue_number": 0
}
}
},
{
"id": "309d0eef-c597-46b5-8d51-b9a28aaef4c7",
"type": "ai-chat-node",
"x": 2090,
"y": 4180,
"properties": {
"config": {
"fields": [
{
"label": "AI 回答内容",
"value": "answer"
}
]
},
"height": 681.4,
"stepName": "AI 对话1",
"node_data": {
"prompt": "{{开始.question}}",
"system": "",
"model_id": "",
"dialogue_number": 0
}
}
}
],
"edges": [
{
"id": "7d0f166f-c472-41b2-b9a2-c294f4c83d73",
"type": "app-edge",
"sourceNodeId": "start-node",
"targetNodeId": "b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"startPoint": {
"x": 600,
"y": 3710
},
"endPoint": {
"x": 670,
"y": 3470
},
"properties": {},
"pointsList": [
{
"x": 600,
"y": 3710
},
{
"x": 710,
"y": 3710
},
{
"x": 560,
"y": 3470
},
{
"x": 670,
"y": 3470
}
],
"sourceAnchorId": "start-node_right",
"targetAnchorId": "b931efe5-5b66-46e0-ae3b-0160cb18eeb5_left"
},
{
"id": "35cb86dd-f328-429e-a973-12fd7218b696",
"type": "app-edge",
"sourceNodeId": "b931efe5-5b66-46e0-ae3b-0160cb18eeb5",
"targetNodeId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b",
"startPoint": {
"x": 990,
"y": 3470
},
"endPoint": {
"x": 1090,
"y": 3470
},
"properties": {},
"pointsList": [
{
"x": 990,
"y": 3470
},
{
"x": 1100,
"y": 3470
},
{
"x": 980,
"y": 3470
},
{
"x": 1090,
"y": 3470
}
],
"sourceAnchorId": "b931efe5-5b66-46e0-ae3b-0160cb18eeb5_right",
"targetAnchorId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b_left"
},
{
"id": "e8f6cfe6-7e48-41cd-abd3-abfb5304d0d8",
"type": "app-edge",
"sourceNodeId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b",
"targetNodeId": "4ffe1086-25df-4c85-b168-979b5bbf0a26",
"startPoint": {
"x": 1670,
"y": 3340.733
},
"endPoint": {
"x": 1930,
"y": 2820
},
"properties": {},
"pointsList": [
{
"x": 1670,
"y": 3340.733
},
{
"x": 1780,
"y": 3340.733
},
{
"x": 1820,
"y": 2820
},
{
"x": 1930,
"y": 2820
}
],
"sourceAnchorId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b_1009_right",
"targetAnchorId": "4ffe1086-25df-4c85-b168-979b5bbf0a26_left"
},
{
"id": "994ff325-6f7a-4ebc-b61b-10e15519d6d2",
"type": "app-edge",
"sourceNodeId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b",
"targetNodeId": "f1f1ee18-5a02-46f6-b4e6-226253cdffbb",
"startPoint": {
"x": 1670,
"y": 3464.866
},
"endPoint": {
"x": 1930,
"y": 3460
},
"properties": {},
"pointsList": [
{
"x": 1670,
"y": 3464.866
},
{
"x": 1780,
"y": 3464.866
},
{
"x": 1820,
"y": 3460
},
{
"x": 1930,
"y": 3460
}
],
"sourceAnchorId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b_4908_right",
"targetAnchorId": "f1f1ee18-5a02-46f6-b4e6-226253cdffbb_left"
},
{
"id": "19270caf-bb9f-4ba7-9bf8-200aa70fecd5",
"type": "app-edge",
"sourceNodeId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b",
"targetNodeId": "309d0eef-c597-46b5-8d51-b9a28aaef4c7",
"startPoint": {
"x": 1670,
"y": 3550.9325000000003
},
"endPoint": {
"x": 1930,
"y": 4180
},
"properties": {},
"pointsList": [
{
"x": 1670,
"y": 3550.9325000000003
},
{
"x": 1780,
"y": 3550.9325000000003
},
{
"x": 1820,
"y": 4180
},
{
"x": 1930,
"y": 4180
}
],
"sourceAnchorId": "fc60863a-dec2-4854-9e5a-7a44b7187a2b_161_right",
"targetAnchorId": "309d0eef-c597-46b5-8d51-b9a28aaef4c7_left"
}
]
}

View File

@ -0,0 +1,190 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_step_node.py
@date2024/6/3 14:57
@desc:
"""
import time
from abc import abstractmethod
from typing import Type, Dict, List
from django.db.models import QuerySet
from rest_framework import serializers
from application.models import ChatRecord
from application.models.api_key_model import ApplicationPublicAccessClient
from common.constants.authentication_type import AuthenticationType
from common.field.common import InstanceField
from common.util.field_message import ErrMessage
from django.core import cache
chat_cache = cache.caches['model_cache']
def write_context(step_variable: Dict, global_variable: Dict, node, workflow):
if step_variable is not None:
for key in step_variable:
node.context[key] = step_variable[key]
if global_variable is not None:
for key in global_variable:
workflow.context[key] = global_variable[key]
class WorkFlowPostHandler:
def __init__(self, chat_info, client_id, client_type):
self.chat_info = chat_info
self.client_id = client_id
self.client_type = client_type
def handler(self, chat_id,
chat_record_id,
answer,
workflow):
question = workflow.params['question']
details = workflow.get_runtime_details()
message_tokens = sum([row.get('message_tokens') for row in details.values() if
'message_tokens' in row and row.get('message_tokens') is not None])
answer_tokens = sum([row.get('answer_tokens') for row in details.values() if
'answer_tokens' in row and row.get('answer_tokens') is not None])
chat_record = ChatRecord(id=chat_record_id,
chat_id=chat_id,
problem_text=question,
answer_text=answer,
details=details,
message_tokens=message_tokens,
answer_tokens=answer_tokens,
run_time=time.time() - workflow.context['start_time'],
index=0)
self.chat_info.append_chat_record(chat_record, self.client_id)
# 重新设置缓存
chat_cache.set(chat_id,
self.chat_info, timeout=60 * 30)
if self.client_type == AuthenticationType.APPLICATION_ACCESS_TOKEN.value:
application_public_access_client = QuerySet(ApplicationPublicAccessClient).filter(id=self.client_id).first()
if application_public_access_client is not None:
application_public_access_client.access_num = application_public_access_client.access_num + 1
application_public_access_client.intraday_access_num = application_public_access_client.intraday_access_num + 1
application_public_access_client.save()
class NodeResult:
def __init__(self, node_variable: Dict, workflow_variable: Dict, _to_response=None, _write_context=write_context):
self._write_context = _write_context
self.node_variable = node_variable
self.workflow_variable = workflow_variable
self._to_response = _to_response
def write_context(self, node, workflow):
self._write_context(self.node_variable, self.workflow_variable, node, workflow)
def to_response(self, chat_id, chat_record_id, node, workflow, post_handler: WorkFlowPostHandler):
return self._to_response(chat_id, chat_record_id, self.node_variable, self.workflow_variable, node, workflow,
post_handler)
def is_assertion_result(self):
return 'branch_id' in self.node_variable
class ReferenceAddressSerializer(serializers.Serializer):
node_id = serializers.CharField(required=True, error_messages=ErrMessage.char("节点id"))
fields = serializers.ListField(
child=serializers.CharField(required=True, error_messages=ErrMessage.char("节点字段")), required=True,
error_messages=ErrMessage.list("节点字段数组"))
class FlowParamsSerializer(serializers.Serializer):
# 历史对答
history_chat_record = serializers.ListField(child=InstanceField(model_type=ChatRecord, required=True),
error_messages=ErrMessage.list("历史对答"))
question = serializers.CharField(required=True, error_messages=ErrMessage.list("用户问题"))
chat_id = serializers.CharField(required=True, error_messages=ErrMessage.list("对话id"))
chat_record_id = serializers.CharField(required=True, error_messages=ErrMessage.char("对话记录id"))
stream = serializers.BooleanField(required=True, error_messages=ErrMessage.base("流式输出"))
client_id = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端id"))
client_type = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端类型"))
class INode:
def __init__(self, node, workflow_params, workflow_manage):
# 当前步骤上下文,用于存储当前步骤信息
self.status = 200
self.err_message = ''
self.node = node
self.node_params = node.properties.get('node_data')
self.workflow_manage = workflow_manage
self.node_params_serializer = None
self.flow_params_serializer = None
self.context = {}
self.id = node.id
self.valid_args(self.node_params, workflow_params)
def valid_args(self, node_params, flow_params):
flow_params_serializer_class = self.get_flow_params_serializer_class()
node_params_serializer_class = self.get_node_params_serializer_class()
if flow_params_serializer_class is not None and flow_params is not None:
self.flow_params_serializer = flow_params_serializer_class(data=flow_params)
self.flow_params_serializer.is_valid(raise_exception=True)
if node_params_serializer_class is not None:
self.node_params_serializer = node_params_serializer_class(data=node_params)
self.node_params_serializer.is_valid(raise_exception=True)
def get_reference_field(self, fields: List[str]):
return self.get_field(self.context, fields)
@staticmethod
def get_field(obj, fields: List[str]):
for field in fields:
value = obj.get(field)
if value is None:
return None
else:
obj = value
return obj
@abstractmethod
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
pass
def get_flow_params_serializer_class(self) -> Type[serializers.Serializer]:
return FlowParamsSerializer
def get_write_error_context(self, e):
self.status = 500
self.err_message = str(e)
def write_error_context(answer, status=200):
pass
return write_error_context
def run(self) -> NodeResult:
"""
:return: 执行结果
"""
start_time = time.time()
self.context['start_time'] = start_time
result = self._run()
self.context['run_time'] = time.time() - start_time
return result
def _run(self):
result = self.execute()
return result
def execute(self, **kwargs) -> NodeResult:
pass
def get_details(self, index: int, **kwargs):
"""
运行详情
:return: 步骤详情
"""
return {}

View File

@ -0,0 +1,23 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py.py
@date2024/6/7 14:43
@desc:
"""
from .ai_chat_step_node import *
from .condition_node import *
from .question_node import *
from .search_dataset_node import *
from .start_node import *
from .direct_reply_node import *
node_list = [BaseStartStepNode, BaseChatNode, BaseSearchDatasetNode, BaseQuestionNode, BaseConditionNode, BaseReplyNode]
def get_node(node_type):
find_list = [node for node in node_list if node.type == node_type]
if len(find_list) > 0:
return find_list[0]
return None

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:29
@desc:
"""
from .impl import *

View File

@ -0,0 +1,37 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_chat_node.py
@date2024/6/4 13:58
@desc:
"""
from typing import Type
from rest_framework import serializers
from application.flow.i_step_node import INode, NodeResult
from common.util.field_message import ErrMessage
class ChatNodeSerializer(serializers.Serializer):
model_id = serializers.CharField(required=True, error_messages=ErrMessage.char("模型id"))
system = serializers.CharField(required=False, allow_blank=True, allow_null=True,
error_messages=ErrMessage.char("角色设定"))
prompt = serializers.CharField(required=True, error_messages=ErrMessage.char("提示词"))
# 多轮对话数量
dialogue_number = serializers.IntegerField(required=True, error_messages=ErrMessage.integer("多轮对话数量"))
class IChatNode(INode):
type = 'ai-chat-node'
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
return ChatNodeSerializer
def _run(self):
return self.execute(**self.node_params_serializer.data, **self.flow_params_serializer.data)
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id, chat_record_id,
**kwargs) -> NodeResult:
pass

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:34
@desc:
"""
from .base_chat_node import BaseChatNode

View File

@ -0,0 +1,195 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_question_node.py
@date2024/6/4 14:30
@desc:
"""
import json
import time
from functools import reduce
from typing import List, Dict
from django.db.models import QuerySet
from langchain.schema import HumanMessage, SystemMessage
from langchain_core.messages import BaseMessage
from application.flow import tools
from application.flow.i_step_node import NodeResult, INode
from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
from common.util.rsa_util import rsa_long_decrypt
from setting.models import Model
from setting.models_provider.constants.model_provider_constants import ModelProvideConstants
def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据 (流式)
@param node_variable: 节点数据
@param workflow_variable: 全局数据
@param node: 节点
@param workflow: 工作流管理器
"""
response = node_variable.get('result')
answer = ''
for chunk in response:
answer += chunk.content
chat_model = node_variable.get('chat_model')
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
answer_tokens = chat_model.get_num_tokens(answer)
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['history_message'] = node_variable['history_message']
node.context['question'] = node_variable['question']
node.context['run_time'] = time.time() - node.context['start_time']
def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据
@param node_variable: 节点数据
@param workflow_variable: 全局数据
@param node: 节点实例对象
@param workflow: 工作流管理器
"""
response = node_variable.get('result')
chat_model = node_variable.get('chat_model')
answer = response.content
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
answer_tokens = chat_model.get_num_tokens(answer)
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['history_message'] = node_variable['history_message']
node.context['question'] = node_variable['question']
def get_to_response_write_context(node_variable: Dict, node: INode):
def _write_context(answer, status=200):
chat_model = node_variable.get('chat_model')
if status == 200:
answer_tokens = chat_model.get_num_tokens(answer)
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
else:
answer_tokens = 0
message_tokens = 0
node.err_message = answer
node.status = status
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['run_time'] = time.time() - node.context['start_time']
return _write_context
def to_stream_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将流式数据 转换为 流式响应
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器 输出结果后执行
@return: 流式响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_stream_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
def to_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将结果转换
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器
@return: 响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
class BaseChatNode(IChatNode):
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id, chat_record_id,
**kwargs) -> NodeResult:
model = QuerySet(Model).filter(id=model_id).first()
chat_model = ModelProvideConstants[model.provider].value.get_model(model.model_type, model.model_name,
json.loads(
rsa_long_decrypt(model.credential)),
streaming=True)
history_message = self.get_history_message(history_chat_record, dialogue_number)
self.context['history_message'] = history_message
question = self.generate_prompt_question(prompt)
self.context['question'] = question.content
message_list = self.generate_message_list(system, prompt, history_message)
self.context['message_list'] = message_list
if stream:
r = chat_model.stream(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
_write_context=write_context_stream,
_to_response=to_stream_response)
else:
r = chat_model.invoke(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
_write_context=write_context, _to_response=to_response)
@staticmethod
def get_history_message(history_chat_record, dialogue_number):
start_index = len(history_chat_record) - dialogue_number
history_message = reduce(lambda x, y: [*x, *y], [
[history_chat_record[index].get_human_message(), history_chat_record[index].get_ai_message()]
for index in
range(start_index if start_index > 0 else 0, len(history_chat_record))], [])
return history_message
def generate_prompt_question(self, prompt):
return HumanMessage(self.workflow_manage.generate_prompt(prompt))
def generate_message_list(self, system: str, prompt: str, history_message):
if system is not None and len(system) > 0:
return [SystemMessage(self.workflow_manage.generate_prompt(system)), *history_message,
HumanMessage(self.workflow_manage.generate_prompt(prompt))]
else:
return [*history_message, HumanMessage(self.workflow_manage.generate_prompt(prompt))]
@staticmethod
def reset_message_list(message_list: List[BaseMessage], answer_text):
result = [{'role': 'user' if isinstance(message, HumanMessage) else 'ai', 'content': message.content} for
message
in
message_list]
result.append({'role': 'ai', 'content': answer_text})
return result
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
"index": index,
'run_time': self.context.get('run_time'),
'system': self.node_params.get('system'),
'history_message': [{'content': message.content, 'role': message.type} for message in
(self.context.get('history_message') if self.context.get(
'history_message') is not None else [])],
'question': self.context.get('question'),
'answer': self.context.get('answer'),
'type': self.node.type,
'message_tokens': self.context.get('message_tokens'),
'answer_tokens': self.context.get('answer_tokens'),
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py.py
@date2024/6/7 14:43
@desc:
"""
from .impl import *

View File

@ -0,0 +1,23 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py.py
@date2024/6/7 14:43
@desc:
"""
from .contain_compare import *
from .equal_compare import *
from .gt_compare import *
from .ge_compare import *
from .le_compare import *
from .lt_compare import *
from .len_ge_compare import *
from .len_gt_compare import *
from .len_le_compare import *
from .len_lt_compare import *
from .len_equal_compare import *
compare_handle_list = [GECompare(), GTCompare(), ContainCompare(), EqualCompare(), LTCompare(), LECompare(),
LenLECompare(), LenGECompare(), LenEqualCompare(), LenGTCompare(), LenLTCompare()]

View File

@ -0,0 +1,20 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file compare.py
@date2024/6/7 14:37
@desc:
"""
from abc import abstractmethod
from typing import List
class Compare:
@abstractmethod
def support(self, node_id, fields: List[str], source_value, compare, target_value):
pass
@abstractmethod
def compare(self, source_value, compare, target_value):
pass

View File

@ -0,0 +1,23 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file contain_compare.py
@date2024/6/11 10:02
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class ContainCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'contain':
return True
def compare(self, source_value, compare, target_value):
if isinstance(source_value, str):
return str(target_value) in source_value
return any([str(item) == str(target_value) for item in source_value])

View File

@ -0,0 +1,21 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file equal_compare.py
@date2024/6/7 14:44
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class EqualCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'eq':
return True
def compare(self, source_value, compare, target_value):
return str(source_value) == str(target_value)

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 大于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class GECompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'ge':
return True
def compare(self, source_value, compare, target_value):
try:
return float(source_value) >= float(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 大于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class GTCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'gt':
return True
def compare(self, source_value, compare, target_value):
try:
return float(source_value) > float(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,21 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file is_not_null_compare.py
@date2024/6/28 10:45
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare import Compare
class IsNotNullCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'is_not_null':
return True
def compare(self, source_value, compare, target_value=None):
return source_value is not None

View File

@ -0,0 +1,21 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file is_null_compare.py
@date2024/6/28 10:45
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare import Compare
class IsNullCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'is_null':
return True
def compare(self, source_value, compare, target_value=None):
return source_value is None

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 小于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LECompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'le':
return True
def compare(self, source_value, compare, target_value):
try:
return float(source_value) <= float(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file equal_compare.py
@date2024/6/7 14:44
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LenEqualCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'len_eq':
return True
def compare(self, source_value, compare, target_value):
try:
return len(source_value) == int(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 大于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LenGECompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'len_ge':
return True
def compare(self, source_value, compare, target_value):
try:
return len(source_value) >= int(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 大于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LenGTCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'len_gt':
return True
def compare(self, source_value, compare, target_value):
try:
return len(source_value) > int(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 小于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LenLECompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'len_le':
return True
def compare(self, source_value, compare, target_value):
try:
return len(source_value) <= int(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 小于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LenLTCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'len_lt':
return True
def compare(self, source_value, compare, target_value):
try:
return len(source_value) < int(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,24 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file lt_compare.py
@date2024/6/11 9:52
@desc: 小于比较器
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class LTCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'lt':
return True
def compare(self, source_value, compare, target_value):
try:
return float(source_value) < float(target_value)
except Exception as e:
return False

View File

@ -0,0 +1,23 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file contain_compare.py
@date2024/6/11 10:02
@desc:
"""
from typing import List
from application.flow.step_node.condition_node.compare.compare import Compare
class ContainCompare(Compare):
def support(self, node_id, fields: List[str], source_value, compare, target_value):
if compare == 'not_contain':
return True
def compare(self, source_value, compare, target_value):
if isinstance(source_value, str):
return str(target_value) not in source_value
return not any([str(item) == str(target_value) for item in source_value])

View File

@ -0,0 +1,39 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_condition_node.py
@date2024/6/7 9:54
@desc:
"""
import json
from typing import Type
from rest_framework import serializers
from application.flow.i_step_node import INode
from common.util.field_message import ErrMessage
class ConditionSerializer(serializers.Serializer):
compare = serializers.CharField(required=True, error_messages=ErrMessage.char("比较器"))
value = serializers.CharField(required=True, error_messages=ErrMessage.char(""))
field = serializers.ListField(required=True, error_messages=ErrMessage.char("字段"))
class ConditionBranchSerializer(serializers.Serializer):
id = serializers.CharField(required=True, error_messages=ErrMessage.char("分支id"))
type = serializers.CharField(required=True, error_messages=ErrMessage.char("分支类型"))
condition = serializers.CharField(required=True, error_messages=ErrMessage.char("条件or|and"))
conditions = ConditionSerializer(many=True)
class ConditionNodeParamsSerializer(serializers.Serializer):
branch = ConditionBranchSerializer(many=True)
class IConditionNode(INode):
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
return ConditionNodeParamsSerializer
type = 'condition-node'

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:35
@desc:
"""
from .base_condition_node import BaseConditionNode

View File

@ -0,0 +1,50 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_condition_node.py
@date2024/6/7 11:29
@desc:
"""
from typing import List
from application.flow.i_step_node import NodeResult
from application.flow.step_node.condition_node.compare import compare_handle_list
from application.flow.step_node.condition_node.i_condition_node import IConditionNode
class BaseConditionNode(IConditionNode):
def execute(self, **kwargs) -> NodeResult:
branch_list = self.node_params_serializer.data['branch']
branch = self._execute(branch_list)
r = NodeResult({'branch_id': branch.get('id'), 'branch_name': branch.get('type')}, {})
return r
def _execute(self, branch_list: List):
for branch in branch_list:
if self.branch_assertion(branch):
return branch
def branch_assertion(self, branch):
condition_list = [self.assertion(row.get('field'), row.get('compare'), row.get('value')) for row in
branch.get('conditions')]
condition = branch.get('condition')
return all(condition_list) if condition == 'and' else any(condition_list)
def assertion(self, field_list: List[str], compare: str, value):
field_value = self.workflow_manage.get_reference_field(field_list[0], field_list[1:])
for compare_handler in compare_handle_list:
if compare_handler.support(field_list[0], field_list[1:], field_value, compare, value):
return compare_handler.compare(field_value, compare, value)
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
"index": index,
'run_time': self.context.get('run_time'),
'branch_id': self.context.get('branch_id'),
'branch_name': self.context.get('branch_name'),
'type': self.node.type,
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 17:50
@desc:
"""
from .impl import *

View File

@ -0,0 +1,46 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_reply_node.py
@date2024/6/11 16:25
@desc:
"""
from typing import Type
from rest_framework import serializers
from application.flow.i_step_node import INode, NodeResult
from common.exception.app_exception import AppApiException
from common.util.field_message import ErrMessage
class ReplyNodeParamsSerializer(serializers.Serializer):
reply_type = serializers.CharField(required=True, error_messages=ErrMessage.char("回复类型"))
fields = serializers.ListField(required=False, error_messages=ErrMessage.list("引用字段"))
content = serializers.CharField(required=False, allow_blank=True, allow_null=True,
error_messages=ErrMessage.char("直接回答内容"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
if self.data.get('reply_type') == 'referencing':
if 'fields' not in self.data:
raise AppApiException(500, "引用字段不能为空")
if len(self.data.get('fields')) < 2:
raise AppApiException(500, "引用字段错误")
else:
if 'content' not in self.data or self.data.get('content') is None:
raise AppApiException(500, "内容不能为空")
class IReplyNode(INode):
type = 'reply-node'
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
return ReplyNodeParamsSerializer
def _run(self):
return self.execute(**self.node_params_serializer.data, **self.flow_params_serializer.data)
def execute(self, reply_type, stream, fields=None, content=None, **kwargs) -> NodeResult:
pass

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 17:49
@desc:
"""
from .base_reply_node import *

View File

@ -0,0 +1,90 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_reply_node.py
@date2024/6/11 17:25
@desc:
"""
from typing import List, Dict
from langchain_core.messages import AIMessage, AIMessageChunk
from application.flow import tools
from application.flow.i_step_node import NodeResult, INode
from application.flow.step_node.direct_reply_node.i_reply_node import IReplyNode
def get_to_response_write_context(node_variable: Dict, node: INode):
def _write_context(answer, status=200):
node.context['answer'] = answer
return _write_context
def to_stream_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将流式数据 转换为 流式响应
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器 输出结果后执行
@return: 流式响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_stream_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
def to_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将结果转换
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器
@return: 响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
class BaseReplyNode(IReplyNode):
def execute(self, reply_type, stream, fields=None, content=None, **kwargs) -> NodeResult:
if reply_type == 'referencing':
result = self.get_reference_content(fields)
else:
result = self.generate_reply_content(content)
if stream:
return NodeResult({'result': iter([AIMessageChunk(content=result)])}, {},
_to_response=to_stream_response)
else:
return NodeResult({'result': AIMessage(content=result)}, {}, _to_response=to_response)
def generate_reply_content(self, prompt):
return self.workflow_manage.generate_prompt(prompt)
def get_reference_content(self, fields: List[str]):
return str(self.workflow_manage.get_reference_field(
fields[0],
fields[1:]))
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
"index": index,
'run_time': self.context.get('run_time'),
'type': self.node.type,
'answer': self.context.get('answer'),
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:30
@desc:
"""
from .impl import *

View File

@ -0,0 +1,37 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_chat_node.py
@date2024/6/4 13:58
@desc:
"""
from typing import Type
from rest_framework import serializers
from application.flow.i_step_node import INode, NodeResult
from common.util.field_message import ErrMessage
class QuestionNodeSerializer(serializers.Serializer):
model_id = serializers.CharField(required=True, error_messages=ErrMessage.char("模型id"))
system = serializers.CharField(required=False, allow_blank=True, allow_null=True,
error_messages=ErrMessage.char("角色设定"))
prompt = serializers.CharField(required=True, error_messages=ErrMessage.char("提示词"))
# 多轮对话数量
dialogue_number = serializers.IntegerField(required=True, error_messages=ErrMessage.integer("多轮对话数量"))
class IQuestionNode(INode):
type = 'question-node'
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
return QuestionNodeSerializer
def _run(self):
return self.execute(**self.node_params_serializer.data, **self.flow_params_serializer.data)
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id, chat_record_id,
**kwargs) -> NodeResult:
pass

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:35
@desc:
"""
from .base_question_node import BaseQuestionNode

View File

@ -0,0 +1,196 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_question_node.py
@date2024/6/4 14:30
@desc:
"""
import json
import time
from functools import reduce
from typing import List, Dict
from django.db.models import QuerySet
from langchain.schema import HumanMessage, SystemMessage
from langchain_core.messages import BaseMessage
from application.flow import tools
from application.flow.i_step_node import NodeResult, INode
from application.flow.step_node.question_node.i_question_node import IQuestionNode
from common.util.rsa_util import rsa_long_decrypt
from setting.models import Model
from setting.models_provider.constants.model_provider_constants import ModelProvideConstants
def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据 (流式)
@param node_variable: 节点数据
@param workflow_variable: 全局数据
@param node: 节点
@param workflow: 工作流管理器
"""
response = node_variable.get('result')
answer = ''
for chunk in response:
answer += chunk.content
chat_model = node_variable.get('chat_model')
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
answer_tokens = chat_model.get_num_tokens(answer)
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['history_message'] = node_variable['history_message']
node.context['question'] = node_variable['question']
node.context['run_time'] = time.time() - node.context['start_time']
def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
"""
写入上下文数据
@param node_variable: 节点数据
@param workflow_variable: 全局数据
@param node: 节点实例对象
@param workflow: 工作流管理器
"""
response = node_variable.get('result')
chat_model = node_variable.get('chat_model')
answer = response.content
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
answer_tokens = chat_model.get_num_tokens(answer)
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['history_message'] = node_variable['history_message']
node.context['question'] = node_variable['question']
def get_to_response_write_context(node_variable: Dict, node: INode):
def _write_context(answer, status=200):
chat_model = node_variable.get('chat_model')
if status == 200:
answer_tokens = chat_model.get_num_tokens(answer)
message_tokens = chat_model.get_num_tokens_from_messages(node_variable.get('message_list'))
else:
answer_tokens = 0
message_tokens = 0
node.err_message = answer
node.status = status
node.context['message_tokens'] = message_tokens
node.context['answer_tokens'] = answer_tokens
node.context['answer'] = answer
node.context['run_time'] = time.time() - node.context['start_time']
return _write_context
def to_stream_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将流式数据 转换为 流式响应
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器 输出结果后执行
@return: 流式响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_stream_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
def to_response(chat_id, chat_record_id, node_variable: Dict, workflow_variable: Dict, node, workflow,
post_handler):
"""
将结果转换
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param node_variable: 节点数据
@param workflow_variable: 工作流数据
@param node: 节点
@param workflow: 工作流管理器
@param post_handler: 后置处理器
@return: 响应
"""
response = node_variable.get('result')
_write_context = get_to_response_write_context(node_variable, node)
return tools.to_response(chat_id, chat_record_id, response, workflow, _write_context, post_handler)
class BaseQuestionNode(IQuestionNode):
def execute(self, model_id, system, prompt, dialogue_number, history_chat_record, stream, chat_id, chat_record_id,
**kwargs) -> NodeResult:
model = QuerySet(Model).filter(id=model_id).first()
chat_model = ModelProvideConstants[model.provider].value.get_model(model.model_type, model.model_name,
json.loads(
rsa_long_decrypt(model.credential)),
streaming=True)
history_message = self.get_history_message(history_chat_record, dialogue_number)
self.context['history_message'] = history_message
question = self.generate_prompt_question(prompt)
self.context['question'] = question.content
message_list = self.generate_message_list(system, prompt, history_message)
self.context['message_list'] = message_list
if stream:
r = chat_model.stream(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'get_to_response_write_context': get_to_response_write_context,
'history_message': history_message, 'question': question.content}, {},
_write_context=write_context_stream,
_to_response=to_stream_response)
else:
r = chat_model.invoke(message_list)
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
'history_message': history_message, 'question': question.content}, {},
_write_context=write_context, _to_response=to_response)
@staticmethod
def get_history_message(history_chat_record, dialogue_number):
start_index = len(history_chat_record) - dialogue_number
history_message = reduce(lambda x, y: [*x, *y], [
[history_chat_record[index].get_human_message(), history_chat_record[index].get_ai_message()]
for index in
range(start_index if start_index > 0 else 0, len(history_chat_record))], [])
return history_message
def generate_prompt_question(self, prompt):
return HumanMessage(self.workflow_manage.generate_prompt(prompt))
def generate_message_list(self, system: str, prompt: str, history_message):
if system is None or len(system) == 0:
return [SystemMessage(self.workflow_manage.generate_prompt(system)), *history_message,
HumanMessage(self.workflow_manage.generate_prompt(prompt))]
else:
return [*history_message, HumanMessage(self.workflow_manage.generate_prompt(prompt))]
@staticmethod
def reset_message_list(message_list: List[BaseMessage], answer_text):
result = [{'role': 'user' if isinstance(message, HumanMessage) else 'ai', 'content': message.content} for
message
in
message_list]
result.append({'role': 'ai', 'content': answer_text})
return result
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
"index": index,
'run_time': self.context.get('run_time'),
'system': self.node_params.get('system'),
'history_message': [{'content': message.content, 'role': message.type} for message in
(self.context.get('history_message') if self.context.get(
'history_message') is not None else [])],
'question': self.context.get('question'),
'answer': self.context.get('answer'),
'type': self.node.type,
'message_tokens': self.context.get('message_tokens'),
'answer_tokens': self.context.get('answer_tokens'),
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:30
@desc:
"""
from .impl import *

View File

@ -0,0 +1,61 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_search_dataset_node.py
@date2024/6/3 17:52
@desc:
"""
import re
from typing import Type
from django.core import validators
from rest_framework import serializers
from application.flow.i_step_node import INode, NodeResult
from common.util.field_message import ErrMessage
class DatasetSettingSerializer(serializers.Serializer):
# 需要查询的条数
top_n = serializers.IntegerField(required=True,
error_messages=ErrMessage.integer("引用分段数"))
# 相似度 0-1之间
similarity = serializers.FloatField(required=True, max_value=2, min_value=0,
error_messages=ErrMessage.float("引用分段数"))
search_mode = serializers.CharField(required=True, validators=[
validators.RegexValidator(regex=re.compile("^embedding|keywords|blend$"),
message="类型只支持register|reset_password", code=500)
], error_messages=ErrMessage.char("检索模式"))
max_paragraph_char_number = serializers.IntegerField(required=True,
error_messages=ErrMessage.float("最大引用分段字数"))
class SearchDatasetStepNodeSerializer(serializers.Serializer):
# 需要查询的数据集id列表
dataset_id_list = serializers.ListField(required=True, child=serializers.UUIDField(required=True),
error_messages=ErrMessage.list("数据集id列表"))
dataset_setting = DatasetSettingSerializer(required=True)
question_reference_address = serializers.ListField(required=True, )
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
class ISearchDatasetStepNode(INode):
type = 'search-dataset-node'
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
return SearchDatasetStepNodeSerializer
def _run(self):
question = self.workflow_manage.get_reference_field(
self.node_params_serializer.data.get('question_reference_address')[0],
self.node_params_serializer.data.get('question_reference_address')[1:])
return self.execute(**self.node_params_serializer.data, question=str(question), exclude_paragraph_id_list=[])
def execute(self, dataset_id_list, dataset_setting, question,
exclude_paragraph_id_list=None,
**kwargs) -> NodeResult:
pass

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:35
@desc:
"""
from .base_search_dataset_node import BaseSearchDatasetNode

View File

@ -0,0 +1,93 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_search_dataset_node.py
@date2024/6/4 11:56
@desc:
"""
import os
from typing import List, Dict
from django.db.models import QuerySet
from application.flow.i_step_node import NodeResult
from application.flow.step_node.search_dataset_node.i_search_dataset_node import ISearchDatasetStepNode
from common.config.embedding_config import EmbeddingModel, VectorStore
from common.db.search import native_search
from common.util.file_util import get_file_content
from dataset.models import Document, Paragraph
from embedding.models import SearchMode
from smartdoc.conf import PROJECT_DIR
class BaseSearchDatasetNode(ISearchDatasetStepNode):
def execute(self, dataset_id_list, dataset_setting, question,
exclude_paragraph_id_list=None,
**kwargs) -> NodeResult:
self.context['question'] = question
embedding_model = EmbeddingModel.get_embedding_model()
embedding_value = embedding_model.embed_query(question)
vector = VectorStore.get_embedding_vector()
exclude_document_id_list = [str(document.id) for document in
QuerySet(Document).filter(
dataset_id__in=dataset_id_list,
is_active=False)]
embedding_list = vector.query(question, embedding_value, dataset_id_list, exclude_document_id_list,
exclude_paragraph_id_list, True, dataset_setting.get('top_n'),
dataset_setting.get('similarity'), SearchMode(dataset_setting.get('search_mode')))
if embedding_list is None:
return NodeResult({'paragraph_list': [], 'is_hit_handling_method': []}, {})
paragraph_list = self.list_paragraph(embedding_list, vector)
result = [self.reset_paragraph(paragraph, embedding_list) for paragraph in paragraph_list]
return NodeResult({'paragraph_list': result,
'is_hit_handling_method_list': [row for row in result if row.get('is_hit_handling_method')],
'data': '\n'.join([paragraph.get('content') for paragraph in paragraph_list]),
'directly_return': '\n'.join([paragraph.get('content') for paragraph in result if
paragraph.get('is_hit_handling_method')]),
'question': question},
{})
@staticmethod
def reset_paragraph(paragraph: Dict, embedding_list: List):
filter_embedding_list = [embedding for embedding in embedding_list if
str(embedding.get('paragraph_id')) == str(paragraph.get('id'))]
if filter_embedding_list is not None and len(filter_embedding_list) > 0:
find_embedding = filter_embedding_list[-1]
return {
**paragraph,
'similarity': find_embedding.get('similarity'),
'is_hit_handling_method': find_embedding.get('similarity') > paragraph.get(
'directly_return_similarity') and paragraph.get('hit_handling_method') == 'directly_return'
}
@staticmethod
def list_paragraph(embedding_list: List, vector):
paragraph_id_list = [row.get('paragraph_id') for row in embedding_list]
if paragraph_id_list is None or len(paragraph_id_list) == 0:
return []
paragraph_list = native_search(QuerySet(Paragraph).filter(id__in=paragraph_id_list),
get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
'list_dataset_paragraph_by_paragraph_id.sql')),
with_table_name=True)
# 如果向量库中存在脏数据 直接删除
if len(paragraph_list) != len(paragraph_id_list):
exist_paragraph_list = [row.get('id') for row in paragraph_list]
for paragraph_id in paragraph_id_list:
if not exist_paragraph_list.__contains__(paragraph_id):
vector.delete_by_paragraph_id(paragraph_id)
return paragraph_list
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
'question': self.context.get('question'),
"index": index,
'run_time': self.context.get('run_time'),
'paragraph_list': self.context.get('paragraph_list'),
'type': self.node.type,
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:30
@desc:
"""
from .impl import *

View File

@ -0,0 +1,26 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file i_start_node.py
@date2024/6/3 16:54
@desc:
"""
from typing import Type
from rest_framework import serializers
from application.flow.i_step_node import INode, NodeResult
class IStarNode(INode):
type = 'start-node'
def get_node_params_serializer_class(self) -> Type[serializers.Serializer] | None:
return None
def _run(self):
return self.execute(**self.flow_params_serializer.data)
def execute(self, question, **kwargs) -> NodeResult:
pass

View File

@ -0,0 +1,9 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file __init__.py
@date2024/6/11 15:36
@desc:
"""
from .base_start_node import BaseStartStepNode

View File

@ -0,0 +1,33 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file base_start_node.py
@date2024/6/3 17:17
@desc:
"""
import time
from datetime import datetime
from application.flow.i_step_node import NodeResult
from application.flow.step_node.start_node.i_start_node import IStarNode
class BaseStartStepNode(IStarNode):
def execute(self, question, **kwargs) -> NodeResult:
"""
开始节点 初始化全局变量
"""
return NodeResult({'question': question},
{'time': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'start_time': time.time()})
def get_details(self, index: int, **kwargs):
return {
'name': self.node.properties.get('stepName'),
"index": index,
"question": self.context.get('question'),
'run_time': self.context.get('run_time'),
'type': self.node.type,
'status': self.status,
'err_message': self.err_message
}

View File

@ -0,0 +1,87 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file utils.py
@date2024/6/6 15:15
@desc:
"""
import json
from typing import Iterator
from django.http import StreamingHttpResponse
from langchain_core.messages import BaseMessageChunk, BaseMessage
from application.flow.i_step_node import WorkFlowPostHandler
from common.response import result
def event_content(chat_id, chat_record_id, response, workflow,
write_context,
post_handler: WorkFlowPostHandler):
"""
用于处理流式输出
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param response: 响应数据
@param workflow: 工作流管理器
@param write_context 写入节点上下文
@param post_handler: 后置处理器
"""
answer = ''
try:
for chunk in response:
answer += chunk.content
yield 'data: ' + json.dumps({'chat_id': str(chat_id), 'id': str(chat_record_id), 'operate': True,
'content': chunk.content, 'is_end': False}, ensure_ascii=False) + "\n\n"
write_context(answer, 200)
post_handler.handler(chat_id, chat_record_id, answer, workflow)
yield 'data: ' + json.dumps({'chat_id': str(chat_id), 'id': str(chat_record_id), 'operate': True,
'content': '', 'is_end': True}, ensure_ascii=False) + "\n\n"
except Exception as e:
answer = str(e)
write_context(answer, 500)
post_handler.handler(chat_id, chat_record_id, answer, workflow)
yield 'data: ' + json.dumps({'chat_id': str(chat_id), 'id': str(chat_record_id), 'operate': True,
'content': answer, 'is_end': True}, ensure_ascii=False) + "\n\n"
def to_stream_response(chat_id, chat_record_id, response: Iterator[BaseMessageChunk], workflow, write_context,
post_handler):
"""
将结果转换为服务流输出
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param response: 响应数据
@param workflow: 工作流管理器
@param write_context 写入节点上下文
@param post_handler: 后置处理器
@return: 响应
"""
r = StreamingHttpResponse(
streaming_content=event_content(chat_id, chat_record_id, response, workflow, write_context, post_handler),
content_type='text/event-stream;charset=utf-8',
charset='utf-8')
r['Cache-Control'] = 'no-cache'
return r
def to_response(chat_id, chat_record_id, response: BaseMessage, workflow, write_context,
post_handler: WorkFlowPostHandler):
"""
将结果转换为服务输出
@param chat_id: 会话id
@param chat_record_id: 对话记录id
@param response: 响应数据
@param workflow: 工作流管理器
@param write_context 写入节点上下文
@param post_handler: 后置处理器
@return: 响应
"""
answer = response.content
write_context(answer)
post_handler.handler(chat_id, chat_record_id, answer, workflow)
return result.success({'chat_id': str(chat_id), 'id': str(chat_record_id), 'operate': True,
'content': answer, 'is_end': True})

View File

@ -0,0 +1,282 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file workflow_manage.py
@date2024/1/9 17:40
@desc:
"""
from functools import reduce
from typing import List, Dict
from langchain_core.messages import AIMessageChunk, AIMessage
from langchain_core.prompts import PromptTemplate
from application.flow import tools
from application.flow.i_step_node import INode, WorkFlowPostHandler, NodeResult
from application.flow.step_node import get_node
from common.exception.app_exception import AppApiException
class Edge:
def __init__(self, _id: str, _type: str, sourceNodeId: str, targetNodeId: str, **keywords):
self.id = _id
self.type = _type
self.sourceNodeId = sourceNodeId
self.targetNodeId = targetNodeId
for keyword in keywords:
self.__setattr__(keyword, keywords.get(keyword))
class Node:
def __init__(self, _id: str, _type: str, x: int, y: int, properties: dict, **kwargs):
self.id = _id
self.type = _type
self.x = x
self.y = y
self.properties = properties
for keyword in kwargs:
self.__setattr__(keyword, kwargs.get(keyword))
end_nodes = ['ai-chat-node', 'reply-node']
class Flow:
def __init__(self, nodes: List[Node], edges: List[Edge]):
self.nodes = nodes
self.edges = edges
@staticmethod
def new_instance(flow_obj: Dict):
nodes = flow_obj.get('nodes')
edges = flow_obj.get('edges')
nodes = [Node(node.get('id'), node.get('type'), **node)
for node in nodes]
edges = [Edge(edge.get('id'), edge.get('type'), **edge) for edge in edges]
return Flow(nodes, edges)
def get_start_node(self):
start_node_list = [node for node in self.nodes if node.id == 'start-node']
return start_node_list[0]
def is_valid(self):
"""
校验工作流数据
"""
self.is_valid_start_node()
self.is_valid_base_node()
self.is_valid_work_flow()
@staticmethod
def is_valid_node_params(node: Node):
get_node(node.type)(node, None, None)
def is_valid_node(self, node: Node):
self.is_valid_node_params(node)
if node.type == 'condition-node':
branch_list = node.properties.get('node_data').get('branch')
for branch in branch_list:
source_anchor_id = f"{node.id}_{branch.get('id')}_right"
edge_list = [edge for edge in self.edges if edge.sourceAnchorId == source_anchor_id]
if len(edge_list) == 0:
raise AppApiException(500,
f'{node.properties.get("stepName")} 节点的{branch.get("type")}分支需要连接')
elif len(edge_list) > 1:
raise AppApiException(500,
f'{node.properties.get("stepName")} 节点的{branch.get("type")}分支不能连接俩个节点')
else:
edge_list = [edge for edge in self.edges if edge.sourceNodeId == node.id]
if len(edge_list) == 0 and not end_nodes.__contains__(node.type):
raise AppApiException(500, f'{node.properties.get("stepName")} 节点不能当做结束节点')
elif len(edge_list) > 1:
raise AppApiException(500,
f'{node.properties.get("stepName")} 节点不能连接俩个节点')
def get_next_nodes(self, node: Node):
edge_list = [edge for edge in self.edges if edge.sourceNodeId == node.id]
node_list = reduce(lambda x, y: [*x, *y],
[[node for node in self.nodes if node.id == edge.targetNodeId] for edge in edge_list],
[])
if len(node_list) == 0 and not end_nodes.__contains__(node.type):
raise AppApiException(500,
f'不存在的下一个节点')
return node_list
def is_valid_work_flow(self, up_node=None):
if up_node is None:
up_node = self.get_start_node()
self.is_valid_node(up_node)
next_nodes = self.get_next_nodes(up_node)
for next_node in next_nodes:
self.is_valid_work_flow(next_node)
def is_valid_start_node(self):
start_node_list = [node for node in self.nodes if node.id == 'start-node']
if len(start_node_list) == 0:
raise AppApiException(500, '开始节点必填')
if len(start_node_list) > 1:
raise AppApiException(500, '开始节点只能有一个')
def is_valid_base_node(self):
base_node_list = [node for node in self.nodes if node.id == 'base-node']
if len(base_node_list) == 0:
raise AppApiException(500, '基本信息节点必填')
if len(base_node_list) > 1:
raise AppApiException(500, '基本信息节点只能有一个')
class WorkflowManage:
def __init__(self, flow: Flow, params, work_flow_post_handler: WorkFlowPostHandler):
self.params = params
self.flow = flow
self.context = {}
self.node_context = []
self.work_flow_post_handler = work_flow_post_handler
self.current_node = None
self.current_result = None
def run(self):
"""
运行工作流
"""
try:
while self.has_next_node(self.current_result):
self.current_node = self.get_next_node()
self.node_context.append(self.current_node)
self.current_result = self.current_node.run()
if self.has_next_node(self.current_result):
self.current_result.write_context(self.current_node, self)
else:
r = self.current_result.to_response(self.params['chat_id'], self.params['chat_record_id'],
self.current_node, self,
self.work_flow_post_handler)
return r
except Exception as e:
if self.params.get('stream'):
return tools.to_stream_response(self.params['chat_id'], self.params['chat_record_id'],
iter([AIMessageChunk(str(e))]), self,
self.current_node.get_write_error_context(e),
self.work_flow_post_handler)
else:
return tools.to_response(self.params['chat_id'], self.params['chat_record_id'],
AIMessage(str(e)), self, self.current_node.get_write_error_context(e),
self.work_flow_post_handler)
def has_next_node(self, node_result: NodeResult | None):
"""
是否有下一个可运行的节点
"""
if self.current_node is None:
if self.get_start_node() is not None:
return True
else:
if node_result is not None and node_result.is_assertion_result():
for edge in self.flow.edges:
if (edge.sourceNodeId == self.current_node.id and
f"{edge.sourceNodeId}_{node_result.node_variable.get('branch_id')}_right" == edge.sourceAnchorId):
return True
else:
for edge in self.flow.edges:
if edge.sourceNodeId == self.current_node.id:
return True
return False
def get_runtime_details(self):
details_result = {}
for index in range(len(self.node_context)):
node = self.node_context[index]
details = node.get_details(index)
details_result[node.id] = details
return details_result
def get_next_node(self):
"""
获取下一个可运行的所有节点
"""
if self.current_node is None:
node = self.get_start_node()
node_instance = get_node(node.type)(node, self.params, self.context)
return node_instance
if self.current_result is not None and self.current_result.is_assertion_result():
for edge in self.flow.edges:
if (edge.sourceNodeId == self.current_node.id and
f"{edge.sourceNodeId}_{self.current_result.node_variable.get('branch_id')}_right" == edge.sourceAnchorId):
return self.get_node_cls_by_id(edge.targetNodeId)
else:
for edge in self.flow.edges:
if edge.sourceNodeId == self.current_node.id:
return self.get_node_cls_by_id(edge.targetNodeId)
return None
def get_reference_field(self, node_id: str, fields: List[str]):
"""
@param node_id: 节点id
@param fields: 字段
@return:
"""
if node_id == 'global':
return INode.get_field(self.context, fields)
else:
return self.get_node_by_id(node_id).get_reference_field(fields)
def generate_prompt(self, prompt: str):
"""
格式化生成提示词
@param prompt: 提示词信息
@return: 格式化后的提示词
"""
context = {
'global': self.context,
}
for node in self.node_context:
properties = node.node.properties
node_config = properties.get('config')
if node_config is not None:
fields = node_config.get('fields')
if fields is not None:
for field in fields:
globeLabel = f"{properties.get('stepName')}.{field.get('value')}"
globeValue = f"context['{node.id}'].{field.get('value')}"
prompt = prompt.replace(globeLabel, globeValue)
global_fields = node_config.get('globalFields')
if global_fields is not None:
for field in global_fields:
globeLabel = f"全局变量.{field.get('value')}"
globeValue = f"context['global'].{field.get('value')}"
prompt = prompt.replace(globeLabel, globeValue)
context[node.id] = node.context
prompt_template = PromptTemplate.from_template(prompt, template_format='jinja2')
value = prompt_template.format(context=context)
return value
def get_start_node(self):
"""
获取启动节点
@return:
"""
start_node_list = [node for node in self.flow.nodes if node.type == 'start-node']
return start_node_list[0]
def get_node_cls_by_id(self, node_id):
for node in self.flow.nodes:
if node.id == node_id:
node_instance = get_node(node.type)(node,
self.params, self)
return node_instance
return None
def get_node_by_id(self, node_id):
for node in self.node_context:
if node.id == node_id:
return node
return None
def get_node_reference(self, reference_address: Dict):
node = self.get_node_by_id(reference_address.get('node_id'))
return node.context[reference_address.get('node_field')]

View File

@ -0,0 +1,38 @@
# Generated by Django 4.1.13 on 2024-06-25 16:30
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('application', '0008_chat_is_deleted'),
]
operations = [
migrations.AddField(
model_name='application',
name='type',
field=models.CharField(choices=[('SIMPLE', '简易'), ('WORK_FLOW', '工作流')], default='SIMPLE', max_length=256, verbose_name='应用类型'),
),
migrations.AddField(
model_name='application',
name='work_flow',
field=models.JSONField(default=dict, verbose_name='工作流数据'),
),
migrations.CreateModel(
name='WorkFlowVersion',
fields=[
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')),
('id', models.UUIDField(default=uuid.uuid1, editable=False, primary_key=True, serialize=False, verbose_name='主键id')),
('work_flow', models.JSONField(default=dict, verbose_name='工作流数据')),
('application', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='application.application')),
],
options={
'db_table': 'application_work_flow_version',
},
),
]

View File

@ -6,6 +6,8 @@
@date2023/9/25 14:24
@desc:
"""
import datetime
import json
import uuid
from django.contrib.postgres.fields import ArrayField
@ -18,6 +20,12 @@ from setting.models.model_management import Model
from users.models import User
class ApplicationTypeChoices(models.TextChoices):
"""订单类型"""
SIMPLE = 'SIMPLE', '简易'
WORK_FLOW = 'WORK_FLOW', '工作流'
def get_dataset_setting_dict():
return {'top_n': 3, 'similarity': 0.6, 'max_paragraph_char_number': 5000, 'search_mode': 'embedding',
'no_references_setting': {
@ -42,6 +50,9 @@ class Application(AppModelMixin):
model_setting = models.JSONField(verbose_name="模型参数相关设置", default=get_model_setting_dict)
problem_optimization = models.BooleanField(verbose_name="问题优化", default=False)
icon = models.CharField(max_length=256, verbose_name="应用icon", default="/ui/favicon.ico")
work_flow = models.JSONField(verbose_name="工作流数据", default=dict)
type = models.CharField(verbose_name="应用类型", choices=ApplicationTypeChoices.choices,
default=ApplicationTypeChoices.SIMPLE, max_length=256)
@staticmethod
def get_default_model_prompt():
@ -61,6 +72,15 @@ class Application(AppModelMixin):
db_table = "application"
class WorkFlowVersion(AppModelMixin):
id = models.UUIDField(primary_key=True, max_length=128, default=uuid.uuid1, editable=False, verbose_name="主键id")
application = models.ForeignKey(Application, on_delete=models.CASCADE)
work_flow = models.JSONField(verbose_name="工作流数据", default=dict)
class Meta:
db_table = "application_work_flow_version"
class ApplicationDatasetMapping(AppModelMixin):
id = models.UUIDField(primary_key=True, max_length=128, default=uuid.uuid1, editable=False, verbose_name="主键id")
application = models.ForeignKey(Application, on_delete=models.CASCADE)
@ -88,6 +108,16 @@ class VoteChoices(models.TextChoices):
TRAMPLE = 1, '反对'
class DateEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, uuid.UUID):
return str(obj)
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S")
else:
return json.JSONEncoder.default(self, obj)
class ChatRecord(AppModelMixin):
"""
对话日志 详情
@ -101,7 +131,7 @@ class ChatRecord(AppModelMixin):
message_tokens = models.IntegerField(verbose_name="请求token数量", default=0)
answer_tokens = models.IntegerField(verbose_name="响应token数量", default=0)
const = models.IntegerField(verbose_name="总费用", default=0)
details = models.JSONField(verbose_name="对话详情", default=dict)
details = models.JSONField(verbose_name="对话详情", default=dict, encoder=DateEncoder)
improve_paragraph_id_list = ArrayField(verbose_name="改进标注列表",
base_field=models.UUIDField(max_length=128, blank=True)
, default=list)

View File

@ -7,6 +7,7 @@
@desc:
"""
import hashlib
import json
import os
import re
import uuid
@ -22,7 +23,8 @@ from django.http import HttpResponse
from django.template import Template, Context
from rest_framework import serializers
from application.models import Application, ApplicationDatasetMapping
from application.flow.workflow_manage import Flow
from application.models import Application, ApplicationDatasetMapping, ApplicationTypeChoices, WorkFlowVersion
from application.models.api_key_model import ApplicationAccessToken, ApplicationApiKey
from common.config.embedding_config import VectorStore, EmbeddingModel
from common.constants.authentication_type import AuthenticationType
@ -105,6 +107,47 @@ class ModelSettingSerializer(serializers.Serializer):
prompt = serializers.CharField(required=True, max_length=2048, error_messages=ErrMessage.char("提示词"))
class ApplicationWorkflowSerializer(serializers.Serializer):
name = serializers.CharField(required=True, max_length=64, min_length=1, error_messages=ErrMessage.char("应用名称"))
desc = serializers.CharField(required=False, allow_null=True, allow_blank=True,
max_length=256, min_length=1,
error_messages=ErrMessage.char("应用描述"))
prologue = serializers.CharField(required=False, allow_null=True, allow_blank=True, max_length=4096,
error_messages=ErrMessage.char("开场白"))
@staticmethod
def to_application_model(user_id: str, application: Dict):
default_workflow_json = get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'flow', 'default_workflow.json'))
default_workflow = json.loads(default_workflow_json)
for node in default_workflow.get('nodes'):
if node.get('id') == 'base-node':
node.get('properties')['node_data'] = {"desc": application.get('desc'),
"name": application.get('name'),
"prologue": application.get('prologue')}
return Application(id=uuid.uuid1(),
name=application.get('name'),
desc=application.get('desc'),
prologue="",
dialogue_number=0,
user_id=user_id, model_id=None,
dataset_setting={},
model_setting={},
problem_optimization=False,
type=ApplicationTypeChoices.WORK_FLOW,
work_flow=default_workflow
)
def get_base_node_work_flow(work_flow):
node_list = work_flow.get('nodes')
base_node_list = [node for node in node_list if node.get('id') == 'base-node']
if len(base_node_list) > 0:
return base_node_list[-1]
return None
class ApplicationSerializer(serializers.Serializer):
name = serializers.CharField(required=True, max_length=64, min_length=1, error_messages=ErrMessage.char("应用名称"))
desc = serializers.CharField(required=False, allow_null=True, allow_blank=True,
@ -123,6 +166,13 @@ class ApplicationSerializer(serializers.Serializer):
model_setting = ModelSettingSerializer(required=True)
# 问题补全
problem_optimization = serializers.BooleanField(required=True, error_messages=ErrMessage.boolean("问题补全"))
# 应用类型
type = serializers.CharField(required=True, error_messages=ErrMessage.char("应用类型"),
validators=[
validators.RegexValidator(regex=re.compile("^SIMPLE|WORK_FLOW$"),
message="应用类型只支持SIMPLE|WORK_FLOW", code=500)
]
)
def is_valid(self, *, user_id=None, raise_exception=False):
super().is_valid(raise_exception=True)
@ -281,6 +331,24 @@ class ApplicationSerializer(serializers.Serializer):
@transaction.atomic
def insert(self, application: Dict):
application_type = application.get('type')
if 'WORK_FLOW' == application_type:
return self.insert_workflow(application)
else:
return self.insert_simple(application)
def insert_workflow(self, application: Dict):
self.is_valid(raise_exception=True)
user_id = self.data.get('user_id')
ApplicationWorkflowSerializer(data=application).is_valid(raise_exception=True)
application_model = ApplicationWorkflowSerializer.to_application_model(user_id, application)
application_model.save()
# 插入认证信息
ApplicationAccessToken(application_id=application_model.id,
access_token=hashlib.md5(str(uuid.uuid1()).encode()).hexdigest()[8:24]).save()
return ApplicationSerializerModel(application_model).data
def insert_simple(self, application: Dict):
self.is_valid(raise_exception=True)
user_id = self.data.get('user_id')
ApplicationSerializer(data=application).is_valid(user_id=user_id, raise_exception=True)
@ -296,7 +364,7 @@ class ApplicationSerializer(serializers.Serializer):
access_token=hashlib.md5(str(uuid.uuid1()).encode()).hexdigest()[8:24]).save()
# 插入关联数据
QuerySet(ApplicationDatasetMapping).bulk_create(application_dataset_mapping_model_list)
return True
return ApplicationSerializerModel(application_model).data
@staticmethod
def to_application_model(user_id: str, application: Dict):
@ -306,7 +374,9 @@ class ApplicationSerializer(serializers.Serializer):
user_id=user_id, model_id=application.get('model_id'),
dataset_setting=application.get('dataset_setting'),
model_setting=application.get('model_setting'),
problem_optimization=application.get('problem_optimization')
problem_optimization=application.get('problem_optimization'),
type=ApplicationTypeChoices.SIMPLE,
work_flow={}
)
@staticmethod
@ -420,7 +490,7 @@ class ApplicationSerializer(serializers.Serializer):
class ApplicationModel(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['id', 'name', 'desc', 'prologue', 'dialogue_number', 'icon']
fields = ['id', 'name', 'desc', 'prologue', 'dialogue_number', 'icon', 'type']
class IconOperate(serializers.Serializer):
application_id = serializers.UUIDField(required=True, error_messages=ErrMessage.uuid("应用id"))
@ -463,6 +533,27 @@ class ApplicationSerializer(serializers.Serializer):
QuerySet(Application).filter(id=self.data.get('application_id')).delete()
return True
def publish(self, instance, with_valid=True):
if with_valid:
self.is_valid()
application = QuerySet(Application).filter(id=self.data.get("application_id")).first()
work_flow = instance.get('work_flow')
if work_flow is None:
raise AppApiException(500, "work_flow是必填字段")
Flow.new_instance(work_flow).is_valid()
base_node = get_base_node_work_flow(work_flow)
if base_node is not None:
node_data = base_node.get('properties').get('node_data')
if node_data is not None:
application.name = node_data.get('name')
application.desc = node_data.get('desc')
application.prologue = node_data.get('prologue')
application.work_flow = work_flow
application.save()
work_flow_version = WorkFlowVersion(work_flow=work_flow, application=application)
work_flow_version.save()
return True
def one(self, with_valid=True):
if with_valid:
self.is_valid()
@ -507,7 +598,7 @@ class ApplicationSerializer(serializers.Serializer):
raise AppApiException(500, "模型不存在")
update_keys = ['name', 'desc', 'model_id', 'multiple_rounds_dialogue', 'prologue', 'status',
'dataset_setting', 'model_setting', 'problem_optimization',
'api_key_is_active', 'icon']
'api_key_is_active', 'icon', 'work_flow']
for update_key in update_keys:
if update_key in instance and instance.get(update_key) is not None:
if update_key == 'multiple_rounds_dialogue':

View File

@ -7,6 +7,7 @@
@desc:
"""
import json
import uuid
from typing import List
from uuid import UUID
@ -22,7 +23,10 @@ from application.chat_pipeline.step.generate_human_message_step.impl.base_genera
BaseGenerateHumanMessageStep
from application.chat_pipeline.step.reset_problem_step.impl.base_reset_problem_step import BaseResetProblemStep
from application.chat_pipeline.step.search_dataset_step.impl.base_search_dataset_step import BaseSearchDatasetStep
from application.models import ChatRecord, Chat, Application, ApplicationDatasetMapping
from application.flow.i_step_node import WorkFlowPostHandler
from application.flow.workflow_manage import WorkflowManage, Flow
from application.models import ChatRecord, Chat, Application, ApplicationDatasetMapping, ApplicationTypeChoices, \
WorkFlowVersion
from application.models.api_key_model import ApplicationPublicAccessClient, ApplicationAccessToken
from common.constants.authentication_type import AuthenticationType
from common.exception.app_exception import AppApiException, AppChatNumOutOfBoundsFailed
@ -39,10 +43,11 @@ chat_cache = caches['model_cache']
class ChatInfo:
def __init__(self,
chat_id: str,
chat_model: BaseChatModel,
chat_model: BaseChatModel | None,
dataset_id_list: List[str],
exclude_document_id_list: list[str],
application: Application):
application: Application,
work_flow_version: WorkFlowVersion = None):
"""
:param chat_id: 对话id
:param chat_model: 对话模型
@ -56,6 +61,7 @@ class ChatInfo:
self.dataset_id_list = dataset_id_list
self.exclude_document_id_list = exclude_document_id_list
self.chat_record_list: List[ChatRecord] = []
self.work_flow_version = work_flow_version
def to_base_pipeline_manage_params(self):
dataset_setting = self.application.dataset_setting
@ -146,8 +152,10 @@ class ChatMessageSerializer(serializers.Serializer):
client_id = serializers.CharField(required=True, error_messages=ErrMessage.char("客户端id"))
client_type = serializers.CharField(required=True, error_messages=ErrMessage.char("客户端类型"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
def is_valid_application_workflow(self, *, raise_exception=False):
self.is_valid_intraday_access_num()
def is_valid_intraday_access_num(self):
if self.data.get('client_type') == AuthenticationType.APPLICATION_ACCESS_TOKEN.value:
access_client = QuerySet(ApplicationPublicAccessClient).filter(id=self.data.get('client_id')).first()
if access_client is None:
@ -161,12 +169,9 @@ class ChatMessageSerializer(serializers.Serializer):
application_id=self.data.get('application_id')).first()
if application_access_token.access_num <= access_client.intraday_access_num:
raise AppChatNumOutOfBoundsFailed(1002, "访问次数超过今日访问量")
chat_id = self.data.get('chat_id')
chat_info: ChatInfo = chat_cache.get(chat_id)
if chat_info is None:
chat_info = self.re_open_chat(chat_id)
chat_cache.set(chat_id,
chat_info, timeout=60 * 30)
def is_valid_application_simple(self, *, chat_info: ChatInfo, raise_exception=False):
self.is_valid_intraday_access_num()
model = chat_info.application.model
if model is None:
return chat_info
@ -179,8 +184,7 @@ class ChatMessageSerializer(serializers.Serializer):
raise AppApiException(500, "模型正在下载中,请稍后再发起对话")
return chat_info
def chat(self):
chat_info = self.is_valid(raise_exception=True)
def chat_simple(self, chat_info: ChatInfo):
message = self.data.get('message')
re_chat = self.data.get('re_chat')
stream = self.data.get('stream')
@ -211,14 +215,54 @@ class ChatMessageSerializer(serializers.Serializer):
pipeline_message.run(params)
return pipeline_message.context['chat_result']
@staticmethod
def re_open_chat(chat_id: str):
def chat_work_flow(self, chat_info: ChatInfo):
message = self.data.get('message')
re_chat = self.data.get('re_chat')
stream = self.data.get('stream')
client_id = self.data.get('client_id')
client_type = self.data.get('client_type')
work_flow_manage = WorkflowManage(Flow.new_instance(chat_info.work_flow_version.work_flow),
{'history_chat_record': chat_info.chat_record_list, 'question': message,
'chat_id': chat_info.chat_id, 'chat_record_id': str(uuid.uuid1()),
'stream': stream,
're_chat': re_chat}, WorkFlowPostHandler(chat_info, client_id, client_type))
r = work_flow_manage.run()
return r
def chat(self):
super().is_valid(raise_exception=True)
chat_info = self.get_chat_info()
if chat_info.application.type == ApplicationTypeChoices.SIMPLE:
self.is_valid_application_simple(raise_exception=True, chat_info=chat_info),
return self.chat_simple(chat_info)
else:
self.is_valid_application_workflow(raise_exception=True)
return self.chat_work_flow(chat_info)
def get_chat_info(self):
self.is_valid(raise_exception=True)
chat_id = self.data.get('chat_id')
chat_info: ChatInfo = chat_cache.get(chat_id)
if chat_info is None:
chat_info: ChatInfo = self.re_open_chat(chat_id)
chat_cache.set(chat_id,
chat_info, timeout=60 * 30)
return chat_info
def re_open_chat(self, chat_id: str):
chat = QuerySet(Chat).filter(id=chat_id).first()
if chat is None:
raise AppApiException(500, "会话不存在")
application = QuerySet(Application).filter(id=chat.application_id).first()
if application is None:
raise AppApiException(500, "应用不存在")
if application.type == ApplicationTypeChoices.SIMPLE:
return self.re_open_chat_simple(chat_id, application)
else:
return self.re_open_chat_work_flow(chat_id, application)
@staticmethod
def re_open_chat_simple(chat_id, application):
model = QuerySet(Model).filter(id=application.model_id).first()
chat_model = None
if model is not None:
@ -238,3 +282,11 @@ class ChatMessageSerializer(serializers.Serializer):
dataset_id__in=dataset_id_list,
is_active=False)]
return ChatInfo(chat_id, chat_model, dataset_id_list, exclude_document_id_list, application)
@staticmethod
def re_open_chat_work_flow(chat_id, application):
work_flow_version = QuerySet(WorkFlowVersion).filter(application_id=application.id).order_by(
'-create_time')[0:1].first()
if work_flow_version is None:
raise AppApiException(500, "应用未发布,请发布后再使用")
return ChatInfo(chat_id, None, [], [], application, work_flow_version)

View File

@ -22,7 +22,9 @@ from django.db.models import QuerySet, Q
from django.http import HttpResponse
from rest_framework import serializers
from application.models import Chat, Application, ApplicationDatasetMapping, VoteChoices, ChatRecord
from application.flow.workflow_manage import Flow
from application.models import Chat, Application, ApplicationDatasetMapping, VoteChoices, ChatRecord, WorkFlowVersion, \
ApplicationTypeChoices
from application.models.api_key_model import ApplicationAccessToken
from application.serializers.application_serializers import ModelDatasetAssociation, DatasetSettingSerializer, \
ModelSettingSerializer
@ -45,6 +47,11 @@ from smartdoc.conf import PROJECT_DIR
chat_cache = caches['model_cache']
class WorkFlowSerializers(serializers.Serializer):
nodes = serializers.ListSerializer(child=serializers.DictField(), error_messages=ErrMessage.uuid("节点"))
edges = serializers.ListSerializer(child=serializers.DictField(), error_messages=ErrMessage.uuid("连线"))
class ChatSerializers(serializers.Serializer):
class Operate(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, error_messages=ErrMessage.uuid("对话id"))
@ -207,6 +214,27 @@ class ChatSerializers(serializers.Serializer):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
application = QuerySet(Application).get(id=application_id)
if application.type == ApplicationTypeChoices.SIMPLE:
return self.open_simple(application)
else:
return self.open_work_flow(application)
def open_work_flow(self, application):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
chat_id = str(uuid.uuid1())
work_flow_version = QuerySet(WorkFlowVersion).filter(application_id=application_id).order_by(
'-create_time')[0:1].first()
if work_flow_version is None:
raise AppApiException(500, "应用未发布,请发布后再使用")
chat_cache.set(chat_id,
ChatInfo(chat_id, None, [],
[],
application, work_flow_version), timeout=60 * 30)
return chat_id
def open_simple(self, application):
application_id = self.data.get('application_id')
model = QuerySet(Model).filter(id=application.model_id).first()
dataset_id_list = [str(row.dataset_id) for row in
QuerySet(ApplicationDatasetMapping).filter(
@ -229,6 +257,27 @@ class ChatSerializers(serializers.Serializer):
application), timeout=60 * 30)
return chat_id
class OpenWorkFlowChat(serializers.Serializer):
work_flow = WorkFlowSerializers(error_messages=ErrMessage.uuid("工作流"))
def open(self):
self.is_valid(raise_exception=True)
work_flow = self.data.get('work_flow')
Flow.new_instance(work_flow).is_valid()
chat_id = str(uuid.uuid1())
application = Application(id=None, dialogue_number=3, model=None,
dataset_setting={},
model_setting={},
problem_optimization=None,
type=ApplicationTypeChoices.WORK_FLOW
)
work_flow_version = WorkFlowVersion(work_flow=work_flow)
chat_cache.set(chat_id,
ChatInfo(chat_id, None, [],
[],
application, work_flow_version), timeout=60 * 30)
return chat_id
class OpenTempChat(serializers.Serializer):
user_id = serializers.UUIDField(required=True, error_messages=ErrMessage.uuid("用户id"))
@ -329,7 +378,7 @@ class ChatRecordSerializer(serializers.Serializer):
chat_info: ChatInfo = chat_cache.get(chat_id)
if chat_info is not None:
chat_record_list = [chat_record for chat_record in chat_info.chat_record_list if
chat_record.id == uuid.UUID(chat_record_id)]
str(chat_record.id) == str(chat_record_id)]
if chat_record_list is not None and len(chat_record_list):
return chat_record_list[-1]
return QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_id).first()
@ -377,7 +426,8 @@ class ChatRecordSerializer(serializers.Serializer):
'padding_problem_text': chat_record.details.get('problem_padding').get(
'padding_problem_text') if 'problem_padding' in chat_record.details else None,
'dataset_list': dataset_list,
'paragraph_list': paragraph_list
'paragraph_list': paragraph_list,
'execution_details': [chat_record.details[key] for key in chat_record.details]
}
def page(self, current_page: int, page_size: int, with_valid=True):

View File

@ -161,7 +161,25 @@ class ApplicationApi(ApiMixin):
'problem_optimization': openapi.Schema(type=openapi.TYPE_BOOLEAN, title="问题优化",
description="是否开启问题优化", default=True),
'icon': openapi.Schema(type=openapi.TYPE_STRING, title="icon",
description="icon", default="/ui/favicon.ico")
description="icon", default="/ui/favicon.ico"),
'work_flow': ApplicationApi.WorkFlow.get_request_body_api()
}
)
class WorkFlow(ApiMixin):
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=[''],
properties={
'nodes': openapi.Schema(type=openapi.TYPE_ARRAY, items=openapi.Schema(type=openapi.TYPE_OBJECT),
title="节点列表", description="节点列表",
default=[]),
'edges': openapi.Schema(type=openapi.TYPE_ARRAY, items=openapi.Schema(type=openapi.TYPE_OBJECT),
title='连线列表', description="连线列表",
default={}),
}
)
@ -219,6 +237,17 @@ class ApplicationApi(ApiMixin):
}
)
class Publish(ApiMixin):
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=[],
properties={
'work_flow': ApplicationApi.WorkFlow.get_request_body_api()
}
)
class Create(ApiMixin):
@staticmethod
def get_request_body_api():
@ -239,7 +268,9 @@ class ApplicationApi(ApiMixin):
'dataset_setting': ApplicationApi.DatasetSetting.get_request_body_api(),
'model_setting': ApplicationApi.ModelSetting.get_request_body_api(),
'problem_optimization': openapi.Schema(type=openapi.TYPE_BOOLEAN, title="问题优化",
description="是否开启问题优化", default=True)
description="是否开启问题优化", default=True),
'type': openapi.Schema(type=openapi.TYPE_STRING, title="应用类型",
description="应用类型 简易:SIMPLE|工作流:WORK_FLOW")
}
)

View File

@ -82,6 +82,17 @@ class ChatApi(ApiMixin):
]
class OpenWorkFlowTemp(ApiMixin):
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=[],
properties={
'work_flow': ApplicationApi.WorkFlow.get_request_body_api()
}
)
class OpenTempChat(ApiMixin):
@staticmethod
def get_request_body_api():

View File

@ -8,6 +8,7 @@ urlpatterns = [
path('application/profile', views.Application.Profile.as_view(), name='application/profile'),
path('application/embed', views.Application.Embed.as_view()),
path('application/authentication', views.Application.Authentication.as_view()),
path('application/<str:application_id>/publish', views.Application.Publish.as_view()),
path('application/<str:application_id>/edit_icon', views.Application.EditIcon.as_view()),
path('application/<str:application_id>/statistics/customer_count',
views.ApplicationStatistics.CustomerCount.as_view()),
@ -30,6 +31,7 @@ urlpatterns = [
path('application/<int:current_page>/<int:page_size>', views.Application.Page.as_view(), name='application_page'),
path('application/<str:application_id>/chat/open', views.ChatView.Open.as_view(), name='application/open'),
path("application/chat/open", views.ChatView.OpenTemp.as_view()),
path("application/chat_workflow/open", views.ChatView.OpenWorkFlowTemp.as_view()),
path("application/<str:application_id>/chat/client/<int:current_page>/<int:page_size>",
views.ChatView.ClientChatHistoryPage.as_view()),
path("application/<str:application_id>/chat/client/<chat_id>",

View File

@ -7,6 +7,7 @@
@desc:
"""
from django.core import cache
from django.http import HttpResponse
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import action
@ -27,6 +28,8 @@ from common.swagger_api.common_api import CommonApi
from common.util.common import query_params_to_single_dict
from dataset.serializers.dataset_serializers import DataSetSerializers
chat_cache = cache.caches['model_cache']
class ApplicationStatistics(APIView):
class CustomerCount(APIView):
@ -332,8 +335,7 @@ class Application(APIView):
tags=['应用'])
@has_permissions(PermissionConstants.APPLICATION_CREATE, compare=CompareConstants.AND)
def post(self, request: Request):
ApplicationSerializer.Create(data={'user_id': request.user.id}).insert(request.data)
return result.success(True)
return result.success(ApplicationSerializer.Create(data={'user_id': request.user.id}).insert(request.data))
@action(methods=['GET'], detail=False)
@swagger_auto_schema(operation_summary="获取应用列表",
@ -370,6 +372,26 @@ class Application(APIView):
'search_mode': request.query_params.get('search_mode')}).hit_test(
))
class Publish(APIView):
authentication_classes = [TokenAuth]
@action(methods=['PUT'], detail=False)
@swagger_auto_schema(operation_summary="发布应用",
operation_id="发布应用",
manual_parameters=ApplicationApi.Operate.get_request_params_api(),
request_body=ApplicationApi.Publish.get_request_body_api(),
responses=result.get_default_response(),
tags=['应用'])
@has_permissions(ViewPermission(
[RoleConstants.ADMIN, RoleConstants.USER],
[lambda r, keywords: Permission(group=Group.APPLICATION, operate=Operate.MANAGE,
dynamic_tag=keywords.get('application_id'))],
compare=CompareConstants.AND))
def put(self, request: Request, application_id: str):
return result.success(
ApplicationSerializer.Operate(
data={'application_id': application_id, 'user_id': request.user.id}).publish(request.data))
class Operate(APIView):
authentication_classes = [TokenAuth]

View File

@ -64,6 +64,18 @@ class ChatView(APIView):
return result.success(ChatSerializers.OpenChat(
data={'user_id': request.user.id, 'application_id': application_id}).open())
class OpenWorkFlowTemp(APIView):
authentication_classes = [TokenAuth]
@action(methods=['POST'], detail=False)
@swagger_auto_schema(operation_summary="获取工作流临时会话id",
operation_id="获取工作流临时会话id",
request_body=ChatApi.OpenWorkFlowTemp.get_request_body_api(),
tags=["应用/会话"])
def post(self, request: Request):
return result.success(ChatSerializers.OpenWorkFlowChat(
data={**request.data, 'user_id': request.user.id}).open())
class OpenTemp(APIView):
authentication_classes = [TokenAuth]

View File

@ -7,9 +7,10 @@
@desc:
"""
from django.core.exceptions import EmptyResultSet
from django.core.exceptions import EmptyResultSet, FullResultSet
from django.db import NotSupportedError
from django.db.models.sql.compiler import SQLCompiler
from django.db.transaction import TransactionManagementError
class AppSQLCompiler(SQLCompiler):
@ -19,15 +20,16 @@ class AppSQLCompiler(SQLCompiler):
field_replace_dict = {}
self.field_replace_dict = field_replace_dict
def get_query_str(self, with_limits=True, with_table_name=False):
def get_query_str(self, with_limits=True, with_table_name=False, with_col_aliases=False):
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
combinator = self.query.combinator
extra_select, order_by, group_by = self.pre_sql_setup(
with_col_aliases=with_col_aliases or bool(combinator),
)
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (
self.query.high_mark is not None or self.query.low_mark
)
with_limit_offset = with_limits and self.query.is_sliced
combinator = self.query.combinator
features = self.connection.features
if combinator:
@ -40,8 +42,14 @@ class AppSQLCompiler(SQLCompiler):
result, params = self.get_combinator_sql(
combinator, self.query.combinator_all
)
elif self.qualify:
result, params = self.get_qualify_sql()
order_by = None
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
try:
where, w_params = (
self.compile(self.where) if self.where is not None else ("", [])
@ -51,11 +59,92 @@ class AppSQLCompiler(SQLCompiler):
raise
# Use a predicate that's always False.
where, w_params = "0 = 1", []
having, h_params = (
self.compile(self.having) if self.having is not None else ("", [])
)
except FullResultSet:
where, w_params = "", []
try:
having, h_params = (
self.compile(self.having)
if self.having is not None
else ("", [])
)
except FullResultSet:
having, h_params = "", []
result = []
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name(alias),
)
params.extend(s_params)
out_cols.append(s_sql)
params.extend(f_params)
if self.query.select_for_update and features.has_select_for_update:
if (
self.connection.get_autocommit()
# Don't raise an exception when database doesn't
# support transactions, as it's a noop.
and features.supports_transactions
):
raise TransactionManagementError(
"select_for_update cannot be used outside of a transaction."
)
if (
with_limit_offset
and not features.supports_select_for_update_with_limit
):
raise NotSupportedError(
"LIMIT/OFFSET is not supported with "
"select_for_update on this database backend."
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
no_key = self.query.select_for_no_key_update
# If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
# backend doesn't support it, raise NotSupportedError to
# prevent a possible deadlock.
if nowait and not features.has_select_for_update_nowait:
raise NotSupportedError(
"NOWAIT is not supported on this database backend."
)
elif skip_locked and not features.has_select_for_update_skip_locked:
raise NotSupportedError(
"SKIP LOCKED is not supported on this database backend."
)
elif of and not features.has_select_for_update_of:
raise NotSupportedError(
"FOR UPDATE OF is not supported on this database backend."
)
elif no_key and not features.has_select_for_no_key_update:
raise NotSupportedError(
"FOR NO KEY UPDATE is not supported on this "
"database backend."
)
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
no_key=no_key,
)
if for_update_part and features.for_update_after_from:
result.append(for_update_part)
if where:
result.append("WHERE %s" % where)
params.extend(w_params)
@ -91,7 +180,11 @@ class AppSQLCompiler(SQLCompiler):
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append("ORDER BY %s" % ", ".join(ordering))
order_by_sql = "ORDER BY %s" % ", ".join(ordering)
if combinator and features.requires_compound_order_by_subquery:
result = ["SELECT * FROM (", *result, ")", order_by_sql]
else:
result.append(order_by_sql)
if with_limit_offset:
result.append(
@ -102,6 +195,7 @@ class AppSQLCompiler(SQLCompiler):
if for_update_part and not features.for_update_after_from:
result.append(for_update_part)
from_, f_params = self.get_from_clause()
sql = " ".join(result)
if not with_table_name:

View File

@ -15,6 +15,7 @@ class Page(dict):
class Result(JsonResponse):
charset = 'utf-8'
"""
接口统一返回对象
"""

View File

@ -30,59 +30,3 @@ class QianfanChatModel(QianfanChatEndpoint):
def get_num_tokens(self, text: str) -> int:
tokenizer = TokenizerManage.get_tokenizer()
return len(tokenizer.encode(text))
def stream(
self,
input: LanguageModelInput,
config: Optional[RunnableConfig] = None,
*,
stop: Optional[List[str]] = None,
**kwargs: Any,
) -> Iterator[BaseMessageChunk]:
if len(input) % 2 == 0:
input = [HumanMessage(content='padding'), *input]
input = [
HumanMessage(content=input[index].content) if index % 2 == 0 else AIMessage(content=input[index].content)
for index in range(0, len(input))]
if type(self)._stream == BaseChatModel._stream:
# model doesn't implement streaming, so use default implementation
yield cast(
BaseMessageChunk, self.invoke(input, config=config, stop=stop, **kwargs)
)
else:
config = config or {}
messages = self._convert_input(input).to_messages()
params = self._get_invocation_params(stop=stop, **kwargs)
options = {"stop": stop, **kwargs}
callback_manager = CallbackManager.configure(
config.get("callbacks"),
self.callbacks,
self.verbose,
config.get("tags"),
self.tags,
config.get("metadata"),
self.metadata,
)
(run_manager,) = callback_manager.on_chat_model_start(
dumpd(self),
[messages],
invocation_params=params,
options=options,
name=config.get("run_name"),
)
try:
generation: Optional[ChatGenerationChunk] = None
for chunk in self._stream(
messages, stop=stop, run_manager=run_manager, **kwargs
):
yield chunk.message
if generation is None:
generation = chunk
assert generation is not None
except BaseException as e:
run_manager.on_llm_error(e)
raise e
else:
run_manager.on_llm_end(
LLMResult(generations=[[generation]]),
)

6
package-lock.json generated Normal file
View File

@ -0,0 +1,6 @@
{
"name": "MaxKB",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

View File

@ -7,8 +7,8 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
django = "4.1.13"
djangorestframework = "3.14.0"
django = "4.2.13"
djangorestframework = "^3.15.2"
drf-yasg = "1.21.7"
django-filter = "23.2"
langchain = "^0.2.3"

View File

@ -5,7 +5,7 @@
<link rel="icon" href="/favicon.ico" />
<meta
name="viewport"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0,user-scalable=no,
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, user-scalable=no,
viewport-fit=cover"
/>
<base target="_blank" />

View File

@ -14,26 +14,20 @@
},
"dependencies": {
"@ctrl/tinycolor": "^4.1.0",
"@logicflow/core": "^1.2.27",
"@logicflow/extension": "^1.2.27",
"@vueuse/core": "^10.9.0",
"axios": "^0.28.0",
"cropperjs": "^1.6.2",
"echarts": "^5.5.0",
"element-plus": "^2.5.6",
"file-saver": "^2.0.5",
"highlight.js": "^11.9.0",
"install": "^0.13.0",
"katex": "^0.16.10",
"lodash": "^4.17.21",
"markdown-it": "^13.0.2",
"markdown-it-abbr": "^1.0.4",
"markdown-it-anchor": "^8.6.7",
"markdown-it-footnote": "^3.0.3",
"markdown-it-highlightjs": "^4.0.1",
"markdown-it-sub": "^1.0.0",
"markdown-it-sup": "^1.0.0",
"markdown-it-task-lists": "^2.1.1",
"markdown-it-toc-done-right": "^4.2.0",
"marked": "^12.0.2",
"md-editor-v3": "4.12.1",
"md-editor-v3": "^4.16.7",
"medium-zoom": "^1.1.0",
"mermaid": "^10.9.0",
"mitt": "^3.0.0",
@ -53,8 +47,6 @@
"@tsconfig/node18": "^18.2.0",
"@types/file-saver": "^2.0.7",
"@types/jsdom": "^21.1.1",
"@types/markdown-it": "^13.0.7",
"@types/markdown-it-highlightjs": "^3.3.4",
"@types/node": "^18.17.5",
"@types/nprogress": "^0.2.0",
"@vitejs/plugin-vue": "^4.3.1",

View File

@ -4,6 +4,4 @@
<RouterView />
</template>
<style scoped>
</style>
<style scoped></style>

View File

@ -45,8 +45,7 @@ const postApplication: (
/**
*
* @param
* @param
*/
const putApplication: (
application_id: String,
@ -150,6 +149,16 @@ const postChatOpen: (data: ApplicationFormType) => Promise<Result<any>> = (data)
return post(`${prefix}/chat/open`, data)
}
/**
* Id
* @param
}
*/
const postWorkflowChatOpen: (data: ApplicationFormType) => Promise<Result<any>> = (data) => {
return post(`${prefix}/chat_workflow/open`, data)
}
/**
* Id
* @param
@ -228,6 +237,18 @@ const getApplicationModel: (
return get(`${prefix}/${application_id}/model`, loading)
}
/**
*
* @param
*/
const putPublishApplication: (
application_id: String,
data: ApplicationFormType,
loading?: Ref<boolean>
) => Promise<Result<any>> = (application_id, data, loading) => {
return put(`${prefix}/${application_id}/publish`, data, undefined, loading)
}
export default {
getAllAppilcation,
getApplication,
@ -245,5 +266,7 @@ export default {
getProfile,
putChatVote,
getApplicationHitTest,
getApplicationModel
getApplicationModel,
putPublishApplication,
postWorkflowChatOpen
}

View File

@ -11,6 +11,8 @@ interface ApplicationFormType {
model_setting?: any
problem_optimization?: boolean
icon?: string | undefined
type?: string
work_flow?: any
}
interface chatType {
id: string

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

@ -0,0 +1,3 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.2404 11.9757C14.2022 11.8917 14.1406 11.8206 14.0629 11.7709C13.9853 11.7213 13.8949 11.6951 13.8027 11.6957H12.6518V9.16918C12.6518 7.44347 11.069 6.00118 8.96725 5.65376V1.63242C8.96725 1.37615 8.86544 1.13037 8.68423 0.949156C8.50302 0.767942 8.25724 0.666138 8.00096 0.666138C7.74469 0.666138 7.49891 0.767942 7.3177 0.949156C7.13648 1.13037 7.03468 1.37615 7.03468 1.63242V5.65604C4.93411 6.00233 3.34896 7.44576 3.34896 9.17033V11.6945H2.19811C2.07221 11.6959 1.95172 11.746 1.86184 11.8341C1.77197 11.9223 1.71965 12.0418 1.71582 12.1677V12.1837C1.71582 12.2797 1.74782 12.3779 1.80953 12.4614L3.76496 15.1345C3.80786 15.1932 3.86352 15.2413 3.92775 15.2754C3.99197 15.3094 4.06308 15.3284 4.13572 15.3309C4.20836 15.3334 4.28062 15.3195 4.34705 15.29C4.41349 15.2605 4.47237 15.2163 4.51925 15.1608L6.80153 12.4899C6.86177 12.42 6.9005 12.3341 6.91307 12.2427C6.92564 12.1512 6.91151 12.0581 6.87239 11.9745C6.83423 11.8906 6.7726 11.8195 6.69493 11.7698C6.61726 11.7201 6.52688 11.694 6.43468 11.6945H5.28382V9.16918C5.28382 8.3829 6.39925 7.50747 8.00039 7.50747C9.60153 7.50747 10.7192 8.3829 10.7192 9.16918V11.6922H9.56611C9.47671 11.6918 9.38902 11.7166 9.31303 11.7637C9.23705 11.8108 9.17583 11.8783 9.13639 11.9585C9.10152 12.026 9.08349 12.1009 9.08382 12.1768C9.08444 12.2795 9.11721 12.3794 9.17754 12.4625L11.133 15.1368C11.1759 15.1955 11.2315 15.2436 11.2957 15.2777C11.36 15.3117 11.4311 15.3307 11.5037 15.3332C11.5764 15.3357 11.6486 15.3217 11.7151 15.2923C11.7815 15.2628 11.8404 15.2186 11.8872 15.1631L14.1684 12.4888C14.242 12.404 14.2833 12.2959 14.285 12.1837V12.1688C14.2839 12.1015 14.2683 12.0352 14.2392 11.9745L14.2404 11.9757Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1718950836622" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="14660" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><path d="M366.08 301.269333l-215.552 235.178667L354.133333 771.669333a21.333333 21.333333 0 0 1-0.810666 28.8l-27.904 28.842667a21.333333 21.333333 0 0 1-31.36-0.810667L50.944 550.826667a21.333333 21.333333 0 0 1 0.298667-28.458667L306.773333 243.669333a21.333333 21.333333 0 0 1 31.018667-0.426666l27.904 28.8a21.333333 21.333333 0 0 1 0.426667 29.226666z m513.578667 235.178667l-206.08-235.178667a21.333333 21.333333 0 0 1 0.682666-28.928l27.904-28.8a21.333333 21.333333 0 0 1 31.317334 0.682667l245.674666 277.845333a21.333333 21.333333 0 0 1-0.298666 28.544l-255.402667 278.613334a21.333333 21.333333 0 0 1-31.061333 0.426666l-27.904-28.757333a21.333333 21.333333 0 0 1-0.384-29.269333l215.594666-235.178667z m-324.864-474.88l42.410666 4.906667a21.333333 21.333333 0 0 1 18.730667 23.637333L514.133333 965.12a21.333333 21.333333 0 0 1-23.637333 18.730667l-42.368-4.949334a21.333333 21.333333 0 0 1-18.773333-23.637333L531.2 80.213333a21.333333 21.333333 0 0 1 23.68-18.730666z" p-id="14661" fill="#FF8800"></path></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -0,0 +1,5 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.78395 12.7399C8.87529 12.6625 8.93678 12.5557 8.9578 12.4378L10.4173 3.95194C10.4294 3.87835 10.4253 3.803 10.4054 3.73113C10.3855 3.65926 10.3502 3.59258 10.3019 3.53572C10.2536 3.47887 10.1936 3.43318 10.1259 3.40185C10.0582 3.37051 9.98452 3.35427 9.90994 3.35425H8.04041C7.92032 3.35426 7.80413 3.39687 7.7125 3.4745C7.62088 3.55212 7.55976 3.65974 7.54002 3.77819L7.01877 6.78751H4.5168L5.01025 3.95194C5.02234 3.87835 5.01828 3.803 4.99836 3.73113C4.97843 3.65926 4.94311 3.59258 4.89485 3.53572C4.84658 3.47887 4.78653 3.43318 4.71885 3.40185C4.65117 3.37051 4.57748 3.35427 4.5029 3.35425H2.63337C2.51366 3.35562 2.39818 3.39872 2.30683 3.47611C2.21549 3.5535 2.154 3.66033 2.13298 3.77819L0.673494 12.264C0.6614 12.3376 0.665456 12.413 0.685383 12.4849C0.70531 12.5567 0.740631 12.6234 0.788895 12.6803C0.837159 12.7371 0.897213 12.7828 0.964892 12.8141C1.03257 12.8455 1.10626 12.8617 1.18084 12.8617H3.02257C3.14266 12.8617 3.25885 12.8191 3.35048 12.7415C3.4421 12.6639 3.50322 12.5563 3.52296 12.4378L4.07201 9.42848H6.57398L6.08053 12.264C6.06844 12.3376 6.0725 12.413 6.09243 12.4849C6.11235 12.5567 6.14767 12.6234 6.19594 12.6803C6.2442 12.7371 6.30425 12.7828 6.37193 12.8141C6.43961 12.8455 6.5133 12.8617 6.58788 12.8617H8.45741C8.57712 12.8604 8.6926 12.8173 8.78395 12.7399Z" fill="white"/>
<path d="M13.686 12.7378C13.7786 12.6617 13.8418 12.5555 13.8644 12.4378L14.6428 7.9134C14.6603 7.83959 14.6613 7.76282 14.6456 7.68859C14.6299 7.61437 14.598 7.54454 14.5522 7.48409C14.5064 7.42364 14.4477 7.37409 14.3805 7.33897C14.3133 7.30385 14.2391 7.28404 14.1633 7.28096H12.2938C12.1741 7.28233 12.0586 7.32543 11.9672 7.40282C11.8759 7.48021 11.8144 7.58704 11.7934 7.7049L10.9872 12.2571C10.9739 12.3303 10.9769 12.4055 10.996 12.4774C11.015 12.5493 11.0497 12.6161 11.0974 12.6731C11.1451 12.7302 11.2048 12.776 11.2723 12.8074C11.3397 12.8388 11.4132 12.855 11.4876 12.8548H13.3571C13.477 12.8553 13.5934 12.814 13.686 12.7378Z" fill="white"/>
<path opacity="0.5" d="M14.891 5.59802C14.9822 5.52202 15.0443 5.41679 15.0667 5.30021L15.3239 3.93802C15.3383 3.86434 15.3362 3.78837 15.3176 3.71561C15.2991 3.64284 15.2647 3.5751 15.2168 3.51725C15.1689 3.45941 15.1088 3.41291 15.0408 3.38112C14.9728 3.34932 14.8986 3.33301 14.8235 3.33338H12.9609C12.8428 3.33281 12.7282 3.37345 12.6369 3.4483C12.5455 3.52315 12.4832 3.62752 12.4605 3.74343L12.1964 5.11256C12.1833 5.18609 12.1863 5.2616 12.2054 5.33384C12.2244 5.40607 12.2589 5.4733 12.3065 5.53084C12.3541 5.58839 12.4137 5.63486 12.4811 5.66705C12.5485 5.69923 12.6221 5.71635 12.6968 5.71721H14.5663C14.6851 5.71614 14.7998 5.67403 14.891 5.59802Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -0,0 +1,3 @@
<svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.5892 18.5008L14.1666 15.4259H17.9166C18.3768 15.4259 18.7499 15.0449 18.7499 14.575V2.10096C18.7499 1.63099 18.3768 1.25 17.9166 1.25H1.66658C1.20635 1.25 0.833252 1.63099 0.833252 2.10096V14.575C0.833252 15.0449 1.20635 15.4259 1.66658 15.4259H5.83325L9.41066 18.5008C9.7361 18.8331 10.2637 18.8331 10.5892 18.5008ZM5.41658 7.08333H6.24992C6.71015 7.08333 7.08325 7.45643 7.08325 7.91667V8.75C7.08325 9.21024 6.71015 9.58333 6.24992 9.58333H5.41658C4.95635 9.58333 4.58325 9.21024 4.58325 8.75V7.91667C4.58325 7.45643 4.95635 7.08333 5.41658 7.08333ZM8.74992 7.91667C8.74992 7.45643 9.12301 7.08333 9.58325 7.08333H10.4166C10.8768 7.08333 11.2499 7.45643 11.2499 7.91667V8.75C11.2499 9.21024 10.8768 9.58333 10.4166 9.58333H9.58325C9.12301 9.58333 8.74992 9.21024 8.74992 8.75V7.91667ZM13.7499 7.08333H14.5832C15.0435 7.08333 15.4166 7.45643 15.4166 7.91667V8.75C15.4166 9.21024 15.0435 9.58333 14.5832 9.58333H13.7499C13.2897 9.58333 12.9166 9.21024 12.9166 8.75V7.91667C12.9166 7.45643 13.2897 7.08333 13.7499 7.08333Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,3 @@
<svg width="22" height="22" viewBox="0 0 22 22" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.50959 19.3955C6.84272 20.0622 5.93829 20.4368 4.99527 20.4367C4.05226 20.4366 3.1479 20.0619 2.48114 19.395C1.81439 18.7282 1.43986 17.8238 1.43994 16.8807C1.44002 15.9377 1.81472 15.0334 2.48159 14.3666L4.6407 12.207C3.70381 9.41949 4.34648 6.21772 6.56692 3.99594C8.78914 1.77372 11.9927 1.13238 14.7816 2.07105C14.9607 2.13105 15.1758 2.21994 15.426 2.33683C15.5488 2.39405 15.656 2.48004 15.7385 2.5875C15.821 2.69496 15.8764 2.82073 15.8999 2.95414C15.9235 3.08754 15.9145 3.22467 15.8738 3.35388C15.8331 3.48309 15.7618 3.60057 15.666 3.69638L11.9096 7.45283L14.4238 9.96705L18.1318 6.25905C18.2333 6.15762 18.3578 6.08226 18.4947 6.0394C18.6316 5.99653 18.7768 5.98744 18.918 6.0129C19.0592 6.03836 19.1921 6.09761 19.3054 6.1856C19.4187 6.27358 19.5091 6.38769 19.5687 6.51816C19.6825 6.76616 19.7683 6.97949 19.8265 7.15861C20.7345 9.93283 20.0856 13.1048 17.8807 15.3097C15.6594 17.5306 12.4571 18.1728 9.66914 17.2359L7.50959 19.395V19.3955Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.185 14.7097C8.41034 15.273 9.21034 15.2667 9.42667 14.6997L10.8753 10.9044L14.701 9.42671C15.266 9.20838 15.2707 8.41072 14.7083 8.18572L5.28034 4.41438C4.736 4.19672 4.196 4.73705 4.41367 5.28105L8.185 14.7097Z" fill="white"/>
<path opacity="0.5" d="M8.33167 4.559C8.275 2.58367 6.65567 1 4.66667 1C2.64167 1 1 2.64167 1 4.66667C1 6.65467 2.58167 8.273 4.55567 8.33167L3.98233 6.898C3.59753 6.77974 3.24987 6.56394 2.97314 6.27157C2.69642 5.9792 2.50004 5.6202 2.4031 5.22948C2.30617 4.83876 2.31196 4.42961 2.41993 4.0418C2.52791 3.65398 2.73438 3.3007 3.01928 3.01629C3.30418 2.73187 3.65781 2.52601 4.04581 2.4187C4.43381 2.3114 4.84298 2.3063 5.23353 2.40391C5.62408 2.50152 5.98274 2.69851 6.27463 2.97574C6.56653 3.25296 6.78174 3.60099 6.89933 3.986L8.33167 4.559Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 904 B

View File

@ -0,0 +1,209 @@
<template>
<el-dialog
class="execution-details-dialog"
title="执行详情"
v-model="dialogVisible"
destroy-on-close
align-center
@click.stop
>
<el-scrollbar>
<div class="execution-details">
<template v-for="(item, index) in arraySort(detail, 'index')" :key="index">
<el-card class="mb-8" shadow="never" style="--el-card-padding: 12px 16px">
<div class="flex-between cursor" @click="current = current === index ? '' : index">
<div class="flex align-center">
<el-icon class="mr-8 arrow-icon" :class="current === index ? 'rotate-90' : ''"
><CaretRight
/></el-icon>
<component :is="iconComponent(`${item.type}-icon`)" class="mr-8" :size="24" />
<h4>{{ item.name }}</h4>
</div>
<div class="flex align-center">
<span
class="mr-16 color-secondary"
v-if="item.type === WorkflowType.Question || item.type === WorkflowType.AiChat"
>{{ item?.message_tokens + item?.answer_tokens }} tokens</span
>
<span class="mr-16 color-secondary">{{ item?.run_time?.toFixed(2) || 0.0 }} s</span>
<el-icon class="success" :size="16" v-if="item.status === 200"
><CircleCheck
/></el-icon>
<el-icon class="danger" :size="16" v-else><CircleClose /></el-icon>
</div>
</div>
<el-collapse-transition>
<div class="mt-12" v-if="current === index">
<template v-if="item.status === 200">
<!-- 开始 -->
<template v-if="item.type === WorkflowType.Start">
<div class="card-never border-r-4">
<h5 class="p-8-12">参数输入</h5>
<div class="p-8-12 border-t-dashed lighter">{{ item.question || '-' }}</div>
</div>
</template>
<!-- 知识库检索 -->
<template v-if="item.type == WorkflowType.SearchDataset">
<div class="card-never border-r-4">
<h5 class="p-8-12">检索内容</h5>
<div class="p-8-12 border-t-dashed lighter">{{ item.question || '-' }}</div>
</div>
<div class="card-never border-r-4 mt-8">
<h5 class="p-8-12">检索结果</h5>
<div class="p-8-12 border-t-dashed lighter">
<template v-if="item.paragraph_list?.length > 0">
<template
v-for="(paragraph, paragraphIndex) in item.paragraph_list"
:key="paragraphIndex"
>
<ParagraphCard :data="paragraph" :index="paragraphIndex" />
</template>
</template>
<template v-else> - </template>
</div>
</div>
</template>
<!-- 判断器 -->
<template v-if="item.type == WorkflowType.Condition">
<div class="card-never border-r-4">
<h5 class="p-8-12">判断结果</h5>
<div class="p-8-12 border-t-dashed lighter">
{{ item.branch_name || '-' }}
</div>
</div>
</template>
<!-- AI 对话 / 问题优化-->
<template
v-if="item.type == WorkflowType.AiChat || item.type == WorkflowType.Question"
>
<div class="card-never border-r-4">
<h5 class="p-8-12">角色设定 (System)</h5>
<div class="p-8-12 border-t-dashed lighter">
{{ item.system || '-' }}
</div>
</div>
<div class="card-never border-r-4 mt-8">
<h5 class="p-8-12">历史记录</h5>
<div class="p-8-12 border-t-dashed lighter">
<template v-if="item.history_message?.length > 0">
<p
class="mt-4 mb-4"
v-for="(history, historyIndex) in item.history_message"
:key="historyIndex"
>
<span class="color-secondary mr-4">{{ history.role }}:</span
><span>{{ history.content }}</span>
</p>
</template>
<template v-else> - </template>
</div>
</div>
<div class="card-never border-r-4 mt-8">
<h5 class="p-8-12">本次对话</h5>
<div class="p-8-12 border-t-dashed lighter pre-line">
{{ item.question || '-' }}
</div>
</div>
<div class="card-never border-r-4 mt-8">
<h5 class="p-8-12">AI 回答</h5>
<div class="p-8-12 border-t-dashed lighter">
<MdPreview
v-if="item.answer"
ref="editorRef"
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
/>
<template v-else> - </template>
</div>
</div>
</template>
<!-- 指定回复 -->
<template v-if="item.type === WorkflowType.Reply">
<div class="card-never border-r-4">
<h5 class="p-8-12">回复内容</h5>
<div class="p-8-12 border-t-dashed lighter">
<el-scrollbar height="150">
<MdPreview
v-if="item.answer"
ref="editorRef"
editorId="preview-only"
:modelValue="item.answer"
style="background: none"
/>
<template v-else> - </template>
</el-scrollbar>
</div>
</div>
</template>
</template>
<template v-else>
<div class="card-never border-r-4">
<h5 class="p-8-12">错误日志</h5>
<div class="p-8-12 border-t-dashed lighter">{{ item.err_message || '-' }}</div>
</div>
</template>
</div>
</el-collapse-transition>
</el-card>
</template>
</div>
</el-scrollbar>
</el-dialog>
</template>
<script setup lang="ts">
import { ref, watch, onBeforeUnmount } from 'vue'
import { cloneDeep } from 'lodash'
import ParagraphCard from './component/ParagraphCard.vue'
import { arraySort } from '@/utils/utils'
import { iconComponent } from '@/workflow/icons/utils'
import { WorkflowType } from '@/enums/workflow'
const dialogVisible = ref(false)
const detail = ref<any[]>([])
const current = ref<number | string>('')
watch(dialogVisible, (bool) => {
if (!bool) {
detail.value = []
}
})
const open = (data: any) => {
detail.value = cloneDeep(data)
dialogVisible.value = true
}
onBeforeUnmount(() => {
dialogVisible.value = false
})
defineExpose({ open })
</script>
<style lang="scss">
.execution-details-dialog {
padding: 0;
.el-dialog__header {
padding: 24px 24px 0 24px;
}
.el-dialog__body {
padding: 8px !important;
}
.execution-details {
max-height: calc(100vh - 260px);
.arrow-icon {
transition: 0.2s;
}
}
}
@media only screen and (max-width: 768px) {
.execution-details-dialog {
width: 90% !important;
.footer-content {
display: block;
}
}
}
</style>

View File

@ -0,0 +1,79 @@
<template>
<div class="flex align-center mt-16" v-if="!isWorkFlow(props.type)">
<span class="mr-4 color-secondary">知识来源</span>
<el-divider direction="vertical" />
<el-button type="primary" class="mr-8" link @click="openParagraph(data)">
<AppIcon iconName="app-reference-outlined" class="mr-4"></AppIcon>
引用分段 {{ data.paragraph_list?.length || 0 }}</el-button
>
</div>
<div class="mt-8" v-if="!isWorkFlow(props.type)">
<el-space wrap>
<el-button
v-for="(dataset, index) in data.dataset_list"
:key="index"
size="small"
class="source_dataset-button"
@click="openParagraph(data, dataset.id)"
>{{ dataset.name }}</el-button
>
</el-space>
</div>
<div class="border-t color-secondary flex-between mt-12" style="padding-top: 12px">
<div>
<span class="mr-8"> 消耗 tokens: {{ data?.message_tokens + data?.answer_tokens }} </span>
<span> 耗时: {{ data?.run_time?.toFixed(2) }} s</span>
</div>
<el-button
v-if="isWorkFlow(props.type)"
type="primary"
link
@click="openExecutionDetail(data.execution_details)"
>
<el-icon class="mr-4"><Document /></el-icon>
执行详情</el-button
>
</div>
<!-- 知识库引用 dialog -->
<ParagraphSourceDialog ref="ParagraphSourceDialogRef" />
<!-- 执行详情 dialog -->
<ExecutionDetailDialog ref="ExecutionDetailDialogRef" />
</template>
<script setup lang="ts">
import { ref } from 'vue'
import ParagraphSourceDialog from './ParagraphSourceDialog.vue'
import ExecutionDetailDialog from './ExecutionDetailDialog.vue'
import { isWorkFlow } from '@/utils/application'
const props = defineProps({
data: {
type: Object,
default: () => {}
},
type: {
type: String,
default: ''
}
})
const ParagraphSourceDialogRef = ref()
const ExecutionDetailDialogRef = ref()
function openParagraph(row: any, id?: string) {
ParagraphSourceDialogRef.value.open(row, id)
}
function openExecutionDetail(row: any) {
ExecutionDetailDialogRef.value.open(row)
}
</script>
<style lang="scss" scoped>
.source_dataset-button {
background: var(--app-text-color-light-1);
border: 1px solid #ffffff;
&:hover {
border: 1px solid var(--el-color-primary);
background: var(--el-color-primary-light-9);
color: var(--el-text-color-primary);
}
}
</style>

View File

@ -19,50 +19,7 @@
</el-form-item>
<el-form-item label="引用分段">
<template v-for="(item, index) in detail.paragraph_list" :key="index">
<CardBox
shadow="never"
:title="item.title || '-'"
class="paragraph-source-card cursor mb-8"
:class="item.is_active ? '' : 'disabled'"
:showIcon="false"
>
<template #icon>
<AppAvatar class="mr-12 avatar-light" :size="22">
{{ index + 1 + '' }}</AppAvatar
>
</template>
<div class="active-button primary">{{ item.similarity?.toFixed(3) }}</div>
<template #description>
<el-scrollbar height="150">
<MdPreview
ref="editorRef"
editorId="preview-only"
:modelValue="item.content"
/>
</el-scrollbar>
</template>
<template #footer>
<div class="footer-content flex-between">
<el-text class="flex align-center">
<el-icon class="mr-4">
<Document />
</el-icon>
<span class="ellipsis-1 break-all" :title="item?.document_name">
{{ item?.document_name }}</span
>
</el-text>
<div class="flex align-center">
<AppAvatar class="mr-8" shape="square" :size="18">
<img src="@/assets/icon_document.svg" style="width: 58%" alt="" />
</AppAvatar>
<span class="ellipsis" :title="item?.dataset_name">
{{ item?.dataset_name }}</span
>
</div>
</div>
</template>
</CardBox>
<ParagraphCard :data="item" :index="index" />
</template>
</el-form-item>
</el-form>
@ -75,10 +32,9 @@
import { ref, watch, onBeforeUnmount } from 'vue'
import { cloneDeep } from 'lodash'
import { arraySort } from '@/utils/utils'
import { MdPreview } from 'md-editor-v3'
import ParagraphCard from './component/ParagraphCard.vue'
const emit = defineEmits(['refresh'])
const ParagraphDialogRef = ref()
const dialogVisible = ref(false)
const detail = ref<any>({})
@ -114,9 +70,6 @@ defineExpose({ open })
.paragraph-source-height {
max-height: calc(100vh - 260px);
}
.paragraph-source-card {
height: 260px;
}
}
@media only screen and (max-width: 768px) {
.paragraph-source {
@ -124,9 +77,6 @@ defineExpose({ open })
.footer-content {
display: block;
}
.paragraph-source-card {
height: 285px;
}
}
}
</style>

View File

@ -0,0 +1,58 @@
<template>
<CardBox
shadow="never"
:title="data.title || '-'"
class="paragraph-source-card cursor mb-8 paragraph-source-card-height"
:class="data.is_active ? '' : 'disabled'"
:showIcon="false"
>
<template #icon>
<AppAvatar class="mr-12 avatar-light" :size="22"> {{ index + 1 + '' }}</AppAvatar>
</template>
<div class="active-button primary">{{ data.similarity?.toFixed(3) }}</div>
<template #description>
<el-scrollbar height="150">
<MdPreview ref="editorRef" editorId="preview-only" :modelValue="data.content" />
</el-scrollbar>
</template>
<template #footer>
<div class="footer-content flex-between">
<el-text>
<el-icon>
<Document />
</el-icon>
{{ data?.document_name }}
</el-text>
<div class="flex align-center" style="line-height: 32px;">
<AppAvatar class="mr-8" shape="square" :size="18">
<img src="@/assets/icon_document.svg" style="width: 58%" alt="" />
</AppAvatar>
<span class="ellipsis"> {{ data?.dataset_name }}</span>
</div>
</div>
</template>
</CardBox>
</template>
<script setup lang="ts">
const props = defineProps({
data: {
type: Object,
default: () => {}
},
index: {
type: Number,
default: 0
}
})
</script>
<style lang="scss" scoped>
.paragraph-source-card-height {
height: 260px;
}
@media only screen and (max-width: 768px) {
.paragraph-source-card-height {
height: 285px;
}
}
</style>

View File

@ -27,6 +27,8 @@
ref="editorRef"
editorId="preview-only"
:modelValue="item.str"
noIconfont
no-mermaid
/>
</template>
</el-card>
@ -73,38 +75,9 @@
<el-card v-else shadow="always" class="dialog-card">
<MdRenderer :source="item.answer_text"></MdRenderer>
<!-- 知识来源 -->
<div v-if="showSource(item)">
<el-divider> <el-text type="info">知识来源</el-text> </el-divider>
<div>
<el-space wrap>
<el-button
v-for="(dataset, index) in item.dataset_list"
:key="index"
type="primary"
plain
size="small"
@click="openParagraph(item, dataset.id)"
>{{ dataset.name }}</el-button
>
</el-space>
</div>
<div>
<el-button
class="mr-8 mt-8"
type="primary"
plain
size="small"
@click="openParagraph(item)"
>引用分段{{ item.paragraph_list?.length || 0 }}</el-button
>
<el-tag type="info" effect="plain" class="mr-8 mt-8">
消耗 tokens: {{ item?.message_tokens + item?.answer_tokens }}
</el-tag>
<el-tag type="info" effect="plain" class="mt-8">
耗时: {{ item.run_time?.toFixed(2) }} s
</el-tag>
</div>
<KnowledgeSource :data="item" :type="props.data.type" />
</div>
</el-card>
<div class="flex-between mt-8" v-if="log">
@ -168,8 +141,6 @@
</div>
</div>
</div>
<!-- 知识库引用 dialog -->
<ParagraphSourceDialog ref="ParagraphSourceDialogRef" />
</div>
</template>
<script setup lang="ts">
@ -177,14 +148,14 @@ import { ref, nextTick, computed, watch, reactive, onMounted } from 'vue'
import { useRoute } from 'vue-router'
import LogOperationButton from './LogOperationButton.vue'
import OperationButton from './OperationButton.vue'
import ParagraphSourceDialog from './ParagraphSourceDialog.vue'
import KnowledgeSource from './KnowledgeSource.vue'
import applicationApi from '@/api/application'
import logApi from '@/api/log'
import { ChatManagement, type chatType } from '@/api/type/application'
import { randomId } from '@/utils/utils'
import useStore from '@/stores'
import MdRenderer from '@/components/markdown-renderer/MdRenderer.vue'
import { MdPreview } from 'md-editor-v3'
import MdRenderer from '@/components/markdown/MdRenderer.vue'
import { isWorkFlow } from '@/utils/application'
import { debounce } from 'lodash'
defineOptions({ name: 'AiChat' })
const route = useRoute()
@ -222,7 +193,6 @@ const isMobile = computed(() => {
return common.isMobile() || mode === 'embed'
})
const ParagraphSourceDialogRef = ref()
const aiChatRef = ref()
const quickInputRef = ref()
const scrollDiv = ref()
@ -313,10 +283,6 @@ function showSource(row: any) {
}
}
function openParagraph(row: any, id?: string) {
ParagraphSourceDialogRef.value.open(row, id)
}
function quickProblemHandle(val: string) {
if (!loading.value && props.data?.name) {
handleDebounceClick(val)
@ -371,16 +337,32 @@ function getChartOpenId(chat?: any) {
}
})
} else {
return applicationApi
.postChatOpen(obj)
.then((res) => {
chartOpenId.value = res.data
chatMessage(chat)
})
.catch((res) => {
loading.value = false
return Promise.reject(res)
})
if (isWorkFlow(obj.type)) {
const submitObj = {
work_flow: obj.work_flow
}
return applicationApi
.postWorkflowChatOpen(submitObj)
.then((res) => {
chartOpenId.value = res.data
chatMessage(chat)
})
.catch((res) => {
loading.value = false
return Promise.reject(res)
})
} else {
return applicationApi
.postChatOpen(obj)
.then((res) => {
chartOpenId.value = res.data
chatMessage(chat)
})
.catch((res) => {
loading.value = false
return Promise.reject(res)
})
}
}
}
/**
@ -615,8 +597,10 @@ watch(
onMounted(() => {
setTimeout(() => {
quickInputRef.value.textarea.style.height = '0'
}, 1000)
if (quickInputRef.value) {
quickInputRef.value.textarea.style.height = '0'
}
}, 1500)
})
defineExpose({

View File

@ -2,6 +2,7 @@
<el-avatar
:size="30"
:style="{ background: props.pinyinColor && getAvatarColour(firstUserName) }"
style="flex-shrink: 0"
v-bind="$attrs"
>
<slot> {{ firstUserName }} </slot>
@ -16,6 +17,7 @@ const props = defineProps({
type: String,
default: ''
},
//
pinyinColor: {
type: Boolean,
default: false

View File

@ -1,7 +1,7 @@
<template>
<el-card shadow="never" class="card-add">
<div class="flex-center">
<AppIcon iconName="Plus" class="add-icon p-8" />
<AppIcon iconName="Plus" class="add-icon p-8 border-r-4 layout-bg" />
<span>{{ title }}</span>
</div>
</el-card>
@ -31,9 +31,7 @@ defineProps({
.add-icon {
font-size: 14px;
border-radius: 4px;
border: 1px solid var(--app-border-color-dark);
background: var(--app-layout-bg-color);
margin-right: 12px;
}
&:hover {

View File

@ -21,7 +21,7 @@
</el-card>
<el-card shadow="never" class="card-add box-card" @click="add_card">
<div class="flex-center">
<AppIcon iconName="Plus" class="add-icon p-8" />
<AppIcon iconName="Plus" class="add-icon layout-bg p-8 border-r-4" />
<span>{{ add_msg }}</span>
</div>
</el-card>
@ -133,9 +133,7 @@ defineExpose({
.add-icon {
font-size: 14px;
border-radius: 4px;
border: 1px solid var(--app-border-color-dark);
background: var(--app-layout-bg-color);
margin-right: 12px;
}
&:hover {

View File

@ -5,10 +5,10 @@
<el-progress v-bind="$attrs" :percentage="row[value_field]"></el-progress
></template>
<div>
<el-row v-for="(item, index) in view_card" :key="index"
><el-col :span="6">{{ item.title }}</el-col
><el-col :span="18"> <span class="value" :innerHTML="value_html(item)"> </span></el-col
></el-row>
<el-row v-for="(item, index) in view_card" :key="index">
<el-col :span="6">{{ item.title }}</el-col>
<el-col :span="18"> <span class="value" :innerHTML="value_html(item)"> </span></el-col>
</el-row>
</div>
</el-popover>
</div>

View File

@ -378,13 +378,13 @@ export const iconMap: any = {
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 16 16',
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M3.33333 5.3335V13.3335H10V5.3335H3.33333ZM11.3333 4.66683V14.0742C11.3333 14.4015 11.0548 14.6668 10.7111 14.6668H2.62222C2.27858 14.6668 2 14.4015 2 14.0742V4.59276C2 4.26548 2.27858 4.00016 2.62222 4.00016H10.6667C11.0349 4.00016 11.3333 4.29864 11.3333 4.66683ZM13.8047 1.52876C13.9254 1.6494 14 1.81607 14 2.00016V10.3335C14 10.5176 13.8508 10.6668 13.6667 10.6668H13C12.8159 10.6668 12.6667 10.5176 12.6667 10.3335V2.66683H6.33333C6.14924 2.66683 6 2.51759 6 2.3335V1.66683C6 1.48273 6.14924 1.3335 6.33333 1.3335H13.3333C13.5174 1.3335 13.6841 1.40812 13.8047 1.52876Z',
d: 'M213.333333 341.333333v512h426.666667V341.333333H213.333333z m512-42.666666v602.069333c0 20.949333-17.834667 37.930667-39.808 37.930667H167.808C145.834667 938.666667 128 921.685333 128 900.736V293.973333C128 272.981333 145.834667 256 167.808 256H682.666667a42.666667 42.666667 0 0 1 42.666666 42.666667z m158.165334-200.832A42.538667 42.538667 0 0 1 896 128v533.333333a21.333333 21.333333 0 0 1-21.333333 21.333334h-42.666667a21.333333 21.333333 0 0 1-21.333333-21.333334V170.666667H405.333333a21.333333 21.333333 0 0 1-21.333333-21.333334v-42.666666a21.333333 21.333333 0 0 1 21.333333-21.333334H853.333333c11.776 0 22.442667 4.778667 30.165334 12.501334z',
fill: 'currentColor'
})
]
@ -533,24 +533,18 @@ export const iconMap: any = {
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 16 16',
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M7.99984 3.66667C8.46007 3.66667 8.83317 4.03977 8.83317 4.5C8.83317 4.96023 8.46007 5.33333 7.99984 5.33333C7.5396 5.33333 7.1665 4.96023 7.1665 4.5C7.1665 4.03977 7.5396 3.66667 7.99984 3.66667Z',
d: 'M512 234.666667A53.333333 53.333333 0 1 1 512 341.333333a53.333333 53.333333 0 0 1 0-106.666666zM522.666667 384h-64a21.333333 21.333333 0 0 0-21.333334 21.333333v42.666667a21.333333 21.333333 0 0 0 21.333334 21.333333h21.333333v213.333334H426.666667a21.333333 21.333333 0 0 0-21.333334 21.333333v42.666667a21.333333 21.333333 0 0 0 21.333334 21.333333h192a21.333333 21.333333 0 0 0 21.333333-21.333333v-42.666667a21.333333 21.333333 0 0 0-21.333333-21.333333h-53.333334v-256a42.666667 42.666667 0 0 0-42.666666-42.666667z',
fill: 'currentColor'
}),
h('path', {
d: 'M8.1665 6H7.33317C7.05703 6 6.83317 6.22386 6.83317 6.5V6.83333C6.83317 7.10948 7.05703 7.33333 7.33317 7.33333H7.49984V10.6667H6.83317C6.55703 10.6667 6.33317 10.8905 6.33317 11.1667V11.5C6.33317 11.7761 6.55703 12 6.83317 12H9.49984C9.77598 12 9.99984 11.7761 9.99984 11.5V11.1667C9.99984 10.8905 9.77598 10.6667 9.49984 10.6667H8.83317V6.66667C8.83317 6.29848 8.53469 6 8.1665 6Z',
d: 'M512 981.333333C252.8 981.333333 42.666667 771.2 42.666667 512S252.8 42.666667 512 42.666667s469.333333 210.133333 469.333333 469.333333-210.133333 469.333333-469.333333 469.333333z m0-85.333333a384 384 0 1 0 0-768 384 384 0 0 0 0 768z',
fill: 'currentColor'
}),
h('path', {
d: 'M7.99984 15.3332C3.94984 15.3332 0.666504 12.0498 0.666504 7.99984C0.666504 3.94984 3.94984 0.666504 7.99984 0.666504C12.0498 0.666504 15.3332 3.94984 15.3332 7.99984C15.3332 12.0498 12.0498 15.3332 7.99984 15.3332ZM7.99984 13.9998C11.3135 13.9998 13.9998 11.3135 13.9998 7.99984C13.9998 4.68617 11.3135 1.99984 7.99984 1.99984C4.68617 1.99984 1.99984 4.68617 1.99984 7.99984C1.99984 11.3135 4.68617 13.9998 7.99984 13.9998Z',
fill: 'currentColor',
fillRule: 'evenodd',
clipRule: 'evenodd'
})
]
)
@ -935,5 +929,147 @@ export const iconMap: any = {
)
])
}
}
},
'app-fitview': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M128 85.333333h192a21.333333 21.333333 0 0 1 21.333333 21.333334v42.666666a21.333333 21.333333 0 0 1-21.333333 21.333334H170.666667v149.333333a21.333333 21.333333 0 0 1-21.333334 21.333333h-42.666666a21.333333 21.333333 0 0 1-21.333334-21.333333V128a42.666667 42.666667 0 0 1 42.666667-42.666667z m768 853.333334h-192a21.333333 21.333333 0 0 1-21.333333-21.333334v-42.666666a21.333333 21.333333 0 0 1 21.333333-21.333334H853.333333v-149.333333a21.333333 21.333333 0 0 1 21.333334-21.333333h42.666666a21.333333 21.333333 0 0 1 21.333334 21.333333V896a42.666667 42.666667 0 0 1-42.666667 42.666667zM85.333333 896v-192a21.333333 21.333333 0 0 1 21.333334-21.333333h42.666666a21.333333 21.333333 0 0 1 21.333334 21.333333V853.333333h149.333333a21.333333 21.333333 0 0 1 21.333333 21.333334v42.666666a21.333333 21.333333 0 0 1-21.333333 21.333334H128a42.666667 42.666667 0 0 1-42.666667-42.666667zM938.666667 128v192a21.333333 21.333333 0 0 1-21.333334 21.333333h-42.666666a21.333333 21.333333 0 0 1-21.333334-21.333333V170.666667h-149.333333a21.333333 21.333333 0 0 1-21.333333-21.333334v-42.666666a21.333333 21.333333 0 0 1 21.333333-21.333334H896a42.666667 42.666667 0 0 1 42.666667 42.666667z',
fill: 'currentColor'
}),
h('path', {
d: 'M512 512m-170.666667 0a170.666667 170.666667 0 1 0 341.333334 0 170.666667 170.666667 0 1 0-341.333334 0Z',
fill: 'currentColor'
})
]
)
])
}
},
'app-minify': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M366.165333 593.749333a21.333333 21.333333 0 0 1 30.208 0l30.165334 30.165334a21.333333 21.333333 0 0 1 0 30.208l-170.752 170.666666H377.173333a21.333333 21.333333 0 0 1 21.333334 21.333334v42.666666a21.333333 21.333333 0 0 1-21.333334 21.333334H156.458667a42.538667 42.538667 0 0 1-42.666667-42.666667v-220.16a21.333333 21.333333 0 0 1 21.333333-21.333333h42.666667a21.333333 21.333333 0 0 1 21.333333 21.333333v113.493333l167.04-167.04z m500.992-480a42.538667 42.538667 0 0 1 42.666667 42.666667v220.16a21.333333 21.333333 0 0 1-21.333333 21.333333h-42.666667a21.333333 21.333333 0 0 1-21.333333-21.333333v-113.493333l-167.04 167.04a21.333333 21.333333 0 0 1-30.165334 0l-30.165333-30.165334a21.333333 21.333333 0 0 1 0-30.165333l170.709333-170.666667h-121.344a21.333333 21.333333 0 0 1-21.333333-21.333333v-42.666667a21.333333 21.333333 0 0 1 21.333333-21.333333h220.672z',
fill: 'currentColor'
})
]
)
])
}
},
'app-magnify': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M384.341333 597.205333a42.538667 42.538667 0 0 1 42.666667 42.666667v220.16a21.333333 21.333333 0 0 1-21.333333 21.333333h-42.666667a21.333333 21.333333 0 0 1-21.333333-21.333333v-113.493333l-167.04 167.04a21.333333 21.333333 0 0 1-30.165334 0l-30.165333-30.208a21.333333 21.333333 0 0 1 0-30.165334l170.709333-170.666666H163.669333a21.333333 21.333333 0 0 1-21.333333-21.333334v-42.666666a21.333333 21.333333 0 0 1 21.333333-21.333334h220.672zM849.92 110.506667a21.333333 21.333333 0 0 1 30.165333 0l30.165334 30.165333a21.333333 21.333333 0 0 1 0 30.165333l-170.709334 170.666667h121.344a21.333333 21.333333 0 0 1 21.333334 21.333333v42.666667a21.333333 21.333333 0 0 1-21.333334 21.333333h-220.672a42.538667 42.538667 0 0 1-42.666666-42.666666v-220.16a21.333333 21.333333 0 0 1 21.333333-21.333334h42.666667a21.333333 21.333333 0 0 1 21.333333 21.333334v113.493333l167.04-166.997333z',
fill: 'currentColor'
})
]
)
])
}
},
'app-play-outlined': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 14 14',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M2.63333 1.82346C2.81847 1.72056 3.04484 1.72611 3.22472 1.83795L10.8081 6.55299C10.9793 6.65945 11.0834 6.84677 11.0834 7.04838C11.0834 7.24999 10.9793 7.43731 10.8081 7.54376L3.22472 12.2588C3.04484 12.3707 2.81847 12.3762 2.63333 12.2733C2.44819 12.1704 2.33337 11.9752 2.33337 11.7634V2.33333C2.33337 2.12152 2.44819 1.92635 2.63333 1.82346ZM3.50004 3.38293V10.7138L9.39529 7.04838L3.50004 3.38293Z',
fill: 'currentColor'
})
]
)
])
}
},
'app-save-outlined': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 14 14',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M1.16666 2.53734C1.16666 1.78025 1.7804 1.1665 2.53749 1.1665H11.4625C12.2196 1.1665 12.8333 1.78025 12.8333 2.53734V11.4623C12.8333 12.2194 12.2196 12.8332 11.4625 12.8332H2.53749C1.7804 12.8332 1.16666 12.2194 1.16666 11.4623V2.53734ZM2.53749 2.33317C2.42473 2.33317 2.33332 2.42458 2.33332 2.53734V11.4623C2.33332 11.5751 2.42473 11.6665 2.53749 11.6665H11.4625C11.5753 11.6665 11.6667 11.5751 11.6667 11.4623V2.53734C11.6667 2.42457 11.5753 2.33317 11.4625 2.33317H2.53749Z',
fill: 'currentColor'
}),
h('path', {
d: 'M3.79166 1.74984C3.79166 1.42767 4.05282 1.1665 4.37499 1.1665H9.33332C9.65549 1.1665 9.91666 1.42767 9.91666 1.74984V6.99984C9.91666 7.322 9.65549 7.58317 9.33332 7.58317H4.37499C4.05282 7.58317 3.79166 7.322 3.79166 6.99984V1.74984ZM4.95832 2.33317V6.4165H8.74999V2.33317H4.95832Z',
fill: 'currentColor'
}),
h('path', {
d: 'M7.58333 3.2085C7.9055 3.2085 8.16667 3.46966 8.16667 3.79183V4.9585C8.16667 5.28066 7.9055 5.54183 7.58333 5.54183C7.26117 5.54183 7 5.28066 7 4.9585V3.79183C7 3.46966 7.26117 3.2085 7.58333 3.2085Z',
fill: 'currentColor'
}),
h('path', {
d: 'M2.62415 1.74984C2.62415 1.42767 2.88531 1.1665 3.20748 1.1665H10.4996C10.8217 1.1665 11.0829 1.42767 11.0829 1.74984C11.0829 2.072 10.8217 2.33317 10.4996 2.33317H3.20748C2.88531 2.33317 2.62415 2.072 2.62415 1.74984Z',
fill: 'currentColor'
})
]
)
])
}
},
'app-reference-outlined': {
iconReader: () => {
return h('i', [
h(
'svg',
{
style: { height: '100%', width: '100%' },
viewBox: '0 0 1024 1024',
version: '1.1',
xmlns: 'http://www.w3.org/2000/svg'
},
[
h('path', {
d: 'M121.216 714.368c-7.082667-17.493333-7.466667-83.413333-7.424-104.32 0.341333-142.72 34.048-256.426667 88.32-330.112C262.4 198.229333 351.701333 161.024 460.8 172.8c7.893333 0.853333 11.946667 7.338667 10.581333 16.981333l-7.381333 51.285334c-1.749333 12.202667-9.813333 12.885333-17.621333 12.202666-138.709333-11.946667-232.576 84.053333-245.76 296.704a165.632 165.632 0 0 1 83.754666-22.528c91.050667 0 164.906667 72.96 164.906667 162.944C449.28 780.373333 375.466667 853.333333 284.373333 853.333333c-82.858667 0-151.424-60.330667-163.157333-138.965333z m438.570667 0c-7.082667-17.493333-7.509333-83.413333-7.466667-104.32 0.426667-142.72 34.090667-256.426667 88.405333-330.112 60.202667-81.706667 149.504-118.912 258.645334-107.136 7.893333 0.853333 11.946667 7.338667 10.581333 16.981333l-7.381333 51.285334c-1.749333 12.202667-9.813333 12.885333-17.621334 12.202666-138.752-11.946667-232.576 84.053333-245.76 296.704a165.632 165.632 0 0 1 83.712-22.528c91.093333 0 164.906667 72.96 164.906667 162.944 0 90.026667-73.813333 162.944-164.906667 162.944-82.773333 0-151.381333-60.330667-163.114666-138.965333z',
fill: 'currentColor'
})
]
)
])
}
},
}

View File

@ -12,12 +12,13 @@ import AppTable from './app-table/index.vue'
import ReadWrite from './read-write/index.vue'
import TagEllipsis from './tag-ellipsis/index.vue'
import CommonList from './common-list/index.vue'
import MarkdownRenderer from './markdown-renderer/index.vue'
import dynamicsForm from './dynamics-form'
import CardCheckbox from './card-checkbox/index.vue'
import AiChat from './ai-chat/index.vue'
import InfiniteScroll from './infinite-scroll/index.vue'
import AutoTooltip from './auto-tooltip/index.vue'
import MdEditor from './markdown/MdEditor.vue'
import MdPreview from './markdown/MdPreview.vue'
export default {
install(app: App) {
@ -35,10 +36,11 @@ export default {
app.component(TagEllipsis.name, TagEllipsis)
app.component(CommonList.name, CommonList)
app.use(dynamicsForm)
app.component(MarkdownRenderer.name, MarkdownRenderer)
app.component(CardCheckbox.name, CardCheckbox)
app.component(AiChat.name, AiChat)
app.component(InfiniteScroll.name, InfiniteScroll)
app.component(AutoTooltip.name, AutoTooltip)
app.component(MdPreview.name, MdPreview)
app.component(MdEditor.name, MdEditor)
}
}

View File

@ -1,11 +1,11 @@
<template>
<div class="content-container">
<div class="content-container border-r-4">
<div class="content-container__header flex align-center w-full" v-if="slots.header || header">
<slot name="backButton">
<back-button :to="backTo" v-if="showBack"></back-button>
</slot>
<slot name="header">
<h3>{{ header }}</h3>
<h4>{{ header }}</h4>
</slot>
</div>
<el-scrollbar>
@ -41,7 +41,6 @@ const showBack = computed(() => {
}
.content-container__main {
background-color: var(--app-view-bg-color);
border-radius: 4px;
box-sizing: border-box;
min-width: 700px;
}

View File

@ -1,10 +1,7 @@
<template>
<div class="login-form-container">
<div class="login-title">
<div class="title flex-center mb-8">
<div class="logo mr-4"></div>
<div class="app-logo-font">{{ title || defaultTitle }}</div>
</div>
<div class="logo text-center"><img src="@/assets/MaxKB-logo.svg" height="45" /></div>
<div class="sub-title text-center" v-if="subTitle">
<el-text type="info">{{ subTitle }}</el-text>
</div>
@ -28,16 +25,6 @@ defineProps({
.login-title {
margin-bottom: 32px;
.title {
font-size: 32px;
.logo {
background-image: url('@/assets/logo.png');
background-size: 100% 100%;
width: 45px;
height: 45px;
}
}
.sub-title {
font-size: 16px;
}

View File

@ -1,56 +0,0 @@
<template>
<MdEditor noIconfont v-bind="$attrs">
<template #defFooters>
<slot name="defFooters"> </slot>
</template>
</MdEditor>
</template>
<script setup lang="ts">
import { MdEditor, config } from 'md-editor-v3'
import 'md-editor-v3/lib/style.css'
import screenfull from 'screenfull'
import katex from 'katex'
import 'katex/dist/katex.min.css'
import Cropper from 'cropperjs'
import 'cropperjs/dist/cropper.css'
import mermaid from 'mermaid'
import highlight from 'highlight.js'
import 'highlight.js/styles/atom-one-dark.css'
// >=3.0
import * as prettier from 'prettier'
import parserMarkdown from 'prettier/plugins/markdown'
// https://at.alicdn.com/t/c/font_2605852_u82y61ve02.js
import './assets/font_prouiefeic.js'
config({
editorExtensions: {
prettier: {
prettierInstance: prettier,
parserMarkdownInstance: parserMarkdown
},
highlight: {
instance: highlight
},
screenfull: {
instance: screenfull
},
katex: {
instance: katex
},
cropper: {
instance: Cropper
},
mermaid: {
instance: mermaid
}
}
})
</script>

View File

@ -1,66 +0,0 @@
<template>
<div v-html="inner" />
</template>
<script setup lang="ts">
import { computed } from 'vue'
import MarkdownIt from 'markdown-it'
import MarkdownItAbbr from 'markdown-it-abbr'
import MarkdownItAnchor from 'markdown-it-anchor'
import MarkdownItFootnote from 'markdown-it-footnote'
import MarkdownItHighlightjs from 'markdown-it-highlightjs'
import MarkdownItTasklists from 'markdown-it-task-lists'
import MarkdownItSub from 'markdown-it-sub'
import MarkdownItSup from 'markdown-it-sup'
import MarkdownItTOC from 'markdown-it-toc-done-right'
defineOptions({ name: 'MarkdownRenderer' })
const markdownIt = MarkdownIt({
html: true, // HTML
typographer: true, // Typographer
linkify: true // URL
})
markdownIt
.use(MarkdownItHighlightjs)
.use(MarkdownItTasklists)
.use(MarkdownItAbbr)
.use(MarkdownItAnchor)
.use(MarkdownItFootnote)
.use(MarkdownItSub)
.use(MarkdownItSup)
.use(MarkdownItTOC)
const props = withDefaults(defineProps<{ source?: string; inner_suffix?: boolean }>(), {
source: '',
inner_suffix: false
})
const suffix = '{inner_suffix_' + new Date().getTime() + '}'
const inner = computed(() => {
if (props.inner_suffix) {
return markdownIt.render(props.source + suffix).replace(suffix, "<span class='loading'></span>")
} else {
return markdownIt.render(props.source)
}
})
</script>
<style>
.loading:after {
overflow: hidden;
display: inline-block;
vertical-align: bottom;
animation: ellipsis 0.5s infinite;
content: '\2026'; /* ascii code for the ellipsis character */
}
@keyframes ellipsis {
from {
width: 2px;
}
to {
width: 20px;
}
}
</style>

View File

@ -0,0 +1,14 @@
<template>
<MdEditor noIconfont noPrettier v-bind="$attrs">
<template #defFooters>
<slot name="defFooters"> </slot>
</template>
</MdEditor>
</template>
<script setup lang="ts">
import { MdEditor } from 'md-editor-v3'
import './assets/markdown-iconfont.js'
defineOptions({ name: 'MdEditor' })
</script>

View File

@ -0,0 +1,8 @@
<template>
<MdPreview noIconfont noPrettier v-bind="$attrs" />
</template>
<script setup lang="ts">
import { MdPreview } from 'md-editor-v3'
defineOptions({ name: 'MdPreview' })
</script>

View File

@ -1,5 +1,6 @@
<template>
<MdPreview
noIconfont
ref="editorRef"
editorId="preview-only"
:modelValue="item"
@ -10,7 +11,7 @@
</template>
<script setup lang="ts">
import { computed, ref } from 'vue'
import { MdPreview, config } from 'md-editor-v3'
import { config } from 'md-editor-v3'
config({
markdownItConfig(md) {
md.renderer.rules.link_open = (tokens, idx, options, env, self) => {

View File

@ -0,0 +1,7 @@
import type { App } from 'vue'
import { ClickOutside as vClickOutside } from 'element-plus'
export default {
install: (app: App) => {
app.directive('click-outside', vClickOutside)
}
}

View File

@ -0,0 +1,31 @@
import type { App } from 'vue'
export default {
install: (app: App) => {
app.directive('resize', {
created(el: any, binding: any) {
// 记录长宽
let width = ''
let height = ''
function getSize() {
const style = (document.defaultView as any).getComputedStyle(el)
// 如果当前长宽和历史长宽不同
if (width !== style.width || height !== style.height) {
// binding.value在这里就是下面的resizeChart函数
binding.value({
width: parseFloat(style.width),
height: parseFloat(style.height)
})
}
width = style.width
height = style.height
}
;(el as any).__vueDomResize__ = setInterval(getSize, 500)
},
unmounted(el: any, binding: any) {
clearInterval((el as any).__vueDomResize__)
}
})
}
}

View File

@ -0,0 +1,5 @@
export enum SearchMode {
embedding = '向量检索',
keywords = '全文检索',
blend = '混合检索'
}

9
ui/src/enums/workflow.ts Normal file
View File

@ -0,0 +1,9 @@
export enum WorkflowType {
Base = 'base-node',
Start = 'start-node',
AiChat = 'ai-chat-node',
SearchDataset = 'search-dataset-node',
Question = 'question-node',
Condition = 'condition-node',
Reply = 'reply-node'
}

View File

@ -90,10 +90,7 @@
</el-dropdown-menu>
<div class="breadcrumb__footer border-t" style="padding: 8px 11px; min-width: 200px">
<template v-if="isApplication">
<div
class="w-full text-left cursor"
@click="router.push({ path: '/application/create' })"
>
<div class="w-full text-left cursor" @click="openCreateDialog">
<el-button link>
<el-icon class="mr-4"><Plus /></el-icon>
</el-button>
@ -115,12 +112,14 @@
</template>
</el-dropdown>
</div>
<CreateApplicationDialog ref="CreateApplicationDialogRef" />
</template>
<script setup lang="ts">
import { ref, onMounted, computed } from 'vue'
import { onBeforeRouteLeave, useRouter, useRoute } from 'vue-router'
import { isAppIcon } from '@/utils/application'
import CreateApplicationDialog from '@/views/application/component/CreateApplicationDialog.vue'
import { isAppIcon, isWorkFlow } from '@/utils/application'
import useStore from '@/stores'
const { common, dataset, application } = useStore()
const route = useRoute()
@ -134,6 +133,7 @@ onBeforeRouteLeave((to, from) => {
common.saveBreadcrumb(null)
})
const CreateApplicationDialogRef = ref()
const list = ref<any[]>([])
const loading = ref(false)
@ -145,12 +145,6 @@ const current = computed(() => {
} = route
return list.value?.filter((v) => v.id === id)?.[0]
})
// const current = computed(() => {
// const {
// params: { id }
// } = route
// return list.value?.filter((v) => v.id === id)?.[0]?.type
// })
const isApplication = computed(() => {
const { meta } = route as any
@ -160,10 +154,30 @@ const isDataset = computed(() => {
const { meta } = route as any
return meta?.activeMenu.includes('dataset')
})
function openCreateDialog() {
CreateApplicationDialogRef.value.open()
}
function changeMenu(id: string) {
const lastMatched = route.matched[route.matched.length - 1]
if (lastMatched) {
router.push({ name: lastMatched.name, params: { id: id } })
if (isDataset.value) {
router.push({ name: lastMatched.name, params: { id: id } })
} else if (isApplication.value) {
const type = list.value?.filter((v) => v.id === id)?.[0]?.type
if (
isWorkFlow(type) &&
(lastMatched.name === 'AppSetting' || lastMatched.name === 'AppHitTest')
) {
router.push({ path: `/application/${id}/${type}/overview` })
} else {
router.push({
name: lastMatched.name,
params: { id: id, type: type }
})
}
}
}
}

View File

@ -1,6 +1,11 @@
<template>
<div v-if="!menu.meta || !menu.meta.hidden" class="sidebar-item">
<el-menu-item ref="subMenu" :index="menu.path" popper-class="sidebar-popper">
<div v-if="(!menu.meta || !menu.meta.hidden) && showMenu()" class="sidebar-item">
<el-menu-item
ref="subMenu"
:index="menu.path"
popper-class="sidebar-popper"
@click="clickHandle(menu)"
>
<template #title>
<AppIcon v-if="menu.meta && menu.meta.icon" :iconName="menuIcon" class="sidebar-icon" />
<span v-if="menu.meta && menu.meta.title">{{ menu.meta.title }}</span>
@ -11,12 +16,32 @@
<script setup lang="ts">
import { computed } from 'vue'
import { type RouteRecordRaw } from 'vue-router'
import { useRouter, useRoute, type RouteRecordRaw } from 'vue-router'
import { isWorkFlow } from '@/utils/application'
const props = defineProps<{
menu: RouteRecordRaw
activeMenu: any
}>()
const router = useRouter()
const route = useRoute()
const {
params: { id, type }
} = route as any
function showMenu() {
if (isWorkFlow(type)) {
return props.menu.name !== 'AppHitTest'
} else {
return true
}
}
function clickHandle(item: any) {
if (isWorkFlow(type) && item.name === 'AppSetting') {
router.push({ path: `/application/${id}/workflow` })
}
}
const menuIcon = computed(() => {
if (props.activeMenu === props.menu.path) {
return props.menu.meta?.iconActive || props.menu?.meta?.icon

View File

@ -20,7 +20,7 @@
<script setup lang="ts">
import { computed } from 'vue'
import { useRoute } from 'vue-router'
import { useRouter, useRoute } from 'vue-router'
import { getChildRouteListByPathAndName } from '@/router/index'
import SidebarItem from './SidebarItem.vue'
import AppBreadcrumb from './../breadcrumb/index.vue'

View File

@ -1,11 +1,8 @@
<template>
<el-dialog v-model="aboutDialogVisible" class="about-dialog">
<el-dialog v-model="aboutDialogVisible" class="about-dialog border-r-4">
<template #header="{ titleId, titleClass }">
<div class="flex-center">
<div class="logo mr-4"></div>
<div class="app-logo-font about-title" :id="titleId" :class="titleClass">
{{ defaultTitle }}
</div>
<div class="logo flex-center" :id="titleId" :class="titleClass">
<img src="@/assets/MaxKB-logo.svg" height="59" />
</div>
</template>
<div class="about-ui">
@ -57,27 +54,18 @@ defineExpose({ open })
<style lang="scss" scope>
.about-dialog {
padding: 0 0 24px 0;
border-radius: 4px;
width: 600px;
font-weight: 400;
.el-dialog__header {
background: var(--app-header-bg-color);
margin-right: 0;
height: 140px;
border-radius: 4px 4px 0 0;
box-sizing: border-box;
border-radius: 4px 4px 0 0;
}
.el-dialog__title {
line-height: 140px;
}
.about-title {
font-size: 40px;
}
.logo {
background-image: url('@/assets/logo.png');
background-size: 100% 100%;
width: 59px;
height: 59px;
height: 140px;
box-sizing: border-box;
}
.about-ui {
width: 360px;

Some files were not shown because too many files have changed in this diff Show More