mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 10:12:51 +00:00
feat: application workflow (#3415)
This commit is contained in:
parent
8468d08eea
commit
276378a9ae
|
|
@ -10,26 +10,24 @@ from .ai_chat_step_node import *
|
|||
from .application_node import BaseApplicationNode
|
||||
from .condition_node import *
|
||||
from .direct_reply_node import *
|
||||
from .document_extract_node import *
|
||||
from .form_node import *
|
||||
from .function_lib_node import *
|
||||
from .function_node import *
|
||||
from .image_generate_step_node import *
|
||||
from .image_understand_step_node import *
|
||||
from .mcp_node import BaseMcpNode
|
||||
from .question_node import *
|
||||
from .reranker_node import *
|
||||
|
||||
from .document_extract_node import *
|
||||
from .image_understand_step_node import *
|
||||
from .image_generate_step_node import *
|
||||
|
||||
from .search_dataset_node import *
|
||||
from .search_knowledge_node import *
|
||||
from .speech_to_text_step_node import BaseSpeechToTextNode
|
||||
from .start_node import *
|
||||
from .text_to_speech_step_node.impl.base_text_to_speech_node import BaseTextToSpeechNode
|
||||
from .tool_lib_node import *
|
||||
from .tool_node import *
|
||||
from .variable_assign_node import BaseVariableAssignNode
|
||||
from .mcp_node import BaseMcpNode
|
||||
|
||||
node_list = [BaseStartStepNode, BaseChatNode, BaseSearchDatasetNode, BaseQuestionNode,
|
||||
node_list = [BaseStartStepNode, BaseChatNode, BaseSearchKnowledgeNode, BaseQuestionNode,
|
||||
BaseConditionNode, BaseReplyNode,
|
||||
BaseFunctionNodeNode, BaseFunctionLibNodeNode, BaseRerankerNode, BaseApplicationNode,
|
||||
BaseToolNodeNode, BaseToolLibNodeNode, BaseRerankerNode, BaseApplicationNode,
|
||||
BaseDocumentExtractNode,
|
||||
BaseImageUnderstandNode, BaseFormNode, BaseSpeechToTextNode, BaseTextToSpeechNode,
|
||||
BaseImageGenerateNode, BaseVariableAssignNode, BaseMcpNode]
|
||||
|
|
|
|||
|
|
@ -35,9 +35,9 @@ class DatasetSettingSerializer(serializers.Serializer):
|
|||
|
||||
class SearchDatasetStepNodeSerializer(serializers.Serializer):
|
||||
# 需要查询的数据集id列表
|
||||
dataset_id_list = serializers.ListField(required=True, child=serializers.UUIDField(required=True),
|
||||
label=_("Dataset id list"))
|
||||
dataset_setting = DatasetSettingSerializer(required=True)
|
||||
knowledge_id_list = serializers.ListField(required=True, child=serializers.UUIDField(required=True),
|
||||
label=_("Dataset id list"))
|
||||
knowledge_setting = DatasetSettingSerializer(required=True)
|
||||
|
||||
question_reference_address = serializers.ListField(required=True)
|
||||
|
||||
|
|
@ -52,8 +52,8 @@ def get_paragraph_list(chat_record, node_id):
|
|||
'paragraph_list', []) is not None and key == node_id])
|
||||
|
||||
|
||||
class ISearchDatasetStepNode(INode):
|
||||
type = 'search-dataset-node'
|
||||
class ISearchKnowledgeStepNode(INode):
|
||||
type = 'search-knowledge-node'
|
||||
|
||||
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
|
||||
return SearchDatasetStepNodeSerializer
|
||||
|
|
@ -6,4 +6,4 @@
|
|||
@date:2024/6/11 15:35
|
||||
@desc:
|
||||
"""
|
||||
from .base_search_dataset_node import BaseSearchDatasetNode
|
||||
from .base_search_knowledge_node import BaseSearchKnowledgeNode
|
||||
|
|
@ -13,7 +13,7 @@ from django.db import connection
|
|||
from django.db.models import QuerySet
|
||||
|
||||
from application.flow.i_step_node import NodeResult
|
||||
from application.flow.step_node.search_dataset_node.i_search_dataset_node import ISearchDatasetStepNode
|
||||
from application.flow.step_node.search_knowledge_node.i_search_knowledge_node import ISearchKnowledgeStepNode
|
||||
from common.config.embedding_config import VectorStore
|
||||
from common.db.search import native_search
|
||||
from common.utils.common import get_file_content
|
||||
|
|
@ -44,7 +44,7 @@ def reset_title(title):
|
|||
return f"#### {title}\n"
|
||||
|
||||
|
||||
class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
||||
class BaseSearchKnowledgeNode(ISearchKnowledgeStepNode):
|
||||
def save_context(self, details, workflow_manage):
|
||||
result = details.get('paragraph_list', [])
|
||||
dataset_setting = self.node_params_serializer.data.get('dataset_setting')
|
||||
|
|
@ -60,24 +60,25 @@ class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
|||
result])[0:dataset_setting.get('max_paragraph_char_number', 5000)]
|
||||
self.context['directly_return'] = directly_return
|
||||
|
||||
def execute(self, dataset_id_list, dataset_setting, question,
|
||||
def execute(self, knowledge_id_list, knowledge_setting, question,
|
||||
exclude_paragraph_id_list=None,
|
||||
**kwargs) -> NodeResult:
|
||||
self.context['question'] = question
|
||||
if len(dataset_id_list) == 0:
|
||||
if len(knowledge_id_list) == 0:
|
||||
return get_none_result(question)
|
||||
model_id = get_embedding_id(dataset_id_list)
|
||||
model_id = get_embedding_id(knowledge_id_list)
|
||||
workspace_id = self.workflow_manage.get_body().get('workspace_id')
|
||||
embedding_model = get_model_instance_by_model_workspace_id(model_id, workspace_id)
|
||||
embedding_value = embedding_model.embed_query(question)
|
||||
vector = VectorStore.get_embedding_vector()
|
||||
exclude_document_id_list = [str(document.id) for document in
|
||||
QuerySet(Document).filter(
|
||||
dataset_id__in=dataset_id_list,
|
||||
knowledge_id__in=knowledge_id_list,
|
||||
is_active=False)]
|
||||
embedding_list = vector.query(question, embedding_value, dataset_id_list, exclude_document_id_list,
|
||||
exclude_paragraph_id_list, True, dataset_setting.get('top_n'),
|
||||
dataset_setting.get('similarity'), SearchMode(dataset_setting.get('search_mode')))
|
||||
embedding_list = vector.query(question, embedding_value, knowledge_id_list, exclude_document_id_list,
|
||||
exclude_paragraph_id_list, True, knowledge_setting.get('top_n'),
|
||||
knowledge_setting.get('similarity'),
|
||||
SearchMode(knowledge_setting.get('search_mode')))
|
||||
# 手动关闭数据库连接
|
||||
connection.close()
|
||||
if embedding_list is None:
|
||||
|
|
@ -89,7 +90,7 @@ class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
|||
'is_hit_handling_method_list': [row for row in result if row.get('is_hit_handling_method')],
|
||||
'data': '\n'.join(
|
||||
[f"{reset_title(paragraph.get('title', ''))}{paragraph.get('content')}" for paragraph in
|
||||
result])[0:dataset_setting.get('max_paragraph_char_number', 5000)],
|
||||
result])[0:knowledge_setting.get('max_paragraph_char_number', 5000)],
|
||||
'directly_return': '\n'.join(
|
||||
[paragraph.get('content') for paragraph in
|
||||
result if
|
||||
|
|
@ -112,7 +113,7 @@ class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
|||
'update_time': paragraph.get('update_time').strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'create_time': paragraph.get('create_time').strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'id': str(paragraph.get('id')),
|
||||
'dataset_id': str(paragraph.get('dataset_id')),
|
||||
'knowledge_id': str(paragraph.get('knowledge_id')),
|
||||
'document_id': str(paragraph.get('document_id'))
|
||||
}
|
||||
|
||||
|
|
@ -124,7 +125,7 @@ class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
|||
paragraph_list = native_search(QuerySet(Paragraph).filter(id__in=paragraph_id_list),
|
||||
get_file_content(
|
||||
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
|
||||
'list_dataset_paragraph_by_paragraph_id.sql')),
|
||||
'list_knowledge_paragraph_by_paragraph_id.sql')),
|
||||
with_table_name=True)
|
||||
# 如果向量库中存在脏数据 直接删除
|
||||
if len(paragraph_list) != len(paragraph_id_list):
|
||||
|
|
@ -9,13 +9,12 @@
|
|||
from typing import Type
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from application.flow.i_step_node import INode, NodeResult
|
||||
from common.field.common import ObjectField
|
||||
|
||||
from tools.models.tool import Tool
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class InputField(serializers.Serializer):
|
||||
|
|
@ -36,8 +35,8 @@ class FunctionLibNodeParamsSerializer(serializers.Serializer):
|
|||
raise Exception(_('The function has been deleted'))
|
||||
|
||||
|
||||
class IFunctionLibNode(INode):
|
||||
type = 'function-lib-node'
|
||||
class IToolLibNode(INode):
|
||||
type = 'tool-lib-node'
|
||||
|
||||
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
|
||||
return FunctionLibNodeParamsSerializer
|
||||
|
|
@ -6,4 +6,4 @@
|
|||
@date:2024/8/8 17:48
|
||||
@desc:
|
||||
"""
|
||||
from .base_function_lib_node import BaseFunctionLibNodeNode
|
||||
from .base_tool_lib_node import BaseToolLibNodeNode
|
||||
|
|
@ -14,7 +14,7 @@ from django.db.models import QuerySet
|
|||
from django.utils.translation import gettext as _
|
||||
|
||||
from application.flow.i_step_node import NodeResult
|
||||
from application.flow.step_node.function_lib_node.i_function_lib_node import IFunctionLibNode
|
||||
from application.flow.step_node.tool_lib_node.i_tool_lib_node import IToolLibNode
|
||||
from common.exception.app_exception import AppApiException
|
||||
from common.utils.function_code import FunctionExecutor
|
||||
from common.utils.rsa_util import rsa_long_decrypt
|
||||
|
|
@ -110,7 +110,7 @@ def valid_function(function_lib, user_id):
|
|||
raise Exception(_('Function {name} is unavailable').format(name=function_lib.name))
|
||||
|
||||
|
||||
class BaseFunctionLibNodeNode(IFunctionLibNode):
|
||||
class BaseToolLibNodeNode(IToolLibNode):
|
||||
def save_context(self, details, workflow_manage):
|
||||
self.context['result'] = details.get('result')
|
||||
if self.node_params.get('is_result'):
|
||||
|
|
@ -51,8 +51,8 @@ class FunctionNodeParamsSerializer(serializers.Serializer):
|
|||
super().is_valid(raise_exception=True)
|
||||
|
||||
|
||||
class IFunctionNode(INode):
|
||||
type = 'function-node'
|
||||
class IToolNode(INode):
|
||||
type = 'tool-node'
|
||||
|
||||
def get_node_params_serializer_class(self) -> Type[serializers.Serializer]:
|
||||
return FunctionNodeParamsSerializer
|
||||
|
|
@ -6,4 +6,4 @@
|
|||
@date:2024/8/13 11:19
|
||||
@desc:
|
||||
"""
|
||||
from .base_function_node import BaseFunctionNodeNode
|
||||
from .base_tool_node import BaseToolNodeNode
|
||||
|
|
@ -11,7 +11,7 @@ import time
|
|||
from typing import Dict
|
||||
|
||||
from application.flow.i_step_node import NodeResult
|
||||
from application.flow.step_node.function_node.i_function_node import IFunctionNode
|
||||
from application.flow.step_node.tool_node.i_tool_node import IToolNode
|
||||
from common.utils.function_code import FunctionExecutor
|
||||
from maxkb.const import CONFIG
|
||||
|
||||
|
|
@ -79,7 +79,7 @@ def convert_value(name: str, value, _type, is_required, source, node):
|
|||
raise Exception(f'字段:{name}类型:{_type}值:{value}类型错误')
|
||||
|
||||
|
||||
class BaseFunctionNodeNode(IFunctionNode):
|
||||
class BaseToolNodeNode(IToolNode):
|
||||
def save_context(self, details, workflow_manage):
|
||||
self.context['result'] = details.get('result')
|
||||
if self.node_params.get('is_result', False):
|
||||
|
|
@ -720,14 +720,48 @@ class ApplicationOperateSerializer(serializers.Serializer):
|
|||
self.is_valid()
|
||||
application_id = self.data.get("application_id")
|
||||
application = QuerySet(Application).get(id=application_id)
|
||||
knowledge_list = self.list_knowledge(with_valid=False)
|
||||
mapping_knowledge_id_list = [str(akm.knowledge_id) for akm in
|
||||
QuerySet(ApplicationKnowledgeMapping).filter(application_id=application_id)]
|
||||
knowledge_id_list = [d.get('id') for d in
|
||||
list(filter(lambda row: mapping_knowledge_id_list.__contains__(row.get('id')),
|
||||
knowledge_list))]
|
||||
available_knowledge_list = self.list_knowledge(with_valid=False)
|
||||
available_knowledge_dict = {knowledge.get('id'): knowledge for knowledge in available_knowledge_list}
|
||||
knowledge_list = []
|
||||
knowledge_id_list = []
|
||||
if application.type == 'SIMPLE':
|
||||
mapping_knowledge_list = QuerySet(ApplicationKnowledgeMapping).filter(application_id=application_id)
|
||||
knowledge_list = [available_knowledge_dict.get(str(km.knowledge_id)) for km in mapping_knowledge_list if
|
||||
available_knowledge_dict.__contains__(str(km.knowledge_id))]
|
||||
knowledge_id_list = [k.get('id') for k in knowledge_list]
|
||||
else:
|
||||
self.update_knowledge_node(application.work_flow, available_knowledge_dict)
|
||||
|
||||
return {**ApplicationSerializerModel(application).data,
|
||||
'knowledge_id_list': knowledge_id_list}
|
||||
'knowledge_id_list': knowledge_id_list,
|
||||
'knowledge_list': knowledge_list}
|
||||
|
||||
@staticmethod
|
||||
def get_search_node(work_flow):
|
||||
if work_flow is None:
|
||||
return []
|
||||
return [node for node in work_flow.get('nodes', []) if node.get('type', '') == 'search-knowledge-node']
|
||||
|
||||
def update_knowledge_node(self, workflow, available_knowledge_dict):
|
||||
"""
|
||||
修改知识库检索节点 数据
|
||||
定义 all_knowledge_id_list: 所有的关联知识库
|
||||
dataset_id_list: 当前用户可看到的关联知识库列表
|
||||
knowledge_list: 用户
|
||||
@param workflow: 知识库
|
||||
@param available_knowledge_dict: 当前用户可用的知识库
|
||||
@return:
|
||||
"""
|
||||
knowledge_node_list = self.get_search_node(workflow)
|
||||
for search_node in knowledge_node_list:
|
||||
node_data = search_node.get('properties', {}).get('node_data', {})
|
||||
# 当前知识库关联的所有知识库
|
||||
knowledge_id_list = node_data.get('knowledge_id_list', [])
|
||||
knowledge_list = [available_knowledge_dict.get(knowledge_id) for knowledge_id in knowledge_id_list if
|
||||
available_knowledge_dict.__contains__(knowledge_id)]
|
||||
node_data['all_knowledge_id_list'] = knowledge_id_list
|
||||
node_data['knowledge_id_list'] = [knowledge.get('id') for knowledge in knowledge_list]
|
||||
node_data['knowledge_list'] = knowledge_list
|
||||
|
||||
def list_knowledge(self, with_valid=True):
|
||||
if with_valid:
|
||||
|
|
@ -744,8 +778,9 @@ class ApplicationOperateSerializer(serializers.Serializer):
|
|||
# 组合查询
|
||||
query = white_list_condition | default_condition
|
||||
inner = QuerySet(knowledge_workspace_authorization_model).filter(query)
|
||||
share_knowledge_list = [KnowledgeModelSerializer(k).data for k in QuerySet(Knowledge).filter(id__in=inner)]
|
||||
workspace_knowledge_list = [k for k in KnowledgeSerializer.Query(
|
||||
share_knowledge_list = [{**KnowledgeModelSerializer(k).data, 'scope': 'SHARED'} for k in
|
||||
QuerySet(Knowledge).filter(id__in=inner)]
|
||||
workspace_knowledge_list = [{**k, 'scope': 'WORKSPACE'} for k in KnowledgeSerializer.Query(
|
||||
data={
|
||||
'folder_id': 'default',
|
||||
'workspace_id': workspace_id,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
SELECT
|
||||
paragraph.*,
|
||||
knowledge."name" AS "knowledge_name",
|
||||
"document"."name" AS "document_name",
|
||||
"document"."meta" AS "meta",
|
||||
"document"."hit_handling_method" AS "hit_handling_method",
|
||||
"document"."directly_return_similarity" as "directly_return_similarity"
|
||||
FROM
|
||||
paragraph paragraph
|
||||
LEFT JOIN knowledge knowledge ON knowledge."id" = paragraph.knowledge_id
|
||||
LEFT JOIN "document" "document" ON "document"."id" =paragraph.document_id
|
||||
|
|
@ -139,13 +139,13 @@ const search_text = ref<string>('')
|
|||
const props = defineProps({
|
||||
show: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
default: false,
|
||||
},
|
||||
id: {
|
||||
type: String,
|
||||
default: ''
|
||||
default: '',
|
||||
},
|
||||
workflowRef: Object
|
||||
workflowRef: Object,
|
||||
})
|
||||
|
||||
const emit = defineEmits(['clickNodes', 'onmousedown'])
|
||||
|
|
@ -156,19 +156,19 @@ const activeName = ref('base')
|
|||
const toolList = ref<any[]>([])
|
||||
const filter_tool_lib_list = computed(() => {
|
||||
return toolList.value.filter((item: any) =>
|
||||
item.name.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase())
|
||||
item.name.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase()),
|
||||
)
|
||||
})
|
||||
const applicationList = ref<any[]>([])
|
||||
const filter_application_list = computed(() => {
|
||||
return applicationList.value.filter((item: any) =>
|
||||
item.name.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase())
|
||||
item.name.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase()),
|
||||
)
|
||||
})
|
||||
|
||||
const filter_menu_nodes = computed(() => {
|
||||
return menuNodes.filter((item) =>
|
||||
item.label.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase())
|
||||
item.label.toLocaleLowerCase().includes(search_text.value.toLocaleLowerCase()),
|
||||
)
|
||||
})
|
||||
function clickNodes(item: any, data?: any, type?: string) {
|
||||
|
|
@ -180,8 +180,8 @@ function clickNodes(item: any, data?: any, type?: string) {
|
|||
tool_lib_id: data.id,
|
||||
input_field_list: data.input_field_list.map((field: any) => ({
|
||||
...field,
|
||||
value: field.source == 'reference' ? [] : ''
|
||||
}))
|
||||
value: field.source == 'reference' ? [] : '',
|
||||
})),
|
||||
}
|
||||
}
|
||||
if (type == 'application') {
|
||||
|
|
@ -199,14 +199,14 @@ function clickNodes(item: any, data?: any, type?: string) {
|
|||
: {
|
||||
...(fileUploadSetting.document ? { document_list: [] } : {}),
|
||||
...(fileUploadSetting.image ? { image_list: [] } : {}),
|
||||
...(fileUploadSetting.audio ? { audio_list: [] } : {})
|
||||
})
|
||||
...(fileUploadSetting.audio ? { audio_list: [] } : {}),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
item['properties']['node_data'] = {
|
||||
name: data.name,
|
||||
icon: data.icon,
|
||||
application_id: data.id
|
||||
application_id: data.id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -225,8 +225,8 @@ function onmousedown(item: any, data?: any, type?: string) {
|
|||
tool_lib_id: data.id,
|
||||
input_field_list: data.input_field_list.map((field: any) => ({
|
||||
...field,
|
||||
value: field.source == 'reference' ? [] : ''
|
||||
}))
|
||||
value: field.source == 'reference' ? [] : '',
|
||||
})),
|
||||
}
|
||||
}
|
||||
if (type == 'application') {
|
||||
|
|
@ -244,14 +244,14 @@ function onmousedown(item: any, data?: any, type?: string) {
|
|||
: {
|
||||
...(fileUploadSetting.document ? { document_list: [] } : {}),
|
||||
...(fileUploadSetting.image ? { image_list: [] } : {}),
|
||||
...(fileUploadSetting.audio ? { audio_list: [] } : {})
|
||||
})
|
||||
...(fileUploadSetting.audio ? { audio_list: [] } : {}),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
item['properties']['node_data'] = {
|
||||
name: data.name,
|
||||
icon: data.icon,
|
||||
application_id: data.id
|
||||
application_id: data.id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -527,10 +527,9 @@ const rules = reactive<FormRules<ApplicationFormType>>({
|
|||
],
|
||||
})
|
||||
const modelOptions = ref<any>(null)
|
||||
const knowledgeList = ref([])
|
||||
const knowledgeList = ref<Array<any>>([])
|
||||
const sttModelOptions = ref<any>(null)
|
||||
const ttsModelOptions = ref<any>(null)
|
||||
const showEditIcon = ref(false)
|
||||
|
||||
function submitPrologueDialog(val: string) {
|
||||
applicationForm.value.prologue = val
|
||||
|
|
@ -618,8 +617,9 @@ function removeKnowledge(id: any) {
|
|||
}
|
||||
}
|
||||
|
||||
function addKnowledge(val: Array<string>) {
|
||||
applicationForm.value.knowledge_id_list = val
|
||||
function addKnowledge(val: Array<any>) {
|
||||
knowledgeList.value = val
|
||||
applicationForm.value.knowledge_id_list = val.map((item) => item.id)
|
||||
}
|
||||
|
||||
function openKnowledgeDialog() {
|
||||
|
|
@ -633,17 +633,9 @@ function getDetail() {
|
|||
applicationForm.value.stt_model_id = res.data.stt_model
|
||||
applicationForm.value.tts_model_id = res.data.tts_model
|
||||
applicationForm.value.tts_type = res.data.tts_type
|
||||
knowledgeList.value = res.data.knowledge_list
|
||||
applicationForm.value.model_setting.no_references_prompt =
|
||||
res.data.model_setting.no_references_prompt || '{question}'
|
||||
application.asyncGetAccessToken(id, loading).then((res: any) => {
|
||||
applicationForm.value = { ...applicationForm.value, ...res.data }
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function getKnowledge() {
|
||||
knowledge.asyncGetFolderKnowledge('', knowledgeLoading).then((res: any) => {
|
||||
knowledgeList.value = res.data
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -715,12 +707,11 @@ function refreshIcon() {
|
|||
}
|
||||
|
||||
function refresh() {
|
||||
getKnowledge()
|
||||
// getDetail()
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
getSelectModel()
|
||||
getKnowledge()
|
||||
getDetail()
|
||||
getSTTModel()
|
||||
getTTSModel()
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ const emit = defineEmits(['addData', 'refresh'])
|
|||
const { folder, user, knowledge } = useStore()
|
||||
|
||||
const dialogVisible = ref<boolean>(false)
|
||||
const checkList = ref([])
|
||||
const checkList = ref<Array<string>>([])
|
||||
const currentEmbedding = ref('')
|
||||
const searchValue = ref('')
|
||||
const searchDate = ref<any[]>([])
|
||||
|
|
@ -166,7 +166,10 @@ const open = (checked: any) => {
|
|||
}
|
||||
|
||||
const submitHandle = () => {
|
||||
emit('addData', checkList.value)
|
||||
emit(
|
||||
'addData',
|
||||
searchDate.value.filter((item: any) => checkList.value.includes(item.id)),
|
||||
)
|
||||
dialogVisible.value = false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -109,7 +109,6 @@
|
|||
ref="AddknowledgeDialogRef"
|
||||
@addData="addKnowledge"
|
||||
:data="knowledgeList"
|
||||
@refresh="refresh"
|
||||
:loading="knowledgeLoading"
|
||||
/>
|
||||
</NodeContainer>
|
||||
|
|
@ -174,8 +173,13 @@ function removeknowledge(id: any) {
|
|||
set(props.nodeModel.properties.node_data, 'knowledge_id_list', list)
|
||||
}
|
||||
|
||||
function addKnowledge(val: Array<string>) {
|
||||
set(props.nodeModel.properties.node_data, 'knowledge_id_list', val)
|
||||
function addKnowledge(val: Array<any>) {
|
||||
set(
|
||||
props.nodeModel.properties.node_data,
|
||||
'knowledge_id_list',
|
||||
val.map((item) => item.id),
|
||||
)
|
||||
knowledgeList.value = val
|
||||
}
|
||||
|
||||
function openknowledgeDialog() {
|
||||
|
|
@ -184,21 +188,6 @@ function openknowledgeDialog() {
|
|||
}
|
||||
}
|
||||
|
||||
function getknowledge() {
|
||||
// if (id) {
|
||||
// application.asyncGetApplicationKnowledge(id, knowledgeLoading).then((res: any) => {
|
||||
// knowledgeList.value = res.data
|
||||
// })
|
||||
// } else {
|
||||
knowledge.asyncGetFolderKnowledge('',knowledgeLoading).then((res: any) => {
|
||||
knowledgeList.value = res.data?.filter((v: any) => v.user_id === user.userInfo?.id)
|
||||
})
|
||||
// }
|
||||
}
|
||||
function refresh() {
|
||||
getknowledge()
|
||||
}
|
||||
|
||||
const validate = () => {
|
||||
return Promise.all([
|
||||
nodeCascaderRef.value.validate(),
|
||||
|
|
@ -209,7 +198,8 @@ const validate = () => {
|
|||
}
|
||||
|
||||
onMounted(() => {
|
||||
getknowledge()
|
||||
console.log(props.nodeModel.properties.node_data)
|
||||
knowledgeList.value = props.nodeModel.properties.node_data.knowledge_list
|
||||
set(props.nodeModel, 'validate', validate)
|
||||
})
|
||||
</script>
|
||||
|
|
|
|||
Loading…
Reference in New Issue