mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-27 12:12:57 +00:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
499fc90f2f | ||
|
|
56a7b7b524 | ||
|
|
28ed136f2d | ||
|
|
16f490567e | ||
|
|
a5ca97ac06 | ||
|
|
601d19d7ad | ||
|
|
5e7879c582 | ||
|
|
0ce3fc5cfa |
|
|
@ -105,12 +105,14 @@ class FlowParamsSerializer(serializers.Serializer):
|
||||||
|
|
||||||
chat_record_id = serializers.CharField(required=True, error_messages=ErrMessage.char("对话记录id"))
|
chat_record_id = serializers.CharField(required=True, error_messages=ErrMessage.char("对话记录id"))
|
||||||
|
|
||||||
stream = serializers.BooleanField(required=True, error_messages=ErrMessage.base("流式输出"))
|
stream = serializers.BooleanField(required=True, error_messages=ErrMessage.boolean("流式输出"))
|
||||||
|
|
||||||
client_id = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端id"))
|
client_id = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端id"))
|
||||||
|
|
||||||
client_type = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端类型"))
|
client_type = serializers.CharField(required=False, error_messages=ErrMessage.char("客户端类型"))
|
||||||
|
|
||||||
|
re_chat = serializers.BooleanField(required=True, error_messages=ErrMessage.boolean("换个答案"))
|
||||||
|
|
||||||
|
|
||||||
class INode:
|
class INode:
|
||||||
def __init__(self, node, workflow_params, workflow_manage):
|
def __init__(self, node, workflow_params, workflow_manage):
|
||||||
|
|
|
||||||
|
|
@ -65,10 +65,10 @@ class BaseReplyNode(IReplyNode):
|
||||||
else:
|
else:
|
||||||
result = self.generate_reply_content(content)
|
result = self.generate_reply_content(content)
|
||||||
if stream:
|
if stream:
|
||||||
return NodeResult({'result': iter([AIMessageChunk(content=result)])}, {},
|
return NodeResult({'result': iter([AIMessageChunk(content=result)]), 'answer': result}, {},
|
||||||
_to_response=to_stream_response)
|
_to_response=to_stream_response)
|
||||||
else:
|
else:
|
||||||
return NodeResult({'result': AIMessage(content=result)}, {}, _to_response=to_response)
|
return NodeResult({'result': AIMessage(content=result), 'answer': result}, {}, _to_response=to_response)
|
||||||
|
|
||||||
def generate_reply_content(self, prompt):
|
def generate_reply_content(self, prompt):
|
||||||
return self.workflow_manage.generate_prompt(prompt)
|
return self.workflow_manage.generate_prompt(prompt)
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ from django.core import validators
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from application.flow.i_step_node import INode, NodeResult
|
from application.flow.i_step_node import INode, NodeResult
|
||||||
|
from common.util.common import flat_map
|
||||||
from common.util.field_message import ErrMessage
|
from common.util.field_message import ErrMessage
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -43,6 +44,13 @@ class SearchDatasetStepNodeSerializer(serializers.Serializer):
|
||||||
super().is_valid(raise_exception=True)
|
super().is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_paragraph_list(chat_record, node_id):
|
||||||
|
return flat_map([chat_record.details[key].get('paragraph_list', []) for key in chat_record.details if
|
||||||
|
(chat_record.details[
|
||||||
|
key].get('type', '') == 'search-dataset-node') and chat_record.details[key].get(
|
||||||
|
'paragraph_list', []) is not None and key == node_id])
|
||||||
|
|
||||||
|
|
||||||
class ISearchDatasetStepNode(INode):
|
class ISearchDatasetStepNode(INode):
|
||||||
type = 'search-dataset-node'
|
type = 'search-dataset-node'
|
||||||
|
|
||||||
|
|
@ -53,7 +61,16 @@ class ISearchDatasetStepNode(INode):
|
||||||
question = self.workflow_manage.get_reference_field(
|
question = self.workflow_manage.get_reference_field(
|
||||||
self.node_params_serializer.data.get('question_reference_address')[0],
|
self.node_params_serializer.data.get('question_reference_address')[0],
|
||||||
self.node_params_serializer.data.get('question_reference_address')[1:])
|
self.node_params_serializer.data.get('question_reference_address')[1:])
|
||||||
return self.execute(**self.node_params_serializer.data, question=str(question), exclude_paragraph_id_list=[])
|
exclude_paragraph_id_list = []
|
||||||
|
if self.flow_params_serializer.data.get('re_chat', False):
|
||||||
|
history_chat_record = self.flow_params_serializer.data.get('history_chat_record', [])
|
||||||
|
paragraph_id_list = [p.get('id') for p in flat_map(
|
||||||
|
[get_paragraph_list(chat_record, self.node.id) for chat_record in history_chat_record if
|
||||||
|
chat_record.problem_text == question])]
|
||||||
|
exclude_paragraph_id_list = list(set(paragraph_id_list))
|
||||||
|
|
||||||
|
return self.execute(**self.node_params_serializer.data, question=str(question),
|
||||||
|
exclude_paragraph_id_list=exclude_paragraph_id_list)
|
||||||
|
|
||||||
def execute(self, dataset_id_list, dataset_setting, question,
|
def execute(self, dataset_id_list, dataset_setting, question,
|
||||||
exclude_paragraph_id_list=None,
|
exclude_paragraph_id_list=None,
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,7 @@ class BaseSearchDatasetNode(ISearchDatasetStepNode):
|
||||||
return NodeResult({'paragraph_list': [], 'is_hit_handling_method': []}, {})
|
return NodeResult({'paragraph_list': [], 'is_hit_handling_method': []}, {})
|
||||||
paragraph_list = self.list_paragraph(embedding_list, vector)
|
paragraph_list = self.list_paragraph(embedding_list, vector)
|
||||||
result = [self.reset_paragraph(paragraph, embedding_list) for paragraph in paragraph_list]
|
result = [self.reset_paragraph(paragraph, embedding_list) for paragraph in paragraph_list]
|
||||||
|
result = sorted(result, key=lambda p: p.get('similarity'), reverse=True)
|
||||||
return NodeResult({'paragraph_list': result,
|
return NodeResult({'paragraph_list': result,
|
||||||
'is_hit_handling_method_list': [row for row in result if row.get('is_hit_handling_method')],
|
'is_hit_handling_method_list': [row for row in result if row.get('is_hit_handling_method')],
|
||||||
'data': '\n'.join([paragraph.get('content') for paragraph in paragraph_list]),
|
'data': '\n'.join([paragraph.get('content') for paragraph in paragraph_list]),
|
||||||
|
|
|
||||||
|
|
@ -576,6 +576,8 @@ class ApplicationSerializer(serializers.Serializer):
|
||||||
'dataset_id_list': dataset_id_list}
|
'dataset_id_list': dataset_id_list}
|
||||||
|
|
||||||
def get_search_node(self, work_flow):
|
def get_search_node(self, work_flow):
|
||||||
|
if work_flow is None:
|
||||||
|
return []
|
||||||
return [node for node in work_flow.get('nodes', []) if node.get('type', '') == 'search-dataset-node']
|
return [node for node in work_flow.get('nodes', []) if node.get('type', '') == 'search-dataset-node']
|
||||||
|
|
||||||
def update_search_node(self, work_flow, user_dataset_id_list: List):
|
def update_search_node(self, work_flow, user_dataset_id_list: List):
|
||||||
|
|
|
||||||
|
|
@ -656,13 +656,13 @@ class DocumentSerializers(ApiMixin, serializers.Serializer):
|
||||||
paragraphs = get_split_model('web.md').parse(response.content)
|
paragraphs = get_split_model('web.md').parse(response.content)
|
||||||
# 插入
|
# 插入
|
||||||
DocumentSerializers.Create(data={'dataset_id': dataset_id}).save(
|
DocumentSerializers.Create(data={'dataset_id': dataset_id}).save(
|
||||||
{'name': source_url, 'paragraphs': paragraphs,
|
{'name': source_url[0:128], 'paragraphs': paragraphs,
|
||||||
'meta': {'source_url': source_url, 'selector': selector},
|
'meta': {'source_url': source_url, 'selector': selector},
|
||||||
'type': Type.web}, with_valid=True)
|
'type': Type.web}, with_valid=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.getLogger("max_kb_error").error(f'{str(e)}:{traceback.format_exc()}')
|
logging.getLogger("max_kb_error").error(f'{str(e)}:{traceback.format_exc()}')
|
||||||
else:
|
else:
|
||||||
Document(name=source_url,
|
Document(name=source_url[0:128],
|
||||||
meta={'source_url': source_url, 'selector': selector},
|
meta={'source_url': source_url, 'selector': selector},
|
||||||
type=Type.web,
|
type=Type.web,
|
||||||
char_length=0,
|
char_length=0,
|
||||||
|
|
|
||||||
|
|
@ -105,9 +105,9 @@ class PGVector(BaseVectorStore):
|
||||||
return []
|
return []
|
||||||
query_set = QuerySet(Embedding).filter(dataset_id__in=dataset_id_list, is_active=is_active)
|
query_set = QuerySet(Embedding).filter(dataset_id__in=dataset_id_list, is_active=is_active)
|
||||||
if exclude_document_id_list is not None and len(exclude_document_id_list) > 0:
|
if exclude_document_id_list is not None and len(exclude_document_id_list) > 0:
|
||||||
exclude_dict.__setitem__('document_id__in', exclude_document_id_list)
|
query_set = query_set.exclude(document_id__in=exclude_document_id_list)
|
||||||
if exclude_paragraph_list is not None and len(exclude_paragraph_list) > 0:
|
if exclude_paragraph_list is not None and len(exclude_paragraph_list) > 0:
|
||||||
exclude_dict.__setitem__('paragraph_id__in', exclude_paragraph_list)
|
query_set = query_set.exclude(paragraph_id__in=exclude_paragraph_list)
|
||||||
query_set = query_set.exclude(**exclude_dict)
|
query_set = query_set.exclude(**exclude_dict)
|
||||||
for search_handle in search_handle_list:
|
for search_handle in search_handle_list:
|
||||||
if search_handle.support(search_mode):
|
if search_handle.support(search_mode):
|
||||||
|
|
|
||||||
|
|
@ -150,7 +150,7 @@ const {
|
||||||
params: { id }
|
params: { id }
|
||||||
} = route as any
|
} = route as any
|
||||||
|
|
||||||
const apiUrl = window.location.origin + '/doc/chat'
|
const apiUrl = window.location.origin + '/doc/chat/'
|
||||||
|
|
||||||
const EditAvatarDialogRef = ref()
|
const EditAvatarDialogRef = ref()
|
||||||
const LimitDialogRef = ref()
|
const LimitDialogRef = ref()
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<div class="border-t mt-16">
|
<div class="mt-16">
|
||||||
<ul>
|
<ul>
|
||||||
<li class="flex mt-16">
|
<li class="flex mt-16">
|
||||||
<el-text type="info">模型类型</el-text>
|
<el-text type="info">模型类型</el-text>
|
||||||
|
|
@ -176,7 +176,7 @@ onBeforeUnmount(() => {
|
||||||
</script>
|
</script>
|
||||||
<style lang="scss" scoped>
|
<style lang="scss" scoped>
|
||||||
.model-card {
|
.model-card {
|
||||||
min-height: 153px;
|
min-height: 135px;
|
||||||
min-width: auto;
|
min-width: auto;
|
||||||
.operation-button {
|
.operation-button {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
<template>
|
<template>
|
||||||
<el-cascader
|
<el-cascader
|
||||||
|
@wheel="wheel"
|
||||||
|
@keydown="isKeyDown = true"
|
||||||
|
@keyup="isKeyDown = false"
|
||||||
:teleported="false"
|
:teleported="false"
|
||||||
:options="options"
|
:options="options"
|
||||||
@visible-change="visibleChange"
|
@visible-change="visibleChange"
|
||||||
|
|
@ -8,7 +11,12 @@
|
||||||
separator=" > "
|
separator=" > "
|
||||||
>
|
>
|
||||||
<template #default="{ node, data }">
|
<template #default="{ node, data }">
|
||||||
<span class="flex align-center">
|
<span
|
||||||
|
class="flex align-center"
|
||||||
|
@wheel="wheel"
|
||||||
|
@keydown="isKeyDown = true"
|
||||||
|
@keyup="isKeyDown = false"
|
||||||
|
>
|
||||||
<component :is="iconComponent(`${data.type}-icon`)" class="mr-8" :size="18" />{{
|
<component :is="iconComponent(`${data.type}-icon`)" class="mr-8" :size="18" />{{
|
||||||
data.label
|
data.label
|
||||||
}}</span
|
}}</span
|
||||||
|
|
@ -34,6 +42,15 @@ const data = computed({
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
const options = ref<Array<any>>([])
|
const options = ref<Array<any>>([])
|
||||||
|
const isKeyDown = ref(false)
|
||||||
|
const wheel = (e: any) => {
|
||||||
|
if (isKeyDown.value) {
|
||||||
|
e.preventDefault()
|
||||||
|
} else {
|
||||||
|
e.stopPropagation()
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function visibleChange(bool: boolean) {
|
function visibleChange(bool: boolean) {
|
||||||
if (bool) {
|
if (bool) {
|
||||||
|
|
|
||||||
|
|
@ -277,6 +277,12 @@ function deleteCondition(index: number, cIndex: number) {
|
||||||
.map((item: any) => item.id)
|
.map((item: any) => item.id)
|
||||||
)
|
)
|
||||||
refreshBranchAnchor(list, false)
|
refreshBranchAnchor(list, false)
|
||||||
|
|
||||||
|
list.forEach((item: any, index: number) => {
|
||||||
|
if (item.type === 'ELSE IF ' + (index + 1)) {
|
||||||
|
item.type = 'ELSE IF ' + index
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
set(props.nodeModel.properties.node_data, 'branch', list)
|
set(props.nodeModel.properties.node_data, 'branch', list)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue