mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
refactor: replace logging with maxkb_logger for consistent logging across modules
This commit is contained in:
parent
fe066aea68
commit
fe8f87834d
|
|
@ -25,6 +25,7 @@ from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
|
|||
from application.flow.tools import Reasoning
|
||||
from models_provider.models import Model
|
||||
from models_provider.tools import get_model_credential, get_model_instance_by_model_workspace_id
|
||||
from common.utils.logger import maxkb_logger
|
||||
|
||||
tool_message_template = """
|
||||
<details>
|
||||
|
|
@ -126,7 +127,7 @@ def mcp_response_generator(chat_model, message_list, mcp_servers):
|
|||
except StopAsyncIteration:
|
||||
break
|
||||
except Exception as e:
|
||||
logging.getLogger("max_kb").error(f'Exception: {e}')
|
||||
maxkb_logger.error(f'Exception: {e}')
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
@date:2023/10/20 14:01
|
||||
@desc:
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import datetime
|
||||
|
|
@ -22,13 +21,13 @@ from common.config.embedding_config import VectorStore
|
|||
from common.db.search import native_search, get_dynamics_model, native_update
|
||||
from common.utils.common import get_file_content
|
||||
from common.utils.lock import try_lock, un_lock
|
||||
from common.utils.logger import maxkb_logger, maxkb_error_logger
|
||||
from common.utils.page_utils import page_desc
|
||||
from knowledge.models import Paragraph, Status, Document, ProblemParagraphMapping, TaskType, State,SourceType, SearchMode
|
||||
from maxkb.conf import (PROJECT_DIR)
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
max_kb_error = logging.getLogger(__file__)
|
||||
max_kb = logging.getLogger(__file__)
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
|
|
@ -87,12 +86,12 @@ class ListenerManagement:
|
|||
ListenerManagement.embedding_by_paragraph_data_list(data_list, paragraph_id_list=paragraph_id_list,
|
||||
embedding_model=embedding_model)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Query vector data: {paragraph_id_list} error {error} {traceback}').format(
|
||||
maxkb_error_logger.error(_('Query vector data: {paragraph_id_list} error {error} {traceback}').format(
|
||||
paragraph_id_list=paragraph_id_list, error=str(e), traceback=traceback.format_exc()))
|
||||
|
||||
@staticmethod
|
||||
def embedding_by_paragraph_data_list(data_list, paragraph_id_list, embedding_model: Embeddings):
|
||||
max_kb.info(_('Start--->Embedding paragraph: {paragraph_id_list}').format(paragraph_id_list=paragraph_id_list))
|
||||
maxkb_logger.info(_('Start--->Embedding paragraph: {paragraph_id_list}').format(paragraph_id_list=paragraph_id_list))
|
||||
status = Status.success
|
||||
try:
|
||||
# 删除段落
|
||||
|
|
@ -104,12 +103,12 @@ class ListenerManagement:
|
|||
# 批量向量化
|
||||
VectorStore.get_embedding_vector().batch_save(data_list, embedding_model, is_save_function)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Vectorized paragraph: {paragraph_id_list} error {error} {traceback}').format(
|
||||
maxkb_error_logger.error(_('Vectorized paragraph: {paragraph_id_list} error {error} {traceback}').format(
|
||||
paragraph_id_list=paragraph_id_list, error=str(e), traceback=traceback.format_exc()))
|
||||
status = Status.error
|
||||
finally:
|
||||
QuerySet(Paragraph).filter(id__in=paragraph_id_list).update(**{'status': status})
|
||||
max_kb.info(
|
||||
maxkb_logger.info(
|
||||
_('End--->Embedding paragraph: {paragraph_id_list}').format(paragraph_id_list=paragraph_id_list))
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -119,7 +118,7 @@ class ListenerManagement:
|
|||
@param paragraph_id: 段落id
|
||||
@param embedding_model: 向量模型
|
||||
"""
|
||||
max_kb.info(_('Start--->Embedding paragraph: {paragraph_id}').format(paragraph_id=paragraph_id))
|
||||
maxkb_logger.info(_('Start--->Embedding paragraph: {paragraph_id}').format(paragraph_id=paragraph_id))
|
||||
# 更新到开始状态
|
||||
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph_id), TaskType.EMBEDDING, State.STARTED)
|
||||
try:
|
||||
|
|
@ -144,12 +143,12 @@ class ListenerManagement:
|
|||
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph_id), TaskType.EMBEDDING,
|
||||
State.SUCCESS)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Vectorized paragraph: {paragraph_id} error {error} {traceback}').format(
|
||||
maxkb_error_logger.error(_('Vectorized paragraph: {paragraph_id} error {error} {traceback}').format(
|
||||
paragraph_id=paragraph_id, error=str(e), traceback=traceback.format_exc()))
|
||||
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph_id), TaskType.EMBEDDING,
|
||||
State.FAILURE)
|
||||
finally:
|
||||
max_kb.info(_('End--->Embedding paragraph: {paragraph_id}').format(paragraph_id=paragraph_id))
|
||||
maxkb_logger.info(_('End--->Embedding paragraph: {paragraph_id}').format(paragraph_id=paragraph_id))
|
||||
|
||||
@staticmethod
|
||||
def embedding_by_data_list(data_list: List, embedding_model: Embeddings):
|
||||
|
|
@ -265,7 +264,7 @@ class ListenerManagement:
|
|||
|
||||
if is_the_task_interrupted():
|
||||
return
|
||||
max_kb.info(_('Start--->Embedding document: {document_id}').format(document_id=document_id)
|
||||
maxkb_logger.info(_('Start--->Embedding document: {document_id}').format(document_id=document_id)
|
||||
)
|
||||
# 批量修改状态为PADDING
|
||||
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id), TaskType.EMBEDDING,
|
||||
|
|
@ -285,12 +284,12 @@ class ListenerManagement:
|
|||
document_id)),
|
||||
is_the_task_interrupted)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Vectorized document: {document_id} error {error} {traceback}').format(
|
||||
maxkb_error_logger.error(_('Vectorized document: {document_id} error {error} {traceback}').format(
|
||||
document_id=document_id, error=str(e), traceback=traceback.format_exc()))
|
||||
finally:
|
||||
ListenerManagement.post_update_document_status(document_id, TaskType.EMBEDDING)
|
||||
ListenerManagement.get_aggregation_document_status(document_id)()
|
||||
max_kb.info(_('End--->Embedding document: {document_id}').format(document_id=document_id))
|
||||
maxkb_logger.info(_('End--->Embedding document: {document_id}').format(document_id=document_id))
|
||||
un_lock('embedding' + str(document_id))
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -301,18 +300,18 @@ class ListenerManagement:
|
|||
@param embedding_model 向量模型
|
||||
:return: None
|
||||
"""
|
||||
max_kb.info(_('Start--->Embedding knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('Start--->Embedding knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
try:
|
||||
ListenerManagement.delete_embedding_by_knowledge(knowledge_id)
|
||||
document_list = QuerySet(Document).filter(knowledge_id=knowledge_id)
|
||||
max_kb.info(_('Start--->Embedding document: {document_list}').format(document_list=document_list))
|
||||
maxkb_logger.info(_('Start--->Embedding document: {document_list}').format(document_list=document_list))
|
||||
for document in document_list:
|
||||
ListenerManagement.embedding_by_document(document.id, embedding_model=embedding_model)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Vectorized knowledge: {knowledge_id} error {error} {traceback}').format(
|
||||
maxkb_error_logger.error(_('Vectorized knowledge: {knowledge_id} error {error} {traceback}').format(
|
||||
knowledge_id=knowledge_id, error=str(e), traceback=traceback.format_exc()))
|
||||
finally:
|
||||
max_kb.info(_('End--->Embedding knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('End--->Embedding knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
|
||||
@staticmethod
|
||||
def delete_embedding_by_document(document_id):
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ import logging
|
|||
from charset_normalizer import detect
|
||||
|
||||
from common.handle.base_parse_table_handle import BaseParseTableHandle
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
from common.utils.logger import maxkb_logger, maxkb_error_logger
|
||||
|
||||
|
||||
class CsvParseTableHandle(BaseParseTableHandle):
|
||||
|
|
@ -20,7 +19,7 @@ class CsvParseTableHandle(BaseParseTableHandle):
|
|||
try:
|
||||
content = buffer.decode(detect(buffer)['encoding'])
|
||||
except BaseException as e:
|
||||
max_kb.error(f'csv split handle error: {e}')
|
||||
maxkb_logger.error(f'csv split handle error: {e}')
|
||||
return [{'name': file.name, 'paragraphs': []}]
|
||||
|
||||
csv_model = content.split('\n')
|
||||
|
|
@ -40,5 +39,5 @@ class CsvParseTableHandle(BaseParseTableHandle):
|
|||
try:
|
||||
return buffer.decode(detect(buffer)['encoding'])
|
||||
except BaseException as e:
|
||||
max_kb.error(f'csv split handle error: {e}')
|
||||
maxkb_error_logger.error(f'csv split handle error: {e}')
|
||||
return f'error: {e}'
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ import logging
|
|||
import xlrd
|
||||
|
||||
from common.handle.base_parse_table_handle import BaseParseTableHandle
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
from common.utils.logger import maxkb_error_logger
|
||||
|
||||
|
||||
class XlsParseTableHandle(BaseParseTableHandle):
|
||||
|
|
@ -56,7 +55,7 @@ class XlsParseTableHandle(BaseParseTableHandle):
|
|||
result.append({'name': sheet.name, 'paragraphs': paragraphs})
|
||||
|
||||
except BaseException as e:
|
||||
max_kb.error(f'excel split handle error: {e}')
|
||||
maxkb_error_logger.error(f'excel split handle error: {e}')
|
||||
return [{'name': file.name, 'paragraphs': []}]
|
||||
return result
|
||||
|
||||
|
|
|
|||
|
|
@ -6,8 +6,7 @@ from openpyxl import load_workbook
|
|||
|
||||
from common.handle.base_parse_table_handle import BaseParseTableHandle
|
||||
from common.handle.impl.common_handle import xlsx_embed_cells_images
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
from common.utils.logger import maxkb_error_logger
|
||||
|
||||
|
||||
class XlsxParseTableHandle(BaseParseTableHandle):
|
||||
|
|
@ -74,7 +73,7 @@ class XlsxParseTableHandle(BaseParseTableHandle):
|
|||
result.append({'name': sheetname, 'paragraphs': paragraphs})
|
||||
|
||||
except BaseException as e:
|
||||
max_kb.error(f'excel split handle error: {e}')
|
||||
maxkb_error_logger.error(f'excel split handle error: {e}')
|
||||
return [{'name': file.name, 'paragraphs': []}]
|
||||
return result
|
||||
|
||||
|
|
@ -87,7 +86,7 @@ class XlsxParseTableHandle(BaseParseTableHandle):
|
|||
if len(image_dict) > 0:
|
||||
save_image(image_dict.values())
|
||||
except Exception as e:
|
||||
logging.getLogger("max_kb").error(f'Exception: {e}')
|
||||
maxkb_error_logger.error(f'Exception: {e}')
|
||||
image_dict = {}
|
||||
md_tables = ''
|
||||
# 如果未指定 sheet_name,则使用第一个工作表
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from django.utils.translation import gettext_lazy as _
|
|||
from langchain_community.document_loaders import PyPDFLoader
|
||||
|
||||
from common.handle.base_split_handle import BaseSplitHandle
|
||||
from common.utils.logger import maxkb_error_logger, maxkb_logger
|
||||
from common.utils.split_model import SplitModel
|
||||
|
||||
default_pattern_list = [re.compile('(?<=^)# .*|(?<=\\n)# .*'),
|
||||
|
|
@ -29,7 +30,6 @@ default_pattern_list = [re.compile('(?<=^)# .*|(?<=\\n)# .*'),
|
|||
re.compile("(?<=\\n)(?<!#)###### (?!#).*|(?<=^)(?<!#)###### (?!#).*"),
|
||||
re.compile("(?<!\n)\n\n+")]
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
def check_links_in_pdf(doc):
|
||||
|
|
@ -72,7 +72,7 @@ class PdfSplitHandle(BaseSplitHandle):
|
|||
else:
|
||||
split_model = SplitModel(default_pattern_list, with_filter=with_filter, limit=limit)
|
||||
except BaseException as e:
|
||||
max_kb.error(f"File: {file.name}, error: {e}")
|
||||
maxkb_error_logger.error(f"File: {file.name}, error: {e}")
|
||||
return {'name': file.name,
|
||||
'content': []}
|
||||
finally:
|
||||
|
|
@ -109,7 +109,7 @@ class PdfSplitHandle(BaseSplitHandle):
|
|||
raise e
|
||||
except BaseException as e:
|
||||
# 当页出错继续进行下一页,防止一个页面出错导致整个文件解析失败
|
||||
max_kb.error(f"File: {file.name}, Page: {page_num + 1}, error: {e}")
|
||||
maxkb_error_logger.error(f"File: {file.name}, Page: {page_num + 1}, error: {e}")
|
||||
continue
|
||||
finally:
|
||||
os.remove(page_num_pdf)
|
||||
|
|
@ -120,7 +120,7 @@ class PdfSplitHandle(BaseSplitHandle):
|
|||
content = content.replace('\0', '')
|
||||
|
||||
elapsed_time = time.time() - start_time
|
||||
max_kb.debug(
|
||||
maxkb_logger.debug(
|
||||
f"File: {file.name}, Page: {page_num + 1}, Time : {elapsed_time: .3f}s, content-length: {len(page_content)}")
|
||||
|
||||
return content
|
||||
|
|
@ -278,7 +278,7 @@ class PdfSplitHandle(BaseSplitHandle):
|
|||
pre_toc[i]['content'] = re.sub(r'(?<!。)\n+', '', pre_toc[i]['content'])
|
||||
pre_toc[i]['content'] = re.sub(r'(?<!.)\n+', '', pre_toc[i]['content'])
|
||||
except BaseException as e:
|
||||
max_kb.error(_('This document has no preface and is treated as ordinary text: {e}').format(e=e))
|
||||
maxkb_error_logger.error(_('This document has no preface and is treated as ordinary text: {e}').format(e=e))
|
||||
if pattern_list is not None and len(pattern_list) > 0:
|
||||
split_model = SplitModel(pattern_list, with_filter, limit)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from django_apscheduler.jobstores import DjangoJobStore
|
|||
from application.models import Application, Chat, ChatRecord
|
||||
from django.db.models import Q, Max
|
||||
from common.lock.impl.file_lock import FileLock
|
||||
from common.utils.logger import maxkb_logger
|
||||
|
||||
from knowledge.models import File
|
||||
|
||||
|
|
@ -20,7 +21,7 @@ lock = FileLock()
|
|||
|
||||
def clean_chat_log_job():
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
logging.getLogger("max_kb").info(_('start clean chat log'))
|
||||
maxkb_logger.info(_('start clean chat log'))
|
||||
now = timezone.now()
|
||||
|
||||
applications = Application.objects.all().values('id', 'clean_time')
|
||||
|
|
@ -66,7 +67,7 @@ def clean_chat_log_job():
|
|||
if deleted_count < batch_size:
|
||||
break
|
||||
|
||||
logging.getLogger("max_kb").info(_('end clean chat log'))
|
||||
maxkb_logger.info(_('end clean chat log'))
|
||||
|
||||
|
||||
def run():
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from django.utils import timezone
|
|||
from django_apscheduler.jobstores import DjangoJobStore
|
||||
|
||||
from common.lock.impl.file_lock import FileLock
|
||||
from common.utils.logger import maxkb_logger
|
||||
from knowledge.models import File
|
||||
|
||||
scheduler = BackgroundScheduler()
|
||||
|
|
@ -18,11 +19,11 @@ lock = FileLock()
|
|||
|
||||
def clean_debug_file():
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
logging.getLogger("max_kb").info(_('start clean debug file'))
|
||||
maxkb_logger.info(_('start clean debug file'))
|
||||
two_hours_ago = timezone.now() - timedelta(hours=2)
|
||||
# 删除对应的文件
|
||||
File.objects.filter(Q(create_time__lt=two_hours_ago) & Q(meta__debug=True)).delete()
|
||||
logging.getLogger("max_kb").info(_('end clean debug file'))
|
||||
maxkb_logger.info(_('end clean debug file'))
|
||||
|
||||
|
||||
def run():
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ import html2text as ht
|
|||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from common.utils.logger import maxkb_error_logger, maxkb_logger
|
||||
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
|
||||
|
|
@ -145,7 +147,7 @@ class Fork:
|
|||
try:
|
||||
html_content = response.content.decode(charset)
|
||||
except Exception as e:
|
||||
logging.getLogger("max_kb").error(f'{e}')
|
||||
maxkb_error_logger.error(f'{e}')
|
||||
return BeautifulSoup(html_content, "html.parser")
|
||||
return beautiful_soup
|
||||
|
||||
|
|
@ -156,14 +158,14 @@ class Fork:
|
|||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36'
|
||||
}
|
||||
|
||||
logging.getLogger("max_kb").info(f'fork:{self.base_fork_url}')
|
||||
maxkb_logger.info(f'fork:{self.base_fork_url}')
|
||||
response = requests.get(self.base_fork_url, verify=False, headers=headers)
|
||||
if response.status_code != 200:
|
||||
logging.getLogger("max_kb").error(f"url: {self.base_fork_url} code:{response.status_code}")
|
||||
maxkb_error_logger.error(f"url: {self.base_fork_url} code:{response.status_code}")
|
||||
return Fork.Response.error(f"url: {self.base_fork_url} code:{response.status_code}")
|
||||
bf = self.get_beautiful_soup(response)
|
||||
except Exception as e:
|
||||
logging.getLogger("max_kb_error").error(f'{str(e)}:{traceback.format_exc()}')
|
||||
maxkb_error_logger.error(f'{str(e)}:{traceback.format_exc()}')
|
||||
return Fork.Response.error(str(e))
|
||||
bf = self.reset_beautiful_soup(bf)
|
||||
link_list = self.get_child_link_list(bf)
|
||||
|
|
@ -173,6 +175,6 @@ class Fork:
|
|||
|
||||
|
||||
def handler(base_url, response: Fork.Response):
|
||||
logging.getLogger("max_kb").info(base_url.url, base_url.tag.text if base_url.tag else None, response.content)
|
||||
maxkb_logger.info(base_url.url, base_url.tag.text if base_url.tag else None, response.content)
|
||||
|
||||
# ForkManage('https://bbs.fit2cloud.com/c/de/6', ['.md-content']).fork(3, set(), handler)
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ from common.event import ListenerManagement
|
|||
from common.exception.app_exception import AppApiException
|
||||
from common.utils.common import valid_license, post, get_file_content, parse_image
|
||||
from common.utils.fork import Fork, ChildLink
|
||||
from common.utils.logger import maxkb_logger
|
||||
from common.utils.split_model import get_split_model
|
||||
from knowledge.models import Knowledge, KnowledgeScope, KnowledgeType, Document, Paragraph, Problem, \
|
||||
ProblemParagraphMapping, TaskType, State, SearchMode, KnowledgeFolder
|
||||
|
|
@ -630,7 +631,7 @@ class KnowledgeSerializer(serializers.Serializer):
|
|||
document_name = child_link.tag.text if child_link.tag is not None and len(
|
||||
child_link.tag.text.strip()) > 0 else child_link.url
|
||||
paragraphs = get_split_model('web.md').parse(response.content)
|
||||
logging.getLogger("max_kb").info(child_link.url.strip())
|
||||
maxkb_logger.info(child_link.url.strip())
|
||||
first = QuerySet(Document).filter(
|
||||
meta__source_url=child_link.url.strip(),
|
||||
knowledge=knowledge
|
||||
|
|
|
|||
|
|
@ -11,16 +11,15 @@ from django.utils.translation import gettext_lazy as _
|
|||
from common.config.embedding_config import ModelManage
|
||||
from common.event import ListenerManagement, UpdateProblemArgs, UpdateEmbeddingKnowledgeIdArgs, \
|
||||
UpdateEmbeddingDocumentIdArgs
|
||||
from common.utils.logger import maxkb_error_logger, maxkb_logger
|
||||
from knowledge.models import Document, TaskType, State
|
||||
from models_provider.tools import get_model
|
||||
from models_provider.models import Model
|
||||
from ops import celery_app
|
||||
|
||||
max_kb_error = logging.getLogger("max_kb_error")
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
def get_embedding_model(model_id, exception_handler=lambda e: max_kb_error.error(
|
||||
def get_embedding_model(model_id, exception_handler=lambda e: maxkb_error_logger.error(
|
||||
_('Failed to obtain vector model: {error} {traceback}').format(
|
||||
error=str(e),
|
||||
traceback=traceback.format_exc()
|
||||
|
|
@ -70,7 +69,7 @@ def embedding_by_document(document_id, model_id, state_list=None):
|
|||
def exception_handler(e):
|
||||
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id), TaskType.EMBEDDING,
|
||||
State.FAILURE)
|
||||
max_kb_error.error(
|
||||
maxkb_error_logger.error(
|
||||
_('Failed to obtain vector model: {error} {traceback}').format(
|
||||
error=str(e),
|
||||
traceback=traceback.format_exc()
|
||||
|
|
@ -100,11 +99,11 @@ def embedding_by_knowledge(knowledge_id, model_id):
|
|||
@param model_id 向量模型
|
||||
:return: None
|
||||
"""
|
||||
max_kb.info(_('Start--->Vectorized knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('Start--->Vectorized knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
try:
|
||||
ListenerManagement.delete_embedding_by_knowledge(knowledge_id)
|
||||
document_list = QuerySet(Document).filter(knowledge_id=knowledge_id)
|
||||
max_kb.info(_('Knowledge documentation: {document_names}').format(
|
||||
maxkb_logger.info(_('Knowledge documentation: {document_names}').format(
|
||||
document_names=", ".join([d.name for d in document_list])))
|
||||
for document in document_list:
|
||||
try:
|
||||
|
|
@ -112,12 +111,12 @@ def embedding_by_knowledge(knowledge_id, model_id):
|
|||
except Exception as e:
|
||||
pass
|
||||
except Exception as e:
|
||||
max_kb_error.error(
|
||||
maxkb_error_logger.error(
|
||||
_('Vectorized knowledge: {knowledge_id} error {error} {traceback}'.format(knowledge_id=knowledge_id,
|
||||
error=str(e),
|
||||
traceback=traceback.format_exc())))
|
||||
finally:
|
||||
max_kb.info(_('End--->Vectorized knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('End--->Vectorized knowledge: {knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
|
||||
|
||||
def embedding_by_problem(args, model_id):
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from langchain_core.messages import HumanMessage
|
|||
|
||||
from common.config.embedding_config import ModelManage
|
||||
from common.event import ListenerManagement
|
||||
from common.utils.logger import maxkb_error_logger, maxkb_logger
|
||||
from common.utils.page_utils import page, page_desc
|
||||
from knowledge.models import Paragraph, Document, Status, TaskType, State
|
||||
from knowledge.task.handler import save_problem
|
||||
|
|
@ -16,9 +17,6 @@ from models_provider.models import Model
|
|||
from models_provider.tools import get_model
|
||||
from ops import celery_app
|
||||
|
||||
max_kb_error = logging.getLogger("max_kb_error")
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
def get_llm_model(model_id):
|
||||
model = QuerySet(Model).filter(id=model_id).first()
|
||||
|
|
@ -102,12 +100,12 @@ def generate_related_by_document_id(document_id, model_id, prompt, state_list=No
|
|||
).filter(task_type_status__in=state_list, document_id=document_id)
|
||||
page_desc(query_set, 10, generate_problem, is_the_task_interrupted)
|
||||
except Exception as e:
|
||||
max_kb_error.error(f'根据文档生成问题:{document_id}出现错误{str(e)}{traceback.format_exc()}')
|
||||
max_kb_error.error(_('Generate issue based on document: {document_id} error {error}{traceback}').format(
|
||||
maxkb_error_logger.error(f'根据文档生成问题:{document_id}出现错误{str(e)}{traceback.format_exc()}')
|
||||
maxkb_error_logger.error(_('Generate issue based on document: {document_id} error {error}{traceback}').format(
|
||||
document_id=document_id, error=str(e), traceback=traceback.format_exc()))
|
||||
finally:
|
||||
ListenerManagement.post_update_document_status(document_id, TaskType.GENERATE_PROBLEM)
|
||||
max_kb.info(_('End--->Generate problem: {document_id}').format(document_id=document_id))
|
||||
maxkb_logger.info(_('End--->Generate problem: {document_id}').format(document_id=document_id))
|
||||
|
||||
|
||||
@celery_app.task(base=QueueOnce, once={'keys': ['paragraph_id_list']},
|
||||
|
|
|
|||
|
|
@ -9,12 +9,10 @@ from django.db.models import QuerySet
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils.fork import ChildLink, Fork
|
||||
from common.utils.logger import maxkb_error_logger
|
||||
from common.utils.split_model import get_split_model
|
||||
from knowledge.models.knowledge import KnowledgeType, Document, Knowledge, Status
|
||||
|
||||
max_kb_error = logging.getLogger("max_kb_error")
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
def get_save_handler(knowledge_id, selector):
|
||||
from knowledge.serializers.document import DocumentSerializers
|
||||
|
|
@ -118,4 +116,4 @@ def save_problem(knowledge_id, document_id, paragraph_id, problem):
|
|||
}
|
||||
).save(instance={"content": problem}, with_valid=True)
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Association problem failed {error}').format(error=str(e)))
|
||||
maxkb_error_logger.error(_('Association problem failed {error}').format(error=str(e)))
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ from celery_once import QueueOnce
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils.fork import ForkManage, Fork
|
||||
from common.utils.logger import maxkb_logger, maxkb_error_logger
|
||||
from ops import celery_app
|
||||
|
||||
max_kb_error = logging.getLogger("max_kb_error")
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
|
||||
@celery_app.task(base=QueueOnce, once={'keys': ['knowledge_id']}, name='celery:sync_web_knowledge')
|
||||
|
|
@ -26,15 +26,15 @@ def sync_web_knowledge(knowledge_id: str, url: str, selector: str):
|
|||
from knowledge.task.handler import get_save_handler
|
||||
|
||||
try:
|
||||
max_kb.info(
|
||||
maxkb_logger.info(
|
||||
_('Start--->Start synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
ForkManage(url, selector.split(" ") if selector is not None else []).fork(2, set(),
|
||||
get_save_handler(knowledge_id,
|
||||
selector))
|
||||
|
||||
max_kb.info(_('End--->End synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('End--->End synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Synchronize web knowledge base:{knowledge_id} error{error}{traceback}').format(
|
||||
maxkb_error_logger.error(_('Synchronize web knowledge base:{knowledge_id} error{error}{traceback}').format(
|
||||
knowledge_id=knowledge_id, error=str(e), traceback=traceback.format_exc()))
|
||||
|
||||
|
||||
|
|
@ -43,14 +43,14 @@ def sync_replace_web_knowledge(knowledge_id: str, url: str, selector: str):
|
|||
from knowledge.task.handler import get_sync_handler
|
||||
|
||||
try:
|
||||
max_kb.info(
|
||||
maxkb_logger.info(
|
||||
_('Start--->Start synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
ForkManage(url, selector.split(" ") if selector is not None else []).fork(2, set(),
|
||||
get_sync_handler(knowledge_id
|
||||
))
|
||||
max_kb.info(_('End--->End synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
maxkb_logger.info(_('End--->End synchronization web knowledge base:{knowledge_id}').format(knowledge_id=knowledge_id))
|
||||
except Exception as e:
|
||||
max_kb_error.error(_('Synchronize web knowledge base:{knowledge_id} error{error}{traceback}').format(
|
||||
maxkb_error_logger.error(_('Synchronize web knowledge base:{knowledge_id} error{error}{traceback}').format(
|
||||
knowledge_id=knowledge_id, error=str(e), traceback=traceback.format_exc()))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from typing import Dict
|
|||
|
||||
import requests
|
||||
|
||||
from common.utils.logger import maxkb_logger
|
||||
from models_provider.base_model_provider import MaxKBBaseModel
|
||||
from models_provider.impl.base_tti import BaseTextToImage
|
||||
|
||||
|
|
@ -35,7 +36,6 @@ req_key_dict = {
|
|||
'anime_v1.3.1': 'high_aes',
|
||||
}
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
def sign(key, msg):
|
||||
|
|
@ -60,7 +60,7 @@ def formatQuery(parameters):
|
|||
|
||||
def signV4Request(access_key, secret_key, service, req_query, req_body):
|
||||
if access_key is None or secret_key is None:
|
||||
max_kb.info('No access key is available.')
|
||||
maxkb_logger.info('No access key is available.')
|
||||
sys.exit()
|
||||
|
||||
t = datetime.datetime.utcnow()
|
||||
|
|
@ -77,42 +77,36 @@ def signV4Request(access_key, secret_key, service, req_query, req_body):
|
|||
'\n' + 'x-date:' + current_date + '\n'
|
||||
canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + \
|
||||
'\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
|
||||
# max_kb.info(canonical_request)
|
||||
algorithm = 'HMAC-SHA256'
|
||||
credential_scope = datestamp + '/' + region + '/' + service + '/' + 'request'
|
||||
string_to_sign = algorithm + '\n' + current_date + '\n' + credential_scope + '\n' + hashlib.sha256(
|
||||
canonical_request.encode('utf-8')).hexdigest()
|
||||
# max_kb.info(string_to_sign)
|
||||
signing_key = getSignatureKey(secret_key, datestamp, region, service)
|
||||
# max_kb.info(signing_key)
|
||||
signature = hmac.new(signing_key, (string_to_sign).encode(
|
||||
'utf-8'), hashlib.sha256).hexdigest()
|
||||
# max_kb.info(signature)
|
||||
|
||||
authorization_header = algorithm + ' ' + 'Credential=' + access_key + '/' + \
|
||||
credential_scope + ', ' + 'SignedHeaders=' + \
|
||||
signed_headers + ', ' + 'Signature=' + signature
|
||||
# max_kb.info(authorization_header)
|
||||
headers = {'X-Date': current_date,
|
||||
'Authorization': authorization_header,
|
||||
'X-Content-Sha256': payload_hash,
|
||||
'Content-Type': content_type
|
||||
}
|
||||
# max_kb.info(headers)
|
||||
|
||||
# ************* SEND THE REQUEST *************
|
||||
request_url = endpoint + '?' + canonical_querystring
|
||||
|
||||
max_kb.info('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++')
|
||||
max_kb.info('Request URL = ' + request_url)
|
||||
maxkb_logger.info('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++')
|
||||
maxkb_logger.info('Request URL = ' + request_url)
|
||||
try:
|
||||
r = requests.post(request_url, headers=headers, data=req_body)
|
||||
except Exception as err:
|
||||
max_kb.info(f'error occurred: {err}')
|
||||
maxkb_logger.info(f'error occurred: {err}')
|
||||
raise
|
||||
else:
|
||||
max_kb.info('\nRESPONSE++++++++++++++++++++++++++++++++++++')
|
||||
max_kb.info(f'Response code: {r.status_code}\n')
|
||||
maxkb_logger.info('\nRESPONSE++++++++++++++++++++++++++++++++++++')
|
||||
maxkb_logger.info(f'Response code: {r.status_code}\n')
|
||||
# 使用 replace 方法将 \u0026 替换为 &
|
||||
resp_str = r.text.replace("\\u0026", "&")
|
||||
if r.status_code != 200:
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|||
ssl_context.check_hostname = False
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
|
||||
class XFSparkSpeechToText(MaxKBBaseModel, BaseSpeechToText):
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ from common.utils.common import _remove_empty_lines
|
|||
from models_provider.base_model_provider import MaxKBBaseModel
|
||||
from models_provider.impl.base_tts import BaseTextToSpeech
|
||||
|
||||
max_kb = logging.getLogger("max_kb")
|
||||
|
||||
STATUS_FIRST_FRAME = 0 # 第一帧的标识
|
||||
STATUS_CONTINUE_FRAME = 1 # 中间帧标识
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from django.conf import settings
|
|||
from kombu import Connection, Exchange, Queue, Producer
|
||||
from kombu.mixins import ConsumerMixin
|
||||
|
||||
from common.utils.logger import maxkb_logger
|
||||
from .utils import get_celery_task_log_path
|
||||
from .const import CELERY_LOG_MAGIC_MARK
|
||||
|
||||
|
|
@ -209,7 +210,7 @@ class CeleryThreadTaskFileHandler(CeleryThreadingLoggerHandler):
|
|||
f.flush()
|
||||
|
||||
def handle_task_start(self, task_id):
|
||||
logging.getLogger("max_kb").info('handle_task_start')
|
||||
maxkb_logger.info('handle_task_start')
|
||||
log_path = get_celery_task_log_path(task_id)
|
||||
thread_id = self.get_current_thread_id()
|
||||
self.task_id_thread_id_mapper[task_id] = thread_id
|
||||
|
|
@ -217,7 +218,7 @@ class CeleryThreadTaskFileHandler(CeleryThreadingLoggerHandler):
|
|||
self.thread_id_fd_mapper[thread_id] = f
|
||||
|
||||
def handle_task_end(self, task_id):
|
||||
logging.getLogger('max_kb').info('handle_task_end')
|
||||
maxkb_logger.info('handle_task_end')
|
||||
ident_id = self.task_id_thread_id_mapper.get(task_id, '')
|
||||
f = self.thread_id_fd_mapper.pop(ident_id, None)
|
||||
if f and not f.closed:
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ from rest_framework import serializers
|
|||
from common.exception.app_exception import AppApiException
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils.logger import maxkb_logger
|
||||
from system_manage.models import SystemSetting, SettingType
|
||||
|
||||
|
||||
|
|
@ -47,7 +48,7 @@ class EmailSettingSerializer(serializers.Serializer):
|
|||
self.data.get("email_use_ssl")
|
||||
).open()
|
||||
except Exception as e:
|
||||
logging.getLogger("max_kb").error(f'Exception: {e}')
|
||||
maxkb_logger.error(f'Exception: {e}')
|
||||
raise AppApiException(1004, _('Email verification failed'))
|
||||
|
||||
def update_or_save(self):
|
||||
|
|
|
|||
Loading…
Reference in New Issue