mirror of
https://github.com/1Panel-dev/MaxKB.git
synced 2025-12-26 01:33:05 +00:00
refactor: replace FileLock with utility functions for locking mechanism
This commit is contained in:
parent
921b9e0d7e
commit
77af823879
|
|
@ -8,11 +8,9 @@
|
|||
"""
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.lock.impl.file_lock import FileLock
|
||||
from .listener_manage import *
|
||||
from ..db.sql_execute import update_execute
|
||||
|
||||
lock = FileLock()
|
||||
update_document_status_sql = """
|
||||
UPDATE "public"."document"
|
||||
SET status ="replace"("replace"("replace"(status, '1', '3'), '0', '3'), '4', '3')
|
||||
|
|
@ -23,7 +21,7 @@ update_document_status_sql = """
|
|||
def run():
|
||||
from models_provider.models import Model, Status
|
||||
|
||||
if lock.try_lock('event_init', 30 * 30):
|
||||
if try_lock('event_init', 30 * 30):
|
||||
try:
|
||||
# 修改Model状态为ERROR
|
||||
QuerySet(Model).filter(
|
||||
|
|
@ -34,4 +32,4 @@ def run():
|
|||
# 更新文档状态
|
||||
update_execute(update_document_status_sql, [])
|
||||
finally:
|
||||
lock.un_lock('event_init')
|
||||
un_lock('event_init')
|
||||
|
|
|
|||
|
|
@ -9,14 +9,13 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|||
from django_apscheduler.jobstores import DjangoJobStore
|
||||
from application.models import Application, Chat, ChatRecord
|
||||
from django.db.models import Q, Max
|
||||
from common.lock.impl.file_lock import FileLock
|
||||
from common.utils.lock import try_lock, un_lock
|
||||
from common.utils.logger import maxkb_logger
|
||||
|
||||
from knowledge.models import File
|
||||
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_jobstore(DjangoJobStore(), "default")
|
||||
lock = FileLock()
|
||||
|
||||
|
||||
def clean_chat_log_job():
|
||||
|
|
@ -71,7 +70,7 @@ def clean_chat_log_job():
|
|||
|
||||
|
||||
def run():
|
||||
if lock.try_lock('clean_chat_log_job', 30 * 30):
|
||||
if try_lock('clean_chat_log_job', 30 * 30):
|
||||
try:
|
||||
scheduler.start()
|
||||
existing_job = scheduler.get_job(job_id='clean_chat_log')
|
||||
|
|
@ -79,4 +78,4 @@ def run():
|
|||
existing_job.remove()
|
||||
scheduler.add_job(clean_chat_log_job, 'cron', hour='0', minute='5', id='clean_chat_log')
|
||||
finally:
|
||||
lock.un_lock('clean_chat_log_job')
|
||||
un_lock('clean_chat_log_job')
|
||||
|
|
|
|||
|
|
@ -8,13 +8,12 @@ from django.db.models import Q
|
|||
from django.utils import timezone
|
||||
from django_apscheduler.jobstores import DjangoJobStore
|
||||
|
||||
from common.lock.impl.file_lock import FileLock
|
||||
from common.utils.lock import un_lock, try_lock
|
||||
from common.utils.logger import maxkb_logger
|
||||
from knowledge.models import File
|
||||
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_jobstore(DjangoJobStore(), "default")
|
||||
lock = FileLock()
|
||||
|
||||
|
||||
def clean_debug_file():
|
||||
|
|
@ -27,7 +26,7 @@ def clean_debug_file():
|
|||
|
||||
|
||||
def run():
|
||||
if lock.try_lock('clean_debug_file', 30 * 30):
|
||||
if try_lock('clean_debug_file', 30 * 30):
|
||||
try:
|
||||
scheduler.start()
|
||||
clean_debug_file_job = scheduler.get_job(job_id='clean_debug_file')
|
||||
|
|
@ -35,4 +34,4 @@ def run():
|
|||
clean_debug_file_job.remove()
|
||||
scheduler.add_job(clean_debug_file, 'cron', hour='2', minute='0', second='0', id='clean_debug_file')
|
||||
finally:
|
||||
lock.un_lock('clean_debug_file')
|
||||
un_lock('clean_debug_file')
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
# coding=utf-8
|
||||
"""
|
||||
@project: MaxKB
|
||||
@Author:虎
|
||||
@file: file_lock.py
|
||||
@date:2024/8/20 10:48
|
||||
@desc:
|
||||
"""
|
||||
import errno
|
||||
import hashlib
|
||||
import os
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
from common.lock.base_lock import BaseLock
|
||||
from maxkb.const import PROJECT_DIR
|
||||
|
||||
|
||||
def key_to_lock_name(key):
|
||||
"""
|
||||
Combine part of a key with its hash to prevent very long filenames
|
||||
"""
|
||||
MAX_LENGTH = 50
|
||||
key_hash = hashlib.md5(six.b(key)).hexdigest()
|
||||
lock_name = key[:MAX_LENGTH - len(key_hash) - 1] + '_' + key_hash
|
||||
return lock_name
|
||||
|
||||
|
||||
class FileLock(BaseLock):
|
||||
"""
|
||||
File locking backend.
|
||||
"""
|
||||
|
||||
def __init__(self, settings=None):
|
||||
if settings is None:
|
||||
settings = {}
|
||||
self.location = settings.get('location')
|
||||
if self.location is None:
|
||||
self.location = os.path.join(PROJECT_DIR, 'data', 'lock')
|
||||
try:
|
||||
os.makedirs(self.location)
|
||||
except OSError as error:
|
||||
# Directory exists?
|
||||
if error.errno != errno.EEXIST:
|
||||
# Re-raise unexpected OSError
|
||||
raise
|
||||
|
||||
def _get_lock_path(self, key):
|
||||
lock_name = key_to_lock_name(key)
|
||||
return os.path.join(self.location, lock_name)
|
||||
|
||||
def try_lock(self, key, timeout):
|
||||
lock_path = self._get_lock_path(key)
|
||||
try:
|
||||
# 创建锁文件,如果没创建成功则拿不到
|
||||
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL)
|
||||
except OSError as error:
|
||||
if error.errno == errno.EEXIST:
|
||||
# File already exists, check its modification time
|
||||
mtime = os.path.getmtime(lock_path)
|
||||
ttl = mtime + timeout - time.time()
|
||||
if ttl > 0:
|
||||
return False
|
||||
else:
|
||||
# 如果超时时间已到,直接上锁成功继续执行
|
||||
os.utime(lock_path, None)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
os.close(fd)
|
||||
return True
|
||||
|
||||
def un_lock(self, key):
|
||||
lock_path = self._get_lock_path(key)
|
||||
os.remove(lock_path)
|
||||
Loading…
Reference in New Issue