MaxKB/pyproject.toml
dependabot[bot] 2782a7720c build(deps): bump the python-dependencies group with 6 updates
Updates the requirements on [langchain-openai](https://github.com/langchain-ai/langchain), zhipuai, [boto3](https://github.com/boto/boto3), [tencentcloud-sdk-python](https://github.com/TencentCloud/tencentcloud-sdk-python), [xinference-client](https://github.com/xorbitsai/inference-client) and [xlrd](https://github.com/python-excel/xlrd) to permit the latest version.

Updates `langchain-openai` to 0.3.23
- [Release notes](https://github.com/langchain-ai/langchain/releases)
- [Commits](https://github.com/langchain-ai/langchain/compare/langchain-openai==0.3.22...langchain-openai==0.3.23)

Updates `zhipuai` to 2.1.5.20250611

Updates `boto3` to 1.38.36
- [Release notes](https://github.com/boto/boto3/releases)
- [Commits](https://github.com/boto/boto3/compare/1.38.35...1.38.36)

Updates `tencentcloud-sdk-python` to 3.0.1402
- [Changelog](https://github.com/TencentCloud/tencentcloud-sdk-python/blob/master/SERVICE_CHANGELOG.md)
- [Commits](https://github.com/TencentCloud/tencentcloud-sdk-python/compare/3.0.1400...3.0.1402)

Updates `xinference-client` to 1.7.0.post1
- [Release notes](https://github.com/xorbitsai/inference-client/releases)
- [Commits](https://github.com/xorbitsai/inference-client/compare/v1.6.1...v1.7.0.post1)

Updates `xlrd` to 2.0.2
- [Changelog](https://github.com/python-excel/xlrd/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/python-excel/xlrd/compare/2.0.1...2.0.2)

---
updated-dependencies:
- dependency-name: langchain-openai
  dependency-version: 0.3.23
  dependency-type: direct:production
  dependency-group: python-dependencies
- dependency-name: zhipuai
  dependency-version: 2.1.5.20250611
  dependency-type: direct:production
  dependency-group: python-dependencies
- dependency-name: boto3
  dependency-version: 1.38.36
  dependency-type: direct:production
  dependency-group: python-dependencies
- dependency-name: tencentcloud-sdk-python
  dependency-version: 3.0.1402
  dependency-type: direct:production
  dependency-group: python-dependencies
- dependency-name: xinference-client
  dependency-version: 1.7.0.post1
  dependency-type: direct:production
  dependency-group: python-dependencies
- dependency-name: xlrd
  dependency-version: 2.0.2
  dependency-type: direct:production
  dependency-group: python-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-16 13:52:58 +08:00

68 lines
1.6 KiB
TOML

[tool.poetry]
name = "maxkb"
version = "2.0.0"
description = "企业级 AI 助手"
authors = ["shaohuzhang1 <shaohu.zhang@fit2cloud.com>"]
readme = "README.md"
package-mode = false
[tool.poetry.dependencies]
python = "^3.11"
django = "5.2.3"
drf-spectacular = { extras = ["sidecar"], version = "0.28.0" }
django-redis = "5.4.0"
django-db-connection-pool = "1.2.6"
django-mptt = "0.17.0"
psycopg = { extras = ["binary"], version = "3.2.9" }
python-dotenv = "1.1.0"
uuid-utils = "0.11.0"
diskcache2 = "1.0.0"
captcha = "0.7.1"
langchain-openai = "0.3.23"
langchain-anthropic = "0.3.15"
langchain-community = "0.3.25"
langchain-deepseek = "0.1.3"
langchain-google-genai = "2.1.5"
langchain-mcp-adapters = "0.1.7"
langchain-huggingface = "0.3.0"
langchain-ollama = "0.3.3"
langgraph = "0.4.8"
torch = "2.7.1"
qianfan = "0.4.12.3"
zhipuai = "2.1.5.20250611"
boto3 = "1.38.36"
tencentcloud-sdk-python = "3.0.1402"
xinference-client = "1.7.0.post1"
anthropic = "0.54.0"
dashscope = "1.23.4"
pylint = "3.3.7"
pydub = "0.25.1"
cffi = "1.17.1"
pysilk = "0.0.1"
sentence-transformers = "4.1.0"
websockets = "15.0.1"
psutil = "7.0.0"
celery = { extras = ["sqlalchemy"], version = "5.5.3" }
django-celery-beat = "2.8.1"
celery-once = "3.0.1"
beautifulsoup4 = "4.13.4"
html2text = "2025.4.15"
jieba = "0.42.1"
openpyxl = "3.1.5"
python-docx = "1.1.2"
xlrd = "2.0.2"
xlwt = "1.3.0"
pymupdf = "1.24.9"
pypdf = "5.6.0"
gunicorn = "23.0.0"
python-daemon = "3.1.2"
pytz = "2025.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[[tool.poetry.source]]
name = "pytorch"
url = "https://download.pytorch.org/whl/cpu"
priority = "explicit"