From a340b4f09f003859d098fc43e4cabe2727353d6f Mon Sep 17 00:00:00 2001 From: Junxiang Huang Date: Thu, 4 Sep 2025 14:02:18 +0800 Subject: [PATCH 1/4] update seafile ai deployment --- manual/extension/seafile-ai.md | 82 +++++++++++++++++--- manual/repo/docker/ce/env | 4 +- manual/repo/docker/pro/env | 4 +- manual/repo/docker/seafile-ai.yml | 6 +- manual/repo/docker/seafile-ai/env | 1 + manual/repo/docker/seafile-ai/seafile-ai.yml | 6 +- 6 files changed, 83 insertions(+), 20 deletions(-) diff --git a/manual/extension/seafile-ai.md b/manual/extension/seafile-ai.md index 7f2736b8..89e6cc7c 100644 --- a/manual/extension/seafile-ai.md +++ b/manual/extension/seafile-ai.md @@ -2,6 +2,9 @@ From Seafile 13, users can enable ***Seafile AI*** to support the following features: +!!! note "Prerequisites of Seafile AI deployment" + To deploy Seafile AI, you have to deploy [Metadat mmanagement](./metadata-server.md) extension firstly. Then you can follow this manual to deploy Seafile AI. + - File tags, file and image summaries, text translation, sdoc writing assistance - Given an image, generate its corresponding tags (including objects, weather, color, etc.) - Detect faces in images and encode them @@ -35,30 +38,85 @@ The Seafile AI basic service will use API calls to external large language model 2. Modify `.env`, insert or modify the following fields: - === "Use ***gpt-4o-mini*** model" + === "OpenAI" ``` COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml ENABLE_SEAFILE_AI=true + SEAFILE_AI_LLM_TYPE=openai SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_MODEL=gpt-4o-mini # recommend ``` - === "Use other models" + === "Deepseek" ``` COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml ENABLE_SEAFILE_AI=true - SEAFILE_AI_LLM_TYPE=other - SEAFILE_AI_LLM_URL=https://api.openai.com/v1 # your LLM API endpoint + SEAFILE_AI_LLM_TYPE=deepseek SEAFILE_AI_LLM_KEY= - SEAFILE_AI_LLM_MODEL=gpt-4o-mini # your model id + SEAFILE_AI_LLM_MODEL=deepseek-chat # recommend + ``` + === "Azure OpenAI" + ``` + COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml + + ENABLE_SEAFILE_AI=true + SEAFILE_AI_LLM_TYPE=azure + SEAFILE_AI_LLM_URL= # your deployment url, leave blank to use default endpoint + SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_MODEL= + ``` + === "Ollama" + ``` + COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml + + ENABLE_SEAFILE_AI=true + SEAFILE_AI_LLM_TYPE=ollama + SEAFILE_AI_LLM_URL= + SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_MODEL= + ``` + === "HuggingFace" + ``` + COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml + + ENABLE_SEAFILE_AI=true + SEAFILE_AI_LLM_TYPE=huggingface + SEAFILE_AI_LLM_URL=/ + SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_MODEL= + ``` + === "Self-proxy Server" + ``` + COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml + + ENABLE_SEAFILE_AI=true + SEAFILE_AI_LLM_TYPE=proxy + SEAFILE_AI_LLM_URL= + SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_MODEL= + ``` + === "Other" + Seafile AI utilizes [LiteLLM](https://docs.litellm.ai/docs/) to interact with LLM services. For a complete list of supported LLM providers, please refer to [this documentation](https://docs.litellm.ai/docs/providers). Then fill the following fields in your `.env`: + + ``` + COMPOSE_FILE='...,seafile-ai.yml' # add seafile-ai.yml + ENABLE_SEAFILE_AI=true + + # according to your situation + SEAFILE_AI_LLM_TYPE=... + SEAFILE_AI_LLM_URL=... + SEAFILE_AI_LLM_KEY=... + SEAFILE_AI_LLM_MODEL=... ``` - !!! note "About use custom model" + For example, if you are using a LLM service with ***OpenAI-compatible endpoints***, you should set `SEAFILE_AI_LLM_TYPE` to `other` or `openai`, and set other LLM configuration items accurately. - Seafile AI supports the use of custom large models, but the following conditions must be met: - - OpenAI compatibility API - - The large model supports multi-modality (such as supporting images, etc.) + + !!! note "About model selection" + + Seafile AI supports using large model providers from [LiteLLM](https://docs.litellm.ai/docs/providers) or large model services with OpenAI-compatible endpoints. Therefore, Seafile AI is compatible with most custom large model services except the default model (*gpt-4o-mini*), but in order to ensure the normal use of Seafile AI features, you need to select a **multimodal large model** (such as supporting image input and recognition) 3. Restart Seafile server: @@ -87,11 +145,11 @@ The Seafile AI basic service will use API calls to external large language model | `REDIS_HOST` | Redis server host | | `REDIS_PORT` | Redis server port | | `REDIS_PASSWORD` | Redis server password | - | `SEAFILE_AI_LLM_TYPE` | Large Language Model (LLM) Type. `openai` (default) will use OpenAI's ***gpt-4o-mini*** model and `other` for user-custom models which support multimodality | - | `SEAFILE_AI_LLM_URL` | LLM API endpoint, only needs to be specified when `SEAFILE_AI_LLM_TYPE=other`. Default is `https://api.openai.com/v1` | + | `SEAFILE_AI_LLM_TYPE` | Large Language Model (LLM) Type. Default is `openai`. | + | `SEAFILE_AI_LLM_URL` | LLM API endpoint. Default is `` (none) | | `SEAFILE_AI_LLM_KEY` | LLM API key | | `FACE_EMBEDDING_SERVICE_URL` | Face embedding service url | - | `SEAFILE_AI_LLM_MODEL` | LLM model id (or name), only needs to be specified when `SEAFILE_AI_LLM_TYPE=other`. Default is ***gpt-4o-mini*** | + | `SEAFILE_AI_LLM_MODEL` | LLM model id (or name). Default is ***gpt-4o-mini*** | then start your Seafile AI server: diff --git a/manual/repo/docker/ce/env b/manual/repo/docker/ce/env index 2a92cb0c..f70fa89a 100644 --- a/manual/repo/docker/ce/env +++ b/manual/repo/docker/ce/env @@ -77,8 +77,10 @@ NOTIFICATION_SERVER_URL= ## Seafile AI ENABLE_SEAFILE_AI=false +SEAFILE_AI_LLM_TYPE=openai SEAFILE_AI_LLM_URL= -SEAFILE_AI_LLM_KEY= +SEAFILE_AI_LLM_KEY= # your llm key +SEAFILE_AI_LLM_MODEL=gpt-4o-mini ## Metadata server MD_FILE_COUNT_LIMIT=100000 diff --git a/manual/repo/docker/pro/env b/manual/repo/docker/pro/env index ee07e4e8..6e3059db 100644 --- a/manual/repo/docker/pro/env +++ b/manual/repo/docker/pro/env @@ -119,8 +119,10 @@ NOTIFICATION_SERVER_URL= ## Seafile AI ENABLE_SEAFILE_AI=false +SEAFILE_AI_LLM_TYPE=openai SEAFILE_AI_LLM_URL= -SEAFILE_AI_LLM_KEY= +SEAFILE_AI_LLM_KEY= # your llm key +SEAFILE_AI_LLM_MODEL=gpt-4o-mini ## Metadata server MD_FILE_COUNT_LIMIT=100000 diff --git a/manual/repo/docker/seafile-ai.yml b/manual/repo/docker/seafile-ai.yml index 8720f7a8..d48733ba 100644 --- a/manual/repo/docker/seafile-ai.yml +++ b/manual/repo/docker/seafile-ai.yml @@ -7,9 +7,9 @@ services: # ports: # - 8888:8888 environment: - - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-openai} - - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-https://api.openai.com/v1} - - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:?Variable is not set or empty} + - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-} + - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-} + - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:-} - SEAFILE_AI_LLM_MODEL=${SEAFILE_AI_LLM_MODEL:-gpt-4o-mini} - FACE_EMBEDDING_SERVICE_URL=${FACE_EMBEDDING_SERVICE_URL:-http://face-embedding:8886} - FACE_EMBEDDING_SERVICE_KEY=${FACE_EMBEDDING_SERVICE_KEY:-${JWT_PRIVATE_KEY:?Variable is not set or empty}} diff --git a/manual/repo/docker/seafile-ai/env b/manual/repo/docker/seafile-ai/env index b19fa81f..df7870a6 100644 --- a/manual/repo/docker/seafile-ai/env +++ b/manual/repo/docker/seafile-ai/env @@ -20,5 +20,6 @@ FACE_EMBEDDING_SERVICE_URL= SEAFILE_AI_LLM_TYPE=openai SEAFILE_AI_LLM_URL= SEAFILE_AI_LLM_KEY=... # your llm key +SEAFILE_AI_LLM_MODEL=gpt-4o-mini INNER_SEAHUB_SERVICE_URL= # https://seafile.example.com diff --git a/manual/repo/docker/seafile-ai/seafile-ai.yml b/manual/repo/docker/seafile-ai/seafile-ai.yml index b651cc37..331802c0 100644 --- a/manual/repo/docker/seafile-ai/seafile-ai.yml +++ b/manual/repo/docker/seafile-ai/seafile-ai.yml @@ -7,9 +7,9 @@ services: ports: - 8888:8888 environment: - - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-openai} - - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-https://api.openai.com/v1} - - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:?Variable is not set or empty} + - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-} + - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-} + - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:-} - SEAFILE_AI_LLM_MODEL=${SEAFILE_AI_LLM_MODEL:-gpt-4o-mini} - FACE_EMBEDDING_SERVICE_URL=${FACE_EMBEDDING_SERVICE_URL:-} - FACE_EMBEDDING_SERVICE_KEY=${FACE_EMBEDDING_SERVICE_KEY:-${JWT_PRIVATE_KEY:?Variable is not set or empty}} From bf02fdfcf085e5d10ae79ba3dc131552cc0b7dc6 Mon Sep 17 00:00:00 2001 From: Junxiang Huang Date: Thu, 4 Sep 2025 14:05:37 +0800 Subject: [PATCH 2/4] update seafile ai deployment --- manual/extension/seafile-ai.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/manual/extension/seafile-ai.md b/manual/extension/seafile-ai.md index 89e6cc7c..b560ac5d 100644 --- a/manual/extension/seafile-ai.md +++ b/manual/extension/seafile-ai.md @@ -65,7 +65,7 @@ The Seafile AI basic service will use API calls to external large language model SEAFILE_AI_LLM_TYPE=azure SEAFILE_AI_LLM_URL= # your deployment url, leave blank to use default endpoint SEAFILE_AI_LLM_KEY= - SEAFILE_AI_LLM_MODEL= + SEAFILE_AI_LLM_MODEL= ``` === "Ollama" ``` @@ -83,9 +83,9 @@ The Seafile AI basic service will use API calls to external large language model ENABLE_SEAFILE_AI=true SEAFILE_AI_LLM_TYPE=huggingface - SEAFILE_AI_LLM_URL=/ + SEAFILE_AI_LLM_URL= SEAFILE_AI_LLM_KEY= - SEAFILE_AI_LLM_MODEL= + SEAFILE_AI_LLM_MODEL=/ ``` === "Self-proxy Server" ``` @@ -94,7 +94,7 @@ The Seafile AI basic service will use API calls to external large language model ENABLE_SEAFILE_AI=true SEAFILE_AI_LLM_TYPE=proxy SEAFILE_AI_LLM_URL= - SEAFILE_AI_LLM_KEY= + SEAFILE_AI_LLM_KEY= # optional SEAFILE_AI_LLM_MODEL= ``` === "Other" From c004d22aec807e9a92aed965b6e2b1498d12c25d Mon Sep 17 00:00:00 2001 From: Junxiang Huang Date: Thu, 4 Sep 2025 14:07:15 +0800 Subject: [PATCH 3/4] update seafile-ai.yml --- manual/extension/seafile-ai.md | 6 +++--- manual/repo/docker/seafile-ai.yml | 2 +- manual/repo/docker/seafile-ai/seafile-ai.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/manual/extension/seafile-ai.md b/manual/extension/seafile-ai.md index b560ac5d..9b1f628f 100644 --- a/manual/extension/seafile-ai.md +++ b/manual/extension/seafile-ai.md @@ -146,10 +146,10 @@ The Seafile AI basic service will use API calls to external large language model | `REDIS_PORT` | Redis server port | | `REDIS_PASSWORD` | Redis server password | | `SEAFILE_AI_LLM_TYPE` | Large Language Model (LLM) Type. Default is `openai`. | - | `SEAFILE_AI_LLM_URL` | LLM API endpoint. Default is `` (none) | - | `SEAFILE_AI_LLM_KEY` | LLM API key | - | `FACE_EMBEDDING_SERVICE_URL` | Face embedding service url | + | `SEAFILE_AI_LLM_URL` | LLM API endpoint. | + | `SEAFILE_AI_LLM_KEY` | LLM API key. | | `SEAFILE_AI_LLM_MODEL` | LLM model id (or name). Default is ***gpt-4o-mini*** | + | `FACE_EMBEDDING_SERVICE_URL` | Face embedding service url | then start your Seafile AI server: diff --git a/manual/repo/docker/seafile-ai.yml b/manual/repo/docker/seafile-ai.yml index d48733ba..c48ab9fb 100644 --- a/manual/repo/docker/seafile-ai.yml +++ b/manual/repo/docker/seafile-ai.yml @@ -7,7 +7,7 @@ services: # ports: # - 8888:8888 environment: - - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-} + - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-openai} - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-} - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:-} - SEAFILE_AI_LLM_MODEL=${SEAFILE_AI_LLM_MODEL:-gpt-4o-mini} diff --git a/manual/repo/docker/seafile-ai/seafile-ai.yml b/manual/repo/docker/seafile-ai/seafile-ai.yml index 331802c0..5723880a 100644 --- a/manual/repo/docker/seafile-ai/seafile-ai.yml +++ b/manual/repo/docker/seafile-ai/seafile-ai.yml @@ -7,7 +7,7 @@ services: ports: - 8888:8888 environment: - - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-} + - SEAFILE_AI_LLM_TYPE=${SEAFILE_AI_LLM_TYPE:-openai} - SEAFILE_AI_LLM_URL=${SEAFILE_AI_LLM_URL:-} - SEAFILE_AI_LLM_KEY=${SEAFILE_AI_LLM_KEY:-} - SEAFILE_AI_LLM_MODEL=${SEAFILE_AI_LLM_MODEL:-gpt-4o-mini} From 5d093693cd8188a198cbff59967be3105e11cdd6 Mon Sep 17 00:00:00 2001 From: Daniel Pan Date: Tue, 16 Sep 2025 14:23:43 +0800 Subject: [PATCH 4/4] Update seafile-ai.md --- manual/extension/seafile-ai.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manual/extension/seafile-ai.md b/manual/extension/seafile-ai.md index 9b1f628f..2ae6059a 100644 --- a/manual/extension/seafile-ai.md +++ b/manual/extension/seafile-ai.md @@ -3,7 +3,7 @@ From Seafile 13, users can enable ***Seafile AI*** to support the following features: !!! note "Prerequisites of Seafile AI deployment" - To deploy Seafile AI, you have to deploy [Metadat mmanagement](./metadata-server.md) extension firstly. Then you can follow this manual to deploy Seafile AI. + To deploy Seafile AI, you have to deploy [metadata server](./metadata-server.md) extension firstly. Then you can follow this manual to deploy Seafile AI. - File tags, file and image summaries, text translation, sdoc writing assistance - Given an image, generate its corresponding tags (including objects, weather, color, etc.)