Add service_conf and llm_factories options to Helm chart (#8607)

### What problem does this PR solve?

### Type of change

- [X] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Scott Davidson
2025-07-02 02:58:17 +01:00
committed by GitHub
parent 212d5ce7ff
commit 9dd3dfaab0
3 changed files with 51 additions and 0 deletions

View File

@ -69,6 +69,30 @@ env:
EMBEDDING_BATCH_SIZE: 16
ragflow:
# Optional service configuration overrides
# to be written to local.service_conf.yaml
# inside the RAGFlow container
# https://ragflow.io/docs/dev/configurations#service-configuration
service_conf:
# Optional yaml formatted override for the
# llm_factories.json file inside the RAGFlow
# container.
llm_factories:
# factory_llm_infos:
# - name: OpenAI-API-Compatible
# logo: ""
# tags: "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION"
# status: "1"
# llm:
# - llm_name: my-custom-llm
# tags: "LLM,CHAT,"
# max_tokens: 100000
# model_type: chat
# is_tools: false
# Kubernetes configuration
deployment:
strategy:
resources: