mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 12:32:30 +08:00
Add service_conf and llm_factories options to Helm chart (#8607)
### What problem does this PR solve? ### Type of change - [X] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -43,6 +43,16 @@ spec:
|
|||||||
- mountPath: /etc/nginx/nginx.conf
|
- mountPath: /etc/nginx/nginx.conf
|
||||||
subPath: nginx.conf
|
subPath: nginx.conf
|
||||||
name: nginx-config-volume
|
name: nginx-config-volume
|
||||||
|
{{- with .Values.ragflow.service_conf }}
|
||||||
|
- mountPath: /ragflow/conf/local.service_conf.yaml
|
||||||
|
subPath: local.service_conf.yaml
|
||||||
|
name: service-conf-volume
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.ragflow.llm_factories }}
|
||||||
|
- mountPath: /ragflow/conf/llm_factories.json
|
||||||
|
subPath: llm_factories.json
|
||||||
|
name: service-conf-volume
|
||||||
|
{{- end }}
|
||||||
envFrom:
|
envFrom:
|
||||||
- secretRef:
|
- secretRef:
|
||||||
name: {{ include "ragflow.fullname" . }}-env-config
|
name: {{ include "ragflow.fullname" . }}-env-config
|
||||||
@ -54,6 +64,9 @@ spec:
|
|||||||
- name: nginx-config-volume
|
- name: nginx-config-volume
|
||||||
configMap:
|
configMap:
|
||||||
name: nginx-config
|
name: nginx-config
|
||||||
|
- name: service-conf-volume
|
||||||
|
configMap:
|
||||||
|
name: ragflow-service-config
|
||||||
---
|
---
|
||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
kind: Service
|
kind: Service
|
||||||
|
|||||||
@ -1,6 +1,20 @@
|
|||||||
---
|
---
|
||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
kind: ConfigMap
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: ragflow-service-config
|
||||||
|
data:
|
||||||
|
{{- with .Values.ragflow.service_conf }}
|
||||||
|
local.service_conf.yaml: |
|
||||||
|
{{- . | toYaml | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.ragflow.llm_factories }}
|
||||||
|
llm_factories.json: |
|
||||||
|
{{- . | toPrettyJson | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
metadata:
|
metadata:
|
||||||
name: nginx-config
|
name: nginx-config
|
||||||
data:
|
data:
|
||||||
|
|||||||
@ -69,6 +69,30 @@ env:
|
|||||||
EMBEDDING_BATCH_SIZE: 16
|
EMBEDDING_BATCH_SIZE: 16
|
||||||
|
|
||||||
ragflow:
|
ragflow:
|
||||||
|
|
||||||
|
# Optional service configuration overrides
|
||||||
|
# to be written to local.service_conf.yaml
|
||||||
|
# inside the RAGFlow container
|
||||||
|
# https://ragflow.io/docs/dev/configurations#service-configuration
|
||||||
|
service_conf:
|
||||||
|
|
||||||
|
# Optional yaml formatted override for the
|
||||||
|
# llm_factories.json file inside the RAGFlow
|
||||||
|
# container.
|
||||||
|
llm_factories:
|
||||||
|
# factory_llm_infos:
|
||||||
|
# - name: OpenAI-API-Compatible
|
||||||
|
# logo: ""
|
||||||
|
# tags: "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION"
|
||||||
|
# status: "1"
|
||||||
|
# llm:
|
||||||
|
# - llm_name: my-custom-llm
|
||||||
|
# tags: "LLM,CHAT,"
|
||||||
|
# max_tokens: 100000
|
||||||
|
# model_type: chat
|
||||||
|
# is_tools: false
|
||||||
|
|
||||||
|
# Kubernetes configuration
|
||||||
deployment:
|
deployment:
|
||||||
strategy:
|
strategy:
|
||||||
resources:
|
resources:
|
||||||
|
|||||||
Reference in New Issue
Block a user