diff --git a/helm/templates/ragflow.yaml b/helm/templates/ragflow.yaml index b1f8d1ea1..57a5d224b 100644 --- a/helm/templates/ragflow.yaml +++ b/helm/templates/ragflow.yaml @@ -43,6 +43,16 @@ spec: - mountPath: /etc/nginx/nginx.conf subPath: nginx.conf name: nginx-config-volume + {{- with .Values.ragflow.service_conf }} + - mountPath: /ragflow/conf/local.service_conf.yaml + subPath: local.service_conf.yaml + name: service-conf-volume + {{- end }} + {{- with .Values.ragflow.llm_factories }} + - mountPath: /ragflow/conf/llm_factories.json + subPath: llm_factories.json + name: service-conf-volume + {{- end }} envFrom: - secretRef: name: {{ include "ragflow.fullname" . }}-env-config @@ -54,6 +64,9 @@ spec: - name: nginx-config-volume configMap: name: nginx-config + - name: service-conf-volume + configMap: + name: ragflow-service-config --- apiVersion: v1 kind: Service diff --git a/helm/templates/ragflow_config.yaml b/helm/templates/ragflow_config.yaml index 6967ecca5..533bd3df9 100644 --- a/helm/templates/ragflow_config.yaml +++ b/helm/templates/ragflow_config.yaml @@ -1,6 +1,20 @@ --- apiVersion: v1 kind: ConfigMap +metadata: + name: ragflow-service-config +data: + {{- with .Values.ragflow.service_conf }} + local.service_conf.yaml: | + {{- . | toYaml | nindent 4 }} + {{- end }} + {{- with .Values.ragflow.llm_factories }} + llm_factories.json: | + {{- . | toPrettyJson | nindent 4 }} + {{- end }} +--- +apiVersion: v1 +kind: ConfigMap metadata: name: nginx-config data: diff --git a/helm/values.yaml b/helm/values.yaml index 396466deb..4756586f0 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -69,6 +69,30 @@ env: EMBEDDING_BATCH_SIZE: 16 ragflow: + + # Optional service configuration overrides + # to be written to local.service_conf.yaml + # inside the RAGFlow container + # https://ragflow.io/docs/dev/configurations#service-configuration + service_conf: + + # Optional yaml formatted override for the + # llm_factories.json file inside the RAGFlow + # container. + llm_factories: + # factory_llm_infos: + # - name: OpenAI-API-Compatible + # logo: "" + # tags: "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION" + # status: "1" + # llm: + # - llm_name: my-custom-llm + # tags: "LLM,CHAT," + # max_tokens: 100000 + # model_type: chat + # is_tools: false + + # Kubernetes configuration deployment: strategy: resources: