mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-20 04:39:00 +08:00
Add support for VolcEngine - the current version supports SDK2 (#885)
- The main idea is to assemble **ak**, **sk**, and **ep_id** into a
dictionary and store it in the database **api_key** field
- I don’t know much about the front-end, so I learned from Ollama, which
may be redundant.
### Configuration method
- model name
- Format requirements: {"VolcEngine model name":"endpoint_id"}
- For example: {"Skylark-pro-32K":"ep-xxxxxxxxx"}
- Volcano ACCESS_KEY
- Format requirements: VOLC_ACCESSKEY of the volcano engine
corresponding to the model
- Volcano SECRET_KEY
- Format requirements: VOLC_SECRETKEY of the volcano engine
corresponding to the model
### What problem does this PR solve?
_Briefly describe what this PR aims to solve. Include background context
that will help reviewers understand the purpose of the PR._
### Type of change
- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -37,10 +37,12 @@ import {
|
||||
useSelectModelProvidersLoading,
|
||||
useSubmitApiKey,
|
||||
useSubmitOllama,
|
||||
useSubmitVolcEngine,
|
||||
useSubmitSystemModelSetting,
|
||||
} from './hooks';
|
||||
import styles from './index.less';
|
||||
import OllamaModal from './ollama-modal';
|
||||
import VolcEngineModal from "./volcengine-model";
|
||||
import SystemModelSettingModal from './system-model-setting-modal';
|
||||
|
||||
const IconMap = {
|
||||
@ -52,6 +54,7 @@ const IconMap = {
|
||||
Ollama: 'ollama',
|
||||
Xinference: 'xinference',
|
||||
DeepSeek: 'deepseek',
|
||||
VolcEngine: 'volc_engine',
|
||||
};
|
||||
|
||||
const LlmIcon = ({ name }: { name: string }) => {
|
||||
@ -165,6 +168,15 @@ const UserSettingModel = () => {
|
||||
selectedLlmFactory,
|
||||
} = useSubmitOllama();
|
||||
|
||||
const {
|
||||
volcAddingVisible,
|
||||
hideVolcAddingModal,
|
||||
showVolcAddingModal,
|
||||
onVolcAddingOk,
|
||||
volcAddingLoading,
|
||||
selectedVolcFactory,
|
||||
} = useSubmitVolcEngine();
|
||||
|
||||
const handleApiKeyClick = useCallback(
|
||||
(llmFactory: string) => {
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
@ -179,6 +191,8 @@ const UserSettingModel = () => {
|
||||
const handleAddModel = (llmFactory: string) => () => {
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
showLlmAddingModal(llmFactory);
|
||||
} else if (llmFactory === 'VolcEngine') {
|
||||
showVolcAddingModal('VolcEngine');
|
||||
} else {
|
||||
handleApiKeyClick(llmFactory);
|
||||
}
|
||||
@ -270,6 +284,13 @@ const UserSettingModel = () => {
|
||||
loading={llmAddingLoading}
|
||||
llmFactory={selectedLlmFactory}
|
||||
></OllamaModal>
|
||||
<VolcEngineModal
|
||||
visible={volcAddingVisible}
|
||||
hideModal={hideVolcAddingModal}
|
||||
onOk={onVolcAddingOk}
|
||||
loading={volcAddingLoading}
|
||||
llmFactory={selectedVolcFactory}
|
||||
></VolcEngineModal>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user