add supprot for OpenAI-API-Compatible llm (#1787)

### What problem does this PR solve?

#1771  add supprot for OpenAI-API-Compatible 

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
黄腾
2024-08-06 16:20:21 +08:00
committed by GitHub
parent 66e4113e0b
commit b67484e77d
12 changed files with 74 additions and 11 deletions

View File

@ -17,4 +17,4 @@ export const UserSettingIconMap = {
export * from '@/constants/setting';
export const LocalLlmFactories = ['Ollama', 'Xinference','LocalAI','LM-Studio'];
export const LocalLlmFactories = ['Ollama', 'Xinference','LocalAI','LM-Studio',"OpenAI-API-Compatible"];

View File

@ -21,7 +21,8 @@ export const IconMap = {
LocalAI: 'local-ai',
StepFun: 'stepfun',
NVIDIA:'nvidia',
'LM-Studio':'lm-studio'
'LM-Studio':'lm-studio',
'OpenAI-API-Compatible':'openai-api'
};
export const BedrockRegionList = [

View File

@ -92,6 +92,13 @@ const OllamaModal = ({
>
<Input placeholder={t('baseUrlNameMessage')} />
</Form.Item>
<Form.Item<FieldType>
label={t('apiKey')}
name="api_key"
rules={[{ required: false, message: t('apiKeyMessage') }]}
>
<Input placeholder={t('apiKeyMessage')} />
</Form.Item>
<Form.Item noStyle dependencies={['model_type']}>
{({ getFieldValue }) =>
getFieldValue('model_type') === 'chat' && (