mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
support api-version and change default-model in adding azure-openai and openai (#2799)
### What problem does this PR solve? #2701 #2712 #2749 ### Type of change -[x] Bug Fix (non-breaking change which fixes an issue) - [x] New Feature (non-breaking change which adds functionality) --------- Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
This commit is contained in:
committed by
GitHub
parent
bfaef2cca6
commit
18f80743eb
@ -581,6 +581,8 @@ The above is the content you need to summarize.`,
|
||||
GoogleRegionMessage: 'Please input Google Cloud Region',
|
||||
modelProvidersWarn:
|
||||
'Please add both embedding model and LLM in <b>Settings > Model providers</b> firstly.',
|
||||
apiVersion: 'API-Version',
|
||||
apiVersionMessage: 'Please input API version',
|
||||
},
|
||||
message: {
|
||||
registered: 'Registered!',
|
||||
|
||||
@ -557,6 +557,8 @@ export default {
|
||||
GoogleRegionMessage: '请输入 Google Cloud 区域',
|
||||
modelProvidersWarn:
|
||||
'请首先在 <b>设置 > 模型提供商</b> 中添加嵌入模型和 LLM。',
|
||||
apiVersion: 'API版本',
|
||||
apiVersionMessage: '请输入API版本!',
|
||||
},
|
||||
message: {
|
||||
registered: '注册成功',
|
||||
|
||||
@ -0,0 +1,128 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select, Switch } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
api_version: string;
|
||||
vision: boolean;
|
||||
};
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
const AzureOpenAIModal = ({
|
||||
visible,
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
llmFactory,
|
||||
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
|
||||
const [form] = Form.useForm<FieldType>();
|
||||
|
||||
const { t } = useTranslate('setting');
|
||||
|
||||
const handleOk = async () => {
|
||||
const values = await form.validateFields();
|
||||
const modelType =
|
||||
values.model_type === 'chat' && values.vision
|
||||
? 'image2text'
|
||||
: values.model_type;
|
||||
|
||||
const data = {
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
onOk?.(data);
|
||||
};
|
||||
const optionsMap = {
|
||||
Default: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
{ value: 'image2text', label: 'image2text' },
|
||||
],
|
||||
};
|
||||
const getOptions = (factory: string) => {
|
||||
return optionsMap.Default;
|
||||
};
|
||||
return (
|
||||
<Modal
|
||||
title={t('addLlmTitle', { name: llmFactory })}
|
||||
open={visible}
|
||||
onOk={handleOk}
|
||||
onCancel={hideModal}
|
||||
okButtonProps={{ loading }}
|
||||
>
|
||||
<Form
|
||||
name="basic"
|
||||
style={{ maxWidth: 600 }}
|
||||
autoComplete="off"
|
||||
layout={'vertical'}
|
||||
form={form}
|
||||
>
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelType')}
|
||||
name="model_type"
|
||||
initialValue={'embedding'}
|
||||
rules={[{ required: true, message: t('modelTypeMessage') }]}
|
||||
>
|
||||
<Select placeholder={t('modelTypeMessage')}>
|
||||
{getOptions(llmFactory).map((option) => (
|
||||
<Option key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</Option>
|
||||
))}
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('addLlmBaseUrl')}
|
||||
name="api_base"
|
||||
rules={[{ required: true, message: t('baseUrlNameMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('baseUrlNameMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('apiKey')}
|
||||
name="api_key"
|
||||
rules={[{ required: false, message: t('apiKeyMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('apiKeyMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelName')}
|
||||
name="llm_name"
|
||||
initialValue="gpt-3.5-turbo"
|
||||
rules={[{ required: true, message: t('modelNameMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('modelNameMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('apiVersion')}
|
||||
name="api_version"
|
||||
initialValue="2024-02-01"
|
||||
rules={[{ required: false, message: t('apiVersionMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('apiVersionMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('model_type') === 'chat' && (
|
||||
<Form.Item
|
||||
label={t('vision')}
|
||||
valuePropName="checked"
|
||||
name={'vision'}
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)
|
||||
}
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default AzureOpenAIModal;
|
||||
@ -353,6 +353,33 @@ export const useSubmitBedrock = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export const useSubmitAzure = () => {
|
||||
const { addLlm, loading } = useAddLlm();
|
||||
const {
|
||||
visible: AzureAddingVisible,
|
||||
hideModal: hideAzureAddingModal,
|
||||
showModal: showAzureAddingModal,
|
||||
} = useSetModalState();
|
||||
|
||||
const onAzureAddingOk = useCallback(
|
||||
async (payload: IAddLlmRequestBody) => {
|
||||
const ret = await addLlm(payload);
|
||||
if (ret === 0) {
|
||||
hideAzureAddingModal();
|
||||
}
|
||||
},
|
||||
[hideAzureAddingModal, addLlm],
|
||||
);
|
||||
|
||||
return {
|
||||
AzureAddingLoading: loading,
|
||||
onAzureAddingOk,
|
||||
AzureAddingVisible,
|
||||
hideAzureAddingModal,
|
||||
showAzureAddingModal,
|
||||
};
|
||||
};
|
||||
|
||||
export const useHandleDeleteLlm = (llmFactory: string) => {
|
||||
const { deleteLlm } = useDeleteLlm();
|
||||
const showDeleteConfirm = useShowDeleteConfirm();
|
||||
|
||||
@ -29,6 +29,7 @@ import SettingTitle from '../components/setting-title';
|
||||
import { isLocalLlmFactory } from '../utils';
|
||||
import TencentCloudModal from './Tencent-modal';
|
||||
import ApiKeyModal from './api-key-modal';
|
||||
import AzureOpenAIModal from './azure-openai-modal';
|
||||
import BedrockModal from './bedrock-modal';
|
||||
import { IconMap } from './constant';
|
||||
import FishAudioModal from './fish-audio-modal';
|
||||
@ -37,6 +38,7 @@ import {
|
||||
useHandleDeleteFactory,
|
||||
useHandleDeleteLlm,
|
||||
useSubmitApiKey,
|
||||
useSubmitAzure,
|
||||
useSubmitBedrock,
|
||||
useSubmitFishAudio,
|
||||
useSubmitGoogle,
|
||||
@ -109,7 +111,8 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
|
||||
item.name === 'BaiduYiyan' ||
|
||||
item.name === 'Fish Audio' ||
|
||||
item.name === 'Tencent Cloud' ||
|
||||
item.name === 'Google Cloud'
|
||||
item.name === 'Google Cloud' ||
|
||||
item.name === 'Azure OpenAI'
|
||||
? t('addTheModel')
|
||||
: 'API-Key'}
|
||||
<SettingOutlined />
|
||||
@ -242,6 +245,14 @@ const UserSettingModel = () => {
|
||||
showBedrockAddingModal,
|
||||
} = useSubmitBedrock();
|
||||
|
||||
const {
|
||||
AzureAddingVisible,
|
||||
hideAzureAddingModal,
|
||||
showAzureAddingModal,
|
||||
onAzureAddingOk,
|
||||
AzureAddingLoading,
|
||||
} = useSubmitAzure();
|
||||
|
||||
const ModalMap = useMemo(
|
||||
() => ({
|
||||
Bedrock: showBedrockAddingModal,
|
||||
@ -252,6 +263,7 @@ const UserSettingModel = () => {
|
||||
'Fish Audio': showFishAudioAddingModal,
|
||||
'Tencent Cloud': showTencentCloudAddingModal,
|
||||
'Google Cloud': showGoogleAddingModal,
|
||||
'Azure-OpenAI': showAzureAddingModal,
|
||||
}),
|
||||
[
|
||||
showBedrockAddingModal,
|
||||
@ -262,6 +274,7 @@ const UserSettingModel = () => {
|
||||
showyiyanAddingModal,
|
||||
showFishAudioAddingModal,
|
||||
showGoogleAddingModal,
|
||||
showAzureAddingModal,
|
||||
],
|
||||
);
|
||||
|
||||
@ -435,6 +448,13 @@ const UserSettingModel = () => {
|
||||
loading={bedrockAddingLoading}
|
||||
llmFactory={'Bedrock'}
|
||||
></BedrockModal>
|
||||
<AzureOpenAIModal
|
||||
visible={AzureAddingVisible}
|
||||
hideModal={hideAzureAddingModal}
|
||||
onOk={onAzureAddingOk}
|
||||
loading={AzureAddingLoading}
|
||||
llmFactory={'Azure-OpenAI'}
|
||||
></AzureOpenAIModal>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
@ -101,7 +101,7 @@ const OllamaModal = ({
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelType')}
|
||||
name="model_type"
|
||||
initialValue={'chat'}
|
||||
initialValue={'embedding'}
|
||||
rules={[{ required: true, message: t('modelTypeMessage') }]}
|
||||
>
|
||||
<Select placeholder={t('modelTypeMessage')}>
|
||||
|
||||
Reference in New Issue
Block a user