Fix: Fixed the styling and logic issues on the model provider page #10703 (#10909)

### What problem does this PR solve?

Fix: Fixed the styling and logic issues on the model provider page

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
chanx
2025-10-31 13:42:28 +08:00
committed by GitHub
parent c8a82da722
commit d8a7fb6f2b
24 changed files with 265 additions and 175 deletions

View File

@ -1,4 +1,5 @@
import { LlmIcon } from '@/components/svg-icon'; import { LlmIcon } from '@/components/svg-icon';
import message from '@/components/ui/message';
import { LlmModelType } from '@/constants/knowledge'; import { LlmModelType } from '@/constants/knowledge';
import { ResponseGetType } from '@/interfaces/database/base'; import { ResponseGetType } from '@/interfaces/database/base';
import { import {
@ -16,7 +17,6 @@ import userService from '@/services/user-service';
import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util'; import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util';
import { getLLMIconName, getRealModelName } from '@/utils/llm-util'; import { getLLMIconName, getRealModelName } from '@/utils/llm-util';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { Flex, message } from 'antd';
import { DefaultOptionType } from 'antd/es/select'; import { DefaultOptionType } from 'antd/es/select';
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
@ -59,7 +59,7 @@ export const useSelectLlmOptions = () => {
function buildLlmOptionsWithIcon(x: IThirdOAIModel) { function buildLlmOptionsWithIcon(x: IThirdOAIModel) {
return { return {
label: ( label: (
<Flex align="center" gap={6}> <div className="flex items-center justify-center gap-6">
<LlmIcon <LlmIcon
name={getLLMIconName(x.fid, x.llm_name)} name={getLLMIconName(x.fid, x.llm_name)}
width={26} width={26}
@ -67,7 +67,7 @@ function buildLlmOptionsWithIcon(x: IThirdOAIModel) {
size={'small'} size={'small'}
/> />
<span>{getRealModelName(x.llm_name)}</span> <span>{getRealModelName(x.llm_name)}</span>
</Flex> </div>
), ),
value: `${x.llm_name}@${x.fid}`, value: `${x.llm_name}@${x.fid}`,
disabled: !x.available, disabled: !x.available,
@ -81,7 +81,6 @@ export const useSelectLlmOptionsByModelType = () => {
const groupImage2TextOptions = useCallback(() => { const groupImage2TextOptions = useCallback(() => {
const modelType = LlmModelType.Image2text; const modelType = LlmModelType.Image2text;
const modelTag = modelType.toUpperCase(); const modelTag = modelType.toUpperCase();
return Object.entries(llmInfo) return Object.entries(llmInfo)
.map(([key, value]) => { .map(([key, value]) => {
return { return {
@ -91,7 +90,8 @@ export const useSelectLlmOptionsByModelType = () => {
(x) => (x) =>
(x.model_type.includes(modelType) || (x.model_type.includes(modelType) ||
(x.tags && x.tags.includes(modelTag))) && (x.tags && x.tags.includes(modelTag))) &&
x.available, x.available &&
x.status === '1',
) )
.map(buildLlmOptionsWithIcon), .map(buildLlmOptionsWithIcon),
}; };
@ -141,7 +141,6 @@ export const useComposeLlmOptionsByModelTypes = (
modelTypes: LlmModelType[], modelTypes: LlmModelType[],
) => { ) => {
const allOptions = useSelectLlmOptionsByModelType(); const allOptions = useSelectLlmOptionsByModelType();
return modelTypes.reduce< return modelTypes.reduce<
(DefaultOptionType & { (DefaultOptionType & {
options: { options: {
@ -359,6 +358,35 @@ export const useDeleteLlm = () => {
return { data, loading, deleteLlm: mutateAsync }; return { data, loading, deleteLlm: mutateAsync };
}; };
export const useEnableLlm = () => {
const queryClient = useQueryClient();
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: ['enableLlm'],
mutationFn: async (params: IDeleteLlmRequestBody & { enable: boolean }) => {
const reqParam: IDeleteLlmRequestBody & {
enable?: boolean;
status?: 1 | 0;
} = { ...params, status: params.enable ? 1 : 0 };
delete reqParam.enable;
const { data } = await userService.enable_llm(reqParam);
if (data.code === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.modified'));
}
return data.code;
},
});
return { data, loading, enableLlm: mutateAsync };
};
export const useDeleteFactory = () => { export const useDeleteFactory = () => {
const queryClient = useQueryClient(); const queryClient = useQueryClient();
const { t } = useTranslation(); const { t } = useTranslation();

View File

@ -37,5 +37,6 @@ export interface IMyLlmValue {
export interface Llm { export interface Llm {
name: string; name: string;
type: string; type: string;
status: '0' | '1';
used_token: number; used_token: number;
} }

View File

@ -680,6 +680,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
tocEnhanceTip: ` During the parsing of the document, table of contents information was generated (see the 'Enable Table of Contents Extraction' option in the General method). This allows the large model to return table of contents items relevant to the user's query, thereby using these items to retrieve related chunks and apply weighting to these chunks during the sorting process. This approach is derived from mimicking the behavioral logic of how humans search for knowledge in books.`, tocEnhanceTip: ` During the parsing of the document, table of contents information was generated (see the 'Enable Table of Contents Extraction' option in the General method). This allows the large model to return table of contents items relevant to the user's query, thereby using these items to retrieve related chunks and apply weighting to these chunks during the sorting process. This approach is derived from mimicking the behavioral logic of how humans search for knowledge in books.`,
}, },
setting: { setting: {
save: 'Save',
search: 'Search', search: 'Search',
availableModels: 'Available models', availableModels: 'Available models',
profile: 'Profile', profile: 'Profile',

View File

@ -671,6 +671,7 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
tocEnhanceTip: `解析文档时生成了目录信息见General方法的启用目录抽取让大模型返回和用户问题相关的目录项从而利用目录项拿到相关chunk对这些chunk在排序中进行加权。这种方法来源于模仿人类查询书本中知识的行为逻辑`, tocEnhanceTip: `解析文档时生成了目录信息见General方法的启用目录抽取让大模型返回和用户问题相关的目录项从而利用目录项拿到相关chunk对这些chunk在排序中进行加权。这种方法来源于模仿人类查询书本中知识的行为逻辑`,
}, },
setting: { setting: {
save: '保存',
search: '搜索', search: '搜索',
availableModels: '可选模型', availableModels: '可选模型',
profile: '概要', profile: '概要',

View File

@ -1,129 +0,0 @@
import { IModalManagerChildrenProps } from '@/components/modal-manager';
import { LLMFactory } from '@/constants/llm';
import { useTranslate } from '@/hooks/common-hooks';
import { Form, Input, Modal } from 'antd';
import { KeyboardEventHandler, useCallback, useEffect } from 'react';
import { ApiKeyPostBody } from '../../interface';
interface IProps extends Omit<IModalManagerChildrenProps, 'showModal'> {
loading: boolean;
initialValue: string;
llmFactory: string;
editMode?: boolean;
onOk: (postBody: ApiKeyPostBody) => void;
showModal?(): void;
}
type FieldType = {
api_key?: string;
base_url?: string;
group_id?: string;
};
const modelsWithBaseUrl = [
LLMFactory.OpenAI,
LLMFactory.AzureOpenAI,
LLMFactory.TongYiQianWen,
];
const ApiKeyModal = ({
visible,
hideModal,
llmFactory,
loading,
initialValue,
editMode = false,
onOk,
}: IProps) => {
const [form] = Form.useForm();
const { t } = useTranslate('setting');
const handleOk = useCallback(async () => {
const ret = await form.validateFields();
return onOk(ret);
}, [form, onOk]);
const handleKeyDown: KeyboardEventHandler<HTMLInputElement> = useCallback(
async (e) => {
if (e.key === 'Enter') {
await handleOk();
}
},
[handleOk],
);
useEffect(() => {
if (visible) {
form.setFieldValue('api_key', initialValue);
}
}, [initialValue, form, visible]);
return (
<Modal
title={editMode ? t('editModel') : t('modify')}
open={visible}
onOk={handleOk}
onCancel={hideModal}
okButtonProps={{ loading }}
confirmLoading={loading}
>
<Form
name="basic"
labelCol={{ span: 6 }}
wrapperCol={{ span: 18 }}
style={{ maxWidth: 600 }}
autoComplete="off"
form={form}
>
<Form.Item<FieldType>
label={t('apiKey')}
name="api_key"
tooltip={t('apiKeyTip')}
rules={[{ required: true, message: t('apiKeyMessage') }]}
>
<Input onKeyDown={handleKeyDown} />
</Form.Item>
{modelsWithBaseUrl.some((x) => x === llmFactory) && (
<Form.Item<FieldType>
label={t('baseUrl')}
name="base_url"
tooltip={
llmFactory === LLMFactory.TongYiQianWen
? t('tongyiBaseUrlTip')
: t('baseUrlTip')
}
>
<Input
placeholder={
llmFactory === LLMFactory.TongYiQianWen
? t('tongyiBaseUrlPlaceholder')
: 'https://api.openai.com/v1'
}
onKeyDown={handleKeyDown}
/>
</Form.Item>
)}
{llmFactory?.toLowerCase() === 'Anthropic'.toLowerCase() && (
<Form.Item<FieldType>
label={t('baseUrl')}
name="base_url"
tooltip={t('baseUrlTip')}
>
<Input
placeholder="https://api.anthropic.com/v1"
onKeyDown={handleKeyDown}
/>
</Form.Item>
)}
{llmFactory?.toLowerCase() === 'Minimax'.toLowerCase() && (
<Form.Item<FieldType> label={'Group ID'} name="group_id">
<Input />
</Form.Item>
)}
</Form>
</Modal>
);
};
export default ApiKeyModal;

View File

@ -1,6 +1,7 @@
// src/components/ModelProviderCard.tsx // src/components/ModelProviderCard.tsx
import { LlmIcon } from '@/components/svg-icon'; import { LlmIcon } from '@/components/svg-icon';
import { Button } from '@/components/ui/button'; import { Button } from '@/components/ui/button';
import { Switch } from '@/components/ui/switch';
import { useSetModalState, useTranslate } from '@/hooks/common-hooks'; import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
import { LlmItem } from '@/hooks/llm-hooks'; import { LlmItem } from '@/hooks/llm-hooks';
import { getRealModelName } from '@/utils/llm-util'; import { getRealModelName } from '@/utils/llm-util';
@ -8,7 +9,7 @@ import { EditOutlined, SettingOutlined } from '@ant-design/icons';
import { ChevronsDown, ChevronsUp, Trash2 } from 'lucide-react'; import { ChevronsDown, ChevronsUp, Trash2 } from 'lucide-react';
import { FC } from 'react'; import { FC } from 'react';
import { isLocalLlmFactory } from '../../utils'; import { isLocalLlmFactory } from '../../utils';
import { useHandleDeleteFactory, useHandleDeleteLlm } from '../hooks'; import { useHandleDeleteFactory, useHandleEnableLlm } from '../hooks';
interface IModelCardProps { interface IModelCardProps {
item: LlmItem; item: LlmItem;
@ -52,7 +53,7 @@ export const ModelProviderCard: FC<IModelCardProps> = ({
}) => { }) => {
const { visible, switchVisible } = useSetModalState(); const { visible, switchVisible } = useSetModalState();
const { t } = useTranslate('setting'); const { t } = useTranslate('setting');
const { handleDeleteLlm } = useHandleDeleteLlm(item.name); const { handleEnableLlm } = useHandleEnableLlm(item.name);
const { handleDeleteFactory } = useHandleDeleteFactory(item.name); const { handleDeleteFactory } = useHandleDeleteFactory(item.name);
const handleApiKeyClick = () => { const handleApiKeyClick = () => {
@ -66,7 +67,7 @@ export const ModelProviderCard: FC<IModelCardProps> = ({
return ( return (
<div className={`w-full rounded-lg border border-border-default`}> <div className={`w-full rounded-lg border border-border-default`}>
{/* Header */} {/* Header */}
<div className="flex items-center justify-between p-4 cursor-pointer transition-colors"> <div className="flex h-16 items-center justify-between p-4 cursor-pointer transition-colors">
<div className="flex items-center space-x-3"> <div className="flex items-center space-x-3">
<LlmIcon name={item.name} /> <LlmIcon name={item.name} />
<div> <div>
@ -151,16 +152,12 @@ export const ModelProviderCard: FC<IModelCardProps> = ({
<EditOutlined /> <EditOutlined />
</Button> </Button>
)} )}
<Button <Switch
variant={'secondary'} checked={model.status === '1'}
onClick={() => { onCheckedChange={(value) => {
handleDeleteLlm(model.name); handleEnableLlm(model.name, value);
console.log(handleDeleteLlm, model.name);
}} }}
className="p-1 hover:text-state-error transition-colors" />
>
<Trash2 />
</Button>
</div> </div>
</div> </div>
))} ))}

View File

@ -10,8 +10,8 @@ export const UsedModel = ({
}) => { }) => {
const { factoryList, myLlmList: llmList, loading } = useSelectLlmList(); const { factoryList, myLlmList: llmList, loading } = useSelectLlmList();
return ( return (
<div className="flex flex-col w-full"> <div className="flex flex-col w-full gap-4 mb-4">
<div className="text-text-primary text-2xl mb-4 mt-4">Added models</div> <div className="text-text-primary text-2xl mb-2 mt-4">Added models</div>
{llmList.map((llm) => { {llmList.map((llm) => {
return ( return (
<ModelProviderCard <ModelProviderCard

View File

@ -5,6 +5,7 @@ import {
useAddLlm, useAddLlm,
useDeleteFactory, useDeleteFactory,
useDeleteLlm, useDeleteLlm,
useEnableLlm,
useSaveApiKey, useSaveApiKey,
useSaveTenantInfo, useSaveTenantInfo,
useSelectLlmOptionsByModelType, useSelectLlmOptionsByModelType,
@ -421,7 +422,7 @@ export const useHandleDeleteLlm = (llmFactory: string) => {
const { deleteLlm } = useDeleteLlm(); const { deleteLlm } = useDeleteLlm();
const showDeleteConfirm = useShowDeleteConfirm(); const showDeleteConfirm = useShowDeleteConfirm();
const handleDeleteLlm = (name: string) => () => { const handleDeleteLlm = (name: string) => {
showDeleteConfirm({ showDeleteConfirm({
onOk: async () => { onOk: async () => {
deleteLlm({ llm_factory: llmFactory, llm_name: name }); deleteLlm({ llm_factory: llmFactory, llm_name: name });
@ -432,6 +433,16 @@ export const useHandleDeleteLlm = (llmFactory: string) => {
return { handleDeleteLlm }; return { handleDeleteLlm };
}; };
export const useHandleEnableLlm = (llmFactory: string) => {
const { enableLlm } = useEnableLlm();
const handleEnableLlm = (name: string, enable: boolean) => {
enableLlm({ llm_factory: llmFactory, llm_name: name, enable });
};
return { handleEnableLlm };
};
export const useHandleDeleteFactory = (llmFactory: string) => { export const useHandleDeleteFactory = (llmFactory: string) => {
const { deleteFactory } = useDeleteFactory(); const { deleteFactory } = useDeleteFactory();
const showDeleteConfirm = useShowDeleteConfirm(); const showDeleteConfirm = useShowDeleteConfirm();

View File

@ -2,14 +2,9 @@ import { LLMFactory } from '@/constants/llm';
import { LlmItem, useFetchMyLlmListDetailed } from '@/hooks/llm-hooks'; import { LlmItem, useFetchMyLlmListDetailed } from '@/hooks/llm-hooks';
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
import { isLocalLlmFactory } from '../utils'; import { isLocalLlmFactory } from '../utils';
import ApiKeyModal from './api-key-modal';
import AzureOpenAIModal from './azure-openai-modal';
import BedrockModal from './bedrock-modal';
import SystemSetting from './components/system-setting'; import SystemSetting from './components/system-setting';
import { AvailableModels } from './components/un-add-model'; import { AvailableModels } from './components/un-add-model';
import { UsedModel } from './components/used-model'; import { UsedModel } from './components/used-model';
import FishAudioModal from './fish-audio-modal';
import GoogleModal from './google-modal';
import { import {
useSubmitApiKey, useSubmitApiKey,
useSubmitAzure, useSubmitAzure,
@ -24,12 +19,17 @@ import {
useSubmitVolcEngine, useSubmitVolcEngine,
useSubmityiyan, useSubmityiyan,
} from './hooks'; } from './hooks';
import HunyuanModal from './hunyuan-modal'; import ApiKeyModal from './modal/api-key-modal';
import TencentCloudModal from './next-tencent-modal'; import AzureOpenAIModal from './modal/azure-openai-modal';
import OllamaModal from './ollama-modal'; import BedrockModal from './modal/bedrock-modal';
import SparkModal from './spark-modal'; import FishAudioModal from './modal/fish-audio-modal';
import VolcEngineModal from './volcengine-modal'; import GoogleModal from './modal/google-modal';
import YiyanModal from './yiyan-modal'; import HunyuanModal from './modal/hunyuan-modal';
import TencentCloudModal from './modal/next-tencent-modal';
import OllamaModal from './modal/ollama-modal';
import SparkModal from './modal/spark-modal';
import VolcEngineModal from './modal/volcengine-modal';
import YiyanModal from './modal/yiyan-modal';
const ModelProviders = () => { const ModelProviders = () => {
const { saveSystemModelSettingLoading, onSystemSettingSavingOk } = const { saveSystemModelSettingLoading, onSystemSettingSavingOk } =
useSubmitSystemModelSetting(); useSubmitSystemModelSetting();

View File

@ -34,11 +34,6 @@ import { CircleHelp } from 'lucide-react';
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
import SettingTitle from '../components/setting-title'; import SettingTitle from '../components/setting-title';
import { isLocalLlmFactory } from '../utils'; import { isLocalLlmFactory } from '../utils';
import ApiKeyModal from './api-key-modal';
import AzureOpenAIModal from './azure-openai-modal';
import BedrockModal from './bedrock-modal';
import FishAudioModal from './fish-audio-modal';
import GoogleModal from './google-modal';
import { import {
useHandleDeleteFactory, useHandleDeleteFactory,
useHandleDeleteLlm, useHandleDeleteLlm,
@ -55,14 +50,19 @@ import {
useSubmitVolcEngine, useSubmitVolcEngine,
useSubmityiyan, useSubmityiyan,
} from './hooks'; } from './hooks';
import HunyuanModal from './hunyuan-modal';
import styles from './index.less'; import styles from './index.less';
import TencentCloudModal from './next-tencent-modal'; import ApiKeyModal from './modal/api-key-modal';
import OllamaModal from './ollama-modal'; import AzureOpenAIModal from './modal/azure-openai-modal';
import SparkModal from './spark-modal'; import BedrockModal from './modal/bedrock-modal';
import SystemModelSettingModal from './system-model-setting-modal'; import FishAudioModal from './modal/fish-audio-modal';
import VolcEngineModal from './volcengine-modal'; import GoogleModal from './modal/google-modal';
import YiyanModal from './yiyan-modal'; import HunyuanModal from './modal/hunyuan-modal';
import TencentCloudModal from './modal/next-tencent-modal';
import OllamaModal from './modal/ollama-modal';
import SparkModal from './modal/spark-modal';
import SystemModelSettingModal from './modal/system-model-setting-modal';
import VolcEngineModal from './modal/volcengine-modal';
import YiyanModal from './modal/yiyan-modal';
const { Text } = Typography; const { Text } = Typography;
interface IModelCardProps { interface IModelCardProps {

View File

@ -0,0 +1,174 @@
import { IModalManagerChildrenProps } from '@/components/modal-manager';
import {
Form,
FormControl,
FormField,
FormItem,
FormLabel,
FormMessage,
} from '@/components/ui/form';
import { Input } from '@/components/ui/input';
import { Modal } from '@/components/ui/modal/modal';
import { LLMFactory } from '@/constants/llm';
import { useTranslate } from '@/hooks/common-hooks';
import { KeyboardEventHandler, useCallback, useEffect } from 'react';
import { useForm } from 'react-hook-form';
import { ApiKeyPostBody } from '../../../interface';
interface IProps extends Omit<IModalManagerChildrenProps, 'showModal'> {
loading: boolean;
initialValue: string;
llmFactory: string;
editMode?: boolean;
onOk: (postBody: ApiKeyPostBody) => void;
showModal?(): void;
}
type FieldType = {
api_key?: string;
base_url?: string;
group_id?: string;
};
const modelsWithBaseUrl = [
LLMFactory.OpenAI,
LLMFactory.AzureOpenAI,
LLMFactory.TongYiQianWen,
];
const ApiKeyModal = ({
visible,
hideModal,
llmFactory,
loading,
initialValue,
editMode = false,
onOk,
}: IProps) => {
const form = useForm<FieldType>();
const { t } = useTranslate('setting');
const handleOk = useCallback(async () => {
await form.handleSubmit((values) => onOk(values))();
}, [form, onOk]);
const handleKeyDown: KeyboardEventHandler<HTMLInputElement> = useCallback(
async (e) => {
if (e.key === 'Enter') {
await handleOk();
}
},
[handleOk],
);
useEffect(() => {
if (visible) {
form.setValue('api_key', initialValue);
}
}, [initialValue, form, visible]);
return (
<Modal
title={editMode ? t('editModel') : t('modify')}
open={visible}
onOpenChange={(open) => !open && hideModal()}
onOk={handleOk}
onCancel={hideModal}
confirmLoading={loading}
okText={t('save')}
cancelText={t('cancel')}
>
<Form {...form}>
<div className="space-y-4 py-4">
<FormField
name="api_key"
rules={{ required: t('apiKeyMessage') }}
render={({ field }) => (
<FormItem>
<FormLabel className="text-sm font-medium text-text-primary">
{t('apiKey')}
<span className="ml-1 text-destructive">*</span>
</FormLabel>
<FormControl>
<Input
{...field}
onKeyDown={handleKeyDown}
className="w-full"
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
{modelsWithBaseUrl.some((x) => x === llmFactory) && (
<FormField
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel className="text-sm font-medium text-text-primary">
{t('baseUrl')}
</FormLabel>
<FormControl>
<Input
{...field}
placeholder={
llmFactory === LLMFactory.TongYiQianWen
? t('tongyiBaseUrlPlaceholder')
: 'https://api.openai.com/v1'
}
onKeyDown={handleKeyDown}
className="w-full"
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
{llmFactory?.toLowerCase() === 'Anthropic'.toLowerCase() && (
<FormField
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel className="text-sm font-medium text-text-primary">
{t('baseUrl')}
</FormLabel>
<FormControl>
<Input
{...field}
placeholder="https://api.anthropic.com/v1"
onKeyDown={handleKeyDown}
className="w-full"
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
{llmFactory?.toLowerCase() === 'Minimax'.toLowerCase() && (
<FormField
name="group_id"
render={({ field }) => (
<FormItem>
<FormLabel className="text-sm font-medium text-text-primary">
Group ID
</FormLabel>
<FormControl>
<Input {...field} className="w-full" />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
</div>
</Form>
</Modal>
);
};
export default ApiKeyModal;

View File

@ -3,7 +3,7 @@ import { IModalProps } from '@/interfaces/common';
import { IAddLlmRequestBody } from '@/interfaces/request/llm'; import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { Flex, Form, Input, InputNumber, Modal, Select, Space } from 'antd'; import { Flex, Form, Input, InputNumber, Modal, Select, Space } from 'antd';
import { useMemo } from 'react'; import { useMemo } from 'react';
import { BedrockRegionList } from '../constant'; import { BedrockRegionList } from '../../constant';
type FieldType = IAddLlmRequestBody & { type FieldType = IAddLlmRequestBody & {
bedrock_ak: string; bedrock_ak: string;

View File

@ -7,7 +7,7 @@ import {
} from '@/hooks/llm-hooks'; } from '@/hooks/llm-hooks';
import { Form, Modal, Select } from 'antd'; import { Form, Modal, Select } from 'antd';
import { useEffect } from 'react'; import { useEffect } from 'react';
import { useFetchSystemModelSettingOnMount } from '../hooks'; import { useFetchSystemModelSettingOnMount } from '../../hooks';
interface IProps extends Omit<IModalManagerChildrenProps, 'showModal'> { interface IProps extends Omit<IModalManagerChildrenProps, 'showModal'> {
loading: boolean; loading: boolean;

View File

@ -16,6 +16,7 @@ const {
set_tenant_info, set_tenant_info,
add_llm, add_llm,
delete_llm, delete_llm,
enable_llm,
deleteFactory, deleteFactory,
getSystemStatus, getSystemStatus,
getSystemVersion, getSystemVersion,
@ -79,6 +80,10 @@ const methods = {
url: delete_llm, url: delete_llm,
method: 'post', method: 'post',
}, },
enable_llm: {
url: enable_llm,
method: 'post',
},
getSystemStatus: { getSystemStatus: {
url: getSystemStatus, url: getSystemStatus,
method: 'get', method: 'get',

View File

@ -31,6 +31,7 @@ export default {
set_api_key: `${api_host}/llm/set_api_key`, set_api_key: `${api_host}/llm/set_api_key`,
add_llm: `${api_host}/llm/add_llm`, add_llm: `${api_host}/llm/add_llm`,
delete_llm: `${api_host}/llm/delete_llm`, delete_llm: `${api_host}/llm/delete_llm`,
enable_llm: `${api_host}/llm/enable_llm`,
deleteFactory: `${api_host}/llm/delete_factory`, deleteFactory: `${api_host}/llm/delete_factory`,
// plugin // plugin