Files
ragflow/web/src/pages/user-setting/setting-model/modal/volcengine-modal/index.tsx
balibabu cc8a10376a Refactor: Refactoring VolcEngine and Yiyan modal using shadcn. #10427 (#12426)
### What problem does this PR solve?

Refactor: Refactoring VolcEngine and Yiyan modal using shadcn. #10427
### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2026-01-05 09:53:47 +08:00

146 lines
3.7 KiB
TypeScript

import {
DynamicForm,
FormFieldConfig,
FormFieldType,
} from '@/components/dynamic-form';
import { Modal } from '@/components/ui/modal/modal';
import { useCommonTranslation, useTranslate } from '@/hooks/common-hooks';
import { IModalProps } from '@/interfaces/common';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { FieldValues } from 'react-hook-form';
import { LLMHeader } from '../../components/llm-header';
type VolcEngineLlmRequest = IAddLlmRequestBody & {
endpoint_id: string;
ark_api_key: string;
};
const VolcEngineModal = ({
visible,
hideModal,
onOk,
loading,
llmFactory,
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
const { t } = useTranslate('setting');
const { t: tc } = useCommonTranslation();
const fields: FormFieldConfig[] = [
{
name: 'model_type',
label: t('modelType'),
type: FormFieldType.Select,
required: true,
options: [
{ label: 'chat', value: 'chat' },
{ label: 'embedding', value: 'embedding' },
{ label: 'image2text', value: 'image2text' },
],
defaultValue: 'chat',
},
{
name: 'llm_name',
label: t('modelName'),
type: FormFieldType.Text,
required: true,
placeholder: t('volcModelNameMessage'),
},
{
name: 'endpoint_id',
label: t('addEndpointID'),
type: FormFieldType.Text,
required: true,
placeholder: t('endpointIDMessage'),
},
{
name: 'ark_api_key',
label: t('addArkApiKey'),
type: FormFieldType.Text,
required: true,
placeholder: t('ArkApiKeyMessage'),
},
{
name: 'max_tokens',
label: t('maxTokens'),
type: FormFieldType.Number,
required: true,
placeholder: t('maxTokensTip'),
validation: {
min: 0,
},
},
];
const handleOk = async (values?: FieldValues) => {
if (!values) return;
const modelType =
values.model_type === 'chat' && values.vision
? 'image2text'
: values.model_type;
const data: VolcEngineLlmRequest = {
llm_factory: llmFactory,
llm_name: values.llm_name as string,
model_type: modelType,
endpoint_id: values.endpoint_id as string,
ark_api_key: values.ark_api_key as string,
max_tokens: values.max_tokens as number,
};
console.info(data);
await onOk?.(data);
};
return (
<Modal
title={<LLMHeader name={llmFactory} />}
open={visible || false}
onOpenChange={(open) => !open && hideModal?.()}
maskClosable={false}
footer={<div className="p-4"></div>}
>
<DynamicForm.Root
fields={fields}
onSubmit={(data) => {
console.log(data);
}}
defaultValues={
{
model_type: 'chat',
vision: false,
} as FieldValues
}
labelClassName="font-normal"
>
<div className="absolute bottom-0 right-0 left-0 flex items-center justify-between w-full py-6 px-6">
<a
href="https://www.volcengine.com/docs/82379/1302008"
target="_blank"
rel="noreferrer"
>
{t('ollamaLink', { name: llmFactory })}
</a>
<div className="flex gap-2">
<DynamicForm.CancelButton
handleCancel={() => {
hideModal?.();
}}
/>
<DynamicForm.SavingButton
submitLoading={loading || false}
buttonText={tc('ok')}
submitFunc={(values: FieldValues) => {
handleOk(values);
}}
/>
</div>
</div>
</DynamicForm.Root>
</Modal>
);
};
export default VolcEngineModal;