mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 12:32:30 +08:00
Add api for sessions and add max_tokens for tenant_llm (#3472)
### What problem does this PR solve? Add api for sessions and add max_tokens for tenant_llm ### Type of change - [x] New Feature (non-breaking change which adds functionality) --------- Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
@ -4,6 +4,7 @@ export interface IAddLlmRequestBody {
|
||||
model_type: string;
|
||||
api_base?: string; // chat|embedding|speech2text|image2text
|
||||
api_key: string;
|
||||
max_tokens: number;
|
||||
}
|
||||
|
||||
export interface IDeleteLlmRequestBody {
|
||||
|
||||
@ -393,6 +393,8 @@ The above is the content you need to summarize.`,
|
||||
maxTokensMessage: 'Max Tokens is required',
|
||||
maxTokensTip:
|
||||
'This sets the maximum length of the model’s output, measured in the number of tokens (words or pieces of words).',
|
||||
maxTokensInvalidMessage: 'Please enter a valid number for Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens cannot be less than 0.',
|
||||
quote: 'Show Quote',
|
||||
quoteTip: 'Should the source of the original text be displayed?',
|
||||
selfRag: 'Self-RAG',
|
||||
@ -441,6 +443,12 @@ The above is the content you need to summarize.`,
|
||||
setting: {
|
||||
profile: 'Profile',
|
||||
profileDescription: 'Update your photo and personal details here.',
|
||||
maxTokens: 'Max Tokens',
|
||||
maxTokensMessage: 'Max Tokens is required',
|
||||
maxTokensTip:
|
||||
'This sets the maximum length of the model’s output, measured in the number of tokens (words or pieces of words).',
|
||||
maxTokensInvalidMessage: 'Please enter a valid number for Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens cannot be less than 0.',
|
||||
password: 'Password',
|
||||
passwordDescription:
|
||||
'Please enter your current password to change your password.',
|
||||
|
||||
@ -231,6 +231,8 @@ export default {
|
||||
maxTokensMessage: 'El máximo de tokens es obligatorio',
|
||||
maxTokensTip:
|
||||
'Esto establece la longitud máxima de la salida del modelo, medida en el número de tokens (palabras o piezas de palabras).',
|
||||
maxTokensInvalidMessage: 'Por favor, ingresa un número válido para Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens no puede ser menor que 0.',
|
||||
quote: 'Mostrar cita',
|
||||
quoteTip: '¿Debe mostrarse la fuente del texto original?',
|
||||
selfRag: 'Self-RAG',
|
||||
@ -278,6 +280,12 @@ export default {
|
||||
setting: {
|
||||
profile: 'Perfil',
|
||||
profileDescription: 'Actualiza tu foto y tus datos personales aquí.',
|
||||
maxTokens: 'Máximo de tokens',
|
||||
maxTokensMessage: 'El máximo de tokens es obligatorio',
|
||||
maxTokensTip:
|
||||
'Esto establece la longitud máxima de la salida del modelo, medida en el número de tokens (palabras o piezas de palabras).',
|
||||
maxTokensInvalidMessage: 'Por favor, ingresa un número válido para Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens no puede ser menor que 0.',
|
||||
password: 'Contraseña',
|
||||
passwordDescription:
|
||||
'Por favor ingresa tu contraseña actual para cambiarla.',
|
||||
|
||||
@ -401,6 +401,8 @@ export default {
|
||||
maxTokensMessage: 'Token Maksimum diperlukan',
|
||||
maxTokensTip:
|
||||
'Ini menetapkan panjang maksimum keluaran model, diukur dalam jumlah token (kata atau potongan kata).',
|
||||
maxTokensInvalidMessage: 'Silakan masukkan angka yang valid untuk Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens tidak boleh kurang dari 0.',
|
||||
quote: 'Tampilkan Kutipan',
|
||||
quoteTip: 'Haruskah sumber teks asli ditampilkan?',
|
||||
selfRag: 'Self-RAG',
|
||||
@ -450,6 +452,12 @@ export default {
|
||||
setting: {
|
||||
profile: 'Profil',
|
||||
profileDescription: 'Perbarui foto dan detail pribadi Anda di sini.',
|
||||
maxTokens: 'Token Maksimum',
|
||||
maxTokensMessage: 'Token Maksimum diperlukan',
|
||||
maxTokensTip:
|
||||
'Ini menetapkan panjang maksimum keluaran model, diukur dalam jumlah token (kata atau potongan kata).',
|
||||
maxTokensInvalidMessage: 'Silakan masukkan angka yang valid untuk Max Tokens.',
|
||||
maxTokensMinMessage: 'Max Tokens tidak boleh kurang dari 0.',
|
||||
password: 'Kata Sandi',
|
||||
passwordDescription:
|
||||
'Silakan masukkan kata sandi Anda saat ini untuk mengubah kata sandi Anda.',
|
||||
|
||||
@ -376,6 +376,8 @@ export default {
|
||||
maxTokensMessage: '最大token數是必填項',
|
||||
maxTokensTip:
|
||||
'這設置了模型輸出的最大長度,以標記(單詞或單詞片段)的數量來衡量。',
|
||||
maxTokensInvalidMessage: '請輸入有效的最大標記數。',
|
||||
maxTokensMinMessage: '最大標記數不能小於 0。',
|
||||
quote: '顯示引文',
|
||||
quoteTip: '是否應該顯示原文出處?',
|
||||
selfRag: 'Self-RAG',
|
||||
@ -422,6 +424,12 @@ export default {
|
||||
setting: {
|
||||
profile: '概述',
|
||||
profileDescription: '在此更新您的照片和個人詳細信息。',
|
||||
maxTokens: '最大token數',
|
||||
maxTokensMessage: '最大token數是必填項',
|
||||
maxTokensTip:
|
||||
'這設置了模型輸出的最大長度,以標記(單詞或單詞片段)的數量來衡量。',
|
||||
maxTokensInvalidMessage: '請輸入有效的最大標記數。',
|
||||
maxTokensMinMessage: '最大標記數不能小於 0。',
|
||||
password: '密碼',
|
||||
passwordDescription: '請輸入您當前的密碼以更改您的密碼。',
|
||||
model: '模型提供商',
|
||||
|
||||
@ -393,6 +393,8 @@ export default {
|
||||
maxTokensMessage: '最大token数是必填项',
|
||||
maxTokensTip:
|
||||
'这设置了模型输出的最大长度,以标记(单词或单词片段)的数量来衡量。',
|
||||
maxTokensInvalidMessage: '请输入有效的最大令牌数。',
|
||||
maxTokensMinMessage: '最大令牌数不能小于 0。',
|
||||
quote: '显示引文',
|
||||
quoteTip: '是否应该显示原文出处?',
|
||||
selfRag: 'Self-RAG',
|
||||
@ -439,6 +441,12 @@ export default {
|
||||
setting: {
|
||||
profile: '概要',
|
||||
profileDescription: '在此更新您的照片和个人详细信息。',
|
||||
maxTokens: '最大token数',
|
||||
maxTokensMessage: '最大token数是必填项',
|
||||
maxTokensTip:
|
||||
'这设置了模型输出的最大长度,以标记(单词或单词片段)的数量来衡量。',
|
||||
maxTokensInvalidMessage: '请输入有效的最大令牌数。',
|
||||
maxTokensMinMessage: '最大令牌数不能小于 0。',
|
||||
password: '密码',
|
||||
passwordDescription: '请输入您当前的密码以更改您的密码。',
|
||||
model: '模型提供商',
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space } from 'antd';
|
||||
import { Flex, Form, Input, Modal, Select, Space, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -30,6 +30,7 @@ const TencentCloudModal = ({
|
||||
...omit(values),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:16000,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select, Switch } from 'antd';
|
||||
import { Form, Input, Modal, Select, Switch, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -33,6 +33,7 @@ const AzureOpenAIModal = ({
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -107,6 +108,31 @@ const AzureOpenAIModal = ({
|
||||
>
|
||||
<Input placeholder={t('apiVersionMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('model_type') === 'chat' && (
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space } from 'antd';
|
||||
import { Flex, Form, Input, Modal, Select, Space, InputNumber } from 'antd';
|
||||
import { useMemo } from 'react';
|
||||
import { BedrockRegionList } from '../constant';
|
||||
|
||||
@ -34,6 +34,7 @@ const BedrockModal = ({
|
||||
const data = {
|
||||
...values,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
|
||||
onOk?.(data);
|
||||
@ -111,6 +112,31 @@ const BedrockModal = ({
|
||||
allowClear
|
||||
></Select>
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space } from 'antd';
|
||||
import { Flex, Form, Input, Modal, Select, Space, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -30,6 +30,7 @@ const FishAudioModal = ({
|
||||
...omit(values),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -93,6 +94,31 @@ const FishAudioModal = ({
|
||||
>
|
||||
<Input placeholder={t('FishAudioRefIDMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select } from 'antd';
|
||||
import { Form, Input, Modal, Select, InputNumber } from 'antd';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
google_project_id: string;
|
||||
@ -27,6 +27,7 @@ const GoogleModal = ({
|
||||
const data = {
|
||||
...values,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
|
||||
onOk?.(data);
|
||||
@ -87,6 +88,31 @@ const GoogleModal = ({
|
||||
>
|
||||
<Input placeholder={t('GoogleServiceAccountKeyMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select } from 'antd';
|
||||
import { Form, Input, Modal, Select} from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
|
||||
@ -402,7 +402,7 @@ const UserSettingModel = () => {
|
||||
hideModal={hideTencentCloudAddingModal}
|
||||
onOk={onTencentCloudAddingOk}
|
||||
loading={TencentCloudAddingLoading}
|
||||
llmFactory={'Tencent TencentCloud'}
|
||||
llmFactory={'Tencent Cloud'}
|
||||
></TencentCloudModal>
|
||||
<SparkModal
|
||||
visible={SparkAddingVisible}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space, Switch } from 'antd';
|
||||
import { Flex, Form, Input, Modal, Select, Space, Switch, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & { vision: boolean };
|
||||
@ -45,6 +45,7 @@ const OllamaModal = ({
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -136,6 +137,31 @@ const OllamaModal = ({
|
||||
>
|
||||
<Input placeholder={t('apiKeyMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('model_type') === 'chat' && (
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select } from 'antd';
|
||||
import { Form, Input, Modal, Select, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -36,6 +36,7 @@ const SparkModal = ({
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -128,6 +129,31 @@ const SparkModal = ({
|
||||
)
|
||||
}
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space, Switch } from 'antd';
|
||||
import { Flex, Form, Input, Modal, Select, Space, Switch, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -36,6 +36,7 @@ const VolcEngineModal = ({
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -103,19 +104,31 @@ const VolcEngineModal = ({
|
||||
>
|
||||
<Input placeholder={t('ArkApiKeyMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('model_type') === 'chat' && (
|
||||
<Form.Item
|
||||
label={t('vision')}
|
||||
valuePropName="checked"
|
||||
name={'vision'}
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)
|
||||
}
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Form, Input, Modal, Select } from 'antd';
|
||||
import { Form, Input, Modal, Select, InputNumber } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & {
|
||||
@ -34,6 +34,7 @@ const YiyanModal = ({
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
max_tokens:values.max_tokens,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
@ -89,6 +90,30 @@ const YiyanModal = ({
|
||||
>
|
||||
<Input placeholder={t('yiyanSKMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('maxTokens')}
|
||||
name="max_tokens"
|
||||
rules={[
|
||||
{ required: true, message: t('maxTokensMessage') },
|
||||
{
|
||||
type: 'number',
|
||||
message: t('maxTokensInvalidMessage'),
|
||||
},
|
||||
({ getFieldValue }) => ({
|
||||
validator(_, value) {
|
||||
if (value < 0) {
|
||||
return Promise.reject(new Error(t('maxTokensMinMessage')));
|
||||
}
|
||||
return Promise.resolve();
|
||||
},
|
||||
}),
|
||||
]}
|
||||
>
|
||||
<InputNumber
|
||||
placeholder={t('maxTokensTip')}
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user