change language #245 (#246)

### What problem does this PR solve?

change language

Issue link: #245



- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
balibabu
2024-04-07 17:41:29 +08:00
committed by GitHub
parent 591202721d
commit 373946ef3f
47 changed files with 1301 additions and 458 deletions

View File

@ -4,6 +4,7 @@ import { Form, Input, Select, Upload } from 'antd';
import classNames from 'classnames';
import { ISegmentedContentProps } from '../interface';
import { useTranslate } from '@/hooks/commonHooks';
import styles from './index.less';
const AssistantSetting = ({ show }: ISegmentedContentProps) => {
@ -12,6 +13,7 @@ const AssistantSetting = ({ show }: ISegmentedContentProps) => {
label: x.name,
value: x.id,
}));
const { t } = useTranslate('chat');
const normFile = (e: any) => {
if (Array.isArray(e)) {
@ -28,14 +30,14 @@ const AssistantSetting = ({ show }: ISegmentedContentProps) => {
>
<Form.Item
name={'name'}
label="Assistant name"
label={t('assistantName')}
rules={[{ required: true }]}
>
<Input placeholder="e.g. Resume Jarvis" />
</Form.Item>
<Form.Item
name="icon"
label="Assistant avatar"
label={t('assistantAvatar')}
valuePropName="fileList"
getValueFromEvent={normFile}
>
@ -46,44 +48,45 @@ const AssistantSetting = ({ show }: ISegmentedContentProps) => {
>
<button style={{ border: 0, background: 'none' }} type="button">
<PlusOutlined />
<div style={{ marginTop: 8 }}>Upload</div>
<div style={{ marginTop: 8 }}>
{t('upload', { keyPrefix: 'common' })}
</div>
</button>
</Upload>
</Form.Item>
<Form.Item
name={'language'}
label="Language"
label={t('language')}
initialValue={'Chinese'}
tooltip="coming soon"
style={{display:'none'}}
style={{ display: 'none' }}
>
<Select
options={[
{ value: 'Chinese', label: 'Chinese' },
{ value: 'English', label: 'English' },
{ value: 'Chinese', label: t('chinese', { keyPrefix: 'common' }) },
{ value: 'English', label: t('english', { keyPrefix: 'common' }) },
]}
/>
</Form.Item>
<Form.Item
name={['prompt_config', 'empty_response']}
label="Empty response"
tooltip="If nothing is retrieved with user's question in the knowledgebase, it will use this as an answer.
If you want LLM comes up with its own opinion when nothing is retrieved, leave this blank."
label={t('emptyResponse')}
tooltip={t('emptyResponseTip')}
>
<Input placeholder="" />
</Form.Item>
<Form.Item
name={['prompt_config', 'prologue']}
label="Set an opener"
tooltip="How do you want to welcome your clients?"
initialValue={"Hi! I'm your assistant, what can I do for you?"}
label={t('setAnOpener')}
tooltip={t('setAnOpenerTip')}
initialValue={t('setAnOpenerInitial')}
>
<Input.TextArea autoSize={{ minRows: 5 }} />
</Form.Item>
<Form.Item
label="Knowledgebases"
label={t('knowledgeBases')}
name="kb_ids"
tooltip="Select knowledgebases associated."
tooltip={t('knowledgeBasesTip')}
rules={[
{
required: true,
@ -95,7 +98,7 @@ const AssistantSetting = ({ show }: ISegmentedContentProps) => {
<Select
mode="multiple"
options={knowledgeOptions}
placeholder="Please select"
placeholder={t('knowledgeBasesMessage')}
></Select>
</Form.Item>
</section>

View File

@ -7,6 +7,7 @@ import {
import { IDialog } from '@/interfaces/database/chat';
import { Divider, Flex, Form, Modal, Segmented, UploadFile } from 'antd';
import { SegmentedValue } from 'antd/es/segmented';
import camelCase from 'lodash/camelCase';
import omit from 'lodash/omit';
import { useEffect, useRef, useState } from 'react';
import { variableEnabledFieldMap } from '../constants';
@ -17,20 +18,9 @@ import { useFetchModelId } from './hooks';
import ModelSetting from './model-setting';
import PromptEngine from './prompt-engine';
import { useTranslate } from '@/hooks/commonHooks';
import styles from './index.less';
enum ConfigurationSegmented {
AssistantSetting = 'Assistant Setting',
PromptEngine = 'Prompt Engine',
ModelSetting = 'Model Setting',
}
const segmentedMap = {
[ConfigurationSegmented.AssistantSetting]: AssistantSetting,
[ConfigurationSegmented.ModelSetting]: ModelSetting,
[ConfigurationSegmented.PromptEngine]: PromptEngine,
};
const layout = {
labelCol: { span: 7 },
wrapperCol: { span: 17 },
@ -47,6 +37,18 @@ const validateMessages = {
},
};
enum ConfigurationSegmented {
AssistantSetting = 'Assistant Setting',
PromptEngine = 'Prompt Engine',
ModelSetting = 'Model Setting',
}
const segmentedMap = {
[ConfigurationSegmented.AssistantSetting]: AssistantSetting,
[ConfigurationSegmented.ModelSetting]: ModelSetting,
[ConfigurationSegmented.PromptEngine]: PromptEngine,
};
interface IProps extends IModalManagerChildrenProps {
initialDialog: IDialog;
loading: boolean;
@ -63,11 +65,13 @@ const ChatConfigurationModal = ({
clearDialog,
}: IProps) => {
const [form] = Form.useForm();
const [value, setValue] = useState<ConfigurationSegmented>(
ConfigurationSegmented.AssistantSetting,
);
const promptEngineRef = useRef<Array<IPromptConfigParameters>>([]);
const modelId = useFetchModelId(visible);
const { t } = useTranslate('chat');
const handleOk = async () => {
const values = await form.validateFields();
@ -115,10 +119,9 @@ const ChatConfigurationModal = ({
<Flex gap={16}>
<ChatConfigurationAtom></ChatConfigurationAtom>
<div>
<b>Chat Configuration</b>
<b>{t('chatConfiguration')}</b>
<div className={styles.chatConfigurationDescription}>
Here, dress up a dedicated assistant for your special knowledge bases!
💕
{t('chatConfigurationDescription')}
</div>
</div>
</Flex>
@ -158,7 +161,10 @@ const ChatConfigurationModal = ({
size={'large'}
value={value}
onChange={handleSegmentedChange}
options={Object.values(ConfigurationSegmented)}
options={Object.values(ConfigurationSegmented).map((x) => ({
label: t(camelCase(x)),
value: x,
}))}
block
/>
<Divider></Divider>

View File

@ -5,16 +5,19 @@ import {
} from '@/constants/knowledge';
import { Divider, Flex, Form, InputNumber, Select, Slider, Switch } from 'antd';
import classNames from 'classnames';
import camelCase from 'lodash/camelCase';
import { useEffect } from 'react';
import { ISegmentedContentProps } from '../interface';
import { useTranslate } from '@/hooks/commonHooks';
import { useFetchLlmList, useSelectLlmOptions } from '@/hooks/llmHooks';
import { variableEnabledFieldMap } from '../constants';
import styles from './index.less';
const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
const { t } = useTranslate('chat');
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
label: x,
label: t(camelCase(x)),
value: x,
}));
@ -44,18 +47,18 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
})}
>
<Form.Item
label="Model"
label={t('model')}
name="llm_id"
tooltip="Large language chat model"
rules={[{ required: true, message: 'Please select!' }]}
tooltip={t('modelTip')}
rules={[{ required: true, message: t('modelMessage') }]}
>
<Select options={modelOptions} showSearch />
</Form.Item>
<Divider></Divider>
<Form.Item
label="Freedom"
label={t('freedom')}
name="parameters"
tooltip="'Precise' means the LLM will be conservative and answer your question cautiously. 'Improvise' means the you want LLM talk much and freely. 'Balance' is between cautiously and freely."
tooltip={t('freedomTip')}
initialValue={ModelVariableType.Precise}
// rules={[{ required: true, message: 'Please input!' }]}
>
@ -64,7 +67,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
onChange={handleParametersChange}
/>
</Form.Item>
<Form.Item label="Temperature" tooltip={'This parameter controls the randomness of predictions by the model. A lower temperature makes the model more confident in its responses, while a higher temperature makes it more creative and diverse.'}>
<Form.Item label={t('temperature')} tooltip={t('temperatureTip')}>
<Flex gap={20} align="center">
<Form.Item
name={'temperatureEnabled'}
@ -77,7 +80,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'temperature']}
noStyle
rules={[{ required: true, message: 'Temperature is required' }]}
rules={[{ required: true, message: t('temperatureMessage') }]}
>
<Slider className={styles.variableSlider} max={1} step={0.01} />
</Form.Item>
@ -85,7 +88,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'temperature']}
noStyle
rules={[{ required: true, message: 'Temperature is required' }]}
rules={[{ required: true, message: t('temperatureMessage') }]}
>
<InputNumber
className={styles.sliderInputNumber}
@ -96,7 +99,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
</Form.Item>
</Flex>
</Form.Item>
<Form.Item label="Top P" tooltip={'Also known as “nucleus sampling,” this parameter sets a threshold to select a smaller set of words to sample from. It focuses on the most likely words, cutting off the less probable ones.'}>
<Form.Item label={t('topP')} tooltip={t('topPTip')}>
<Flex gap={20} align="center">
<Form.Item name={'topPEnabled'} valuePropName="checked" noStyle>
<Switch size="small" />
@ -105,7 +108,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'top_p']}
noStyle
rules={[{ required: true, message: 'Top_p is required' }]}
rules={[{ required: true, message: t('topPMessage') }]}
>
<Slider className={styles.variableSlider} max={1} step={0.01} />
</Form.Item>
@ -113,7 +116,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'top_p']}
noStyle
rules={[{ required: true, message: 'Top_p is required' }]}
rules={[{ required: true, message: t('topPMessage') }]}
>
<InputNumber
className={styles.sliderInputNumber}
@ -124,7 +127,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
</Form.Item>
</Flex>
</Form.Item>
<Form.Item label="Presence Penalty" tooltip={'This discourages the model from repeating the same information by penalizing words that have already appeared in the conversation.'}>
<Form.Item label={t('presencePenalty')} tooltip={t('presencePenaltyTip')}>
<Flex gap={20} align="center">
<Form.Item
name={'presencePenaltyEnabled'}
@ -137,9 +140,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'presence_penalty']}
noStyle
rules={[
{ required: true, message: 'Presence Penalty is required' },
]}
rules={[{ required: true, message: t('presencePenaltyMessage') }]}
>
<Slider className={styles.variableSlider} max={1} step={0.01} />
</Form.Item>
@ -147,9 +148,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'presence_penalty']}
noStyle
rules={[
{ required: true, message: 'Presence Penalty is required' },
]}
rules={[{ required: true, message: t('presencePenaltyMessage') }]}
>
<InputNumber
className={styles.sliderInputNumber}
@ -160,7 +159,10 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
</Form.Item>
</Flex>
</Form.Item>
<Form.Item label="Frequency Penalty" tooltip={'Similar to the presence penalty, this reduces the models tendency to repeat the same words frequently.'}>
<Form.Item
label={t('frequencyPenalty')}
tooltip={t('frequencyPenaltyTip')}
>
<Flex gap={20} align="center">
<Form.Item
name={'frequencyPenaltyEnabled'}
@ -174,7 +176,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
name={['llm_setting', 'frequency_penalty']}
noStyle
rules={[
{ required: true, message: 'Frequency Penalty is required' },
{ required: true, message: t('frequencyPenaltyMessage') },
]}
>
<Slider className={styles.variableSlider} max={1} step={0.01} />
@ -183,9 +185,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'frequency_penalty']}
noStyle
rules={[
{ required: true, message: 'Frequency Penalty is required' },
]}
rules={[{ required: true, message: t('frequencyPenaltyMessage') }]}
>
<InputNumber
className={styles.sliderInputNumber}
@ -196,7 +196,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
</Form.Item>
</Flex>
</Form.Item>
<Form.Item label="Max Tokens" tooltip={'This sets the maximum length of the models output, measured in the number of tokens (words or pieces of words).'}>
<Form.Item label={t('maxTokens')} tooltip={t('maxTokensTip')}>
<Flex gap={20} align="center">
<Form.Item name={'maxTokensEnabled'} valuePropName="checked" noStyle>
<Switch size="small" />
@ -205,7 +205,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'max_tokens']}
noStyle
rules={[{ required: true, message: 'Max Tokens is required' }]}
rules={[{ required: true, message: t('maxTokensMessage') }]}
>
<Slider className={styles.variableSlider} max={2048} />
</Form.Item>
@ -213,7 +213,7 @@ const ModelSetting = ({ show, form }: ISegmentedContentProps) => {
<Form.Item
name={['llm_setting', 'max_tokens']}
noStyle
rules={[{ required: true, message: 'Max Tokens is required' }]}
rules={[{ required: true, message: t('maxTokensMessage') }]}
>
<InputNumber
className={styles.sliderInputNumber}

View File

@ -29,6 +29,7 @@ import {
} from '../interface';
import { EditableCell, EditableRow } from './editable-cell';
import { useTranslate } from '@/hooks/commonHooks';
import { useSelectPromptConfigParameters } from '../hooks';
import styles from './index.less';
@ -44,6 +45,7 @@ const PromptEngine = (
) => {
const [dataSource, setDataSource] = useState<DataType[]>([]);
const parameters = useSelectPromptConfigParameters();
const { t } = useTranslate('chat');
const components = {
body: {
@ -102,7 +104,7 @@ const PromptEngine = (
const columns: TableProps<DataType>['columns'] = [
{
title: 'key',
title: t('key'),
dataIndex: 'variable',
key: 'variable',
onCell: (record: DataType) => ({
@ -114,7 +116,7 @@ const PromptEngine = (
}),
},
{
title: 'optional',
title: t('optional'),
dataIndex: 'optional',
key: 'optional',
width: 40,
@ -130,7 +132,7 @@ const PromptEngine = (
},
},
{
title: 'operation',
title: t('operation'),
dataIndex: 'operation',
width: 30,
key: 'operation',
@ -152,24 +154,21 @@ const PromptEngine = (
})}
>
<Form.Item
label="System"
rules={[{ required: true, message: 'Please input!' }]}
tooltip="Instructions you need LLM to follow when LLM answers questions, like charactor design, answer length and answer language etc."
label={t('system')}
rules={[{ required: true, message: t('systemMessage') }]}
tooltip={t('systemTip')}
name={['prompt_config', 'system']}
initialValue={`你是一个智能助手,请总结知识库的内容来回答问题,请列举知识库中的数据详细回答。当所有知识库内容都与问题无关时,你的回答必须包括“知识库中未找到您要的答案!”这句话。回答需要考虑聊天历史。
以下是知识库:
{knowledge}
以上是知识库。`}
initialValue={t('systemInitialValue')}
>
<Input.TextArea autoSize={{ maxRows: 8, minRows: 5 }} />
</Form.Item>
<Divider></Divider>
<SimilaritySlider isTooltipShown></SimilaritySlider>
<Form.Item<FieldType>
label="Top N"
label={t('topN')}
name={'top_n'}
initialValue={8}
tooltip={`Not all the chunks whose similarity score is above the 'simialrity threashold' will be feed to LLMs. LLM can only see these 'Top N' chunks.`}
tooltip={t('topNTip')}
>
<Slider max={30} />
</Form.Item>
@ -177,18 +176,15 @@ const PromptEngine = (
<Row align={'middle'} justify="end">
<Col span={7} className={styles.variableAlign}>
<label className={styles.variableLabel}>
Variables
<Tooltip title="If you use dialog APIs, the varialbes might help you chat with your clients with different strategies.
The variables are used to fill-in the 'System' part in prompt in order to give LLM a hint.
The 'knowledge' is a very special variable which will be filled-in with the retrieved chunks.
All the variables in 'System' should be curly bracketed.">
{t('variable')}
<Tooltip title={t('variableTip')}>
<QuestionCircleOutlined className={styles.variableIcon} />
</Tooltip>
</label>
</Col>
<Col span={17} className={styles.variableAlign}>
<Button size="small" onClick={handleAdd}>
Add
{t('add')}
</Button>
</Col>
</Row>