Files
ragflow/web/src/pages/chat/chat-configuration-modal/model-setting.tsx
balibabu 1137b04154 Feat: Disable Max_token by default #5283 (#5290)
### What problem does this PR solve?

Feat: Disable Max_token by default #5283

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-02-24 14:22:15 +08:00

56 lines
1.5 KiB
TypeScript

import classNames from 'classnames';
import { useEffect } from 'react';
import { ISegmentedContentProps } from '../interface';
import LlmSettingItems from '@/components/llm-setting-items';
import {
ChatVariableEnabledField,
variableEnabledFieldMap,
} from '@/constants/chat';
import { Variable } from '@/interfaces/database/chat';
import { setInitialChatVariableEnabledFieldValue } from '@/utils/chat';
import styles from './index.less';
const ModelSetting = ({
show,
form,
initialLlmSetting,
visible,
}: ISegmentedContentProps & {
initialLlmSetting?: Variable;
visible?: boolean;
}) => {
useEffect(() => {
if (visible) {
const values = Object.keys(variableEnabledFieldMap).reduce<
Record<string, boolean>
>((pre, field) => {
pre[field] =
initialLlmSetting === undefined
? setInitialChatVariableEnabledFieldValue(
field as ChatVariableEnabledField,
)
: !!initialLlmSetting[
variableEnabledFieldMap[
field as keyof typeof variableEnabledFieldMap
] as keyof Variable
];
return pre;
}, {});
form.setFieldsValue(values);
}
}, [form, initialLlmSetting, visible]);
return (
<section
className={classNames({
[styles.segmentedHidden]: !show,
})}
>
{visible && <LlmSettingItems prefix="llm_setting"></LlmSettingItems>}
</section>
);
};
export default ModelSetting;