mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
### What problem does this PR solve? fix: Merge models of the same category #2479 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@ -1,5 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { Popover, Select } from 'antd';
|
||||
import LlmSettingItems from '../llm-setting-items';
|
||||
|
||||
@ -10,7 +10,10 @@ interface IProps {
|
||||
}
|
||||
|
||||
const LLMSelect = ({ id, value, onChange }: IProps) => {
|
||||
const modelOptions = useSelectLlmOptionsByModelType();
|
||||
const modelOptions = useComposeLlmOptionsByModelTypes([
|
||||
LlmModelType.Chat,
|
||||
LlmModelType.Image2text,
|
||||
]);
|
||||
|
||||
const content = (
|
||||
<div style={{ width: 400 }}>
|
||||
@ -29,10 +32,7 @@ const LLMSelect = ({ id, value, onChange }: IProps) => {
|
||||
destroyTooltipOnHide
|
||||
>
|
||||
<Select
|
||||
options={[
|
||||
...modelOptions[LlmModelType.Chat],
|
||||
...modelOptions[LlmModelType.Image2text],
|
||||
]}
|
||||
options={modelOptions}
|
||||
style={{ width: '100%' }}
|
||||
dropdownStyle={{ display: 'none' }}
|
||||
id={id}
|
||||
|
||||
@ -7,7 +7,7 @@ import { Divider, Flex, Form, InputNumber, Select, Slider, Switch } from 'antd';
|
||||
import camelCase from 'lodash/camelCase';
|
||||
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useSelectLlmOptionsByModelType } from '@/hooks/llm-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import styles from './index.less';
|
||||
|
||||
@ -39,7 +39,10 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
|
||||
|
||||
const memorizedPrefix = useMemo(() => (prefix ? [prefix] : []), [prefix]);
|
||||
|
||||
const modelOptions = useSelectLlmOptionsByModelType();
|
||||
const modelOptions = useComposeLlmOptionsByModelTypes([
|
||||
LlmModelType.Chat,
|
||||
LlmModelType.Image2text,
|
||||
]);
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -50,13 +53,7 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
|
||||
{...formItemLayout}
|
||||
rules={[{ required: true, message: t('modelMessage') }]}
|
||||
>
|
||||
<Select
|
||||
options={[
|
||||
...modelOptions[LlmModelType.Chat],
|
||||
...modelOptions[LlmModelType.Image2text],
|
||||
]}
|
||||
showSearch
|
||||
/>
|
||||
<Select options={modelOptions} showSearch />
|
||||
</Form.Item>
|
||||
<Divider></Divider>
|
||||
<Form.Item
|
||||
|
||||
Reference in New Issue
Block a user