Fix:Optimize Agent template page, fix bugs in knowledge base (#9009)

### What problem does this PR solve?

Replace Avatar with RAGFlowAvatar component for knowledge base and
agent, optimize Agent template page, and modify bugs in knowledge base
#3221

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
chanx
2025-07-24 09:30:05 +08:00
committed by GitHub
parent ad177951e9
commit 03e39ca9be
17 changed files with 222 additions and 103 deletions

View File

@ -111,7 +111,9 @@ export const useFetchSystemModelSettingOnMount = () => {
export const useSubmitOllama = () => {
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
const [editMode, setEditMode] = useState(false);
const [initialValues, setInitialValues] = useState<Partial<IAddLlmRequestBody> | undefined>();
const [initialValues, setInitialValues] = useState<
Partial<IAddLlmRequestBody> | undefined
>();
const [originalModelName, setOriginalModelName] = useState<string>('');
const { addLlm, loading } = useAddLlm();
const {
@ -126,7 +128,7 @@ export const useSubmitOllama = () => {
if (!cleanedPayload.api_key || cleanedPayload.api_key.trim() === '') {
delete cleanedPayload.api_key;
}
const ret = await addLlm(cleanedPayload);
if (ret === 0) {
hideLlmAddingModal();
@ -137,10 +139,15 @@ export const useSubmitOllama = () => {
[hideLlmAddingModal, addLlm],
);
const handleShowLlmAddingModal = (llmFactory: string, isEdit = false, modelData?: any, detailedData?: any) => {
const handleShowLlmAddingModal = (
llmFactory: string,
isEdit = false,
modelData?: any,
detailedData?: any,
) => {
setSelectedLlmFactory(llmFactory);
setEditMode(isEdit);
if (isEdit && detailedData) {
const initialVals = {
llm_name: getRealModelName(detailedData.name),

View File

@ -3,9 +3,17 @@ import { LlmIcon } from '@/components/svg-icon';
import { useTheme } from '@/components/theme-provider';
import { LLMFactory } from '@/constants/llm';
import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
import { LlmItem, useSelectLlmList, useFetchMyLlmListDetailed } from '@/hooks/llm-hooks';
import {
LlmItem,
useFetchMyLlmListDetailed,
useSelectLlmList,
} from '@/hooks/llm-hooks';
import { getRealModelName } from '@/utils/llm-util';
import { CloseCircleOutlined, EditOutlined, SettingOutlined } from '@ant-design/icons';
import {
CloseCircleOutlined,
EditOutlined,
SettingOutlined,
} from '@ant-design/icons';
import {
Button,
Card,
@ -137,7 +145,10 @@ const ModelCard = ({ item, clickApiKey, handleEditModel }: IModelCardProps) => {
<Tag color="#b8b8b8">{model.type}</Tag>
{isLocalLlmFactory(item.name) && (
<Tooltip title={t('edit', { keyPrefix: 'common' })}>
<Button type={'text'} onClick={() => handleEditModel(model, item)}>
<Button
type={'text'}
onClick={() => handleEditModel(model, item)}
>
<EditOutlined style={{ color: '#1890ff' }} />
</Button>
</Tooltip>
@ -304,14 +315,16 @@ const UserSettingModel = () => {
(model: any, factory: LlmItem) => {
if (factory) {
const detailedFactory = detailedLlmList[factory.name];
const detailedModel = detailedFactory?.llm?.find((m: any) => m.name === model.name);
const detailedModel = detailedFactory?.llm?.find(
(m: any) => m.name === model.name,
);
const editData = {
llm_factory: factory.name,
llm_name: model.name,
model_type: model.type
model_type: model.type,
};
if (isLocalLlmFactory(factory.name)) {
showLlmAddingModal(factory.name, true, editData, detailedModel);
} else if (factory.name in ModalMap) {
@ -333,7 +346,11 @@ const UserSettingModel = () => {
grid={{ gutter: 16, column: 1 }}
dataSource={llmList}
renderItem={(item) => (
<ModelCard item={item} clickApiKey={handleAddModel} handleEditModel={handleEditModel}></ModelCard>
<ModelCard
item={item}
clickApiKey={handleAddModel}
handleEditModel={handleEditModel}
></ModelCard>
)}
/>
),

View File

@ -48,8 +48,8 @@ const OllamaModal = ({
llmFactory,
editMode = false,
initialValues,
}: IModalProps<IAddLlmRequestBody> & {
llmFactory: string;
}: IModalProps<IAddLlmRequestBody> & {
llmFactory: string;
editMode?: boolean;
initialValues?: Partial<IAddLlmRequestBody>;
}) => {
@ -96,7 +96,7 @@ const OllamaModal = ({
form.resetFields();
}
}, [visible, editMode, initialValues, form]);
const url =
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/models/deploy_local_llm.mdx';
@ -134,7 +134,11 @@ const OllamaModal = ({
};
return (
<Modal
title={editMode ? t('editLlmTitle', { name: llmFactory }) : t('addLlmTitle', { name: llmFactory })}
title={
editMode
? t('editLlmTitle', { name: llmFactory })
: t('addLlmTitle', { name: llmFactory })
}
open={visible}
onOk={handleOk}
onCancel={hideModal}
@ -196,10 +200,7 @@ const OllamaModal = ({
name="api_key"
rules={[{ required: false, message: t('apiKeyMessage') }]}
>
<Input
placeholder={t('apiKeyMessage')}
onKeyDown={handleKeyDown}
/>
<Input placeholder={t('apiKeyMessage')} onKeyDown={handleKeyDown} />
</Form.Item>
<Form.Item<FieldType>
label={t('maxTokens')}