Compare commits

...

3 Commits

Author SHA1 Message Date
30ae78755b Feat: Delete or filter conversations #3221 (#9491)
### What problem does this PR solve?

Feat: Delete or filter conversations #3221

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-08-15 12:05:27 +08:00
2114e966d8 Feat: add citation option to agent and enlarge the timeouts. (#9484)
### What problem does this PR solve?

#9422

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
2025-08-15 10:05:01 +08:00
562349eb02 Feat: Upload files in the chat box #3221 (#9483)
### What problem does this PR solve?
Feat: Upload files in the chat box #3221

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-08-15 10:04:37 +08:00
23 changed files with 373 additions and 75 deletions

View File

@ -244,7 +244,7 @@ class Agent(LLM, ToolBase):
def complete():
nonlocal hist
need2cite = self._canvas.get_reference()["chunks"] and self._id.find("-->") < 0
need2cite = self._param.cite and self._canvas.get_reference()["chunks"] and self._id.find("-->") < 0
cited = False
if hist[0]["role"] == "system" and need2cite:
if len(hist) < 7:

View File

@ -145,7 +145,7 @@ class LLM(ComponentBase):
prompt = self.string_format(prompt, args)
for m in msg:
m["content"] = self.string_format(m["content"], args)
if self._canvas.get_reference()["chunks"]:
if self._param.cite and self._canvas.get_reference()["chunks"]:
prompt += citation_prompt()
return prompt, msg

View File

@ -106,7 +106,7 @@ class EntityResolution(Extractor):
nonlocal remain_candidates_to_resolve, callback
async with semaphore:
try:
with trio.move_on_after(180) as cancel_scope:
with trio.move_on_after(280) as cancel_scope:
await self._resolve_candidate(candidate_batch, result_set, result_lock)
remain_candidates_to_resolve = remain_candidates_to_resolve - len(candidate_batch[1])
callback(msg=f"Resolved {len(candidate_batch[1])} pairs, {remain_candidates_to_resolve} are remained to resolve. ")
@ -169,7 +169,7 @@ class EntityResolution(Extractor):
logging.info(f"Created resolution prompt {len(text)} bytes for {len(candidate_resolution_i[1])} entity pairs of type {candidate_resolution_i[0]}")
async with chat_limiter:
try:
with trio.move_on_after(120) as cancel_scope:
with trio.move_on_after(240) as cancel_scope:
response = await trio.to_thread.run_sync(self._chat, text, [{"role": "user", "content": "Output:"}], {})
if cancel_scope.cancelled_caught:
logging.warning("_resolve_candidate._chat timeout, skipping...")

View File

@ -92,7 +92,7 @@ class CommunityReportsExtractor(Extractor):
text = perform_variable_replacements(self._extraction_prompt, variables=prompt_variables)
async with chat_limiter:
try:
with trio.move_on_after(80) as cancel_scope:
with trio.move_on_after(180) as cancel_scope:
response = await trio.to_thread.run_sync( self._chat, text, [{"role": "user", "content": "Output:"}], {})
if cancel_scope.cancelled_caught:
logging.warning("extract_community_report._chat timeout, skipping...")

View File

@ -57,20 +57,22 @@ async def run_graphrag(
):
chunks.append(d["content_with_weight"])
subgraph = await generate_subgraph(
LightKGExt
if "method" not in row["kb_parser_config"].get("graphrag", {}) or row["kb_parser_config"]["graphrag"]["method"] != "general"
else GeneralKGExt,
tenant_id,
kb_id,
doc_id,
chunks,
language,
row["kb_parser_config"]["graphrag"].get("entity_types", []),
chat_model,
embedding_model,
callback,
)
with trio.fail_after(len(chunks)*60):
subgraph = await generate_subgraph(
LightKGExt
if "method" not in row["kb_parser_config"].get("graphrag", {}) or row["kb_parser_config"]["graphrag"]["method"] != "general"
else GeneralKGExt,
tenant_id,
kb_id,
doc_id,
chunks,
language,
row["kb_parser_config"]["graphrag"].get("entity_types", []),
chat_model,
embedding_model,
callback,
)
if not subgraph:
return
@ -125,7 +127,6 @@ async def run_graphrag(
return
@timeout(60*60, 1)
async def generate_subgraph(
extractor: Extractor,
tenant_id: str,

View File

@ -520,7 +520,7 @@ async def run_raptor(row, chat_mdl, embd_mdl, vector_size, callback=None):
return res, tk_count
@timeout(60*60, 1)
@timeout(60*60*2, 1)
async def do_handle_task(task):
task_id = task["id"]
task_from_page = task["from_page"]

View File

@ -12,7 +12,7 @@ import { PaginationProps, message } from 'antd';
import { FormInstance } from 'antd/lib';
import axios from 'axios';
import { EventSourceParserStream } from 'eventsource-parser/stream';
import { omit } from 'lodash';
import { has, isEmpty, omit } from 'lodash';
import {
ChangeEventHandler,
useCallback,
@ -166,11 +166,43 @@ export const useFetchAppConf = () => {
return appConf;
};
function useSetDoneRecord() {
const [doneRecord, setDoneRecord] = useState<Record<string, boolean>>({});
const clearDoneRecord = useCallback(() => {
setDoneRecord({});
}, []);
const setDoneRecordById = useCallback((id: string, val: boolean) => {
setDoneRecord((prev) => ({ ...prev, [id]: val }));
}, []);
const allDone = useMemo(() => {
return Object.values(doneRecord).every((val) => val);
}, [doneRecord]);
useEffect(() => {
if (!isEmpty(doneRecord) && allDone) {
clearDoneRecord();
}
}, [allDone, clearDoneRecord, doneRecord]);
return {
doneRecord,
setDoneRecord,
setDoneRecordById,
clearDoneRecord,
allDone,
};
}
export const useSendMessageWithSse = (
url: string = api.completeConversation,
) => {
const [answer, setAnswer] = useState<IAnswer>({} as IAnswer);
const [done, setDone] = useState(true);
const { doneRecord, clearDoneRecord, setDoneRecordById, allDone } =
useSetDoneRecord();
const timer = useRef<any>();
const sseRef = useRef<AbortController>();
@ -188,6 +220,17 @@ export const useSendMessageWithSse = (
}, 1000);
}, []);
const setDoneValue = useCallback(
(body: any, value: boolean) => {
if (has(body, 'chatBoxId')) {
setDoneRecordById(body.chatBoxId, value);
} else {
setDone(value);
}
},
[setDoneRecordById],
);
const send = useCallback(
async (
body: any,
@ -195,7 +238,7 @@ export const useSendMessageWithSse = (
): Promise<{ response: Response; data: ResponseType } | undefined> => {
initializeSseRef();
try {
setDone(false);
setDoneValue(body, false);
const response = await fetch(url, {
method: 'POST',
headers: {
@ -236,23 +279,34 @@ export const useSendMessageWithSse = (
}
}
}
setDone(true);
setDoneValue(body, true);
resetAnswer();
return { data: await res, response };
} catch (e) {
setDone(true);
setDoneValue(body, true);
resetAnswer();
// Swallow fetch errors silently
}
},
[initializeSseRef, url, resetAnswer],
[initializeSseRef, setDoneValue, url, resetAnswer],
);
const stopOutputMessage = useCallback(() => {
sseRef.current?.abort();
}, []);
return { send, answer, done, setDone, resetAnswer, stopOutputMessage };
return {
send,
answer,
done,
doneRecord,
allDone,
setDone,
resetAnswer,
stopOutputMessage,
clearDoneRecord,
};
};
export const useSpeechWithSse = (url: string = api.tts) => {

View File

@ -0,0 +1,14 @@
import { useCallback, useState } from 'react';
export const useHandleSearchStrChange = () => {
const [searchString, setSearchString] = useState('');
const handleInputChange = useCallback(
(e: React.ChangeEvent<HTMLInputElement | HTMLTextAreaElement>) => {
const value = e.target.value;
setSearchString(value);
},
[],
);
return { handleInputChange, searchString };
};

View File

@ -17,6 +17,7 @@ import {
useGetPaginationWithRouter,
useHandleSearchChange,
} from './logic-hooks';
import { useHandleSearchStrChange } from './logic-hooks/use-change-search';
export const enum ChatApiAction {
FetchDialogList = 'fetchDialogList',
@ -30,6 +31,7 @@ export const enum ChatApiAction {
DeleteMessage = 'deleteMessage',
FetchMindMap = 'fetchMindMap',
FetchRelatedQuestions = 'fetchRelatedQuestions',
UploadAndParse = 'upload_and_parse',
}
export const useGetChatSearchParams = () => {
@ -163,6 +165,10 @@ export const useSetDialog = () => {
queryKey: [ChatApiAction.FetchDialogList],
});
queryClient.invalidateQueries({
queryKey: [ChatApiAction.FetchDialog],
});
message.success(
t(`message.${params.dialog_id ? 'modified' : 'created'}`),
);
@ -224,6 +230,9 @@ export const useClickConversationCard = () => {
export const useFetchConversationList = () => {
const { id } = useParams();
const { handleClickConversation } = useClickConversationCard();
const { searchString, handleInputChange } = useHandleSearchStrChange();
const {
data,
isFetching: loading,
@ -234,6 +243,11 @@ export const useFetchConversationList = () => {
gcTime: 0,
refetchOnWindowFocus: false,
enabled: !!id,
select(data) {
return searchString
? data.filter((x) => x.name.includes(searchString))
: data;
},
queryFn: async () => {
const { data } = await chatService.listConversation(
{ params: { dialog_id: id } },
@ -250,7 +264,7 @@ export const useFetchConversationList = () => {
},
});
return { data, loading, refetch };
return { data, loading, refetch, searchString, handleInputChange };
};
export const useFetchConversation = () => {
@ -376,6 +390,34 @@ export const useDeleteMessage = () => {
return { data, loading, deleteMessage: mutateAsync };
};
export function useUploadAndParseFile() {
const { conversationId } = useGetChatSearchParams();
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: [ChatApiAction.UploadAndParse],
mutationFn: async (file: File) => {
const formData = new FormData();
formData.append('file', file);
formData.append('conversation_id', conversationId);
const { data } = await chatService.uploadAndParse(formData);
if (data.code === 0) {
message.success(t(`message.uploaded`));
}
return data;
},
});
return { data, loading, uploadAndParseFile: mutateAsync };
}
//#endregion
//#region search page

View File

@ -651,6 +651,7 @@ export const initialAgentValues = {
exception_default_value: '',
tools: [],
mcp: [],
cite: true,
outputs: {
// structured_output: {
// topic: {

View File

@ -15,6 +15,7 @@ import {
FormLabel,
} from '@/components/ui/form';
import { Input, NumberInput } from '@/components/ui/input';
import { Switch } from '@/components/ui/switch';
import { LlmModelType } from '@/constants/knowledge';
import { useFindLlmByUuid } from '@/hooks/use-llm-request';
import { zodResolver } from '@hookform/resolvers/zod';
@ -71,6 +72,7 @@ const FormSchema = z.object({
exception_goto: z.array(z.string()).optional(),
exception_default_value: z.string().optional(),
...LargeModelFilterFormSchema,
cite: z.boolean().optional(),
});
const outputList = buildOutputList(initialAgentValues.outputs);
@ -184,6 +186,23 @@ function AgentForm({ node }: INextOperatorForm) {
<Collapse title={<div>Advanced Settings</div>}>
<FormContainer>
<MessageHistoryWindowSizeFormField></MessageHistoryWindowSizeFormField>
<FormField
control={form.control}
name={`cite`}
render={({ field }) => (
<FormItem className="flex-1">
<FormLabel tooltip={t('flow.citeTip')}>
{t('flow.cite')}
</FormLabel>
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
></Switch>
</FormControl>
</FormItem>
)}
/>
<FormField
control={form.control}
name={`max_retries`}

View File

@ -75,7 +75,7 @@ export function ChatSettings({ switchSettingVisible }: ChatSettingsProps) {
</div>
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit, onInvalid)}>
<section className="space-y-6 overflow-auto max-h-[87vh] pr-4">
<section className="space-y-6 overflow-auto max-h-[85vh] pr-4">
<ChatBasicSetting></ChatBasicSetting>
<Separator />
<ChatPromptEngine></ChatPromptEngine>

View File

@ -16,13 +16,16 @@ import {
useFetchConversation,
useFetchDialog,
useGetChatSearchParams,
useSetDialog,
} from '@/hooks/use-chat-request';
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
import { buildMessageUuidWithRole } from '@/utils/chat';
import { zodResolver } from '@hookform/resolvers/zod';
import { isEmpty, omit } from 'lodash';
import { ListCheck, Plus, Trash2 } from 'lucide-react';
import { forwardRef, useCallback, useImperativeHandle, useRef } from 'react';
import { useForm } from 'react-hook-form';
import { useForm, useWatch } from 'react-hook-form';
import { useParams } from 'umi';
import { z } from 'zod';
import {
useGetSendButtonDisabled,
@ -47,6 +50,7 @@ type ChatCardProps = {
id: string;
idx: number;
derivedMessages: IMessage[];
sendLoading: boolean;
} & Pick<
MultipleChatBoxProps,
'controller' | 'removeChatBox' | 'addChatBox' | 'chatBoxIds'
@ -61,11 +65,14 @@ const ChatCard = forwardRef(function ChatCard(
addChatBox,
chatBoxIds,
derivedMessages,
sendLoading,
}: ChatCardProps,
ref,
) {
const { sendLoading, regenerateMessage, removeMessageById } =
useSendMessage(controller);
const { id: dialogId } = useParams();
const { setDialog } = useSetDialog();
const { regenerateMessage, removeMessageById } = useSendMessage(controller);
const messageContainerRef = useRef<HTMLDivElement>(null);
@ -80,6 +87,8 @@ const ChatCard = forwardRef(function ChatCard(
},
});
const llmId = useWatch({ control: form.control, name: 'llm_id' });
const { data: userInfo } = useFetchUserInfo();
const { data: currentDialog } = useFetchDialog();
const { data: conversation } = useFetchConversation();
@ -90,6 +99,16 @@ const ChatCard = forwardRef(function ChatCard(
removeChatBox(id);
}, [id, removeChatBox]);
const handleApplyConfig = useCallback(() => {
const values = form.getValues();
setDialog({
...currentDialog,
llm_id: values.llm_id,
llm_setting: omit(values, 'llm_id'),
dialog_id: dialogId,
});
}, [currentDialog, dialogId, form, setDialog]);
useImperativeHandle(ref, () => ({
getFormData: () => form.getValues(),
}));
@ -107,7 +126,11 @@ const ChatCard = forwardRef(function ChatCard(
<div className="space-x-2">
<Tooltip>
<TooltipTrigger>
<Button variant={'ghost'}>
<Button
variant={'ghost'}
disabled={isEmpty(llmId)}
onClick={handleApplyConfig}
>
<ListCheck />
</Button>
</TooltipTrigger>
@ -180,6 +203,7 @@ export function MultipleChatBox({
handlePressEnter,
stopOutputMessage,
setFormRef,
handleUploadFile,
} = useSendMultipleChatMessage(controller, chatBoxIds);
const { createConversationBeforeUploadDocument } =
@ -202,6 +226,7 @@ export function MultipleChatBox({
addChatBox={addChatBox}
derivedMessages={messageRecord[id]}
ref={setFormRef(id)}
sendLoading={sendLoading}
></ChatCard>
))}
</div>
@ -218,6 +243,7 @@ export function MultipleChatBox({
createConversationBeforeUploadDocument
}
stopOutputMessage={stopOutputMessage}
onUpload={handleUploadFile}
/>
</div>
</section>

View File

@ -32,6 +32,7 @@ export function SingleChatBox({ controller }: IProps) {
regenerateMessage,
removeMessageById,
stopOutputMessage,
handleUploadFile,
} = useSendMessage(controller);
const { data: userInfo } = useFetchUserInfo();
const { data: currentDialog } = useFetchDialog();
@ -89,6 +90,7 @@ export function SingleChatBox({ controller }: IProps) {
createConversationBeforeUploadDocument
}
stopOutputMessage={stopOutputMessage}
onUpload={handleUploadFile}
/>
</section>
);

View File

@ -0,0 +1,48 @@
import { ConfirmDeleteDialog } from '@/components/confirm-delete-dialog';
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu';
import { useRemoveConversation } from '@/hooks/use-chat-request';
import { IConversation } from '@/interfaces/database/chat';
import { Trash2 } from 'lucide-react';
import { MouseEventHandler, PropsWithChildren, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
export function ConversationDropdown({
children,
conversation,
}: PropsWithChildren & {
conversation: IConversation;
}) {
const { t } = useTranslation();
const { removeConversation } = useRemoveConversation();
const handleDelete: MouseEventHandler<HTMLDivElement> = useCallback(() => {
removeConversation([conversation.id]);
}, [conversation.id, removeConversation]);
return (
<DropdownMenu>
<DropdownMenuTrigger asChild>{children}</DropdownMenuTrigger>
<DropdownMenuContent>
<ConfirmDeleteDialog onOk={handleDelete}>
<DropdownMenuItem
className="text-state-error"
onSelect={(e) => {
e.preventDefault();
}}
onClick={(e) => {
e.stopPropagation();
}}
>
{t('common.delete')} <Trash2 />
</DropdownMenuItem>
</ConfirmDeleteDialog>
</DropdownMenuContent>
</DropdownMenu>
);
}

View File

@ -11,15 +11,19 @@ import { Button } from '@/components/ui/button';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { useSetModalState } from '@/hooks/common-hooks';
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
import { useFetchConversation, useFetchDialog } from '@/hooks/use-chat-request';
import {
useFetchConversation,
useFetchDialog,
useGetChatSearchParams,
} from '@/hooks/use-chat-request';
import { cn } from '@/lib/utils';
import { ArrowUpRight, LogOut } from 'lucide-react';
import { isEmpty } from 'lodash';
import { ArrowUpRight, LogOut, Send } from 'lucide-react';
import { useTranslation } from 'react-i18next';
import { useHandleClickConversationCard } from '../hooks/use-click-card';
import { ChatSettings } from './app-settings/chat-settings';
import { MultipleChatBox } from './chat-box/multiple-chat-box';
import { SingleChatBox } from './chat-box/single-chat-box';
import { LLMSelectForm } from './llm-select-form';
import { Sessions } from './sessions';
import { useAddChatBox } from './use-add-box';
import { useSwitchDebugMode } from './use-switch-debug-mode';
@ -42,6 +46,8 @@ export default function Chat() {
hasThreeChatBox,
} = useAddChatBox();
const { conversationId, isNew } = useGetChatSearchParams();
const { isDebugMode, switchDebugMode } = useSwitchDebugMode();
if (isDebugMode) {
@ -81,6 +87,10 @@ export default function Chat() {
</BreadcrumbItem>
</BreadcrumbList>
</Breadcrumb>
<Button>
<Send />
{t('common.embedIntoSite')}
</Button>
</PageHeader>
<div className="flex flex-1 min-h-0">
<Sessions
@ -96,21 +106,22 @@ export default function Chat() {
className={cn('p-5', { 'border-b': hasSingleChatBox })}
>
<CardTitle className="flex justify-between items-center text-base">
<div className="flex gap-3 items-center">
{conversation.name}
<LLMSelectForm></LLMSelectForm>
</div>
<div>{conversation.name}</div>
<Button
variant={'ghost'}
onClick={switchDebugMode}
disabled={hasThreeChatBox}
disabled={
hasThreeChatBox ||
isEmpty(conversationId) ||
isNew === 'true'
}
>
<ArrowUpRight /> Multiple Models
</Button>
</CardTitle>
</CardHeader>
<CardContent className="flex-1 p-0">
<CardContent className="flex-1 p-0 min-h-0">
<SingleChatBox controller={controller}></SingleChatBox>
</CardContent>
</Card>

View File

@ -1,12 +1,16 @@
import { LargeModelFormFieldWithoutFilter } from '@/components/large-model-form-field';
import { LlmSettingSchema } from '@/components/llm-setting-items/next';
import { Form } from '@/components/ui/form';
import { useFetchDialog } from '@/hooks/use-chat-request';
import { zodResolver } from '@hookform/resolvers/zod';
import { isEmpty } from 'lodash';
import { useEffect } from 'react';
import { useForm } from 'react-hook-form';
import { z } from 'zod';
export function LLMSelectForm() {
const FormSchema = z.object(LlmSettingSchema);
const { data } = useFetchDialog();
const form = useForm<z.infer<typeof FormSchema>>({
resolver: zodResolver(FormSchema),
@ -15,6 +19,15 @@ export function LLMSelectForm() {
},
});
// const values = useWatch({ control: form.control, name: ['llm_id'] });
useEffect(() => {
if (!isEmpty(data)) {
form.reset({ llm_id: data.llm_id, ...data.llm_setting });
}
form.reset(data);
}, [data, form]);
return (
<Form {...form}>
<LargeModelFormFieldWithoutFilter></LargeModelFormFieldWithoutFilter>

View File

@ -10,9 +10,10 @@ import {
} from '@/hooks/use-chat-request';
import { cn } from '@/lib/utils';
import { PanelLeftClose, PanelRightClose, Plus } from 'lucide-react';
import { useCallback, useState } from 'react';
import { useCallback } from 'react';
import { useHandleClickConversationCard } from '../hooks/use-click-card';
import { useSelectDerivedConversationList } from '../hooks/use-select-conversation-list';
import { ConversationDropdown } from './conversation-dropdown';
type SessionProps = Pick<
ReturnType<typeof useHandleClickConversationCard>,
@ -23,11 +24,14 @@ export function Sessions({
handleConversationCardClick,
switchSettingVisible,
}: SessionProps) {
const { list: conversationList, addTemporaryConversation } =
useSelectDerivedConversationList();
const {
list: conversationList,
addTemporaryConversation,
handleInputChange,
searchString,
} = useSelectDerivedConversationList();
const { data } = useFetchDialog();
const { visible, switchVisible } = useSetModalState(true);
const [searchStr, setSearchStr] = useState('');
const handleCardClick = useCallback(
(conversationId: string, isNew: boolean) => () => {
@ -71,8 +75,8 @@ export function Sessions({
</div>
<div className="pb-4">
<SearchInput
onChange={(e) => setSearchStr(e.target.value)}
value={searchStr}
onChange={handleInputChange}
value={searchString}
></SearchInput>
</div>
<div className="space-y-4 flex-1 overflow-auto">
@ -86,7 +90,9 @@ export function Sessions({
>
<CardContent className="px-3 py-2 flex justify-between items-center group">
{x.name}
<MoreButton></MoreButton>
<ConversationDropdown conversation={x}>
<MoreButton></MoreButton>
</ConversationDropdown>
</CardContent>
</Card>
))}

View File

@ -43,7 +43,12 @@ export const useSelectDerivedConversationList = () => {
const { t } = useTranslate('chat');
const [list, setList] = useState<Array<IConversation>>([]);
const { data: conversationList, loading } = useFetchConversationList();
const {
data: conversationList,
loading,
handleInputChange,
searchString,
} = useFetchConversationList();
const { id: dialogId } = useParams();
const { setNewConversationRouteParams } = useSetNewConversationRouteParams();
const prologue = useFindPrologueFromDialogList();
@ -81,5 +86,11 @@ export const useSelectDerivedConversationList = () => {
setList([...conversationList]);
}, [conversationList]);
return { list, addTemporaryConversation, loading };
return {
list,
addTemporaryConversation,
loading,
handleInputChange,
searchString,
};
};

View File

@ -18,6 +18,7 @@ import { useParams, useSearchParams } from 'umi';
import { v4 as uuid } from 'uuid';
import { IMessage } from '../chat/interface';
import { useFindPrologueFromDialogList } from './use-select-conversation-list';
import { useUploadFile } from './use-upload-file';
export const useSetChatRouteParams = () => {
const [currentQueryParameters, setSearchParams] = useSearchParams();
@ -137,6 +138,8 @@ export const useSendMessage = (controller: AbortController) => {
const { conversationId, isNew } = useGetChatSearchParams();
const { handleInputChange, value, setValue } = useHandleMessageInputChange();
const { handleUploadFile, fileIds, clearFileIds } = useUploadFile();
const { send, answer, done } = useSendMessageWithSse(
api.completeConversation,
);
@ -238,29 +241,35 @@ export const useSendMessage = (controller: AbortController) => {
}
}, [answer, addNewestAnswer, conversationId, isNew]);
const handlePressEnter = useCallback(
(documentIds: string[]) => {
if (trim(value) === '') return;
const id = uuid();
const handlePressEnter = useCallback(() => {
if (trim(value) === '') return;
const id = uuid();
addNewestQuestion({
content: value,
doc_ids: documentIds,
addNewestQuestion({
content: value,
doc_ids: fileIds,
id,
role: MessageType.User,
});
if (done) {
setValue('');
handleSendMessage({
id,
content: value.trim(),
role: MessageType.User,
doc_ids: fileIds,
});
if (done) {
setValue('');
handleSendMessage({
id,
content: value.trim(),
role: MessageType.User,
doc_ids: documentIds,
});
}
},
[addNewestQuestion, handleSendMessage, done, setValue, value],
);
}
clearFileIds();
}, [
value,
addNewestQuestion,
fileIds,
done,
clearFileIds,
setValue,
handleSendMessage,
]);
return {
handlePressEnter,
@ -275,5 +284,6 @@ export const useSendMessage = (controller: AbortController) => {
derivedMessages,
removeMessageById,
stopOutputMessage,
handleUploadFile,
};
};

View File

@ -12,6 +12,7 @@ import { useCallback, useEffect, useState } from 'react';
import { v4 as uuid } from 'uuid';
import { IMessage } from '../chat/interface';
import { useBuildFormRefs } from './use-build-form-refs';
import { useUploadFile } from './use-upload-file';
export function useSendMultipleChatMessage(
controller: AbortController,
@ -24,10 +25,12 @@ export function useSendMultipleChatMessage(
const { conversationId } = useGetChatSearchParams();
const { handleInputChange, value, setValue } = useHandleMessageInputChange();
const { send, answer, done } = useSendMessageWithSse(
const { send, answer, allDone } = useSendMessageWithSse(
api.completeConversation,
);
const { handleUploadFile, fileIds, clearFileIds } = useUploadFile();
const { setFormRef, getLLMConfigById, isLLMConfigEmpty } =
useBuildFormRefs(chatBoxIds);
@ -182,12 +185,12 @@ export function useSendMultipleChatMessage(
id,
role: MessageType.User,
chatBoxId,
doc_ids: fileIds,
});
}
});
if (done) {
// TODO:
if (allDone) {
setValue('');
chatBoxIds.forEach((chatBoxId) => {
if (!isLLMConfigEmpty(chatBoxId)) {
@ -196,18 +199,22 @@ export function useSendMultipleChatMessage(
id,
content: value.trim(),
role: MessageType.User,
doc_ids: fileIds,
},
chatBoxId,
});
}
});
}
clearFileIds();
}, [
value,
chatBoxIds,
done,
allDone,
clearFileIds,
isLLMConfigEmpty,
addNewestQuestion,
fileIds,
setValue,
sendMessage,
]);
@ -229,7 +236,8 @@ export function useSendMultipleChatMessage(
handleInputChange,
handlePressEnter,
stopOutputMessage,
sendLoading: false,
sendLoading: !allDone,
setFormRef,
handleUploadFile,
};
}

View File

@ -0,0 +1,27 @@
import { FileUploadProps } from '@/components/file-upload';
import { useUploadAndParseFile } from '@/hooks/use-chat-request';
import { useCallback, useState } from 'react';
export function useUploadFile() {
const { uploadAndParseFile } = useUploadAndParseFile();
const [fileIds, setFileIds] = useState<string[]>([]);
const handleUploadFile: NonNullable<FileUploadProps['onUpload']> =
useCallback(
async (files) => {
if (Array.isArray(files) && files.length) {
const ret = await uploadAndParseFile(files[0]);
if (ret.code === 0 && Array.isArray(ret.data)) {
setFileIds((list) => [...list, ...ret.data]);
}
}
},
[uploadAndParseFile],
);
const clearFileIds = useCallback(() => {
setFileIds([]);
}, []);
return { handleUploadFile, clearFileIds, fileIds };
}

View File

@ -27,6 +27,7 @@ const {
mindmap,
getRelatedQuestions,
listNextDialog,
upload_and_parse,
} = api;
const methods = {
@ -126,6 +127,10 @@ const methods = {
url: getRelatedQuestions,
method: 'post',
},
uploadAndParse: {
method: 'post',
url: upload_and_parse,
},
} as const;
const chatService = registerNextServer<keyof typeof methods>(methods);