mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
### What problem does this PR solve? add support for ollama Issue link:#221 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -2,6 +2,7 @@ import { useSetModalState } from '@/hooks/commonHooks';
|
||||
import {
|
||||
IApiKeySavingParams,
|
||||
ISystemModelSettingSavingParams,
|
||||
useAddLlm,
|
||||
useFetchLlmList,
|
||||
useSaveApiKey,
|
||||
useSaveTenantInfo,
|
||||
@ -12,6 +13,7 @@ import {
|
||||
useFetchTenantInfo,
|
||||
useSelectTenantInfo,
|
||||
} from '@/hooks/userSettingHook';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
|
||||
type SavingParamsState = Omit<IApiKeySavingParams, 'api_key'>;
|
||||
@ -127,3 +129,31 @@ export const useSelectModelProvidersLoading = () => {
|
||||
|
||||
return loading;
|
||||
};
|
||||
|
||||
export const useSubmitOllama = () => {
|
||||
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
|
||||
const addLlm = useAddLlm();
|
||||
const {
|
||||
visible: llmAddingVisible,
|
||||
hideModal: hideLlmAddingModal,
|
||||
showModal: showLlmAddingModal,
|
||||
} = useSetModalState();
|
||||
|
||||
const onLlmAddingOk = useCallback(
|
||||
async (payload: IAddLlmRequestBody) => {
|
||||
const ret = await addLlm(payload);
|
||||
if (ret === 0) {
|
||||
hideLlmAddingModal();
|
||||
}
|
||||
},
|
||||
[hideLlmAddingModal, addLlm],
|
||||
);
|
||||
|
||||
return {
|
||||
llmAddingLoading: loading,
|
||||
onLlmAddingOk,
|
||||
llmAddingVisible,
|
||||
hideLlmAddingModal,
|
||||
showLlmAddingModal,
|
||||
};
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user