feat: add support for ollama #221 (#260)

### What problem does this PR solve?

add support for ollama

Issue link:#221

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
balibabu
2024-04-08 19:13:45 +08:00
committed by GitHub
parent d0a1ffe6e2
commit 265a7a283a
22 changed files with 275 additions and 26 deletions

View File

@ -2,6 +2,7 @@ import { useSetModalState } from '@/hooks/commonHooks';
import {
IApiKeySavingParams,
ISystemModelSettingSavingParams,
useAddLlm,
useFetchLlmList,
useSaveApiKey,
useSaveTenantInfo,
@ -12,6 +13,7 @@ import {
useFetchTenantInfo,
useSelectTenantInfo,
} from '@/hooks/userSettingHook';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { useCallback, useEffect, useState } from 'react';
type SavingParamsState = Omit<IApiKeySavingParams, 'api_key'>;
@ -127,3 +129,31 @@ export const useSelectModelProvidersLoading = () => {
return loading;
};
export const useSubmitOllama = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const addLlm = useAddLlm();
const {
visible: llmAddingVisible,
hideModal: hideLlmAddingModal,
showModal: showLlmAddingModal,
} = useSetModalState();
const onLlmAddingOk = useCallback(
async (payload: IAddLlmRequestBody) => {
const ret = await addLlm(payload);
if (ret === 0) {
hideLlmAddingModal();
}
},
[hideLlmAddingModal, addLlm],
);
return {
llmAddingLoading: loading,
onLlmAddingOk,
llmAddingVisible,
hideLlmAddingModal,
showLlmAddingModal,
};
};