Feat: Support tool calling in Generate component (#7572)

### What problem does this PR solve?

Hello, our use case requires LLM agent to invoke some tools, so I made a
simple implementation here.

This PR does two things:

1. A simple plugin mechanism based on `pluginlib`:

This mechanism lives in the `plugin` directory. It will only load
plugins from `plugin/embedded_plugins` for now.

A sample plugin `bad_calculator.py` is placed in
`plugin/embedded_plugins/llm_tools`, it accepts two numbers `a` and `b`,
then give a wrong result `a + b + 100`.

In the future, it can load plugins from external location with little
code change.

Plugins are divided into different types. The only plugin type supported
in this PR is `llm_tools`, which must implement the `LLMToolPlugin`
class in the `plugin/llm_tool_plugin.py`.
More plugin types can be added in the future.

2. A tool selector in the `Generate` component:

Added a tool selector to select one or more tools for LLM:


![image](https://github.com/user-attachments/assets/74a21fdf-9333-4175-991b-43df6524c5dc)

And with the `bad_calculator` tool, it results this with the `qwen-max`
model:


![image](https://github.com/user-attachments/assets/93aff9c4-8550-414a-90a2-1a15a5249d94)


### Type of change

- [ ] Bug Fix (non-breaking change which fixes an issue)
- [x] New Feature (non-breaking change which adds functionality)
- [ ] Documentation Update
- [ ] Refactoring
- [ ] Performance Improvement
- [ ] Other (please describe):

Co-authored-by: Yingfeng <yingfeng.zhang@gmail.com>
This commit is contained in:
Song Fuchang
2025-05-16 16:32:19 +08:00
committed by GitHub
parent cb26564d50
commit a1f06a4fdc
28 changed files with 625 additions and 61 deletions

View File

@ -11,19 +11,31 @@ import { Select, SelectTrigger, SelectValue } from '../ui/select';
interface IProps {
id?: string;
value?: string;
onChange?: (value: string) => void;
onInitialValue?: (value: string, option: any) => void;
onChange?: (value: string, option: any) => void;
disabled?: boolean;
}
const LLMSelect = ({ id, value, onChange, disabled }: IProps) => {
const LLMSelect = ({ id, value, onInitialValue, onChange, disabled }: IProps) => {
const modelOptions = useComposeLlmOptionsByModelTypes([
LlmModelType.Chat,
LlmModelType.Image2text,
]);
if (onInitialValue && value) {
for (const modelOption of modelOptions) {
for (const option of modelOption.options) {
if (option.value === value) {
onInitialValue(value, option);
break;
}
}
}
}
const content = (
<div style={{ width: 400 }}>
<LlmSettingItems
<LlmSettingItems onChange={onChange}
formItemLayout={{ labelCol: { span: 10 }, wrapperCol: { span: 14 } }}
></LlmSettingItems>
</div>

View File

@ -16,9 +16,10 @@ interface IProps {
prefix?: string;
formItemLayout?: any;
handleParametersChange?(value: ModelVariableType): void;
onChange?(value: string, option: any): void;
}
const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
const LlmSettingItems = ({ prefix, formItemLayout = {}, onChange }: IProps) => {
const form = Form.useFormInstance();
const { t } = useTranslate('chat');
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
@ -58,6 +59,7 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
options={modelOptions}
showSearch
popupMatchSelectWidth={false}
onChange={onChange}
/>
</Form.Item>
<div className="border rounded-md">

View File

@ -0,0 +1,51 @@
import { useTranslate } from '@/hooks/common-hooks';
import { useLlmToolsList } from '@/hooks/plugin-hooks';
import { Select, Space } from 'antd';
interface IProps {
value?: string;
onChange?: (value: string) => void;
disabled?: boolean;
}
const LLMToolsSelect = ({ value, onChange, disabled }: IProps) => {
const { t } = useTranslate("llmTools");
const tools = useLlmToolsList();
function wrapTranslation(text: string): string {
if (!text) {
return text;
}
if (text.startsWith("$t:")) {
return t(text.substring(3));
}
return text;
}
const toolOptions = tools.map(t => ({
label: wrapTranslation(t.displayName),
description: wrapTranslation(t.displayDescription),
value: t.name,
title: wrapTranslation(t.displayDescription),
}));
return (
<Select
mode="multiple"
options={toolOptions}
optionRender={option => (
<Space size="large">
{option.label}
{option.data.description}
</Space>
)}
onChange={onChange}
value={value}
disabled={disabled}
></Select>
);
};
export default LLMToolsSelect;

View File

@ -71,6 +71,7 @@ function buildLlmOptionsWithIcon(x: IThirdOAIModel) {
),
value: `${x.llm_name}@${x.fid}`,
disabled: !x.available,
is_tools: x.is_tools,
};
}
@ -142,7 +143,7 @@ export const useComposeLlmOptionsByModelTypes = (
return modelTypes.reduce<
(DefaultOptionType & {
options: { label: JSX.Element; value: string; disabled: boolean }[];
options: { label: JSX.Element; value: string; disabled: boolean; is_tools: boolean }[];
})[]
>((pre, cur) => {
const options = allOptions[cur];

View File

@ -0,0 +1,17 @@
import { ILLMTools } from '@/interfaces/database/plugin';
import pluginService from '@/services/plugin-service';
import { useQuery } from '@tanstack/react-query';
export const useLlmToolsList = (): ILLMTools => {
const { data } = useQuery({
queryKey: ['llmTools'],
initialData: [],
queryFn: async () => {
const { data } = await pluginService.getLlmTools();
return data?.data ?? [];
},
});
return data;
};

View File

@ -13,6 +13,7 @@ export interface IThirdOAIModel {
update_time: number;
tenant_id?: string;
tenant_name?: string;
is_tools: boolean;
}
export type IThirdOAIModelCollection = Record<string, IThirdOAIModel[]>;

View File

@ -0,0 +1,13 @@
export type ILLMTools = ILLMToolMetadata[];
export interface ILLMToolMetadata {
name: string;
displayName: string;
displayDescription: string;
parameters: Map<string, ILLMToolParameter>;
}
export interface ILLMToolParameter {
type: string;
displayDescription: string;
}

View File

@ -454,6 +454,8 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
model: 'Model',
modelTip: 'Large language chat model',
modelMessage: 'Please select!',
modelEnabledTools: 'Enabled tools',
modelEnabledToolsTip: 'Please select one or more tools for the chat model to use. It takes no effect for models not supporting tool call.',
freedom: 'Freedom',
improvise: 'Improvise',
precise: 'Precise',
@ -1267,5 +1269,15 @@ This delimiter is used to split the input text into several text pieces echo of
inputVariables: 'Input variables',
runningHintText: 'is running...🕞',
},
llmTools: {
bad_calculator: {
name: "Calculator",
description: "A tool to calculate the sum of two numbers (will give wrong answer)",
params: {
a: "The first number",
b: "The second number",
},
},
},
},
};

View File

@ -461,6 +461,8 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
model: '模型',
modelTip: '大语言聊天模型',
modelMessage: '请选择',
modelEnabledTools: '可用的工具',
modelEnabledToolsTip: '请选择一个或多个可供该模型所使用的工具。仅对支持工具调用的模型生效。',
freedom: '自由度',
improvise: '即兴创作',
precise: '精确',
@ -1231,5 +1233,15 @@ General实体和关系提取提示来自 GitHub - microsoft/graphrag基于
knowledge: 'knowledge',
chat: 'chat',
},
llmTools: {
bad_calculator: {
name: "计算器",
description: "用于计算两个数的和的工具(会给出错误答案)",
params: {
a: "第一个数",
b: "第二个数",
},
},
},
},
};

View File

@ -4,10 +4,18 @@ import { PromptEditor } from '@/components/prompt-editor';
import { useTranslate } from '@/hooks/common-hooks';
import { Form, Switch } from 'antd';
import { IOperatorForm } from '../../interface';
import LLMToolsSelect from '@/components/llm-tools-select';
import { useState } from 'react';
const GenerateForm = ({ onValuesChange, form }: IOperatorForm) => {
const { t } = useTranslate('flow');
const [isCurrentLlmSupportTools, setCurrentLlmSupportTools] = useState(false);
const onLlmSelectChanged = (_: string, option: any) => {
setCurrentLlmSupportTools(option.is_tools);
};
return (
<Form
name="basic"
@ -21,7 +29,7 @@ const GenerateForm = ({ onValuesChange, form }: IOperatorForm) => {
label={t('model', { keyPrefix: 'chat' })}
tooltip={t('modelTip', { keyPrefix: 'chat' })}
>
<LLMSelect></LLMSelect>
<LLMSelect onInitialValue={onLlmSelectChanged} onChange={onLlmSelectChanged}></LLMSelect>
</Form.Item>
<Form.Item
name={['prompt']}
@ -38,6 +46,13 @@ const GenerateForm = ({ onValuesChange, form }: IOperatorForm) => {
{/* <Input.TextArea rows={8}></Input.TextArea> */}
<PromptEditor></PromptEditor>
</Form.Item>
<Form.Item
name={'llm_enabled_tools'}
label={t('modelEnabledTools', { keyPrefix: 'chat' })}
tooltip={t('modelEnabledToolsTip', { keyPrefix: 'chat' })}
>
<LLMToolsSelect disabled={!isCurrentLlmSupportTools}></LLMToolsSelect>
</Form.Item>
<Form.Item
name={['cite']}
label={t('cite')}

View File

@ -0,0 +1,18 @@
import api from '@/utils/api';
import registerServer from '@/utils/register-server';
import request from '@/utils/request';
const {
llm_tools
} = api;
const methods = {
getLlmTools: {
url: llm_tools,
method: 'get',
},
} as const;
const pluginService = registerServer<keyof typeof methods>(methods, request);
export default pluginService;

View File

@ -32,6 +32,9 @@ export default {
delete_llm: `${api_host}/llm/delete_llm`,
deleteFactory: `${api_host}/llm/delete_factory`,
// plugin
llm_tools: `${api_host}/plugin/llm_tools`,
// knowledge base
kb_list: `${api_host}/kb/list`,
create_kb: `${api_host}/kb/create`,