mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
### What problem does this PR solve? Hello, our use case requires LLM agent to invoke some tools, so I made a simple implementation here. This PR does two things: 1. A simple plugin mechanism based on `pluginlib`: This mechanism lives in the `plugin` directory. It will only load plugins from `plugin/embedded_plugins` for now. A sample plugin `bad_calculator.py` is placed in `plugin/embedded_plugins/llm_tools`, it accepts two numbers `a` and `b`, then give a wrong result `a + b + 100`. In the future, it can load plugins from external location with little code change. Plugins are divided into different types. The only plugin type supported in this PR is `llm_tools`, which must implement the `LLMToolPlugin` class in the `plugin/llm_tool_plugin.py`. More plugin types can be added in the future. 2. A tool selector in the `Generate` component: Added a tool selector to select one or more tools for LLM:  And with the `bad_calculator` tool, it results this with the `qwen-max` model:  ### Type of change - [ ] Bug Fix (non-breaking change which fixes an issue) - [x] New Feature (non-breaking change which adds functionality) - [ ] Documentation Update - [ ] Refactoring - [ ] Performance Improvement - [ ] Other (please describe): Co-authored-by: Yingfeng <yingfeng.zhang@gmail.com>
73 lines
2.1 KiB
TypeScript
73 lines
2.1 KiB
TypeScript
import LLMSelect from '@/components/llm-select';
|
|
import MessageHistoryWindowSizeItem from '@/components/message-history-window-size-item';
|
|
import { PromptEditor } from '@/components/prompt-editor';
|
|
import { useTranslate } from '@/hooks/common-hooks';
|
|
import { Form, Switch } from 'antd';
|
|
import { IOperatorForm } from '../../interface';
|
|
import LLMToolsSelect from '@/components/llm-tools-select';
|
|
import { useState } from 'react';
|
|
|
|
const GenerateForm = ({ onValuesChange, form }: IOperatorForm) => {
|
|
const { t } = useTranslate('flow');
|
|
|
|
const [isCurrentLlmSupportTools, setCurrentLlmSupportTools] = useState(false);
|
|
|
|
const onLlmSelectChanged = (_: string, option: any) => {
|
|
setCurrentLlmSupportTools(option.is_tools);
|
|
};
|
|
|
|
return (
|
|
<Form
|
|
name="basic"
|
|
autoComplete="off"
|
|
form={form}
|
|
onValuesChange={onValuesChange}
|
|
layout={'vertical'}
|
|
>
|
|
<Form.Item
|
|
name={'llm_id'}
|
|
label={t('model', { keyPrefix: 'chat' })}
|
|
tooltip={t('modelTip', { keyPrefix: 'chat' })}
|
|
>
|
|
<LLMSelect onInitialValue={onLlmSelectChanged} onChange={onLlmSelectChanged}></LLMSelect>
|
|
</Form.Item>
|
|
<Form.Item
|
|
name={['prompt']}
|
|
label={t('systemPrompt')}
|
|
initialValue={t('promptText')}
|
|
tooltip={t('promptTip')}
|
|
rules={[
|
|
{
|
|
required: true,
|
|
message: t('promptMessage'),
|
|
},
|
|
]}
|
|
>
|
|
{/* <Input.TextArea rows={8}></Input.TextArea> */}
|
|
<PromptEditor></PromptEditor>
|
|
</Form.Item>
|
|
<Form.Item
|
|
name={'llm_enabled_tools'}
|
|
label={t('modelEnabledTools', { keyPrefix: 'chat' })}
|
|
tooltip={t('modelEnabledToolsTip', { keyPrefix: 'chat' })}
|
|
>
|
|
<LLMToolsSelect disabled={!isCurrentLlmSupportTools}></LLMToolsSelect>
|
|
</Form.Item>
|
|
<Form.Item
|
|
name={['cite']}
|
|
label={t('cite')}
|
|
initialValue={true}
|
|
valuePropName="checked"
|
|
tooltip={t('citeTip')}
|
|
>
|
|
<Switch />
|
|
</Form.Item>
|
|
<MessageHistoryWindowSizeItem
|
|
initialValue={12}
|
|
></MessageHistoryWindowSizeItem>
|
|
</Form>
|
|
);
|
|
};
|
|
|
|
export default GenerateForm;
|