mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
### What problem does this PR solve? Fix: Search app AI summary ERROR And The tag set cannot be selected #9649 #9652 - Search app AI summary ERROR: 'dict' object has no attribute 'split' #9649 - fix The tag set cannot be selected in the knowledge base. #9652 - Added custom parameter options to the LlmSettingFieldItems component - Adjusted the document preview height to improve page layout adaptability ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@ -19,6 +19,7 @@ type SliderInputSwitchFormFieldProps = {
|
||||
name: string;
|
||||
label: string;
|
||||
defaultValue?: number;
|
||||
onChange?: (value: number) => void;
|
||||
className?: string;
|
||||
checkName: string;
|
||||
};
|
||||
@ -30,6 +31,7 @@ export function SliderInputSwitchFormField({
|
||||
label,
|
||||
name,
|
||||
defaultValue,
|
||||
onChange,
|
||||
className,
|
||||
checkName,
|
||||
}: SliderInputSwitchFormFieldProps) {
|
||||
@ -66,6 +68,10 @@ export function SliderInputSwitchFormField({
|
||||
<FormControl>
|
||||
<SingleFormSlider
|
||||
{...field}
|
||||
onChange={(value: number) => {
|
||||
onChange?.(value);
|
||||
field.onChange(value);
|
||||
}}
|
||||
max={max}
|
||||
min={min}
|
||||
step={step}
|
||||
@ -80,6 +86,10 @@ export function SliderInputSwitchFormField({
|
||||
min={min}
|
||||
step={step}
|
||||
{...field}
|
||||
onChange={(value: number) => {
|
||||
onChange?.(value);
|
||||
field.onChange(value);
|
||||
}}
|
||||
></NumberInput>
|
||||
</FormControl>
|
||||
</div>
|
||||
|
||||
@ -209,8 +209,10 @@ export const MultiSelect = React.forwardRef<
|
||||
const [isAnimating, setIsAnimating] = React.useState(false);
|
||||
|
||||
React.useEffect(() => {
|
||||
setSelectedValues(defaultValue);
|
||||
}, [defaultValue]);
|
||||
if (selectedValues === undefined) {
|
||||
setSelectedValues(defaultValue);
|
||||
}
|
||||
}, [defaultValue, selectedValues]);
|
||||
|
||||
const flatOptions = React.useMemo(() => {
|
||||
return options.flatMap((option) =>
|
||||
|
||||
@ -479,6 +479,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
improvise: 'Improvise',
|
||||
precise: 'Precise',
|
||||
balance: 'Balance',
|
||||
custom: 'Custom',
|
||||
freedomTip: `A shortcut to 'Temperature', 'Top P', 'Presence penalty', and 'Frequency penalty' settings, indicating the freedom level of the model. This parameter has three options: Select 'Improvise' to produce more creative responses; select 'Precise' (default) to produce more conservative responses; 'Balance' is a middle ground between 'Improvise' and 'Precise'.`,
|
||||
temperature: 'Temperature',
|
||||
temperatureMessage: 'Temperature is required',
|
||||
|
||||
@ -454,6 +454,7 @@ export default {
|
||||
improvise: '即興創作',
|
||||
precise: '精確',
|
||||
balance: '平衡',
|
||||
custom: '自定義',
|
||||
freedomTip: `“精確”意味著法學碩士會保守並謹慎地回答你的問題。“即興發揮”意味著你希望法學碩士能夠自由地暢所欲言。“平衡”是謹慎與自由之間的平衡。`,
|
||||
temperature: '溫度',
|
||||
temperatureMessage: '溫度是必填項',
|
||||
|
||||
@ -477,6 +477,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
improvise: '即兴创作',
|
||||
precise: '精确',
|
||||
balance: '平衡',
|
||||
custom: '自定义',
|
||||
freedomTip: `“精确”意味着大语言模型会保守并谨慎地回答你的问题。 “即兴发挥”意味着你希望大语言模型能够自由地畅所欲言。 “平衡”是谨慎与自由之间的平衡。`,
|
||||
temperature: '温度',
|
||||
temperatureMessage: '温度是必填项',
|
||||
|
||||
@ -35,7 +35,7 @@
|
||||
|
||||
.documentPreview {
|
||||
// width: 40%;
|
||||
height: calc(100vh - 130px);
|
||||
height: calc(100vh - 180px);
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ import {
|
||||
} from '@/constants/knowledge';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { camelCase } from 'lodash';
|
||||
import { camelCase, isEqual } from 'lodash';
|
||||
import { useCallback } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { z } from 'zod';
|
||||
@ -61,20 +61,15 @@ export function LlmSettingFieldItems({
|
||||
|
||||
const handleChange = useCallback(
|
||||
(parameter: string) => {
|
||||
const currentValues = { ...form.getValues() };
|
||||
console.log('currentValues', currentValues);
|
||||
const values =
|
||||
settledModelVariableMap[
|
||||
parameter as keyof typeof settledModelVariableMap
|
||||
];
|
||||
const enabledKeys = Object.keys(LlmSettingEnableSchema);
|
||||
|
||||
// const nextValues = { ...currentValues, ...values };
|
||||
|
||||
for (const key in values) {
|
||||
if (Object.prototype.hasOwnProperty.call(values, key)) {
|
||||
const element = values[key];
|
||||
|
||||
const element = values[key as keyof typeof values];
|
||||
form.setValue(`${prefix}.${key}`, element);
|
||||
}
|
||||
}
|
||||
@ -90,7 +85,11 @@ export function LlmSettingFieldItems({
|
||||
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
|
||||
label: t(camelCase(x)),
|
||||
value: x,
|
||||
}));
|
||||
})) as unknown as { label: string; value: ModelVariableType | 'Custom' }[];
|
||||
parameterOptions.push({
|
||||
label: t(camelCase('Custom')),
|
||||
value: 'Custom',
|
||||
});
|
||||
|
||||
const getFieldWithPrefix = useCallback(
|
||||
(name: string) => {
|
||||
@ -99,6 +98,35 @@ export function LlmSettingFieldItems({
|
||||
[prefix],
|
||||
);
|
||||
|
||||
const checkParameterIsEquel = () => {
|
||||
const [
|
||||
parameter,
|
||||
topPValue,
|
||||
frequencyPenaltyValue,
|
||||
temperatureValue,
|
||||
presencePenaltyValue,
|
||||
] = form.getValues([
|
||||
getFieldWithPrefix('parameter'),
|
||||
getFieldWithPrefix('temperature'),
|
||||
getFieldWithPrefix('top_p'),
|
||||
getFieldWithPrefix('frequency_penalty'),
|
||||
getFieldWithPrefix('presence_penalty'),
|
||||
]);
|
||||
if (parameter && parameter !== 'Custom') {
|
||||
const parameterValue =
|
||||
settledModelVariableMap[parameter as keyof typeof ModelVariableType];
|
||||
const parameterRealValue = {
|
||||
top_p: topPValue,
|
||||
temperature: temperatureValue,
|
||||
frequency_penalty: frequencyPenaltyValue,
|
||||
presence_penalty: presencePenaltyValue,
|
||||
};
|
||||
if (!isEqual(parameterValue, parameterRealValue)) {
|
||||
form.setValue(getFieldWithPrefix('parameter'), 'Custom');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-5">
|
||||
<FormField
|
||||
@ -159,6 +187,9 @@ export function LlmSettingFieldItems({
|
||||
label="temperature"
|
||||
max={1}
|
||||
step={0.01}
|
||||
onChange={() => {
|
||||
checkParameterIsEquel();
|
||||
}}
|
||||
></SliderInputSwitchFormField>
|
||||
<SliderInputSwitchFormField
|
||||
name={getFieldWithPrefix('top_p')}
|
||||
@ -166,6 +197,9 @@ export function LlmSettingFieldItems({
|
||||
label="topP"
|
||||
max={1}
|
||||
step={0.01}
|
||||
onChange={() => {
|
||||
checkParameterIsEquel();
|
||||
}}
|
||||
></SliderInputSwitchFormField>
|
||||
<SliderInputSwitchFormField
|
||||
name={getFieldWithPrefix('presence_penalty')}
|
||||
@ -173,6 +207,9 @@ export function LlmSettingFieldItems({
|
||||
label="presencePenalty"
|
||||
max={1}
|
||||
step={0.01}
|
||||
onChange={() => {
|
||||
checkParameterIsEquel();
|
||||
}}
|
||||
></SliderInputSwitchFormField>
|
||||
<SliderInputSwitchFormField
|
||||
name={getFieldWithPrefix('frequency_penalty')}
|
||||
@ -180,6 +217,9 @@ export function LlmSettingFieldItems({
|
||||
label="frequencyPenalty"
|
||||
max={1}
|
||||
step={0.01}
|
||||
onChange={() => {
|
||||
checkParameterIsEquel();
|
||||
}}
|
||||
></SliderInputSwitchFormField>
|
||||
{/* <SliderInputSwitchFormField
|
||||
name={getFieldWithPrefix('max_tokens')}
|
||||
|
||||
@ -136,9 +136,9 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
use_rerank: search_config?.rerank_id ? true : false,
|
||||
top_k: search_config?.top_k || 1024,
|
||||
summary: search_config?.summary || false,
|
||||
chat_id: '',
|
||||
chat_id: search_config?.chat_id || '',
|
||||
llm_setting: {
|
||||
llm_id: llm_setting?.llm_id || '',
|
||||
llm_id: search_config?.chat_id || '',
|
||||
parameter: llm_setting?.parameter,
|
||||
temperature: llm_setting?.temperature,
|
||||
top_p: llm_setting?.top_p,
|
||||
@ -159,7 +159,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
meta_data_filter: search_config?.meta_data_filter,
|
||||
},
|
||||
});
|
||||
}, [data, search_config, llm_setting, formMethods]);
|
||||
}, [data, search_config, llm_setting, formMethods, descriptionDefaultValue]);
|
||||
|
||||
useEffect(() => {
|
||||
resetForm();
|
||||
@ -255,7 +255,7 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
...other_config
|
||||
} = search_config;
|
||||
const llmSetting = {
|
||||
llm_id: llm_setting.llm_id,
|
||||
// llm_id: llm_setting.llm_id,
|
||||
parameter: llm_setting.parameter,
|
||||
temperature: llm_setting.temperature,
|
||||
top_p: llm_setting.top_p,
|
||||
@ -263,22 +263,11 @@ const SearchSetting: React.FC<SearchSettingProps> = ({
|
||||
presence_penalty: llm_setting.presence_penalty,
|
||||
} as IllmSettingProps;
|
||||
|
||||
if (!llm_setting.frequencyPenaltyEnabled) {
|
||||
delete llmSetting.frequency_penalty;
|
||||
}
|
||||
if (!llm_setting.presencePenaltyEnabled) {
|
||||
delete llmSetting.presence_penalty;
|
||||
}
|
||||
if (!llm_setting.temperatureEnabled) {
|
||||
delete llmSetting.temperature;
|
||||
}
|
||||
if (!llm_setting.topPEnabled) {
|
||||
delete llmSetting.top_p;
|
||||
}
|
||||
await updateSearch({
|
||||
...other_formdata,
|
||||
search_config: {
|
||||
...other_config,
|
||||
chat_id: llm_setting.llm_id,
|
||||
vector_similarity_weight: 1 - vector_similarity_weight,
|
||||
rerank_id: use_rerank ? rerank_id : '',
|
||||
llm_setting: { ...llmSetting },
|
||||
|
||||
@ -154,6 +154,7 @@ export interface ISearchAppDetailProps {
|
||||
search_config: {
|
||||
cross_languages: string[];
|
||||
doc_ids: string[];
|
||||
chat_id: string;
|
||||
highlight: boolean;
|
||||
kb_ids: string[];
|
||||
keyword: boolean;
|
||||
|
||||
Reference in New Issue
Block a user