Compare commits

...

8 Commits

Author SHA1 Message Date
fb0426419e Feat: Create a conversation #3221 (#9269)
### What problem does this PR solve?

Feat: Create a conversation #3221

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-08-06 11:42:40 +08:00
1409bb30df Refactor:Improve the logic so that it does not decode base 64 for the test image each time (#9264)
### What problem does this PR solve?

Improve the logic so that it does not decode base 64 for the test image
each time

### Type of change

- [x] Refactoring
- [x] Performance Improvement

---------

Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
2025-08-06 11:42:25 +08:00
7efeaf6548 Fix:remove a img close which can not operate (#9267)
### What problem does this PR solve?


https://github.com/infiniflow/ragflow/issues/9149#issuecomment-3157129587

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-08-06 10:59:49 +08:00
46a35f44da Feat: add Claude Opus 4.1 (#9268)
### What problem does this PR solve?

Add Claude Opus 4.

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
- [x] Refactoring
2025-08-06 10:57:03 +08:00
a7eba61067 FIX: If chunk["content_with_weight"] contains one or more unpaired surrogate characters (such as incomplete emoji or other special characters), then calling .encode("utf-8") directly will raise a UnicodeEncodeError. (#9246)
FIX: If chunk["content_with_weight"] contains one or more unpaired
surrogate characters (such as incomplete emoji or other special
characters), then calling .encode("utf-8") directly will raise a
UnicodeEncodeError.

### What problem does this PR solve?
### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-08-06 10:36:50 +08:00
465f7e036a Feat: advanced list dialogs (#9256)
### What problem does this PR solve?

Advanced list dialogs

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
2025-08-06 10:33:52 +08:00
7a27d5e463 Feat: Added history management and paste handling features #3221 (#9266)
### What problem does this PR solve?

feat(agent): Added history management and paste handling features #3221

- Added a PasteHandlerPlugin to handle paste operations, optimizing the
multi-line text pasting experience
- Implemented the AgentHistoryManager class to manage history,
supporting undo and redo functionality
- Integrates history management functionality into the Agent component

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-08-06 10:29:44 +08:00
6a0d6d2565 Added French language support (#9173)
### What problem does this PR solve?
Implemented French UI translation

### Type of change
- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: ramin cedric <>
Co-authored-by: Liu An <asiro@qq.com>
2025-08-06 10:22:32 +08:00
23 changed files with 1974 additions and 134 deletions

View File

@ -32,7 +32,8 @@ from api.utils.api_utils import get_json_result
@login_required
def set_dialog():
req = request.json
dialog_id = req.get("dialog_id")
dialog_id = req.get("dialog_id", "")
is_create = not dialog_id
name = req.get("name", "New Dialog")
if not isinstance(name, str):
return get_data_error_result(message="Dialog name must be string.")
@ -52,15 +53,16 @@ def set_dialog():
llm_setting = req.get("llm_setting", {})
prompt_config = req["prompt_config"]
if not req.get("kb_ids", []) and not prompt_config.get("tavily_api_key") and "{knowledge}" in prompt_config['system']:
return get_data_error_result(message="Please remove `{knowledge}` in system prompt since no knowledge base/Tavily used here.")
if not is_create:
if not req.get("kb_ids", []) and not prompt_config.get("tavily_api_key") and "{knowledge}" in prompt_config['system']:
return get_data_error_result(message="Please remove `{knowledge}` in system prompt since no knowledge base/Tavily used here.")
for p in prompt_config["parameters"]:
if p["optional"]:
continue
if prompt_config["system"].find("{%s}" % p["key"]) < 0:
return get_data_error_result(
message="Parameter '{}' is not used".format(p["key"]))
for p in prompt_config["parameters"]:
if p["optional"]:
continue
if prompt_config["system"].find("{%s}" % p["key"]) < 0:
return get_data_error_result(
message="Parameter '{}' is not used".format(p["key"]))
try:
e, tenant = TenantService.get_by_id(current_user.id)
@ -153,6 +155,43 @@ def list_dialogs():
return server_error_response(e)
@manager.route('/next', methods=['POST']) # noqa: F821
@login_required
def list_dialogs_next():
keywords = request.args.get("keywords", "")
page_number = int(request.args.get("page", 0))
items_per_page = int(request.args.get("page_size", 0))
parser_id = request.args.get("parser_id")
orderby = request.args.get("orderby", "create_time")
if request.args.get("desc", "true").lower() == "false":
desc = False
else:
desc = True
req = request.get_json()
owner_ids = req.get("owner_ids", [])
try:
if not owner_ids:
# tenants = TenantService.get_joined_tenants_by_user_id(current_user.id)
# tenants = [tenant["tenant_id"] for tenant in tenants]
tenants = [] # keep it here
dialogs, total = DialogService.get_by_tenant_ids(
tenants, current_user.id, page_number,
items_per_page, orderby, desc, keywords, parser_id)
else:
tenants = owner_ids
dialogs, total = DialogService.get_by_tenant_ids(
tenants, current_user.id, 0,
0, orderby, desc, keywords, parser_id)
dialogs = [dialog for dialog in dialogs if dialog["tenant_id"] in tenants]
total = len(dialogs)
if page_number and items_per_page:
dialogs = dialogs[(page_number-1)*items_per_page:page_number*items_per_page]
return get_json_result(data={"dialogs": dialogs, "total": total})
except Exception as e:
return server_error_response(e)
@manager.route('/rm', methods=['POST']) # noqa: F821
@login_required
@validate_request("dialog_ids")

View File

@ -15,7 +15,6 @@
#
import logging
import json
import base64
from flask import request
from flask_login import login_required, current_user
from api.db.services.llm_service import LLMFactoriesService, TenantLLMService, LLMService
@ -24,7 +23,7 @@ from api.utils.api_utils import server_error_response, get_data_error_result, va
from api.db import StatusEnum, LLMType
from api.db.db_models import TenantLLM
from api.utils.api_utils import get_json_result
from api.utils.base64_image import test_image_base64
from api.utils.base64_image import test_image
from rag.llm import EmbeddingModel, ChatModel, RerankModel, CvModel, TTSModel
@ -256,7 +255,7 @@ def add_llm():
base_url=llm["api_base"]
)
try:
image_data = base64.b64decode(test_image_base64)
image_data = test_image
m, tc = mdl.describe(image_data)
if not m and not tc:
raise Exception(m)

View File

@ -23,6 +23,7 @@ from functools import partial
from timeit import default_timer as timer
from langfuse import Langfuse
from peewee import fn
from agentic_reasoning import DeepResearcher
from api import settings
@ -96,6 +97,66 @@ class DialogService(CommonService):
return list(chats.dicts())
@classmethod
@DB.connection_context()
def get_by_tenant_ids(cls, joined_tenant_ids, user_id, page_number, items_per_page, orderby, desc, keywords, parser_id=None):
from api.db.db_models import User
fields = [
cls.model.id,
cls.model.tenant_id,
cls.model.name,
cls.model.description,
cls.model.language,
cls.model.llm_id,
cls.model.llm_setting,
cls.model.prompt_type,
cls.model.prompt_config,
cls.model.similarity_threshold,
cls.model.vector_similarity_weight,
cls.model.top_n,
cls.model.top_k,
cls.model.do_refer,
cls.model.rerank_id,
cls.model.kb_ids,
cls.model.status,
User.nickname,
User.avatar.alias("tenant_avatar"),
cls.model.update_time,
cls.model.create_time,
]
if keywords:
dialogs = (
cls.model.select(*fields)
.join(User, on=(cls.model.tenant_id == User.id))
.where(
(cls.model.tenant_id.in_(joined_tenant_ids) | (cls.model.tenant_id == user_id)) & (cls.model.status == StatusEnum.VALID.value),
(fn.LOWER(cls.model.name).contains(keywords.lower())),
)
)
else:
dialogs = (
cls.model.select(*fields)
.join(User, on=(cls.model.tenant_id == User.id))
.where(
(cls.model.tenant_id.in_(joined_tenant_ids) | (cls.model.tenant_id == user_id)) & (cls.model.status == StatusEnum.VALID.value),
)
)
if parser_id:
dialogs = dialogs.where(cls.model.parser_id == parser_id)
if desc:
dialogs = dialogs.order_by(cls.model.getter_by(orderby).desc())
else:
dialogs = dialogs.order_by(cls.model.getter_by(orderby).asc())
count = dialogs.count()
if page_number and items_per_page:
dialogs = dialogs.paginate(page_number, items_per_page)
return list(dialogs.dicts()), count
def chat_solo(dialog, messages, stream=True):
if TenantLLMService.llm_id2llm_type(dialog.llm_id) == "image2text":
chat_mdl = LLMBundle(dialog.tenant_id, LLMType.IMAGE2TEXT, dialog.llm_id)

View File

@ -1 +1,3 @@
import base64
test_image_base64 = "iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAA6ElEQVR4nO3QwQ3AIBDAsIP9d25XIC+EZE8QZc18w5l9O+AlZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBWYFZgVmBT+IYAHHLHkdEgAAAABJRU5ErkJggg=="
test_image = base64.b64decode(test_image_base64)

View File

@ -2598,234 +2598,255 @@
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK,IMAGE2TEXT",
"status": "1",
"llm": [
{
"llm_name": "Qwen3-Embedding-8B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK,32k",
"max_tokens": 32000,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Embedding-4B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK,32k",
"max_tokens": 32000,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Embedding-0.6B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK,32k",
"max_tokens": 32000,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen/Qwen3-235B-A22B",
"tags": "LLM,CHAT,128k",
"max_tokens": 8192,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen3-30B-A3B",
"tags": "LLM,CHAT,128k",
"max_tokens": 8192,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen3-32B",
"tags": "LLM,CHAT,128k",
"max_tokens": 8192,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen3-14B",
"tags": "LLM,CHAT,128k",
"max_tokens": 8192,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen3-8B",
"tags": "LLM,CHAT,64k",
"max_tokens": 8192,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/QVQ-72B-Preview",
"tags": "LLM,CHAT,IMAGE2TEXT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Pro/deepseek-ai/DeepSeek-R1",
"tags": "LLM,CHAT,64k",
"max_tokens": 16384,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-R1",
"tags": "LLM,CHAT,64k",
"max_tokens": 16384,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/deepseek-ai/DeepSeek-V3",
"tags": "LLM,CHAT,64k",
"max_tokens": 8192,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-V3",
"tags": "LLM,CHAT,64k",
"max_tokens": 8192,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/deepseek-ai/DeepSeek-V3-1226",
"tags": "LLM,CHAT,64k",
"max_tokens": 4096,
"max_tokens": 64000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
"tags": "LLM,CHAT,32k",
"max_tokens": 16384,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek-ai/DeepSeek-V2.5",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/QwQ-32B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2.5-VL-72B-Instruct",
"tags": "LLM,CHAT,IMAGE2TEXT,128k",
"max_tokens": 4096,
"max_tokens": 128000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "Pro/Qwen/Qwen2.5-VL-7B-Instruct",
"tags": "LLM,CHAT,IMAGE2TEXT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "THUDM/GLM-Z1-32B-0414",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "THUDM/GLM-4-32B-0414",
"tags": "LLM,CHAT,32k",
"max_tokens": 8192,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "THUDM/GLM-Z1-9B-0414",
"tags": "LLM,CHAT,32k",
"max_tokens": 8192,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "THUDM/GLM-4-9B-0414",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "THUDM/chatglm3-6b",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Pro/THUDM/glm-4-9b-chat",
"tags": "LLM,CHAT,128k",
"max_tokens": 4096,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "THUDM/GLM-Z1-Rumination-32B-0414",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "THUDM/glm-4-9b-chat",
"tags": "LLM,CHAT,128k",
"max_tokens": 4096,
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/QwQ-32B-Preview",
"tags": "LLM,CHAT,32k",
"max_tokens": 8192,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen/Qwen2.5-Coder-32B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen/Qwen2-VL-72B-Instruct",
"tags": "LLM,IMAGE2TEXT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Qwen/Qwen2.5-72B-Instruct-128Kt",
"tags": "LLM,IMAGE2TEXT,128k",
"max_tokens": 4096,
"max_tokens": 128000,
"model_type": "image2text",
"is_tools": false
},
@ -2839,98 +2860,98 @@
{
"llm_name": "Qwen/Qwen2.5-72B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2.5-32B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2.5-14B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2.5-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2.5-Coder-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "internlm/internlm2_5-20b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "internlm/internlm2_5-7b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen/Qwen2-1.5B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/Qwen/Qwen2.5-Coder-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Pro/Qwen/Qwen2-VL-7B-Instruct",
"tags": "LLM,CHAT,IMAGE2TEXT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Pro/Qwen/Qwen2.5-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Pro/Qwen/Qwen2-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Pro/Qwen/Qwen2-1.5B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 4096,
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
@ -3267,45 +3288,52 @@
"status": "1",
"llm": [
{
"llm_name": "claude-opus-4-20250514",
"tags": "LLM,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-20250514",
"tags": "LLM,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "claude-3-7-sonnet-20250219",
"tags": "LLM,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "claude-3-5-sonnet-20241022",
"tags": "LLM,IMAGE2TEXT,200k",
"llm_name": "claude-opus-4-1-20250805",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-opus-20240229",
"tags": "LLM,IMAGE2TEXT,200k",
"llm_name": "claude-opus-4-20250514",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-20250514",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-7-sonnet-20250219",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-sonnet-20241022",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-haiku-20241022",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-haiku-20240307",
"tags": "LLM,IMAGE2TEXT,200k",
"tags": "LLM,CHAT,IMAGE2TEXT,200k",
"max_tokens": 204800,
"model_type": "image2text",
"model_type": "chat",
"is_tools": true
}
]

View File

@ -642,7 +642,6 @@ def concat_img(img1, img2):
pixel_data1 = img1.tobytes()
pixel_data2 = img2.tobytes()
if pixel_data1 == pixel_data2:
img2.close()
return img1
width1, height1 = img1.size
@ -654,8 +653,6 @@ def concat_img(img1, img2):
new_image.paste(img1, (0, 0))
new_image.paste(img2, (0, height1))
img1.close()
img2.close()
return new_image

View File

@ -284,7 +284,7 @@ async def build_chunks(task, progress_callback):
try:
d = copy.deepcopy(document)
d.update(chunk)
d["id"] = xxhash.xxh64((chunk["content_with_weight"] + str(d["doc_id"])).encode("utf-8")).hexdigest()
d["id"] = xxhash.xxh64((chunk["content_with_weight"] + str(d["doc_id"])).encode("utf-8", "surrogatepass")).hexdigest()
d["create_time"] = str(datetime.now()).replace("T", " ")[:19]
d["create_timestamp_flt"] = datetime.now().timestamp()
if not d.get("image"):

View File

@ -49,8 +49,8 @@ export const LanguageList = [
'Japanese',
'Portuguese BR',
'German',
'French',
];
export const LanguageMap = {
English: 'English',
Chinese: '简体中文',
@ -61,6 +61,7 @@ export const LanguageMap = {
Japanese: '日本語',
'Portuguese BR': 'Português BR',
German: 'German',
French: 'Français',
};
export enum LanguageAbbreviation {
@ -73,6 +74,7 @@ export enum LanguageAbbreviation {
Vi = 'vi',
PtBr = 'pt-BR',
De = 'de',
Fr = 'fr',
}
export const LanguageAbbreviationMap = {
@ -85,6 +87,7 @@ export const LanguageAbbreviationMap = {
[LanguageAbbreviation.Ja]: '日本語',
[LanguageAbbreviation.PtBr]: 'Português BR',
[LanguageAbbreviation.De]: 'Deutsch',
[LanguageAbbreviation.Fr]: 'Français',
};
export const LanguageTranslationMap = {
@ -97,6 +100,7 @@ export const LanguageTranslationMap = {
Japanese: 'ja',
'Portuguese BR': 'pt-br',
German: 'de',
French: 'fr',
};
export enum FileMimeType {

View File

@ -1,12 +1,12 @@
import message from '@/components/ui/message';
import { ChatSearchParams } from '@/constants/chat';
import { IDialog } from '@/interfaces/database/chat';
import chatService from '@/services/chat-service';
import chatService from '@/services/next-chat-service ';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { useDebounce } from 'ahooks';
import { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { history, useSearchParams } from 'umi';
import { useParams, useSearchParams } from 'umi';
import {
useGetPaginationWithRouter,
useHandleSearchChange,
@ -16,6 +16,7 @@ export const enum ChatApiAction {
FetchDialogList = 'fetchDialogList',
RemoveDialog = 'removeDialog',
SetDialog = 'setDialog',
FetchDialog = 'fetchDialog',
}
export const useGetChatSearchParams = () => {
@ -52,9 +53,7 @@ export const useClickDialogCard = () => {
return { handleClickDialog };
};
export const useFetchDialogList = (pureFetch = false) => {
const { handleClickDialog } = useClickDialogCard();
const { dialogId } = useGetChatSearchParams();
export const useFetchDialogList = () => {
const { searchString, handleInputChange } = useHandleSearchChange();
const { pagination, setPagination } = useGetPaginationWithRouter();
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
@ -63,7 +62,7 @@ export const useFetchDialogList = (pureFetch = false) => {
data,
isFetching: loading,
refetch,
} = useQuery<IDialog[]>({
} = useQuery<{ dialogs: IDialog[]; total: number }>({
queryKey: [
ChatApiAction.FetchDialogList,
{
@ -71,27 +70,17 @@ export const useFetchDialogList = (pureFetch = false) => {
...pagination,
},
],
initialData: [],
initialData: { dialogs: [], total: 0 },
gcTime: 0,
refetchOnWindowFocus: false,
queryFn: async (...params) => {
console.log('🚀 ~ queryFn: ~ params:', params);
const { data } = await chatService.listDialog();
queryFn: async () => {
const { data } = await chatService.listDialog({
keywords: debouncedSearchString,
page_size: pagination.pageSize,
page: pagination.current,
});
if (data.code === 0) {
const list: IDialog[] = data.data;
if (!pureFetch) {
if (list.length > 0) {
if (list.every((x) => x.id !== dialogId)) {
handleClickDialog(data.data[0].id);
}
} else {
history.push('/chat');
}
}
}
return data?.data ?? [];
return data?.data ?? { dialogs: [], total: 0 };
},
});
@ -147,17 +136,14 @@ export const useSetDialog = () => {
mutateAsync,
} = useMutation({
mutationKey: [ChatApiAction.SetDialog],
mutationFn: async (params: IDialog) => {
mutationFn: async (params: Partial<IDialog>) => {
const { data } = await chatService.setDialog(params);
if (data.code === 0) {
queryClient.invalidateQueries({
exact: false,
queryKey: ['fetchDialogList'],
queryKey: [ChatApiAction.FetchDialogList],
});
queryClient.invalidateQueries({
queryKey: ['fetchDialog'],
});
message.success(
t(`message.${params.dialog_id ? 'modified' : 'created'}`),
);
@ -168,3 +154,29 @@ export const useSetDialog = () => {
return { data, loading, setDialog: mutateAsync };
};
export const useFetchDialog = () => {
const { id } = useParams();
const {
data,
isFetching: loading,
refetch,
} = useQuery<IDialog>({
queryKey: [ChatApiAction.FetchDialog, id],
gcTime: 0,
initialData: {} as IDialog,
enabled: !!id,
refetchOnWindowFocus: false,
queryFn: async () => {
const { data } = await chatService.getDialog(
{ params: { dialogId: id } },
true,
);
return data?.data ?? ({} as IDialog);
},
});
return { data, loading, refetch };
};

View File

@ -6,6 +6,7 @@ import { LanguageAbbreviation } from '@/constants/common';
import translation_de from './de';
import translation_en from './en';
import translation_es from './es';
import translation_fr from './fr';
import translation_id from './id';
import translation_ja from './ja';
import translation_pt_br from './pt-br';
@ -24,6 +25,7 @@ const resources = {
[LanguageAbbreviation.Vi]: translation_vi,
[LanguageAbbreviation.PtBr]: translation_pt_br,
[LanguageAbbreviation.De]: translation_de,
[LanguageAbbreviation.Fr]: translation_fr,
};
const enFlattened = flattenObject(translation_en);
const viFlattened = flattenObject(translation_vi);
@ -33,6 +35,7 @@ const jaFlattened = flattenObject(translation_ja);
const pt_brFlattened = flattenObject(translation_pt_br);
const zh_traditionalFlattened = flattenObject(translation_zh_traditional);
const deFlattened = flattenObject(translation_de);
const frFlattened = flattenObject(translation_fr);
export const translationTable = createTranslationTable(
[
enFlattened,
@ -43,6 +46,7 @@ export const translationTable = createTranslationTable(
jaFlattened,
pt_brFlattened,
deFlattened,
frFlattened,
],
[
'English',
@ -53,6 +57,7 @@ export const translationTable = createTranslationTable(
'ja',
'pt-BR',
'Deutsch',
'French',
],
);
i18n

1261
web/src/locales/fr.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,7 @@ import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext
import { Variable } from 'lucide-react';
import { ReactNode, useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { PasteHandlerPlugin } from './paste-handler-plugin';
import theme from './theme';
import { VariableNode } from './variable-node';
import { VariableOnChangePlugin } from './variable-on-change-plugin';
@ -172,6 +173,7 @@ export function PromptEditor({
ErrorBoundary={LexicalErrorBoundary}
/>
<VariablePickerMenuPlugin value={value}></VariablePickerMenuPlugin>
<PasteHandlerPlugin />
<VariableOnChangePlugin
onChange={onValueChange}
></VariableOnChangePlugin>

View File

@ -0,0 +1,83 @@
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext';
import {
$createParagraphNode,
$createTextNode,
$getSelection,
$isRangeSelection,
PASTE_COMMAND,
} from 'lexical';
import { useEffect } from 'react';
function PasteHandlerPlugin() {
const [editor] = useLexicalComposerContext();
useEffect(() => {
const removeListener = editor.registerCommand(
PASTE_COMMAND,
(clipboardEvent: ClipboardEvent) => {
const clipboardData = clipboardEvent.clipboardData;
if (!clipboardData) {
return false;
}
const text = clipboardData.getData('text/plain');
if (!text) {
return false;
}
// Check if text contains line breaks
if (text.includes('\n')) {
editor.update(() => {
const selection = $getSelection();
if (selection && $isRangeSelection(selection)) {
// Normalize line breaks, merge multiple consecutive line breaks into a single line break
const normalizedText = text.replace(/\n{2,}/g, '\n');
// Clear current selection
selection.removeText();
// Create a paragraph node to contain all content
const paragraph = $createParagraphNode();
// Split text by line breaks
const lines = normalizedText.split('\n');
// Process each line
lines.forEach((lineText, index) => {
// Add line text (if any)
if (lineText) {
const textNode = $createTextNode(lineText);
paragraph.append(textNode);
}
// If not the last line, add a line break
if (index < lines.length - 1) {
const lineBreak = $createTextNode('\n');
paragraph.append(lineBreak);
}
});
// Insert paragraph
selection.insertNodes([paragraph]);
}
});
// Prevent default paste behavior
clipboardEvent.preventDefault();
return true;
}
// If no line breaks, use default behavior
return false;
},
4,
);
return () => {
removeListener();
};
}, [editor]);
return null;
}
export { PasteHandlerPlugin };

View File

@ -44,6 +44,7 @@ import {
} from './hooks/use-save-graph';
import { useShowEmbedModal } from './hooks/use-show-dialog';
import { UploadAgentDialog } from './upload-agent-dialog';
import { useAgentHistoryManager } from './use-agent-history-manager';
import { VersionDialog } from './version-dialog';
function AgentDropdownMenuItem({
@ -66,8 +67,7 @@ export default function Agent() {
showModal: showChatDrawer,
} = useSetModalState();
const { t } = useTranslation();
// const openDocument = useOpenDocument();
useAgentHistoryManager();
const {
handleExportJson,
handleImportJson,

View File

@ -57,6 +57,10 @@ export const LanguageOptions = [
value: 'de',
label: 'Deutsch',
},
{
value: 'fr',
label: 'Français',
},
{
value: 'et',
label: 'Eesti',

View File

@ -0,0 +1,163 @@
import { useEffect, useRef } from 'react';
import useGraphStore from './store';
// History management class
export class HistoryManager {
private history: { nodes: any[]; edges: any[] }[] = [];
private currentIndex: number = -1;
private readonly maxSize: number = 50; // Limit maximum number of history records
private setNodes: (nodes: any[]) => void;
private setEdges: (edges: any[]) => void;
private lastSavedState: string = ''; // Used to compare if state has changed
constructor(
setNodes: (nodes: any[]) => void,
setEdges: (edges: any[]) => void,
) {
this.setNodes = setNodes;
this.setEdges = setEdges;
}
// Compare if two states are equal
private statesEqual(
state1: { nodes: any[]; edges: any[] },
state2: { nodes: any[]; edges: any[] },
): boolean {
return JSON.stringify(state1) === JSON.stringify(state2);
}
push(nodes: any[], edges: any[]) {
const currentState = {
nodes: JSON.parse(JSON.stringify(nodes)),
edges: JSON.parse(JSON.stringify(edges)),
};
// If state hasn't changed, don't save
if (
this.history.length > 0 &&
this.statesEqual(currentState, this.history[this.currentIndex])
) {
return;
}
// If current index is not at the end of history, remove subsequent states
if (this.currentIndex < this.history.length - 1) {
this.history.splice(this.currentIndex + 1);
}
// Add current state
this.history.push(currentState);
// Limit history record size
if (this.history.length > this.maxSize) {
this.history.shift();
this.currentIndex = this.history.length - 1;
} else {
this.currentIndex = this.history.length - 1;
}
// Update last saved state
this.lastSavedState = JSON.stringify(currentState);
}
undo() {
if (this.canUndo()) {
this.currentIndex--;
const prevState = this.history[this.currentIndex];
this.setNodes(JSON.parse(JSON.stringify(prevState.nodes)));
this.setEdges(JSON.parse(JSON.stringify(prevState.edges)));
return true;
}
return false;
}
redo() {
console.log('redo');
if (this.canRedo()) {
this.currentIndex++;
const nextState = this.history[this.currentIndex];
this.setNodes(JSON.parse(JSON.stringify(nextState.nodes)));
this.setEdges(JSON.parse(JSON.stringify(nextState.edges)));
return true;
}
return false;
}
canUndo() {
return this.currentIndex > 0;
}
canRedo() {
return this.currentIndex < this.history.length - 1;
}
// Reset history records
reset() {
this.history = [];
this.currentIndex = -1;
this.lastSavedState = '';
}
}
export const useAgentHistoryManager = () => {
// Get current state and history state
const nodes = useGraphStore((state) => state.nodes);
const edges = useGraphStore((state) => state.edges);
const setNodes = useGraphStore((state) => state.setNodes);
const setEdges = useGraphStore((state) => state.setEdges);
// Use useRef to keep HistoryManager instance unchanged
const historyManagerRef = useRef<HistoryManager | null>(null);
// Initialize HistoryManager
if (!historyManagerRef.current) {
historyManagerRef.current = new HistoryManager(setNodes, setEdges);
}
const historyManager = historyManagerRef.current;
// Save state history - use useEffect instead of useMemo to avoid re-rendering
useEffect(() => {
historyManager.push(nodes, edges);
}, [nodes, edges, historyManager]);
// Keyboard event handling
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
// Check if focused on an input element
const activeElement = document.activeElement;
const isInputFocused =
activeElement instanceof HTMLInputElement ||
activeElement instanceof HTMLTextAreaElement ||
activeElement?.hasAttribute('contenteditable');
// Skip keyboard shortcuts if typing in an input field
if (isInputFocused) {
return;
}
// Ctrl+Z or Cmd+Z undo
if (
(e.ctrlKey || e.metaKey) &&
(e.key === 'z' || e.key === 'Z') &&
!e.shiftKey
) {
e.preventDefault();
historyManager.undo();
}
// Ctrl+Shift+Z or Cmd+Shift+Z redo
else if (
(e.ctrlKey || e.metaKey) &&
(e.key === 'z' || e.key === 'Z') &&
e.shiftKey
) {
e.preventDefault();
historyManager.redo();
}
};
document.addEventListener('keydown', handleKeyDown);
return () => {
document.removeEventListener('keydown', handleKeyDown);
};
}, [historyManager]);
};

View File

@ -835,6 +835,10 @@ export const LanguageOptions = [
value: 'de',
label: 'Deutsch',
},
{
value: 'fr',
label: 'Français',
},
{
value: 'et',
label: 'Eesti',

View File

@ -2,9 +2,9 @@ import { useFetchDialogList } from '@/hooks/use-chat-request';
import { ApplicationCard } from './application-card';
export function ChatList() {
const { data } = useFetchDialogList(true);
const { data } = useFetchDialogList();
return data
return data.dialogs
.slice(0, 10)
.map((x) => (
<ApplicationCard

View File

@ -1,6 +1,7 @@
import { PageHeader } from '@/components/page-header';
import { Button } from '@/components/ui/button';
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
import { useFetchDialog } from '@/hooks/use-chat-request';
import { EllipsisVertical } from 'lucide-react';
import { AppSettings } from './app-settings';
import { ChatBox } from './chat-box';
@ -8,10 +9,11 @@ import { Sessions } from './sessions';
export default function Chat() {
const { navigateToChatList } = useNavigatePage();
useFetchDialog();
return (
<section className="h-full flex flex-col">
<PageHeader back={navigateToChatList} title="Chat app 01">
<PageHeader>
<div className="flex items-center gap-2">
<Button variant={'icon'} size={'icon'}>
<EllipsisVertical />

View File

@ -1,8 +1,33 @@
import { useSetModalState } from '@/hooks/common-hooks';
import { useSetDialog } from '@/hooks/use-chat-request';
import { IDialog } from '@/interfaces/database/chat';
import { isEmpty } from 'lodash';
import { useCallback, useState } from 'react';
const InitialData = {
name: '',
icon: '',
language: 'English',
prompt_config: {
empty_response: '',
prologue: '你好! 我是你的助理,有什么可以帮到你的吗?',
quote: true,
keyword: false,
tts: false,
system:
'你是一个智能助手,请总结知识库的内容来回答问题,请列举知识库中的数据详细回答。当所有知识库内容都与问题无关时,你的回答必须包括“知识库中未找到您要的答案!”这句话。回答需要考虑聊天历史。\n 以下是知识库:\n {knowledge}\n 以上是知识库。',
refine_multiturn: false,
use_kg: false,
reasoning: false,
parameters: [{ key: 'knowledge', optional: false }],
},
llm_id: '',
llm_setting: {},
similarity_threshold: 0.2,
vector_similarity_weight: 0.30000000000000004,
top_n: 8,
};
export const useRenameChat = () => {
const [chat, setChat] = useState<IDialog>({} as IDialog);
const {
@ -14,10 +39,11 @@ export const useRenameChat = () => {
const onChatRenameOk = useCallback(
async (name: string) => {
const ret = await setDialog({
...chat,
const nextChat = {
...(isEmpty(chat) ? InitialData : chat),
name,
});
};
const ret = await setDialog(nextChat);
if (ret === 0) {
hideChatRenameModal();
@ -27,19 +53,26 @@ export const useRenameChat = () => {
);
const handleShowChatRenameModal = useCallback(
async (record: IDialog) => {
setChat(record);
(record?: IDialog) => {
if (record) {
setChat(record);
}
showChatRenameModal();
},
[showChatRenameModal],
);
const handleHideModal = useCallback(() => {
hideChatRenameModal();
setChat({} as IDialog);
}, [hideChatRenameModal]);
return {
chatRenameLoading: loading,
initialChatName: chat?.name,
onChatRenameOk,
chatRenameVisible,
hideChatRenameModal,
hideChatRenameModal: handleHideModal,
showChatRenameModal: handleShowChatRenameModal,
};
};

View File

@ -11,7 +11,7 @@ import { ChatCard } from './chat-card';
import { useRenameChat } from './hooks/use-rename-chat';
export default function ChatList() {
const { data: chatList, setPagination, pagination } = useFetchDialogList();
const { data, setPagination, pagination } = useFetchDialogList();
const { t } = useTranslation();
const {
initialChatName,
@ -29,11 +29,15 @@ export default function ChatList() {
[setPagination],
);
const handleShowCreateModal = useCallback(() => {
showChatRenameModal();
}, [showChatRenameModal]);
return (
<section className="flex flex-col w-full flex-1">
<div className="px-8 pt-8">
<ListFilterBar title="Chat apps">
<Button>
<Button onClick={handleShowCreateModal}>
<Plus className="size-2.5" />
{t('chat.createChat')}
</Button>
@ -41,7 +45,7 @@ export default function ChatList() {
</div>
<div className="flex-1 overflow-auto">
<div className="flex flex-wrap gap-4 px-8">
{chatList.map((x) => {
{data.dialogs.map((x) => {
return (
<ChatCard
key={x.id}
@ -65,6 +69,7 @@ export default function ChatList() {
onOk={onChatRenameOk}
initialName={initialChatName}
loading={chatRenameLoading}
title={initialChatName || t('chat.createChat')}
></RenameDialog>
)}
</section>

View File

@ -0,0 +1,133 @@
import api from '@/utils/api';
import { registerNextServer } from '@/utils/register-server';
const {
getDialog,
setDialog,
listDialog,
removeDialog,
getConversation,
getConversationSSE,
setConversation,
completeConversation,
listConversation,
removeConversation,
createToken,
listToken,
removeToken,
getStats,
createExternalConversation,
getExternalConversation,
completeExternalConversation,
uploadAndParseExternal,
deleteMessage,
thumbup,
tts,
ask,
mindmap,
getRelatedQuestions,
listNextDialog,
} = api;
const methods = {
getDialog: {
url: getDialog,
method: 'get',
},
setDialog: {
url: setDialog,
method: 'post',
},
removeDialog: {
url: removeDialog,
method: 'post',
},
listDialog: {
url: listNextDialog,
method: 'post',
},
listConversation: {
url: listConversation,
method: 'get',
},
getConversation: {
url: getConversation,
method: 'get',
},
getConversationSSE: {
url: getConversationSSE,
method: 'get',
},
setConversation: {
url: setConversation,
method: 'post',
},
completeConversation: {
url: completeConversation,
method: 'post',
},
removeConversation: {
url: removeConversation,
method: 'post',
},
createToken: {
url: createToken,
method: 'post',
},
listToken: {
url: listToken,
method: 'get',
},
removeToken: {
url: removeToken,
method: 'post',
},
getStats: {
url: getStats,
method: 'get',
},
createExternalConversation: {
url: createExternalConversation,
method: 'get',
},
getExternalConversation: {
url: getExternalConversation,
method: 'get',
},
completeExternalConversation: {
url: completeExternalConversation,
method: 'post',
},
uploadAndParseExternal: {
url: uploadAndParseExternal,
method: 'post',
},
deleteMessage: {
url: deleteMessage,
method: 'post',
},
thumbup: {
url: thumbup,
method: 'post',
},
tts: {
url: tts,
method: 'post',
},
ask: {
url: ask,
method: 'post',
},
getMindMap: {
url: mindmap,
method: 'post',
},
getRelatedQuestions: {
url: getRelatedQuestions,
method: 'post',
},
} as const;
const chatService = registerNextServer<keyof typeof methods>(methods);
export default chatService;

View File

@ -108,6 +108,9 @@ export default {
completeExternalConversation: `${api_host}/api/completion`,
uploadAndParseExternal: `${api_host}/api/document/upload_and_parse`,
// next chat
listNextDialog: `${api_host}/dialog/next`,
// file manager
listFile: `${api_host}/file/list`,
uploadFile: `${api_host}/file/upload`,