mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-03 17:15:08 +08:00
Compare commits
11 Commits
1deb0a2d42
...
9295c23170
| Author | SHA1 | Date | |
|---|---|---|---|
| 9295c23170 | |||
| 023b090fa4 | |||
| 2124329e95 | |||
| ed9757b0c7 | |||
| f235a38225 | |||
| 550e65bb22 | |||
| a264c629b5 | |||
| e6bad45c6d | |||
| 0a303d9ae1 | |||
| 98a83543e8 | |||
| afd3a508e5 |
@ -87,7 +87,8 @@ Try our demo at [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Latest Updates
|
||||
|
||||
- 2025-08-01 Supports agentic workflow.
|
||||
- 2025-08-04 Supports new models, including Kimi K2 and Grok 4.
|
||||
- 2025-08-01 Supports agentic workflow and MCP.
|
||||
- 2025-05-23 Adds a Python/JavaScript code executor component to Agent.
|
||||
- 2025-05-05 Supports cross-language query.
|
||||
- 2025-03-19 Supports using a multi-modal model to make sense of images within PDF or DOCX files.
|
||||
|
||||
@ -80,7 +80,8 @@ Coba demo kami di [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Pembaruan Terbaru
|
||||
|
||||
- 2025-08-01 Mendukung Alur Kerja agen.
|
||||
- 2025-08-04 Mendukung model baru, termasuk Kimi K2 dan Grok 4.
|
||||
- 2025-08-01 Mendukung alur kerja agen dan MCP.
|
||||
- 2025-05-23 Menambahkan komponen pelaksana kode Python/JS ke Agen.
|
||||
- 2025-05-05 Mendukung kueri lintas bahasa.
|
||||
- 2025-03-19 Mendukung penggunaan model multi-modal untuk memahami gambar di dalam file PDF atau DOCX.
|
||||
|
||||
@ -60,7 +60,8 @@
|
||||
|
||||
## 🔥 最新情報
|
||||
|
||||
- 2025-08-01 エージェントワークフローをサポートします。
|
||||
- 2025-08-04 新モデル、キミK2およびGrok 4をサポート。
|
||||
- 2025-08-01 エージェントワークフローとMCPをサポート。
|
||||
- 2025-05-23 エージェントに Python/JS コードエグゼキュータコンポーネントを追加しました。
|
||||
- 2025-05-05 言語間クエリをサポートしました。
|
||||
- 2025-03-19 PDFまたはDOCXファイル内の画像を理解するために、多モーダルモデルを使用することをサポートします。
|
||||
|
||||
@ -60,7 +60,8 @@
|
||||
|
||||
## 🔥 업데이트
|
||||
|
||||
- 2025-08-01 에이전트 워크플로를 지원합니다.
|
||||
- 2025-08-04 새로운 모델인 Kimi K2와 Grok 4를 포함하여 지원합니다.
|
||||
- 2025-08-01 에이전트 워크플로우와 MCP를 지원합니다.
|
||||
- 2025-05-23 Agent에 Python/JS 코드 실행기 구성 요소를 추가합니다.
|
||||
- 2025-05-05 언어 간 쿼리를 지원합니다.
|
||||
- 2025-03-19 PDF 또는 DOCX 파일 내의 이미지를 이해하기 위해 다중 모드 모델을 사용하는 것을 지원합니다.
|
||||
|
||||
@ -80,7 +80,8 @@ Experimente nossa demo em [https://demo.ragflow.io](https://demo.ragflow.io).
|
||||
|
||||
## 🔥 Últimas Atualizações
|
||||
|
||||
- 01-08-2025 Suporta o fluxo de trabalho agêntico.
|
||||
- 04-08-2025 Suporta novos modelos, incluindo Kimi K2 e Grok 4.
|
||||
- 01-08-2025 Suporta fluxo de trabalho agente e MCP.
|
||||
- 23-05-2025 Adicione o componente executor de código Python/JS ao Agente.
|
||||
- 05-05-2025 Suporte a consultas entre idiomas.
|
||||
- 19-03-2025 Suporta o uso de um modelo multi-modal para entender imagens dentro de arquivos PDF ou DOCX.
|
||||
|
||||
@ -83,7 +83,8 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-08-01 支援 agentic workflow
|
||||
- 2025-08-04 支援 Kimi K2 和 Grok 4 等模型.
|
||||
- 2025-08-01 支援 agentic workflow 和 MCP
|
||||
- 2025-05-23 為 Agent 新增 Python/JS 程式碼執行器元件。
|
||||
- 2025-05-05 支援跨語言查詢。
|
||||
- 2025-03-19 PDF和DOCX中的圖支持用多模態大模型去解析得到描述.
|
||||
|
||||
@ -83,7 +83,8 @@
|
||||
|
||||
## 🔥 近期更新
|
||||
|
||||
- 2025-08-01 支持 agentic workflow。
|
||||
- 2025-08-04 新增对 Kimi K2 和 Grok 4 等模型的支持.
|
||||
- 2025-08-01 支持 agentic workflow 和 MCP。
|
||||
- 2025-05-23 Agent 新增 Python/JS 代码执行器组件。
|
||||
- 2025-05-05 支持跨语言查询。
|
||||
- 2025-03-19 PDF 和 DOCX 中的图支持用多模态大模型去解析得到描述.
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -89,11 +89,11 @@
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "{sys.query}",
|
||||
"content": "The user query is {sys.query}\n\nThe relevant document are {Retrieval:ShyPumasJoke@formalized_content}",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "You are a highly professional product information advisor. \n\nYour only mission is to provide accurate, factual, and structured answers to all product-related queries.\n\nAbsolutely no assumptions, guesses, or fabricated content are allowed. \n\n**Key Principles:**\n\n1. **Strict Database Reliance:** \n\n - Every answer must be based solely on the verified product information stored in the database accessed through the Retrieval tool. \n\n - You are NOT allowed to invent, speculate, or infer details beyond what is retrieved. \n\n - If you cannot find relevant data, respond with: *\"I cannot find this information in our official product database. Please check back later or provide more details for further search.\"*\n\n2. **Information Accuracy and Structure:** \n\n - Provide information in a clear, concise, and professional way. \n\n - Use bullet points or numbered lists if there are multiple key points (e.g., features, price, warranty, technical specifications). \n\n - Always specify the version or model number when applicable to avoid confusion.\n\n3. **Tone and Style:** \n\n - Maintain a polite, professional, and helpful tone at all times. \n\n - Avoid marketing exaggeration or promotional language; stay strictly factual. \n\n - Do not express personal opinions; only cite official product data.\n\n4. **User Guidance:** \n\n - If the user\u2019s query is unclear or too broad, politely request clarification or guide them to provide more specific product details (e.g., product name, model, version). \n\n - Example: *\"Could you please specify the product model or category so I can retrieve the most relevant information for you?\"*\n\n5. **Response Length and Formatting:** \n\n - Keep each answer within 100\u2013150 words for general queries. \n\n - For complex or multi-step explanations, you may extend to 200\u2013250 words, but always remain clear and well-structured.\n\n6. **Critical Reminder:** \n\nYour authority and reliability depend entirely on database-driven responses. Any fabricated, speculative, or unverified content will be considered a critical failure of your role.\n\nAlways begin processing a query by accessing the Retrieval tool, confirming the data source, and then structuring your response according to the above principles.\n\n",
|
||||
"sys_prompt": "You are a highly professional product information advisor. \n\nYour only mission is to provide accurate, factual, and structured answers to all product-related queries.\n\nAbsolutely no assumptions, guesses, or fabricated content are allowed. \n\n**Key Principles:**\n\n1. **Strict Database Reliance:** \n\n - Every answer must be based solely on the verified product information stored in the relevant documen.\n\n - You are NOT allowed to invent, speculate, or infer details beyond what is retrieved. \n\n - If you cannot find relevant data, respond with: *\"I cannot find this information in our official product database. Please check back later or provide more details for further search.\"*\n\n2. **Information Accuracy and Structure:** \n\n - Provide information in a clear, concise, and professional way. \n\n - Use bullet points or numbered lists if there are multiple key points (e.g., features, price, warranty, technical specifications). \n\n - Always specify the version or model number when applicable to avoid confusion.\n\n3. **Tone and Style:** \n\n - Maintain a polite, professional, and helpful tone at all times. \n\n - Avoid marketing exaggeration or promotional language; stay strictly factual. \n\n - Do not express personal opinions; only cite official product data.\n\n4. **User Guidance:** \n\n - If the user\u2019s query is unclear or too broad, politely request clarification or guide them to provide more specific product details (e.g., product name, model, version). \n\n - Example: *\"Could you please specify the product model or category so I can retrieve the most relevant information for you?\"*\n\n5. **Response Length and Formatting:** \n\n - Keep each answer within 100\u2013150 words for general queries. \n\n - For complex or multi-step explanations, you may extend to 200\u2013250 words, but always remain clear and well-structured.\n\n6. **Critical Reminder:** \n\nYour authority and reliability depend entirely on the relevant document responses. Any fabricated, speculative, or unverified content will be considered a critical failure of your role.\n\n\n",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": true,
|
||||
"tools": [],
|
||||
@ -699,7 +699,7 @@
|
||||
"width": 200
|
||||
},
|
||||
"position": {
|
||||
"x": 644.5771854408022,
|
||||
"x": 645.6873721057459,
|
||||
"y": 516.6923702571407
|
||||
},
|
||||
"selected": false,
|
||||
@ -735,11 +735,11 @@
|
||||
"presence_penalty": 0.4,
|
||||
"prompts": [
|
||||
{
|
||||
"content": "{sys.query}",
|
||||
"content": "The user query is {sys.query}\n\nThe relevant document are {Retrieval:ShyPumasJoke@formalized_content}",
|
||||
"role": "user"
|
||||
}
|
||||
],
|
||||
"sys_prompt": "You are a highly professional product information advisor. \n\nYour only mission is to provide accurate, factual, and structured answers to all product-related queries.\n\nAbsolutely no assumptions, guesses, or fabricated content are allowed. \n\n**Key Principles:**\n\n1. **Strict Database Reliance:** \n\n - Every answer must be based solely on the verified product information stored in the database accessed through the Retrieval tool. \n\n - You are NOT allowed to invent, speculate, or infer details beyond what is retrieved. \n\n - If you cannot find relevant data, respond with: *\"I cannot find this information in our official product database. Please check back later or provide more details for further search.\"*\n\n2. **Information Accuracy and Structure:** \n\n - Provide information in a clear, concise, and professional way. \n\n - Use bullet points or numbered lists if there are multiple key points (e.g., features, price, warranty, technical specifications). \n\n - Always specify the version or model number when applicable to avoid confusion.\n\n3. **Tone and Style:** \n\n - Maintain a polite, professional, and helpful tone at all times. \n\n - Avoid marketing exaggeration or promotional language; stay strictly factual. \n\n - Do not express personal opinions; only cite official product data.\n\n4. **User Guidance:** \n\n - If the user\u2019s query is unclear or too broad, politely request clarification or guide them to provide more specific product details (e.g., product name, model, version). \n\n - Example: *\"Could you please specify the product model or category so I can retrieve the most relevant information for you?\"*\n\n5. **Response Length and Formatting:** \n\n - Keep each answer within 100\u2013150 words for general queries. \n\n - For complex or multi-step explanations, you may extend to 200\u2013250 words, but always remain clear and well-structured.\n\n6. **Critical Reminder:** \n\nYour authority and reliability depend entirely on database-driven responses. Any fabricated, speculative, or unverified content will be considered a critical failure of your role.\n\nAlways begin processing a query by accessing the Retrieval tool, confirming the data source, and then structuring your response according to the above principles.\n\n",
|
||||
"sys_prompt": "You are a highly professional product information advisor. \n\nYour only mission is to provide accurate, factual, and structured answers to all product-related queries.\n\nAbsolutely no assumptions, guesses, or fabricated content are allowed. \n\n**Key Principles:**\n\n1. **Strict Database Reliance:** \n\n - Every answer must be based solely on the verified product information stored in the relevant documen.\n\n - You are NOT allowed to invent, speculate, or infer details beyond what is retrieved. \n\n - If you cannot find relevant data, respond with: *\"I cannot find this information in our official product database. Please check back later or provide more details for further search.\"*\n\n2. **Information Accuracy and Structure:** \n\n - Provide information in a clear, concise, and professional way. \n\n - Use bullet points or numbered lists if there are multiple key points (e.g., features, price, warranty, technical specifications). \n\n - Always specify the version or model number when applicable to avoid confusion.\n\n3. **Tone and Style:** \n\n - Maintain a polite, professional, and helpful tone at all times. \n\n - Avoid marketing exaggeration or promotional language; stay strictly factual. \n\n - Do not express personal opinions; only cite official product data.\n\n4. **User Guidance:** \n\n - If the user\u2019s query is unclear or too broad, politely request clarification or guide them to provide more specific product details (e.g., product name, model, version). \n\n - Example: *\"Could you please specify the product model or category so I can retrieve the most relevant information for you?\"*\n\n5. **Response Length and Formatting:** \n\n - Keep each answer within 100\u2013150 words for general queries. \n\n - For complex or multi-step explanations, you may extend to 200\u2013250 words, but always remain clear and well-structured.\n\n6. **Critical Reminder:** \n\nYour authority and reliability depend entirely on the relevant document responses. Any fabricated, speculative, or unverified content will be considered a critical failure of your role.\n\n\n",
|
||||
"temperature": 0.1,
|
||||
"temperatureEnabled": true,
|
||||
"tools": [],
|
||||
|
||||
@ -66,7 +66,8 @@ def set_conversation():
|
||||
e, dia = DialogService.get_by_id(req["dialog_id"])
|
||||
if not e:
|
||||
return get_data_error_result(message="Dialog not found")
|
||||
conv = {"id": conv_id, "dialog_id": req["dialog_id"], "name": name, "message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}],"user_id": current_user.id}
|
||||
conv = {"id": conv_id, "dialog_id": req["dialog_id"], "name": name, "message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}],"user_id": current_user.id,
|
||||
"reference":[{}],}
|
||||
ConversationService.save(**conv)
|
||||
return get_json_result(data=conv)
|
||||
except Exception as e:
|
||||
|
||||
@ -51,6 +51,7 @@ def create(tenant_id, chat_id):
|
||||
"name": req.get("name", "New session"),
|
||||
"message": [{"role": "assistant", "content": dia[0].prompt_config.get("prologue")}],
|
||||
"user_id": req.get("user_id", ""),
|
||||
"reference":[{}],
|
||||
}
|
||||
if not conv.get("name"):
|
||||
return get_error_data_result(message="`name` can not be empty.")
|
||||
@ -435,14 +436,38 @@ def agents_completion_openai_compatibility(tenant_id, agent_id):
|
||||
)
|
||||
)
|
||||
|
||||
# Get the last user message as the question
|
||||
question = next((m["content"] for m in reversed(messages) if m["role"] == "user"), "")
|
||||
|
||||
if req.get("stream", True):
|
||||
return Response(completionOpenAI(tenant_id, agent_id, question, session_id=req.get("id", req.get("metadata", {}).get("id", "")), stream=True), mimetype="text/event-stream")
|
||||
stream = req.pop("stream", False)
|
||||
if stream:
|
||||
resp = Response(
|
||||
completionOpenAI(
|
||||
tenant_id,
|
||||
agent_id,
|
||||
question,
|
||||
session_id=req.get("id", req.get("metadata", {}).get("id", "")),
|
||||
stream=True,
|
||||
**req,
|
||||
),
|
||||
mimetype="text/event-stream",
|
||||
)
|
||||
resp.headers.add_header("Cache-control", "no-cache")
|
||||
resp.headers.add_header("Connection", "keep-alive")
|
||||
resp.headers.add_header("X-Accel-Buffering", "no")
|
||||
resp.headers.add_header("Content-Type", "text/event-stream; charset=utf-8")
|
||||
return resp
|
||||
else:
|
||||
# For non-streaming, just return the response directly
|
||||
response = next(completionOpenAI(tenant_id, agent_id, question, session_id=req.get("id", req.get("metadata", {}).get("id", "")), stream=False))
|
||||
response = next(
|
||||
completionOpenAI(
|
||||
tenant_id,
|
||||
agent_id,
|
||||
question,
|
||||
session_id=req.get("id", req.get("metadata", {}).get("id", "")),
|
||||
stream=False,
|
||||
**req,
|
||||
)
|
||||
)
|
||||
return jsonify(response)
|
||||
|
||||
|
||||
@ -512,16 +537,16 @@ def list_session(tenant_id, chat_id):
|
||||
if "prompt" in info:
|
||||
info.pop("prompt")
|
||||
conv["chat_id"] = conv.pop("dialog_id")
|
||||
if conv["reference"]:
|
||||
ref_messages = conv["reference"]
|
||||
if ref_messages:
|
||||
messages = conv["messages"]
|
||||
message_num = 0
|
||||
while message_num < len(messages) and message_num < len(conv["reference"]):
|
||||
if message_num != 0 and messages[message_num]["role"] != "user":
|
||||
if message_num >= len(conv["reference"]):
|
||||
break
|
||||
ref_num = 0
|
||||
while message_num < len(messages) and ref_num < len(ref_messages):
|
||||
if messages[message_num]["role"] != "user":
|
||||
chunk_list = []
|
||||
if "chunks" in conv["reference"][message_num]:
|
||||
chunks = conv["reference"][message_num]["chunks"]
|
||||
if "chunks" in ref_messages[ref_num]:
|
||||
chunks = ref_messages[ref_num]["chunks"]
|
||||
for chunk in chunks:
|
||||
new_chunk = {
|
||||
"id": chunk.get("chunk_id", chunk.get("id")),
|
||||
@ -535,6 +560,7 @@ def list_session(tenant_id, chat_id):
|
||||
|
||||
chunk_list.append(new_chunk)
|
||||
messages[message_num]["reference"] = chunk_list
|
||||
ref_num += 1
|
||||
message_num += 1
|
||||
del conv["reference"]
|
||||
return get_result(data=convs)
|
||||
@ -848,10 +874,10 @@ def begin_inputs(agent_id):
|
||||
return get_error_data_result(f"Can't find agent by ID: {agent_id}")
|
||||
|
||||
canvas = Canvas(json.dumps(cvs.dsl), objs[0].tenant_id)
|
||||
return get_result(data={
|
||||
"title": cvs.title,
|
||||
"avatar": cvs.avatar,
|
||||
"inputs": canvas.get_component_input_form("begin")
|
||||
})
|
||||
|
||||
|
||||
return get_result(
|
||||
data={
|
||||
"title": cvs.title,
|
||||
"avatar": cvs.avatar,
|
||||
"inputs": canvas.get_component_input_form("begin"),
|
||||
}
|
||||
)
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import traceback
|
||||
from uuid import uuid4
|
||||
from agent.canvas import Canvas
|
||||
from api.db import TenantPermission
|
||||
@ -54,12 +53,12 @@ class UserCanvasService(CommonService):
|
||||
agents = agents.paginate(page_number, items_per_page)
|
||||
|
||||
return list(agents.dicts())
|
||||
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def get_by_tenant_id(cls, pid):
|
||||
try:
|
||||
|
||||
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.avatar,
|
||||
@ -83,7 +82,7 @@ class UserCanvasService(CommonService):
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
return False, None
|
||||
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def get_by_tenant_ids(cls, joined_tenant_ids, user_id,
|
||||
@ -103,14 +102,14 @@ class UserCanvasService(CommonService):
|
||||
]
|
||||
if keywords:
|
||||
agents = cls.model.select(*fields).join(User, on=(cls.model.user_id == User.id)).where(
|
||||
((cls.model.user_id.in_(joined_tenant_ids) & (cls.model.permission ==
|
||||
((cls.model.user_id.in_(joined_tenant_ids) & (cls.model.permission ==
|
||||
TenantPermission.TEAM.value)) | (
|
||||
cls.model.user_id == user_id)),
|
||||
(fn.LOWER(cls.model.title).contains(keywords.lower()))
|
||||
)
|
||||
else:
|
||||
agents = cls.model.select(*fields).join(User, on=(cls.model.user_id == User.id)).where(
|
||||
((cls.model.user_id.in_(joined_tenant_ids) & (cls.model.permission ==
|
||||
((cls.model.user_id.in_(joined_tenant_ids) & (cls.model.permission ==
|
||||
TenantPermission.TEAM.value)) | (
|
||||
cls.model.user_id == user_id))
|
||||
)
|
||||
@ -178,219 +177,99 @@ def completion(tenant_id, agent_id, session_id=None, **kwargs):
|
||||
|
||||
|
||||
def completionOpenAI(tenant_id, agent_id, question, session_id=None, stream=True, **kwargs):
|
||||
"""Main function for OpenAI-compatible completions, structured similarly to the completion function."""
|
||||
tiktokenenc = tiktoken.get_encoding("cl100k_base")
|
||||
e, cvs = UserCanvasService.get_by_id(agent_id)
|
||||
|
||||
if not e:
|
||||
yield get_data_openai(
|
||||
id=session_id,
|
||||
model=agent_id,
|
||||
content="**ERROR**: Agent not found."
|
||||
)
|
||||
return
|
||||
|
||||
if cvs.user_id != tenant_id:
|
||||
yield get_data_openai(
|
||||
id=session_id,
|
||||
model=agent_id,
|
||||
content="**ERROR**: You do not own the agent"
|
||||
)
|
||||
return
|
||||
|
||||
if not isinstance(cvs.dsl, str):
|
||||
cvs.dsl = json.dumps(cvs.dsl, ensure_ascii=False)
|
||||
|
||||
canvas = Canvas(cvs.dsl, tenant_id)
|
||||
canvas.reset()
|
||||
message_id = str(uuid4())
|
||||
|
||||
# Handle new session creation
|
||||
if not session_id:
|
||||
query = canvas.get_preset_param()
|
||||
if query:
|
||||
for ele in query:
|
||||
if not ele["optional"]:
|
||||
if not kwargs.get(ele["key"]):
|
||||
yield get_data_openai(
|
||||
id=None,
|
||||
model=agent_id,
|
||||
content=f"`{ele['key']}` is required",
|
||||
completion_tokens=len(tiktokenenc.encode(f"`{ele['key']}` is required")),
|
||||
prompt_tokens=len(tiktokenenc.encode(question if question else ""))
|
||||
)
|
||||
return
|
||||
ele["value"] = kwargs[ele["key"]]
|
||||
if ele["optional"]:
|
||||
if kwargs.get(ele["key"]):
|
||||
ele["value"] = kwargs[ele['key']]
|
||||
else:
|
||||
if "value" in ele:
|
||||
ele.pop("value")
|
||||
|
||||
cvs.dsl = json.loads(str(canvas))
|
||||
session_id = get_uuid()
|
||||
conv = {
|
||||
"id": session_id,
|
||||
"dialog_id": cvs.id,
|
||||
"user_id": kwargs.get("user_id", "") if isinstance(kwargs, dict) else "",
|
||||
"message": [{"role": "assistant", "content": canvas.get_prologue(), "created_at": time.time()}],
|
||||
"source": "agent",
|
||||
"dsl": cvs.dsl
|
||||
}
|
||||
canvas.messages.append({"role": "user", "content": question, "id": message_id})
|
||||
canvas.add_user_input(question)
|
||||
|
||||
API4ConversationService.save(**conv)
|
||||
conv = API4Conversation(**conv)
|
||||
if not conv.message:
|
||||
conv.message = []
|
||||
conv.message.append({
|
||||
"role": "user",
|
||||
"content": question,
|
||||
"id": message_id
|
||||
})
|
||||
|
||||
if not conv.reference:
|
||||
conv.reference = []
|
||||
conv.reference.append({"chunks": [], "doc_aggs": []})
|
||||
|
||||
# Handle existing session
|
||||
else:
|
||||
e, conv = API4ConversationService.get_by_id(session_id)
|
||||
if not e:
|
||||
yield get_data_openai(
|
||||
id=session_id,
|
||||
model=agent_id,
|
||||
content="**ERROR**: Session not found!"
|
||||
)
|
||||
return
|
||||
|
||||
canvas = Canvas(json.dumps(conv.dsl), tenant_id)
|
||||
canvas.messages.append({"role": "user", "content": question, "id": message_id})
|
||||
canvas.add_user_input(question)
|
||||
|
||||
if not conv.message:
|
||||
conv.message = []
|
||||
conv.message.append({
|
||||
"role": "user",
|
||||
"content": question,
|
||||
"id": message_id
|
||||
})
|
||||
|
||||
if not conv.reference:
|
||||
conv.reference = []
|
||||
conv.reference.append({"chunks": [], "doc_aggs": []})
|
||||
|
||||
# Process request based on stream mode
|
||||
final_ans = {"reference": [], "content": ""}
|
||||
prompt_tokens = len(tiktokenenc.encode(str(question)))
|
||||
|
||||
user_id = kwargs.get("user_id", "")
|
||||
|
||||
if stream:
|
||||
completion_tokens = 0
|
||||
try:
|
||||
completion_tokens = 0
|
||||
for ans in canvas.run(stream=True, bypass_begin=True):
|
||||
if ans.get("running_status"):
|
||||
completion_tokens += len(tiktokenenc.encode(ans.get("content", "")))
|
||||
yield "data: " + json.dumps(
|
||||
get_data_openai(
|
||||
id=session_id,
|
||||
model=agent_id,
|
||||
content=ans["content"],
|
||||
object="chat.completion.chunk",
|
||||
completion_tokens=completion_tokens,
|
||||
prompt_tokens=prompt_tokens
|
||||
),
|
||||
ensure_ascii=False
|
||||
) + "\n\n"
|
||||
for ans in completion(
|
||||
tenant_id=tenant_id,
|
||||
agent_id=agent_id,
|
||||
session_id=session_id,
|
||||
query=question,
|
||||
user_id=user_id,
|
||||
**kwargs
|
||||
):
|
||||
if isinstance(ans, str):
|
||||
try:
|
||||
ans = json.loads(ans[5:]) # remove "data:"
|
||||
except Exception as e:
|
||||
logging.exception(f"Agent OpenAI-Compatible completionOpenAI parse answer failed: {e}")
|
||||
continue
|
||||
|
||||
if ans.get("event") != "message":
|
||||
continue
|
||||
|
||||
for k in ans.keys():
|
||||
final_ans[k] = ans[k]
|
||||
|
||||
completion_tokens += len(tiktokenenc.encode(final_ans.get("content", "")))
|
||||
|
||||
content_piece = ans["data"]["content"]
|
||||
completion_tokens += len(tiktokenenc.encode(content_piece))
|
||||
|
||||
yield "data: " + json.dumps(
|
||||
get_data_openai(
|
||||
id=session_id,
|
||||
id=session_id or str(uuid4()),
|
||||
model=agent_id,
|
||||
content=final_ans["content"],
|
||||
object="chat.completion.chunk",
|
||||
finish_reason="stop",
|
||||
content=content_piece,
|
||||
prompt_tokens=prompt_tokens,
|
||||
completion_tokens=completion_tokens,
|
||||
prompt_tokens=prompt_tokens
|
||||
stream=True
|
||||
),
|
||||
ensure_ascii=False
|
||||
) + "\n\n"
|
||||
|
||||
# Update conversation
|
||||
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "created_at": time.time(), "id": message_id})
|
||||
canvas.history.append(("assistant", final_ans["content"]))
|
||||
if final_ans.get("reference"):
|
||||
canvas.reference.append(final_ans["reference"])
|
||||
conv.dsl = json.loads(str(canvas))
|
||||
API4ConversationService.append_message(conv.id, conv.to_dict())
|
||||
|
||||
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
conv.dsl = json.loads(str(canvas))
|
||||
API4ConversationService.append_message(conv.id, conv.to_dict())
|
||||
yield "data: " + json.dumps(
|
||||
get_data_openai(
|
||||
id=session_id,
|
||||
id=session_id or str(uuid4()),
|
||||
model=agent_id,
|
||||
content="**ERROR**: " + str(e),
|
||||
content=f"**ERROR**: {str(e)}",
|
||||
finish_reason="stop",
|
||||
completion_tokens=len(tiktokenenc.encode("**ERROR**: " + str(e))),
|
||||
prompt_tokens=prompt_tokens
|
||||
prompt_tokens=prompt_tokens,
|
||||
completion_tokens=len(tiktokenenc.encode(f"**ERROR**: {str(e)}")),
|
||||
stream=True
|
||||
),
|
||||
ensure_ascii=False
|
||||
) + "\n\n"
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
else: # Non-streaming mode
|
||||
|
||||
else:
|
||||
try:
|
||||
all_answer_content = ""
|
||||
for answer in canvas.run(stream=False, bypass_begin=True):
|
||||
if answer.get("running_status"):
|
||||
all_content = ""
|
||||
for ans in completion(
|
||||
tenant_id=tenant_id,
|
||||
agent_id=agent_id,
|
||||
session_id=session_id,
|
||||
query=question,
|
||||
user_id=user_id,
|
||||
**kwargs
|
||||
):
|
||||
if isinstance(ans, str):
|
||||
ans = json.loads(ans[5:])
|
||||
if ans.get("event") != "message":
|
||||
continue
|
||||
|
||||
final_ans["content"] = "\n".join(answer["content"]) if "content" in answer else ""
|
||||
final_ans["reference"] = answer.get("reference", [])
|
||||
all_answer_content += final_ans["content"]
|
||||
|
||||
final_ans["content"] = all_answer_content
|
||||
|
||||
# Update conversation
|
||||
canvas.messages.append({"role": "assistant", "content": final_ans["content"], "created_at": time.time(), "id": message_id})
|
||||
canvas.history.append(("assistant", final_ans["content"]))
|
||||
if final_ans.get("reference"):
|
||||
canvas.reference.append(final_ans["reference"])
|
||||
conv.dsl = json.loads(str(canvas))
|
||||
API4ConversationService.append_message(conv.id, conv.to_dict())
|
||||
|
||||
# Return the response in OpenAI format
|
||||
all_content += ans["data"]["content"]
|
||||
|
||||
completion_tokens = len(tiktokenenc.encode(all_content))
|
||||
|
||||
yield get_data_openai(
|
||||
id=session_id,
|
||||
id=session_id or str(uuid4()),
|
||||
model=agent_id,
|
||||
content=final_ans["content"],
|
||||
finish_reason="stop",
|
||||
completion_tokens=len(tiktokenenc.encode(final_ans["content"])),
|
||||
prompt_tokens=prompt_tokens,
|
||||
param=canvas.get_preset_param() # Added param info like in completion
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
conv.dsl = json.loads(str(canvas))
|
||||
API4ConversationService.append_message(conv.id, conv.to_dict())
|
||||
yield get_data_openai(
|
||||
id=session_id,
|
||||
model=agent_id,
|
||||
content="**ERROR**: " + str(e),
|
||||
completion_tokens=completion_tokens,
|
||||
content=all_content,
|
||||
finish_reason="stop",
|
||||
completion_tokens=len(tiktokenenc.encode("**ERROR**: " + str(e))),
|
||||
prompt_tokens=prompt_tokens
|
||||
param=None
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
yield get_data_openai(
|
||||
id=session_id or str(uuid4()),
|
||||
model=agent_id,
|
||||
prompt_tokens=prompt_tokens,
|
||||
completion_tokens=len(tiktokenenc.encode(f"**ERROR**: {str(e)}")),
|
||||
content=f"**ERROR**: {str(e)}",
|
||||
finish_reason="stop",
|
||||
param=None
|
||||
)
|
||||
|
||||
@ -402,8 +402,22 @@ def get_data_openai(
|
||||
finish_reason=None,
|
||||
object="chat.completion",
|
||||
param=None,
|
||||
stream=False
|
||||
):
|
||||
total_tokens = prompt_tokens + completion_tokens
|
||||
|
||||
if stream:
|
||||
return {
|
||||
"id": f"{id}",
|
||||
"object": "chat.completion.chunk",
|
||||
"model": model,
|
||||
"choices": [{
|
||||
"delta": {"content": content},
|
||||
"finish_reason": finish_reason,
|
||||
"index": 0,
|
||||
}],
|
||||
}
|
||||
|
||||
return {
|
||||
"id": f"{id}",
|
||||
"object": object,
|
||||
@ -414,9 +428,21 @@ def get_data_openai(
|
||||
"prompt_tokens": prompt_tokens,
|
||||
"completion_tokens": completion_tokens,
|
||||
"total_tokens": total_tokens,
|
||||
"completion_tokens_details": {"reasoning_tokens": 0, "accepted_prediction_tokens": 0, "rejected_prediction_tokens": 0},
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0,
|
||||
},
|
||||
},
|
||||
"choices": [{"message": {"role": "assistant", "content": content}, "logprobs": None, "finish_reason": finish_reason, "index": 0}],
|
||||
"choices": [{
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": content
|
||||
},
|
||||
"logprobs": None,
|
||||
"finish_reason": finish_reason,
|
||||
"index": 0,
|
||||
}],
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -135,7 +135,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
sections = pdf_parser(filename, binary, from_page=from_page, to_page=to_page, callback=callback)
|
||||
elif layout_recognizer == "Plain Text":
|
||||
pdf_parser = PlainParser()
|
||||
sections, _ = pdf_parser(filename, binary, from_page=from_page, to_page=to_page, callback=callback)
|
||||
sections, _ = pdf_parser(filename if not binary else binary, from_page=from_page, to_page=to_page,
|
||||
callback=callback)
|
||||
else:
|
||||
vision_model = LLMBundle(kwargs["tenant_id"], LLMType.IMAGE2TEXT, llm_name=layout_recognizer, lang=lang)
|
||||
pdf_parser = VisionParser(vision_model=vision_model, **kwargs)
|
||||
|
||||
@ -542,6 +542,7 @@ class GeminiCV(Base):
|
||||
yield response.usage_metadata.total_token_count
|
||||
else:
|
||||
yield 0
|
||||
|
||||
|
||||
class NvidiaCV(Base):
|
||||
_FACTORY_NAME = "NVIDIA"
|
||||
@ -623,15 +624,18 @@ class NvidiaCV(Base):
|
||||
return "**ERROR**: " + str(e), 0
|
||||
|
||||
def chat_streamly(self, system, history, gen_conf, images=[], **kwargs):
|
||||
total_tokens = 0
|
||||
try:
|
||||
response = self._request(self._form_history(system, history, images), gen_conf)
|
||||
cnt = response["choices"][0]["message"]["content"]
|
||||
if "usage" in response and "total_tokens" in response["usage"]:
|
||||
total_tokens += response["usage"]["total_tokens"]
|
||||
for resp in cnt:
|
||||
yield resp
|
||||
except Exception as e:
|
||||
yield "\n**ERROR**: " + str(e)
|
||||
|
||||
yield response["usage"]["total_tokens"]
|
||||
yield total_tokens
|
||||
|
||||
|
||||
class AnthropicCV(Base):
|
||||
|
||||
@ -420,7 +420,6 @@ def init_kb(row, vector_size: int):
|
||||
return settings.docStoreConn.createIdx(idxnm, row.get("kb_id", ""), vector_size)
|
||||
|
||||
|
||||
@timeout(60*20)
|
||||
async def embedding(docs, mdl, parser_config=None, callback=None):
|
||||
if parser_config is None:
|
||||
parser_config = {}
|
||||
@ -441,10 +440,15 @@ async def embedding(docs, mdl, parser_config=None, callback=None):
|
||||
tts = np.concatenate([vts for _ in range(len(tts))], axis=0)
|
||||
tk_count += c
|
||||
|
||||
@timeout(5)
|
||||
def batch_encode(txts):
|
||||
nonlocal mdl
|
||||
return mdl.encode([truncate(c, mdl.max_length-10) for c in txts])
|
||||
|
||||
cnts_ = np.array([])
|
||||
for i in range(0, len(cnts), EMBEDDING_BATCH_SIZE):
|
||||
async with embed_limiter:
|
||||
vts, c = await trio.to_thread.run_sync(lambda: mdl.encode([truncate(c, mdl.max_length-10) for c in cnts[i: i + EMBEDDING_BATCH_SIZE]]))
|
||||
vts, c = await trio.to_thread.run_sync(lambda: batch_encode(cnts[i: i + EMBEDDING_BATCH_SIZE]))
|
||||
if len(cnts_) == 0:
|
||||
cnts_ = vts
|
||||
else:
|
||||
|
||||
@ -63,7 +63,7 @@ const NumberInput: React.FC<NumberInputProps> = ({
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
className="w-10 p-2 text-white focus:outline-none border-r-[1px]"
|
||||
className="w-10 p-2 focus:outline-none border-r-[1px]"
|
||||
onClick={handleDecrement}
|
||||
style={style}
|
||||
>
|
||||
@ -74,12 +74,12 @@ const NumberInput: React.FC<NumberInputProps> = ({
|
||||
value={value}
|
||||
onInput={handleInput}
|
||||
onChange={handleChange}
|
||||
className="w-full flex-1 text-center bg-transparent text-white focus:outline-none"
|
||||
className="w-full flex-1 text-center bg-transparent focus:outline-none"
|
||||
style={style}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className="w-10 p-2 text-white focus:outline-none border-l-[1px]"
|
||||
className="w-10 p-2 focus:outline-none border-l-[1px]"
|
||||
onClick={handleIncrement}
|
||||
style={style}
|
||||
>
|
||||
|
||||
@ -35,9 +35,12 @@ export const useNavigatePage = () => {
|
||||
navigate(Routes.Chats);
|
||||
}, [navigate]);
|
||||
|
||||
const navigateToChat = useCallback(() => {
|
||||
navigate(Routes.Chat);
|
||||
}, [navigate]);
|
||||
const navigateToChat = useCallback(
|
||||
(id: string) => () => {
|
||||
navigate(`${Routes.Chat}/${id}`);
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const navigateToAgents = useCallback(() => {
|
||||
navigate(Routes.Agents);
|
||||
|
||||
@ -1,9 +1,22 @@
|
||||
import message from '@/components/ui/message';
|
||||
import { ChatSearchParams } from '@/constants/chat';
|
||||
import { IDialog } from '@/interfaces/database/chat';
|
||||
import chatService from '@/services/chat-service';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { history, useSearchParams } from 'umi';
|
||||
import {
|
||||
useGetPaginationWithRouter,
|
||||
useHandleSearchChange,
|
||||
} from './logic-hooks';
|
||||
|
||||
export const enum ChatApiAction {
|
||||
FetchDialogList = 'fetchDialogList',
|
||||
RemoveDialog = 'removeDialog',
|
||||
SetDialog = 'setDialog',
|
||||
}
|
||||
|
||||
export const useGetChatSearchParams = () => {
|
||||
const [currentQueryParameters] = useSearchParams();
|
||||
@ -42,13 +55,22 @@ export const useClickDialogCard = () => {
|
||||
export const useFetchDialogList = (pureFetch = false) => {
|
||||
const { handleClickDialog } = useClickDialogCard();
|
||||
const { dialogId } = useGetChatSearchParams();
|
||||
const { searchString, handleInputChange } = useHandleSearchChange();
|
||||
const { pagination, setPagination } = useGetPaginationWithRouter();
|
||||
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
|
||||
|
||||
const {
|
||||
data,
|
||||
isFetching: loading,
|
||||
refetch,
|
||||
} = useQuery<IDialog[]>({
|
||||
queryKey: ['fetchDialogList'],
|
||||
queryKey: [
|
||||
ChatApiAction.FetchDialogList,
|
||||
{
|
||||
debouncedSearchString,
|
||||
...pagination,
|
||||
},
|
||||
],
|
||||
initialData: [],
|
||||
gcTime: 0,
|
||||
refetchOnWindowFocus: false,
|
||||
@ -73,5 +95,76 @@ export const useFetchDialogList = (pureFetch = false) => {
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, refetch };
|
||||
const onInputChange: React.ChangeEventHandler<HTMLInputElement> = useCallback(
|
||||
(e) => {
|
||||
handleInputChange(e);
|
||||
},
|
||||
[handleInputChange],
|
||||
);
|
||||
|
||||
return {
|
||||
data,
|
||||
loading,
|
||||
refetch,
|
||||
searchString,
|
||||
handleInputChange: onInputChange,
|
||||
pagination: { ...pagination, total: data?.total },
|
||||
setPagination,
|
||||
};
|
||||
};
|
||||
|
||||
export const useRemoveDialog = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [ChatApiAction.RemoveDialog],
|
||||
mutationFn: async (dialogIds: string[]) => {
|
||||
const { data } = await chatService.removeDialog({ dialogIds });
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({ queryKey: ['fetchDialogList'] });
|
||||
|
||||
message.success(t('message.deleted'));
|
||||
}
|
||||
return data.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, removeDialog: mutateAsync };
|
||||
};
|
||||
|
||||
export const useSetDialog = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
data,
|
||||
isPending: loading,
|
||||
mutateAsync,
|
||||
} = useMutation({
|
||||
mutationKey: [ChatApiAction.SetDialog],
|
||||
mutationFn: async (params: IDialog) => {
|
||||
const { data } = await chatService.setDialog(params);
|
||||
if (data.code === 0) {
|
||||
queryClient.invalidateQueries({
|
||||
exact: false,
|
||||
queryKey: ['fetchDialogList'],
|
||||
});
|
||||
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ['fetchDialog'],
|
||||
});
|
||||
message.success(
|
||||
t(`message.${params.dialog_id ? 'modified' : 'created'}`),
|
||||
);
|
||||
}
|
||||
return data?.code;
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading, setDialog: mutateAsync };
|
||||
};
|
||||
|
||||
@ -562,6 +562,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
tavilyApiKeyHelp: 'How to get it?',
|
||||
crossLanguage: 'Cross-language search',
|
||||
crossLanguageTip: `Select one or more languages for cross‑language search. If no language is selected, the system searches with the original query.`,
|
||||
createChat: 'Create chat',
|
||||
},
|
||||
setting: {
|
||||
profile: 'Profile',
|
||||
|
||||
@ -843,7 +843,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
relevant: '是否相关',
|
||||
rewriteQuestion: '问题优化',
|
||||
begin: '开始',
|
||||
message: '静态消息',
|
||||
message: '回复消息',
|
||||
blank: '空',
|
||||
createFromNothing: '从无到有',
|
||||
addItem: '新增',
|
||||
@ -1245,7 +1245,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
modeTip: '模式定义了工作流的启动方式。',
|
||||
beginInputTip: '通过定义输入参数,此内容可以被后续流程中的其他组件访问。',
|
||||
query: '查询变量',
|
||||
agent: 'Agent',
|
||||
agent: '智能体',
|
||||
agentDescription: '构建具备推理、工具调用和多智能体协同的智能体组件。',
|
||||
maxRecords: '最大记录数',
|
||||
createAgent: 'Create Agent',
|
||||
|
||||
@ -48,8 +48,6 @@ export const BeginId = 'begin';
|
||||
export enum Operator {
|
||||
Begin = 'Begin',
|
||||
Retrieval = 'Retrieval',
|
||||
Generate = 'Generate',
|
||||
Answer = 'Answer',
|
||||
Categorize = 'Categorize',
|
||||
Message = 'Message',
|
||||
Relevant = 'Relevant',
|
||||
@ -78,7 +76,6 @@ export enum Operator {
|
||||
Note = 'Note',
|
||||
Crawler = 'Crawler',
|
||||
Invoke = 'Invoke',
|
||||
Template = 'Template',
|
||||
Email = 'Email',
|
||||
Iteration = 'Iteration',
|
||||
IterationStart = 'IterationItem',
|
||||
@ -100,15 +97,12 @@ export const CommonOperatorList = Object.values(Operator).filter(
|
||||
|
||||
export const AgentOperatorList = [
|
||||
Operator.Retrieval,
|
||||
Operator.Generate,
|
||||
Operator.Answer,
|
||||
Operator.Categorize,
|
||||
Operator.Message,
|
||||
Operator.RewriteQuestion,
|
||||
Operator.KeywordExtract,
|
||||
Operator.Switch,
|
||||
Operator.Concentrator,
|
||||
Operator.Template,
|
||||
Operator.Iteration,
|
||||
Operator.WaitingDialogue,
|
||||
Operator.Note,
|
||||
@ -119,12 +113,6 @@ export const componentMenuList = [
|
||||
{
|
||||
name: Operator.Retrieval,
|
||||
},
|
||||
{
|
||||
name: Operator.Generate,
|
||||
},
|
||||
{
|
||||
name: Operator.Answer,
|
||||
},
|
||||
{
|
||||
name: Operator.Categorize,
|
||||
},
|
||||
@ -144,9 +132,6 @@ export const componentMenuList = [
|
||||
{
|
||||
name: Operator.Concentrator,
|
||||
},
|
||||
{
|
||||
name: Operator.Template,
|
||||
},
|
||||
{
|
||||
name: Operator.Iteration,
|
||||
},
|
||||
@ -660,7 +645,7 @@ export const initialAgentValues = {
|
||||
max_retries: 3,
|
||||
delay_after_error: 1,
|
||||
visual_files_var: '',
|
||||
max_rounds: 5,
|
||||
max_rounds: 1,
|
||||
exception_method: '',
|
||||
exception_goto: [],
|
||||
exception_default_value: '',
|
||||
@ -796,19 +781,16 @@ export const CategorizeAnchorPointPositions = [
|
||||
// no connection lines are allowed between key and value
|
||||
export const RestrictedUpstreamMap = {
|
||||
[Operator.Begin]: [Operator.Relevant],
|
||||
[Operator.Categorize]: [Operator.Begin, Operator.Categorize, Operator.Answer],
|
||||
[Operator.Answer]: [Operator.Begin, Operator.Answer, Operator.Message],
|
||||
[Operator.Categorize]: [Operator.Begin, Operator.Categorize],
|
||||
[Operator.Retrieval]: [Operator.Begin, Operator.Retrieval],
|
||||
[Operator.Generate]: [Operator.Begin, Operator.Relevant],
|
||||
[Operator.Message]: [
|
||||
Operator.Begin,
|
||||
Operator.Message,
|
||||
Operator.Generate,
|
||||
Operator.Retrieval,
|
||||
Operator.RewriteQuestion,
|
||||
Operator.Categorize,
|
||||
],
|
||||
[Operator.Relevant]: [Operator.Begin, Operator.Answer],
|
||||
[Operator.Relevant]: [Operator.Begin],
|
||||
[Operator.RewriteQuestion]: [
|
||||
Operator.Begin,
|
||||
Operator.Message,
|
||||
@ -843,7 +825,6 @@ export const RestrictedUpstreamMap = {
|
||||
[Operator.Crawler]: [Operator.Begin],
|
||||
[Operator.Note]: [],
|
||||
[Operator.Invoke]: [Operator.Begin],
|
||||
[Operator.Template]: [Operator.Begin, Operator.Relevant],
|
||||
[Operator.Email]: [Operator.Begin],
|
||||
[Operator.Iteration]: [Operator.Begin],
|
||||
[Operator.IterationStart]: [Operator.Begin],
|
||||
@ -861,8 +842,6 @@ export const NodeMap = {
|
||||
[Operator.Begin]: 'beginNode',
|
||||
[Operator.Categorize]: 'categorizeNode',
|
||||
[Operator.Retrieval]: 'retrievalNode',
|
||||
[Operator.Generate]: 'generateNode',
|
||||
[Operator.Answer]: 'logicNode',
|
||||
[Operator.Message]: 'messageNode',
|
||||
[Operator.Relevant]: 'relevantNode',
|
||||
[Operator.RewriteQuestion]: 'rewriteNode',
|
||||
@ -890,7 +869,6 @@ export const NodeMap = {
|
||||
[Operator.Note]: 'noteNode',
|
||||
[Operator.Crawler]: 'ragNode',
|
||||
[Operator.Invoke]: 'ragNode',
|
||||
[Operator.Template]: 'templateNode',
|
||||
[Operator.Email]: 'ragNode',
|
||||
[Operator.Iteration]: 'group',
|
||||
[Operator.IterationStart]: 'iterationStartNode',
|
||||
@ -924,9 +902,7 @@ export const BeginQueryTypeIconMap = {
|
||||
|
||||
export const NoDebugOperatorsList = [
|
||||
Operator.Begin,
|
||||
Operator.Answer,
|
||||
Operator.Concentrator,
|
||||
Operator.Template,
|
||||
Operator.Message,
|
||||
Operator.RewriteQuestion,
|
||||
Operator.Switch,
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
import { z } from 'zod';
|
||||
import { Operator } from '../constant';
|
||||
import AgentForm from '../form/agent-form';
|
||||
import AkShareForm from '../form/akshare-form';
|
||||
import AnswerForm from '../form/answer-form';
|
||||
import ArXivForm from '../form/arxiv-form';
|
||||
import BaiduFanyiForm from '../form/baidu-fanyi-form';
|
||||
import BaiduForm from '../form/baidu-form';
|
||||
@ -15,7 +13,6 @@ import DeepLForm from '../form/deepl-form';
|
||||
import DuckDuckGoForm from '../form/duckduckgo-form';
|
||||
import EmailForm from '../form/email-form';
|
||||
import ExeSQLForm from '../form/exesql-form';
|
||||
import GenerateForm from '../form/generate-form';
|
||||
import GithubForm from '../form/github-form';
|
||||
import GoogleForm from '../form/google-form';
|
||||
import GoogleScholarForm from '../form/google-scholar-form';
|
||||
@ -34,7 +31,6 @@ import StringTransformForm from '../form/string-transform-form';
|
||||
import SwitchForm from '../form/switch-form';
|
||||
import TavilyExtractForm from '../form/tavily-extract-form';
|
||||
import TavilyForm from '../form/tavily-form';
|
||||
import TemplateForm from '../form/template-form';
|
||||
import ToolForm from '../form/tool-form';
|
||||
import TuShareForm from '../form/tushare-form';
|
||||
import UserFillUpForm from '../form/user-fill-up-form';
|
||||
@ -49,12 +45,6 @@ export const FormConfigMap = {
|
||||
[Operator.Retrieval]: {
|
||||
component: RetrievalForm,
|
||||
},
|
||||
[Operator.Generate]: {
|
||||
component: GenerateForm,
|
||||
},
|
||||
[Operator.Answer]: {
|
||||
component: AnswerForm,
|
||||
},
|
||||
[Operator.Categorize]: {
|
||||
component: CategorizeForm,
|
||||
},
|
||||
@ -75,8 +65,6 @@ export const FormConfigMap = {
|
||||
},
|
||||
[Operator.Agent]: {
|
||||
component: AgentForm,
|
||||
defaultValues: {},
|
||||
schema: z.object({}),
|
||||
},
|
||||
[Operator.Baidu]: {
|
||||
component: BaiduForm,
|
||||
@ -107,8 +95,6 @@ export const FormConfigMap = {
|
||||
},
|
||||
[Operator.DeepL]: {
|
||||
component: DeepLForm,
|
||||
defaultValues: {},
|
||||
schema: z.object({}),
|
||||
},
|
||||
[Operator.GitHub]: {
|
||||
component: GithubForm,
|
||||
@ -152,9 +138,6 @@ export const FormConfigMap = {
|
||||
[Operator.Note]: {
|
||||
component: () => <></>,
|
||||
},
|
||||
[Operator.Template]: {
|
||||
component: TemplateForm,
|
||||
},
|
||||
[Operator.Email]: {
|
||||
component: EmailForm,
|
||||
},
|
||||
|
||||
@ -1,5 +0,0 @@
|
||||
const AnswerForm = () => {
|
||||
return <div></div>;
|
||||
};
|
||||
|
||||
export default AnswerForm;
|
||||
@ -8,6 +8,7 @@ import { useForm } from 'react-hook-form';
|
||||
import { initialCategorizeValues } from '../../constant';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { buildOutputList } from '../../utils/build-output-list';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { Output } from '../components/output';
|
||||
import { QueryVariable } from '../components/query-variable';
|
||||
import DynamicCategorize from './dynamic-categorize';
|
||||
@ -31,12 +32,7 @@ function CategorizeForm({ node }: INextOperatorForm) {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6 p-5 "
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<QueryVariable></QueryVariable>
|
||||
<LargeModelFormField></LargeModelFormField>
|
||||
@ -44,7 +40,7 @@ function CategorizeForm({ node }: INextOperatorForm) {
|
||||
<MessageHistoryWindowSizeFormField></MessageHistoryWindowSizeFormField>
|
||||
<DynamicCategorize nodeId={node?.id}></DynamicCategorize>
|
||||
<Output list={outputList}></Output>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ import Editor, { loader } from '@monaco-editor/react';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
|
||||
import { FormContainer } from '@/components/form-container';
|
||||
import { useIsDarkTheme } from '@/components/theme-provider';
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
@ -46,6 +47,7 @@ function CodeForm({ node }: INextOperatorForm) {
|
||||
const formData = node?.data.form as ICodeForm;
|
||||
const { t } = useTranslation();
|
||||
const values = useValues(node);
|
||||
const isDarkTheme = useIsDarkTheme();
|
||||
|
||||
const form = useForm<FormSchemaType>({
|
||||
defaultValues: values,
|
||||
@ -94,7 +96,7 @@ function CodeForm({ node }: INextOperatorForm) {
|
||||
<FormControl>
|
||||
<Editor
|
||||
height={300}
|
||||
theme="vs-dark"
|
||||
theme={isDarkTheme ? 'vs-dark' : 'vs'}
|
||||
language={formData.lang}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
|
||||
@ -1,17 +0,0 @@
|
||||
import { Form } from 'antd';
|
||||
import { IOperatorForm } from '../../interface';
|
||||
|
||||
const ConcentratorForm = ({ onValuesChange, form }: IOperatorForm) => {
|
||||
return (
|
||||
<Form
|
||||
name="basic"
|
||||
labelCol={{ span: 8 }}
|
||||
wrapperCol={{ span: 16 }}
|
||||
autoComplete="off"
|
||||
form={form}
|
||||
onValuesChange={onValuesChange}
|
||||
></Form>
|
||||
);
|
||||
};
|
||||
|
||||
export default ConcentratorForm;
|
||||
@ -1,78 +0,0 @@
|
||||
import { NextLLMSelect } from '@/components/llm-select/next';
|
||||
import { MessageHistoryWindowSizeFormField } from '@/components/message-history-window-size-item';
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '@/components/ui/form';
|
||||
import { Switch } from '@/components/ui/switch';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
|
||||
const GenerateForm = ({ form }: INextOperatorForm) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="llm_id"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('chat.modelTip')}>
|
||||
{t('chat.model')}
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<NextLLMSelect {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="prompt"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.promptTip')}>
|
||||
{t('flow.systemPrompt')}
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<PromptEditor {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="cite"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.citeTip')}>
|
||||
{t('flow.cite')}
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<Switch {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<MessageHistoryWindowSizeFormField></MessageHistoryWindowSizeFormField>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
export default GenerateForm;
|
||||
@ -1,44 +0,0 @@
|
||||
.editableRow {
|
||||
:global(.editable-cell) {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
:global(.editable-cell-value-wrap) {
|
||||
padding: 5px 12px;
|
||||
cursor: pointer;
|
||||
height: 30px !important;
|
||||
}
|
||||
&:hover {
|
||||
:global(.editable-cell-value-wrap) {
|
||||
padding: 4px 11px;
|
||||
border: 1px solid #d9d9d9;
|
||||
border-radius: 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dynamicParameterVariable {
|
||||
background-color: #ebe9e950;
|
||||
:global(.ant-collapse-content) {
|
||||
background-color: #f6f6f634;
|
||||
}
|
||||
:global(.ant-collapse-content-box) {
|
||||
padding: 0 !important;
|
||||
}
|
||||
margin-bottom: 20px;
|
||||
.title {
|
||||
font-weight: 600;
|
||||
font-size: 16px;
|
||||
}
|
||||
.variableType {
|
||||
width: 30%;
|
||||
}
|
||||
.variableValue {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.addButton {
|
||||
color: rgb(22, 119, 255);
|
||||
font-weight: 600;
|
||||
}
|
||||
}
|
||||
@ -6,6 +6,7 @@ import { useForm, useWatch } from 'react-hook-form';
|
||||
import { z } from 'zod';
|
||||
import { VariableType } from '../../constant';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { Output } from '../components/output';
|
||||
import { QueryVariable } from '../components/query-variable';
|
||||
import { DynamicOutput } from './dynamic-output';
|
||||
@ -39,12 +40,7 @@ function IterationForm({ node }: INextOperatorForm) {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6 p-4"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<QueryVariable
|
||||
name="items_ref"
|
||||
@ -53,7 +49,7 @@ function IterationForm({ node }: INextOperatorForm) {
|
||||
</FormContainer>
|
||||
<DynamicOutput node={node}></DynamicOutput>
|
||||
<Output list={outputList}></Output>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ import { useFieldArray, useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { z } from 'zod';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
import { useValues } from './use-values';
|
||||
import { useWatchFormChange } from './use-watch-change';
|
||||
@ -48,13 +49,7 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-5 px-5 "
|
||||
autoComplete="off"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('flow.msgTip')}>{t('flow.msg')}</FormLabel>
|
||||
@ -98,7 +93,7 @@ function MessageForm({ node }: INextOperatorForm) {
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
</FormContainer>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@ -20,6 +20,7 @@ import {
|
||||
initialStringTransformValues,
|
||||
} from '../../constant';
|
||||
import { INextOperatorForm } from '../../interface';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { Output, transferOutputs } from '../components/output';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
import { QueryVariable } from '../components/query-variable';
|
||||
@ -76,13 +77,7 @@ function StringTransformForm({ node }: INextOperatorForm) {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-5 px-5 "
|
||||
autoComplete="off"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<FormField
|
||||
control={form.control}
|
||||
@ -157,7 +152,7 @@ function StringTransformForm({ node }: INextOperatorForm) {
|
||||
render={() => <div></div>}
|
||||
/>
|
||||
</FormContainer>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
<div className="p-5">
|
||||
<Output list={outputList}></Output>
|
||||
</div>
|
||||
|
||||
@ -28,6 +28,7 @@ import {
|
||||
} from '../../constant';
|
||||
import { useBuildQueryVariableOptions } from '../../hooks/use-get-begin-query';
|
||||
import { IOperatorForm } from '../../interface';
|
||||
import { FormWrapper } from '../components/form-wrapper';
|
||||
import { useValues } from './use-values';
|
||||
import { useWatchFormChange } from './use-watch-change';
|
||||
|
||||
@ -249,12 +250,7 @@ function SwitchForm({ node }: IOperatorForm) {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6 p-5 "
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
{fields.map((field, index) => {
|
||||
const name = `${ConditionKey}.${index}`;
|
||||
const conditions: Array<any> = form.getValues(`${name}.${ItemKey}`);
|
||||
@ -323,7 +319,7 @@ function SwitchForm({ node }: IOperatorForm) {
|
||||
>
|
||||
Add
|
||||
</BlockButton>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,24 +0,0 @@
|
||||
import { Form } from 'antd';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { IOperatorForm } from '../../interface';
|
||||
import { PromptEditor } from '../components/prompt-editor';
|
||||
|
||||
const TemplateForm = ({ onValuesChange, form }: IOperatorForm) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Form
|
||||
name="basic"
|
||||
autoComplete="off"
|
||||
form={form}
|
||||
onValuesChange={onValuesChange}
|
||||
layout={'vertical'}
|
||||
>
|
||||
<Form.Item name={['content']} label={t('flow.content')}>
|
||||
<PromptEditor></PromptEditor>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
export default TemplateForm;
|
||||
@ -11,6 +11,7 @@ import { zodResolver } from '@hookform/resolvers/zod';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { z } from 'zod';
|
||||
import { DescriptionField } from '../../components/description-field';
|
||||
import { FormWrapper } from '../../components/form-wrapper';
|
||||
import {
|
||||
EmptyResponseField,
|
||||
RetrievalPartialSchema,
|
||||
@ -35,12 +36,7 @@ const RetrievalForm = () => {
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
className="space-y-6 p-4"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
>
|
||||
<FormWrapper>
|
||||
<FormContainer>
|
||||
<DescriptionField></DescriptionField>
|
||||
<KnowledgeBaseFormField showVariable></KnowledgeBaseFormField>
|
||||
@ -58,7 +54,7 @@ const RetrievalForm = () => {
|
||||
<UseKnowledgeGraphFormField name="use_kg"></UseKnowledgeGraphFormField>
|
||||
</FormContainer>
|
||||
</Collapse>
|
||||
</form>
|
||||
</FormWrapper>
|
||||
</Form>
|
||||
);
|
||||
};
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import {
|
||||
Connection,
|
||||
Edge,
|
||||
getOutgoers,
|
||||
Node,
|
||||
Position,
|
||||
ReactFlowInstance,
|
||||
@ -15,9 +16,6 @@ import { get, lowerFirst, omit } from 'lodash';
|
||||
import { UseFormReturn } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
NodeMap,
|
||||
Operator,
|
||||
RestrictedUpstreamMap,
|
||||
initialAgentValues,
|
||||
initialAkShareValues,
|
||||
initialArXivValues,
|
||||
@ -33,7 +31,6 @@ import {
|
||||
initialDuckValues,
|
||||
initialEmailValues,
|
||||
initialExeSqlValues,
|
||||
initialGenerateValues,
|
||||
initialGithubValues,
|
||||
initialGoogleScholarValues,
|
||||
initialGoogleValues,
|
||||
@ -48,15 +45,19 @@ import {
|
||||
initialRelevantValues,
|
||||
initialRetrievalValues,
|
||||
initialRewriteQuestionValues,
|
||||
initialStringTransformValues,
|
||||
initialSwitchValues,
|
||||
initialTavilyExtractValues,
|
||||
initialTavilyValues,
|
||||
initialTemplateValues,
|
||||
initialTuShareValues,
|
||||
initialUserFillUpValues,
|
||||
initialWaitingDialogueValues,
|
||||
initialWenCaiValues,
|
||||
initialWikipediaValues,
|
||||
initialYahooFinanceValues,
|
||||
NodeMap,
|
||||
Operator,
|
||||
RestrictedUpstreamMap,
|
||||
} from './constant';
|
||||
import useGraphStore, { RFState } from './store';
|
||||
import {
|
||||
@ -92,8 +93,6 @@ export const useInitializeOperatorParams = () => {
|
||||
return {
|
||||
[Operator.Begin]: initialBeginValues,
|
||||
[Operator.Retrieval]: initialRetrievalValues,
|
||||
[Operator.Generate]: { ...initialGenerateValues, llm_id: llmId },
|
||||
[Operator.Answer]: {},
|
||||
[Operator.Categorize]: { ...initialCategorizeValues, llm_id: llmId },
|
||||
[Operator.Relevant]: { ...initialRelevantValues, llm_id: llmId },
|
||||
[Operator.RewriteQuestion]: {
|
||||
@ -128,7 +127,6 @@ export const useInitializeOperatorParams = () => {
|
||||
[Operator.Note]: initialNoteValues,
|
||||
[Operator.Crawler]: initialCrawlerValues,
|
||||
[Operator.Invoke]: initialInvokeValues,
|
||||
[Operator.Template]: initialTemplateValues,
|
||||
[Operator.Email]: initialEmailValues,
|
||||
[Operator.Iteration]: initialIterationValues,
|
||||
[Operator.IterationStart]: initialIterationValues,
|
||||
@ -137,6 +135,9 @@ export const useInitializeOperatorParams = () => {
|
||||
[Operator.Agent]: { ...initialAgentValues, llm_id: llmId },
|
||||
[Operator.TavilySearch]: initialTavilyValues,
|
||||
[Operator.TavilyExtract]: initialTavilyExtractValues,
|
||||
[Operator.Tool]: {},
|
||||
[Operator.UserFillUp]: initialUserFillUpValues,
|
||||
[Operator.StringTransform]: initialStringTransformValues,
|
||||
};
|
||||
}, [llmId]);
|
||||
|
||||
@ -333,9 +334,8 @@ export const useHandleFormValuesChange = (
|
||||
};
|
||||
|
||||
export const useValidateConnection = () => {
|
||||
const { getOperatorTypeFromId, getParentIdById } = useGraphStore(
|
||||
(state) => state,
|
||||
);
|
||||
const { getOperatorTypeFromId, getParentIdById, edges, nodes } =
|
||||
useGraphStore((state) => state);
|
||||
|
||||
const isSameNodeChild = useCallback(
|
||||
(connection: Connection | Edge) => {
|
||||
@ -349,6 +349,27 @@ export const useValidateConnection = () => {
|
||||
[getParentIdById],
|
||||
);
|
||||
|
||||
const hasCanvasCycle = useCallback(
|
||||
(connection: Connection | Edge) => {
|
||||
const target = nodes.find((node) => node.id === connection.target);
|
||||
const hasCycle = (node: RAGFlowNodeType, visited = new Set()) => {
|
||||
if (visited.has(node.id)) return false;
|
||||
|
||||
visited.add(node.id);
|
||||
|
||||
for (const outgoer of getOutgoers(node, nodes, edges)) {
|
||||
if (outgoer.id === connection.source) return true;
|
||||
if (hasCycle(outgoer, visited)) return true;
|
||||
}
|
||||
};
|
||||
|
||||
if (target?.id === connection.source) return false;
|
||||
|
||||
return target ? !hasCycle(target) : false;
|
||||
},
|
||||
[edges, nodes],
|
||||
);
|
||||
|
||||
// restricted lines cannot be connected successfully.
|
||||
const isValidConnection = useCallback(
|
||||
(connection: Connection | Edge) => {
|
||||
@ -365,10 +386,11 @@ export const useValidateConnection = () => {
|
||||
RestrictedUpstreamMap[
|
||||
getOperatorTypeFromId(connection.source) as Operator
|
||||
]?.every((x) => x !== getOperatorTypeFromId(connection.target)) &&
|
||||
isSameNodeChild(connection);
|
||||
isSameNodeChild(connection) &&
|
||||
hasCanvasCycle(connection);
|
||||
return ret;
|
||||
},
|
||||
[getOperatorTypeFromId, isSameNodeChild],
|
||||
[getOperatorTypeFromId, hasCanvasCycle, isSameNodeChild],
|
||||
);
|
||||
|
||||
return isValidConnection;
|
||||
|
||||
@ -23,7 +23,6 @@ import {
|
||||
initialDuckValues,
|
||||
initialEmailValues,
|
||||
initialExeSqlValues,
|
||||
initialGenerateValues,
|
||||
initialGithubValues,
|
||||
initialGoogleScholarValues,
|
||||
initialGoogleValues,
|
||||
@ -43,7 +42,6 @@ import {
|
||||
initialSwitchValues,
|
||||
initialTavilyExtractValues,
|
||||
initialTavilyValues,
|
||||
initialTemplateValues,
|
||||
initialTuShareValues,
|
||||
initialUserFillUpValues,
|
||||
initialWaitingDialogueValues,
|
||||
@ -70,8 +68,6 @@ export const useInitializeOperatorParams = () => {
|
||||
return {
|
||||
[Operator.Begin]: initialBeginValues,
|
||||
[Operator.Retrieval]: initialRetrievalValues,
|
||||
[Operator.Generate]: { ...initialGenerateValues, llm_id: llmId },
|
||||
[Operator.Answer]: {},
|
||||
[Operator.Categorize]: { ...initialCategorizeValues, llm_id: llmId },
|
||||
[Operator.Relevant]: { ...initialRelevantValues, llm_id: llmId },
|
||||
[Operator.RewriteQuestion]: {
|
||||
@ -106,7 +102,6 @@ export const useInitializeOperatorParams = () => {
|
||||
[Operator.Note]: initialNoteValues,
|
||||
[Operator.Crawler]: initialCrawlerValues,
|
||||
[Operator.Invoke]: initialInvokeValues,
|
||||
[Operator.Template]: initialTemplateValues,
|
||||
[Operator.Email]: initialEmailValues,
|
||||
[Operator.Iteration]: initialIterationValues,
|
||||
[Operator.IterationStart]: initialIterationStartValues,
|
||||
|
||||
@ -10,7 +10,7 @@ import { useCallback } from 'react';
|
||||
import { AgentCard } from './agent-card';
|
||||
import { useRenameAgent } from './use-rename-agent';
|
||||
|
||||
export default function Agent() {
|
||||
export default function Agents() {
|
||||
const { data, pagination, setPagination, searchString, handleInputChange } =
|
||||
useFetchAgentListByPage();
|
||||
const { navigateToAgentTemplates } = useNavigatePage();
|
||||
|
||||
@ -53,7 +53,7 @@ export default function Datasets() {
|
||||
);
|
||||
|
||||
return (
|
||||
<section className="py-4 text-foreground">
|
||||
<section className="py-4 flex-1 flex flex-col">
|
||||
<ListFilterBar
|
||||
title={t('header.knowledgeBase')}
|
||||
searchString={searchString}
|
||||
@ -69,16 +69,18 @@ export default function Datasets() {
|
||||
{t('knowledgeList.createKnowledgeBase')}
|
||||
</Button>
|
||||
</ListFilterBar>
|
||||
<div className="flex flex-wrap gap-4 max-h-[78vh] overflow-auto px-8">
|
||||
{kbs.map((dataset) => {
|
||||
return (
|
||||
<DatasetCard
|
||||
dataset={dataset}
|
||||
key={dataset.id}
|
||||
showDatasetRenameModal={showDatasetRenameModal}
|
||||
></DatasetCard>
|
||||
);
|
||||
})}
|
||||
<div className="flex-1">
|
||||
<div className="flex flex-wrap gap-4 max-h-[78vh] overflow-auto px-8">
|
||||
{kbs.map((dataset) => {
|
||||
return (
|
||||
<DatasetCard
|
||||
dataset={dataset}
|
||||
key={dataset.id}
|
||||
showDatasetRenameModal={showDatasetRenameModal}
|
||||
></DatasetCard>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-8 px-8">
|
||||
<RAGFlowPagination
|
||||
|
||||
@ -1,52 +1,45 @@
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { MoreButton } from '@/components/more-button';
|
||||
import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { Card, CardContent } from '@/components/ui/card';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { IDialog } from '@/interfaces/database/chat';
|
||||
import { formatPureDate } from '@/utils/date';
|
||||
import { ChevronRight, Trash2 } from 'lucide-react';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { ChatDropdown } from './chat-dropdown';
|
||||
import { useRenameChat } from './hooks/use-rename-chat';
|
||||
|
||||
interface IProps {
|
||||
export type IProps = {
|
||||
data: IDialog;
|
||||
}
|
||||
} & Pick<ReturnType<typeof useRenameChat>, 'showChatRenameModal'>;
|
||||
|
||||
export function ChatCard({ data }: IProps) {
|
||||
export function ChatCard({ data, showChatRenameModal }: IProps) {
|
||||
const { navigateToChat } = useNavigatePage();
|
||||
|
||||
return (
|
||||
<Card className="bg-colors-background-inverse-weak border-colors-outline-neutral-standard">
|
||||
<CardContent className="p-4">
|
||||
<div className="flex justify-between mb-4">
|
||||
{data.icon ? (
|
||||
<div
|
||||
className="w-[70px] h-[70px] rounded-xl bg-cover"
|
||||
style={{ backgroundImage: `url(${data.icon})` }}
|
||||
/>
|
||||
) : (
|
||||
<Avatar className="w-[70px] h-[70px]">
|
||||
<AvatarImage src="https://github.com/shadcn.png" />
|
||||
<AvatarFallback>CN</AvatarFallback>
|
||||
</Avatar>
|
||||
)}
|
||||
</div>
|
||||
<h3 className="text-xl font-bold mb-2">{data.name}</h3>
|
||||
<p>An app that does things An app that does things</p>
|
||||
<section className="flex justify-between pt-3">
|
||||
<div>
|
||||
Search app
|
||||
<p className="text-sm opacity-80">
|
||||
{formatPureDate(data.update_time)}
|
||||
<Card key={data.id} className="w-40" onClick={navigateToChat(data.id)}>
|
||||
<CardContent className="p-2.5 pt-2 group">
|
||||
<section className="flex justify-between mb-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
<RAGFlowAvatar
|
||||
className="size-6 rounded-lg"
|
||||
avatar={data.icon}
|
||||
name={data.name || 'CN'}
|
||||
></RAGFlowAvatar>
|
||||
</div>
|
||||
<ChatDropdown chat={data} showChatRenameModal={showChatRenameModal}>
|
||||
<MoreButton></MoreButton>
|
||||
</ChatDropdown>
|
||||
</section>
|
||||
<div className="flex justify-between items-end">
|
||||
<div className="w-full">
|
||||
<h3 className="text-lg font-semibold mb-2 line-clamp-1">
|
||||
{data.name}
|
||||
</h3>
|
||||
<p className="text-xs text-text-sub-title">{data.description}</p>
|
||||
<p className="text-xs text-text-sub-title">
|
||||
{formatDate(data.update_time)}
|
||||
</p>
|
||||
</div>
|
||||
<div className="space-x-2">
|
||||
<Button variant="icon" size="icon" onClick={navigateToChat}>
|
||||
<ChevronRight className="h-6 w-6" />
|
||||
</Button>
|
||||
<Button variant="icon" size="icon">
|
||||
<Trash2 />
|
||||
</Button>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
|
||||
64
web/src/pages/next-chats/chat-dropdown.tsx
Normal file
64
web/src/pages/next-chats/chat-dropdown.tsx
Normal file
@ -0,0 +1,64 @@
|
||||
import { ConfirmDeleteDialog } from '@/components/confirm-delete-dialog';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import { useRemoveDialog } from '@/hooks/use-chat-request';
|
||||
import { IDialog } from '@/interfaces/database/chat';
|
||||
import { PenLine, Trash2 } from 'lucide-react';
|
||||
import { MouseEventHandler, PropsWithChildren, useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useRenameChat } from './hooks/use-rename-chat';
|
||||
|
||||
export function ChatDropdown({
|
||||
children,
|
||||
showChatRenameModal,
|
||||
chat,
|
||||
}: PropsWithChildren &
|
||||
Pick<ReturnType<typeof useRenameChat>, 'showChatRenameModal'> & {
|
||||
chat: IDialog;
|
||||
}) {
|
||||
const { t } = useTranslation();
|
||||
const { removeDialog } = useRemoveDialog();
|
||||
|
||||
const handleShowChatRenameModal: MouseEventHandler<HTMLDivElement> =
|
||||
useCallback(
|
||||
(e) => {
|
||||
e.stopPropagation();
|
||||
showChatRenameModal(chat);
|
||||
},
|
||||
[chat, showChatRenameModal],
|
||||
);
|
||||
|
||||
const handleDelete: MouseEventHandler<HTMLDivElement> = useCallback(() => {
|
||||
removeDialog([chat.id]);
|
||||
}, [chat.id, removeDialog]);
|
||||
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>{children}</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
<DropdownMenuItem onClick={handleShowChatRenameModal}>
|
||||
{t('common.rename')} <PenLine />
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<ConfirmDeleteDialog onOk={handleDelete}>
|
||||
<DropdownMenuItem
|
||||
className="text-text-delete-red"
|
||||
onSelect={(e) => {
|
||||
e.preventDefault();
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
{t('common.delete')} <Trash2 />
|
||||
</DropdownMenuItem>
|
||||
</ConfirmDeleteDialog>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
}
|
||||
45
web/src/pages/next-chats/hooks/use-rename-chat.ts
Normal file
45
web/src/pages/next-chats/hooks/use-rename-chat.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useSetDialog } from '@/hooks/use-chat-request';
|
||||
import { IDialog } from '@/interfaces/database/chat';
|
||||
import { useCallback, useState } from 'react';
|
||||
|
||||
export const useRenameChat = () => {
|
||||
const [chat, setChat] = useState<IDialog>({} as IDialog);
|
||||
const {
|
||||
visible: chatRenameVisible,
|
||||
hideModal: hideChatRenameModal,
|
||||
showModal: showChatRenameModal,
|
||||
} = useSetModalState();
|
||||
const { setDialog, loading } = useSetDialog();
|
||||
|
||||
const onChatRenameOk = useCallback(
|
||||
async (name: string) => {
|
||||
const ret = await setDialog({
|
||||
...chat,
|
||||
name,
|
||||
});
|
||||
|
||||
if (ret === 0) {
|
||||
hideChatRenameModal();
|
||||
}
|
||||
},
|
||||
[setDialog, chat, hideChatRenameModal],
|
||||
);
|
||||
|
||||
const handleShowChatRenameModal = useCallback(
|
||||
async (record: IDialog) => {
|
||||
setChat(record);
|
||||
showChatRenameModal();
|
||||
},
|
||||
[showChatRenameModal],
|
||||
);
|
||||
|
||||
return {
|
||||
chatRenameLoading: loading,
|
||||
initialChatName: chat?.name,
|
||||
onChatRenameOk,
|
||||
chatRenameVisible,
|
||||
hideChatRenameModal,
|
||||
showChatRenameModal: handleShowChatRenameModal,
|
||||
};
|
||||
};
|
||||
@ -1,25 +1,72 @@
|
||||
import ListFilterBar from '@/components/list-filter-bar';
|
||||
import { RenameDialog } from '@/components/rename-dialog';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { useFetchChatAppList } from '@/hooks/chat-hooks';
|
||||
import { RAGFlowPagination } from '@/components/ui/ragflow-pagination';
|
||||
import { useFetchDialogList } from '@/hooks/use-chat-request';
|
||||
import { pick } from 'lodash';
|
||||
import { Plus } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ChatCard } from './chat-card';
|
||||
import { useRenameChat } from './hooks/use-rename-chat';
|
||||
|
||||
export default function ChatList() {
|
||||
const { data: chatList } = useFetchChatAppList();
|
||||
const { data: chatList, setPagination, pagination } = useFetchDialogList();
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
initialChatName,
|
||||
chatRenameVisible,
|
||||
showChatRenameModal,
|
||||
hideChatRenameModal,
|
||||
onChatRenameOk,
|
||||
chatRenameLoading,
|
||||
} = useRenameChat();
|
||||
|
||||
const handlePageChange = useCallback(
|
||||
(page: number, pageSize?: number) => {
|
||||
setPagination({ page, pageSize });
|
||||
},
|
||||
[setPagination],
|
||||
);
|
||||
|
||||
return (
|
||||
<section className="p-8">
|
||||
<ListFilterBar title="Chat apps">
|
||||
<Button variant={'tertiary'} size={'sm'}>
|
||||
<Plus className="mr-2 h-4 w-4" />
|
||||
Create app
|
||||
</Button>
|
||||
</ListFilterBar>
|
||||
<div className="grid gap-6 sm:grid-cols-1 md:grid-cols-2 lg:grid-cols-4 xl:grid-cols-6 2xl:grid-cols-8">
|
||||
{chatList.map((x) => {
|
||||
return <ChatCard key={x.id} data={x}></ChatCard>;
|
||||
})}
|
||||
<section className="flex flex-col w-full flex-1">
|
||||
<div className="px-8 pt-8">
|
||||
<ListFilterBar title="Chat apps">
|
||||
<Button>
|
||||
<Plus className="size-2.5" />
|
||||
{t('chat.createChat')}
|
||||
</Button>
|
||||
</ListFilterBar>
|
||||
</div>
|
||||
<div className="flex-1 overflow-auto">
|
||||
<div className="flex flex-wrap gap-4 px-8">
|
||||
{chatList.map((x) => {
|
||||
return (
|
||||
<ChatCard
|
||||
key={x.id}
|
||||
data={x}
|
||||
showChatRenameModal={showChatRenameModal}
|
||||
></ChatCard>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-8 px-8 pb-8">
|
||||
<RAGFlowPagination
|
||||
{...pick(pagination, 'current', 'pageSize')}
|
||||
total={pagination.total}
|
||||
onChange={handlePageChange}
|
||||
></RAGFlowPagination>
|
||||
</div>
|
||||
{chatRenameVisible && (
|
||||
<RenameDialog
|
||||
hideModal={hideChatRenameModal}
|
||||
onOk={onChatRenameOk}
|
||||
initialName={initialChatName}
|
||||
loading={chatRenameLoading}
|
||||
></RenameDialog>
|
||||
)}
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
@ -214,7 +214,7 @@ const routes = [
|
||||
],
|
||||
},
|
||||
{
|
||||
path: Routes.Chat,
|
||||
path: Routes.Chat + '/:id',
|
||||
layout: false,
|
||||
component: `@/pages${Routes.Chats}/chat`,
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user