mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-04 03:25:30 +08:00
Compare commits
40 Commits
v0.20.1
...
da5cef0686
| Author | SHA1 | Date | |
|---|---|---|---|
| da5cef0686 | |||
| 9098efb8aa | |||
| 421657f64b | |||
| 7ee5e0d152 | |||
| 22915223d4 | |||
| d7b4e84cda | |||
| e845d5f9f8 | |||
| 3d18284dd6 | |||
| 96783aa82c | |||
| a0c2da1219 | |||
| 79e2edc835 | |||
| 57b87fa9d9 | |||
| 153e430b00 | |||
| 3ccaa06031 | |||
| 569ab011c4 | |||
| 96b1538b3e | |||
| 735570486f | |||
| da68f541b6 | |||
| 83771e500c | |||
| a6d2119498 | |||
| 57b9f8cf52 | |||
| 5c3577c4c9 | |||
| 76118000c1 | |||
| 9433f64fe2 | |||
| d7c9611d45 | |||
| 79399f7f25 | |||
| 23522f1ea8 | |||
| 46dc3f1c48 | |||
| c9b156fa6d | |||
| 83939b1a63 | |||
| 7f08ba47d7 | |||
| ce3dd019c3 | |||
| 476c56868d | |||
| b9c4954c2f | |||
| a060672b31 | |||
| f022504ef9 | |||
| 1a78b8b295 | |||
| 017dd85ccf | |||
| 4c7b2ef46e | |||
| 597d88bf9a |
@ -165,7 +165,7 @@ class Agent(LLM, ToolBase):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
use_tools = []
|
||||
ans = ""
|
||||
for delta_ans, tk in self._react_with_tools_streamly(msg, use_tools):
|
||||
for delta_ans, tk in self._react_with_tools_streamly(prompt, msg, use_tools):
|
||||
ans += delta_ans
|
||||
|
||||
if ans.find("**ERROR**") >= 0:
|
||||
@ -185,7 +185,7 @@ class Agent(LLM, ToolBase):
|
||||
_, msg = message_fit_in([{"role": "system", "content": prompt}, *msg], int(self.chat_mdl.max_length * 0.97))
|
||||
answer_without_toolcall = ""
|
||||
use_tools = []
|
||||
for delta_ans,_ in self._react_with_tools_streamly(msg, use_tools):
|
||||
for delta_ans,_ in self._react_with_tools_streamly(prompt, msg, use_tools):
|
||||
if delta_ans.find("**ERROR**") >= 0:
|
||||
if self.get_exception_default_value():
|
||||
self.set_output("content", self.get_exception_default_value())
|
||||
@ -208,7 +208,7 @@ class Agent(LLM, ToolBase):
|
||||
]):
|
||||
yield delta_ans
|
||||
|
||||
def _react_with_tools_streamly(self, history: list[dict], use_tools):
|
||||
def _react_with_tools_streamly(self, prompt, history: list[dict], use_tools):
|
||||
token_count = 0
|
||||
tool_metas = self.tool_meta
|
||||
hist = deepcopy(history)
|
||||
@ -221,7 +221,7 @@ class Agent(LLM, ToolBase):
|
||||
|
||||
def use_tool(name, args):
|
||||
nonlocal hist, use_tools, token_count,last_calling,user_request
|
||||
print(f"{last_calling=} == {name=}", )
|
||||
logging.info(f"{last_calling=} == {name=}")
|
||||
# Summarize of function calling
|
||||
#if all([
|
||||
# isinstance(self.toolcall_session.get_tool_obj(name), Agent),
|
||||
@ -275,7 +275,7 @@ class Agent(LLM, ToolBase):
|
||||
else:
|
||||
hist.append({"role": "user", "content": content})
|
||||
|
||||
task_desc = analyze_task(self.chat_mdl, user_request, tool_metas)
|
||||
task_desc = analyze_task(self.chat_mdl, prompt, user_request, tool_metas)
|
||||
self.callback("analyze_task", {}, task_desc)
|
||||
for _ in range(self._param.max_rounds + 1):
|
||||
response, tk = next_step(self.chat_mdl, hist, tool_metas, task_desc)
|
||||
|
||||
@ -39,7 +39,10 @@ class Begin(UserFillUp):
|
||||
def _invoke(self, **kwargs):
|
||||
for k, v in kwargs.get("inputs", {}).items():
|
||||
if isinstance(v, dict) and v.get("type", "").lower().find("file") >=0:
|
||||
v = self._canvas.get_files([v["value"]])
|
||||
if v.get("optional") and v.get("value", None) is None:
|
||||
v = None
|
||||
else:
|
||||
v = self._canvas.get_files([v["value"]])
|
||||
else:
|
||||
v = v.get("value")
|
||||
self.set_output(k, v)
|
||||
|
||||
@ -57,7 +57,7 @@ class Invoke(ComponentBase, ABC):
|
||||
def _invoke(self, **kwargs):
|
||||
args = {}
|
||||
for para in self._param.variables:
|
||||
if para.get("value") is not None:
|
||||
if para.get("value"):
|
||||
args[para["key"]] = para["value"]
|
||||
else:
|
||||
args[para["key"]] = self._canvas.get_variable_value(para["ref"])
|
||||
@ -139,4 +139,4 @@ class Invoke(ComponentBase, ABC):
|
||||
assert False, self.output()
|
||||
|
||||
def thoughts(self) -> str:
|
||||
return "Waiting for the server respond..."
|
||||
return "Waiting for the server respond..."
|
||||
|
||||
@ -17,7 +17,7 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Any, Generator
|
||||
|
||||
import json_repair
|
||||
from copy import deepcopy
|
||||
@ -154,7 +154,7 @@ class LLM(ComponentBase):
|
||||
return self.chat_mdl.chat(msg[0]["content"], msg[1:], self._param.gen_conf(), **kwargs)
|
||||
return self.chat_mdl.chat(msg[0]["content"], msg[1:], self._param.gen_conf(), images=self.imgs, **kwargs)
|
||||
|
||||
def _generate_streamly(self, msg:list[dict], **kwargs) -> str:
|
||||
def _generate_streamly(self, msg:list[dict], **kwargs) -> Generator[str, None, None]:
|
||||
ans = ""
|
||||
last_idx = 0
|
||||
endswith_think = False
|
||||
|
||||
@ -17,7 +17,7 @@ import base64
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC
|
||||
from enum import StrEnum
|
||||
from strenum import StrEnum
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from agent.tools.base import ToolParamBase, ToolBase, ToolMeta
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import re
|
||||
from abc import ABC
|
||||
import pandas as pd
|
||||
import pymysql
|
||||
@ -109,7 +110,7 @@ class ExeSQL(ToolBase, ABC):
|
||||
single_sql = single_sql.replace('```','')
|
||||
if not single_sql:
|
||||
continue
|
||||
|
||||
single_sql = re.sub(r"\[ID:[0-9]+\]", "", single_sql)
|
||||
cursor.execute(single_sql)
|
||||
if cursor.rowcount == 0:
|
||||
sql_res.append({"content": "No record in the database!"})
|
||||
|
||||
@ -90,7 +90,7 @@ def save():
|
||||
data=False, message='Only owner of canvas authorized for this operation.',
|
||||
code=RetCode.OPERATING_ERROR)
|
||||
UserCanvasService.update_by_id(req["id"], req)
|
||||
# save version
|
||||
# save version
|
||||
UserCanvasVersionService.insert( user_canvas_id=req["id"], dsl=req["dsl"], title="{0}_{1}".format(req["title"], time.strftime("%Y_%m_%d_%H_%M_%S")))
|
||||
UserCanvasVersionService.delete_all_versions(req["id"])
|
||||
return get_json_result(data=req)
|
||||
@ -347,7 +347,7 @@ def test_db_connect():
|
||||
if req["db_type"] != 'mssql':
|
||||
db.connect()
|
||||
db.close()
|
||||
|
||||
|
||||
return get_json_result(data="Database Connection Successful!")
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
@ -369,7 +369,7 @@ def getlistversion(canvas_id):
|
||||
@login_required
|
||||
def getversion( version_id):
|
||||
try:
|
||||
|
||||
|
||||
e, version = UserCanvasVersionService.get_by_id(version_id)
|
||||
if version:
|
||||
return get_json_result(data=version.to_dict())
|
||||
@ -379,7 +379,7 @@ def getversion( version_id):
|
||||
|
||||
@manager.route('/listteam', methods=['GET']) # noqa: F821
|
||||
@login_required
|
||||
def list_kbs():
|
||||
def list_canvas():
|
||||
keywords = request.args.get("keywords", "")
|
||||
page_number = int(request.args.get("page", 1))
|
||||
items_per_page = int(request.args.get("page_size", 150))
|
||||
@ -387,10 +387,10 @@ def list_kbs():
|
||||
desc = request.args.get("desc", True)
|
||||
try:
|
||||
tenants = TenantService.get_joined_tenants_by_user_id(current_user.id)
|
||||
kbs, total = UserCanvasService.get_by_tenant_ids(
|
||||
canvas, total = UserCanvasService.get_by_tenant_ids(
|
||||
[m["tenant_id"] for m in tenants], current_user.id, page_number,
|
||||
items_per_page, orderby, desc, keywords)
|
||||
return get_json_result(data={"kbs": kbs, "total": total})
|
||||
return get_json_result(data={"canvas": canvas, "total": total})
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@ -67,7 +67,7 @@ def set_conversation():
|
||||
if not e:
|
||||
return get_data_error_result(message="Dialog not found")
|
||||
conv = {"id": conv_id, "dialog_id": req["dialog_id"], "name": name, "message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}],"user_id": current_user.id,
|
||||
"reference":[{}],}
|
||||
"reference":[],}
|
||||
ConversationService.save(**conv)
|
||||
return get_json_result(data=conv)
|
||||
except Exception as e:
|
||||
@ -187,14 +187,7 @@ def completion():
|
||||
|
||||
if not conv.reference:
|
||||
conv.reference = []
|
||||
else:
|
||||
for ref in conv.reference:
|
||||
if isinstance(ref, list):
|
||||
continue
|
||||
ref["chunks"] = chunks_format(ref)
|
||||
|
||||
if not conv.reference:
|
||||
conv.reference = []
|
||||
conv.reference = [r for r in conv.reference if r]
|
||||
conv.reference.append({"chunks": [], "doc_aggs": []})
|
||||
|
||||
def stream():
|
||||
|
||||
@ -51,6 +51,7 @@ def set_dialog():
|
||||
similarity_threshold = req.get("similarity_threshold", 0.1)
|
||||
vector_similarity_weight = req.get("vector_similarity_weight", 0.3)
|
||||
llm_setting = req.get("llm_setting", {})
|
||||
meta_data_filter = req.get("meta_data_filter", {})
|
||||
prompt_config = req["prompt_config"]
|
||||
|
||||
if not is_create:
|
||||
@ -85,6 +86,7 @@ def set_dialog():
|
||||
"llm_id": llm_id,
|
||||
"llm_setting": llm_setting,
|
||||
"prompt_config": prompt_config,
|
||||
"meta_data_filter": meta_data_filter,
|
||||
"top_n": top_n,
|
||||
"top_k": top_k,
|
||||
"rerank_id": rerank_id,
|
||||
|
||||
@ -681,6 +681,11 @@ def set_meta():
|
||||
return get_json_result(data=False, message="No authorization.", code=settings.RetCode.AUTHENTICATION_ERROR)
|
||||
try:
|
||||
meta = json.loads(req["meta"])
|
||||
if not isinstance(meta, dict):
|
||||
return get_json_result(data=False, message="Only dictionary type supported.", code=settings.RetCode.ARGUMENT_ERROR)
|
||||
for k,v in meta.items():
|
||||
if not isinstance(v, str) and not isinstance(v, int) and not isinstance(v, float):
|
||||
return get_json_result(data=False, message=f"The type is not supported: {v}", code=settings.RetCode.ARGUMENT_ERROR)
|
||||
except Exception as e:
|
||||
return get_json_result(data=False, message=f"Json syntax error: {e}", code=settings.RetCode.ARGUMENT_ERROR)
|
||||
if not isinstance(meta, dict):
|
||||
|
||||
@ -351,6 +351,7 @@ def knowledge_graph(kb_id):
|
||||
obj["graph"]["edges"] = sorted(filtered_edges, key=lambda x: x.get("weight", 0), reverse=True)[:128]
|
||||
return get_json_result(data=obj)
|
||||
|
||||
|
||||
@manager.route('/<kb_id>/knowledge_graph', methods=['DELETE']) # noqa: F821
|
||||
@login_required
|
||||
def delete_knowledge_graph(kb_id):
|
||||
@ -364,3 +365,17 @@ def delete_knowledge_graph(kb_id):
|
||||
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]}, search.index_name(kb.tenant_id), kb_id)
|
||||
|
||||
return get_json_result(data=True)
|
||||
|
||||
|
||||
@manager.route("/get_meta", methods=["GET"]) # noqa: F821
|
||||
@login_required
|
||||
def get_meta():
|
||||
kb_ids = request.args.get("kb_ids", "").split(",")
|
||||
for kb_id in kb_ids:
|
||||
if not KnowledgebaseService.accessible(kb_id, current_user.id):
|
||||
return get_json_result(
|
||||
data=False,
|
||||
message='No authorization.',
|
||||
code=settings.RetCode.AUTHENTICATION_ERROR
|
||||
)
|
||||
return get_json_result(data=DocumentService.get_meta_by_kbs(kb_ids))
|
||||
|
||||
@ -57,6 +57,7 @@ def set_api_key():
|
||||
# test if api key works
|
||||
chat_passed, embd_passed, rerank_passed = False, False, False
|
||||
factory = req["llm_factory"]
|
||||
extra = {"provider": factory}
|
||||
msg = ""
|
||||
for llm in LLMService.query(fid=factory):
|
||||
if not embd_passed and llm.model_type == LLMType.EMBEDDING.value:
|
||||
@ -73,7 +74,7 @@ def set_api_key():
|
||||
elif not chat_passed and llm.model_type == LLMType.CHAT.value:
|
||||
assert factory in ChatModel, f"Chat model from {factory} is not supported yet."
|
||||
mdl = ChatModel[factory](
|
||||
req["api_key"], llm.llm_name, base_url=req.get("base_url"))
|
||||
req["api_key"], llm.llm_name, base_url=req.get("base_url"), **extra)
|
||||
try:
|
||||
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}],
|
||||
{"temperature": 0.9, 'max_tokens': 50})
|
||||
@ -204,6 +205,7 @@ def add_llm():
|
||||
|
||||
msg = ""
|
||||
mdl_nm = llm["llm_name"].split("___")[0]
|
||||
extra = {"provider": factory}
|
||||
if llm["model_type"] == LLMType.EMBEDDING.value:
|
||||
assert factory in EmbeddingModel, f"Embedding model from {factory} is not supported yet."
|
||||
mdl = EmbeddingModel[factory](
|
||||
@ -221,7 +223,8 @@ def add_llm():
|
||||
mdl = ChatModel[factory](
|
||||
key=llm['api_key'],
|
||||
model_name=mdl_nm,
|
||||
base_url=llm["api_base"]
|
||||
base_url=llm["api_base"],
|
||||
**extra,
|
||||
)
|
||||
try:
|
||||
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
|
||||
@ -312,12 +315,12 @@ def delete_factory():
|
||||
def my_llms():
|
||||
try:
|
||||
include_details = request.args.get('include_details', 'false').lower() == 'true'
|
||||
|
||||
|
||||
if include_details:
|
||||
res = {}
|
||||
objs = TenantLLMService.query(tenant_id=current_user.id)
|
||||
factories = LLMFactoriesService.query(status=StatusEnum.VALID.value)
|
||||
|
||||
|
||||
for o in objs:
|
||||
o_dict = o.to_dict()
|
||||
factory_tags = None
|
||||
@ -325,13 +328,13 @@ def my_llms():
|
||||
if f.name == o_dict["llm_factory"]:
|
||||
factory_tags = f.tags
|
||||
break
|
||||
|
||||
|
||||
if o_dict["llm_factory"] not in res:
|
||||
res[o_dict["llm_factory"]] = {
|
||||
"tags": factory_tags,
|
||||
"llm": []
|
||||
}
|
||||
|
||||
|
||||
res[o_dict["llm_factory"]]["llm"].append({
|
||||
"type": o_dict["model_type"],
|
||||
"name": o_dict["llm_name"],
|
||||
@ -352,7 +355,7 @@ def my_llms():
|
||||
"name": o["llm_name"],
|
||||
"used_token": o["used_tokens"]
|
||||
})
|
||||
|
||||
|
||||
return get_json_result(data=res)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -139,7 +139,7 @@ def create(tenant_id):
|
||||
res["llm"] = res.pop("llm_setting")
|
||||
res["llm"]["model_name"] = res.pop("llm_id")
|
||||
del res["kb_ids"]
|
||||
res["dataset_ids"] = req["dataset_ids"]
|
||||
res["dataset_ids"] = req.get("dataset_ids", [])
|
||||
res["avatar"] = res.pop("icon")
|
||||
return get_result(data=res)
|
||||
|
||||
|
||||
@ -589,14 +589,22 @@ def list_agent_session(tenant_id, agent_id):
|
||||
if "prompt" in info:
|
||||
info.pop("prompt")
|
||||
conv["agent_id"] = conv.pop("dialog_id")
|
||||
# Fix for session listing endpoint
|
||||
if conv["reference"]:
|
||||
messages = conv["messages"]
|
||||
message_num = 0
|
||||
chunk_num = 0
|
||||
# Ensure reference is a list type to prevent KeyError
|
||||
if not isinstance(conv["reference"], list):
|
||||
conv["reference"] = []
|
||||
while message_num < len(messages):
|
||||
if message_num != 0 and messages[message_num]["role"] != "user":
|
||||
chunk_list = []
|
||||
if "chunks" in conv["reference"][chunk_num]:
|
||||
# Add boundary and type checks to prevent KeyError
|
||||
if (chunk_num < len(conv["reference"]) and
|
||||
conv["reference"][chunk_num] is not None and
|
||||
isinstance(conv["reference"][chunk_num], dict) and
|
||||
"chunks" in conv["reference"][chunk_num]):
|
||||
chunks = conv["reference"][chunk_num]["chunks"]
|
||||
for chunk in chunks:
|
||||
new_chunk = {
|
||||
|
||||
@ -620,18 +620,35 @@ def user_register(user_id, user):
|
||||
"location": "",
|
||||
}
|
||||
tenant_llm = []
|
||||
for llm in LLMService.query(fid=settings.LLM_FACTORY):
|
||||
tenant_llm.append(
|
||||
{
|
||||
"tenant_id": user_id,
|
||||
"llm_factory": settings.LLM_FACTORY,
|
||||
"llm_name": llm.llm_name,
|
||||
"model_type": llm.model_type,
|
||||
"api_key": settings.API_KEY,
|
||||
"api_base": settings.LLM_BASE_URL,
|
||||
"max_tokens": llm.max_tokens if llm.max_tokens else 8192,
|
||||
}
|
||||
)
|
||||
|
||||
seen = set()
|
||||
factory_configs = []
|
||||
for factory_config in [
|
||||
settings.CHAT_CFG,
|
||||
settings.EMBEDDING_CFG,
|
||||
settings.ASR_CFG,
|
||||
settings.IMAGE2TEXT_CFG,
|
||||
settings.RERANK_CFG,
|
||||
]:
|
||||
factory_name = factory_config["factory"]
|
||||
if factory_name not in seen:
|
||||
seen.add(factory_name)
|
||||
factory_configs.append(factory_config)
|
||||
|
||||
for factory_config in factory_configs:
|
||||
for llm in LLMService.query(fid=factory_config["factory"]):
|
||||
tenant_llm.append(
|
||||
{
|
||||
"tenant_id": user_id,
|
||||
"llm_factory": factory_config["factory"],
|
||||
"llm_name": llm.llm_name,
|
||||
"model_type": llm.model_type,
|
||||
"api_key": factory_config["api_key"],
|
||||
"api_base": factory_config["base_url"],
|
||||
"max_tokens": llm.max_tokens if llm.max_tokens else 8192,
|
||||
}
|
||||
)
|
||||
|
||||
if settings.LIGHTEN != 1:
|
||||
for buildin_embedding_model in settings.BUILTIN_EMBEDDING_MODELS:
|
||||
mdlnm, fid = TenantLLMService.split_model_name_and_factory(buildin_embedding_model)
|
||||
@ -647,6 +664,13 @@ def user_register(user_id, user):
|
||||
}
|
||||
)
|
||||
|
||||
unique = {}
|
||||
for item in tenant_llm:
|
||||
key = (item["tenant_id"], item["llm_factory"], item["llm_name"])
|
||||
if key not in unique:
|
||||
unique[key] = item
|
||||
tenant_llm = list(unique.values())
|
||||
|
||||
if not UserService.save(**user):
|
||||
return
|
||||
TenantService.insert(**tenant)
|
||||
|
||||
@ -744,6 +744,7 @@ class Dialog(DataBaseModel):
|
||||
null=False,
|
||||
default={"system": "", "prologue": "Hi! I'm your assistant, what can I do for you?", "parameters": [], "empty_response": "Sorry! No relevant content was found in the knowledge base!"},
|
||||
)
|
||||
meta_data_filter = JSONField(null=True, default={})
|
||||
|
||||
similarity_threshold = FloatField(default=0.2)
|
||||
vector_similarity_weight = FloatField(default=0.3)
|
||||
@ -1015,4 +1016,8 @@ def migrate_db():
|
||||
migrate(migrator.add_column("api_4_conversation", "errors", TextField(null=True, help_text="errors")))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
migrate(migrator.add_column("dialog", "meta_data_filter", JSONField(null=True, default={})))
|
||||
except Exception:
|
||||
pass
|
||||
logging.disable(logging.NOTSET)
|
||||
@ -63,12 +63,44 @@ def init_superuser():
|
||||
"invited_by": user_info["id"],
|
||||
"role": UserTenantRole.OWNER
|
||||
}
|
||||
|
||||
user_id = user_info
|
||||
tenant_llm = []
|
||||
for llm in LLMService.query(fid=settings.LLM_FACTORY):
|
||||
tenant_llm.append(
|
||||
{"tenant_id": user_info["id"], "llm_factory": settings.LLM_FACTORY, "llm_name": llm.llm_name,
|
||||
"model_type": llm.model_type,
|
||||
"api_key": settings.API_KEY, "api_base": settings.LLM_BASE_URL})
|
||||
|
||||
seen = set()
|
||||
factory_configs = []
|
||||
for factory_config in [
|
||||
settings.CHAT_CFG["factory"],
|
||||
settings.EMBEDDING_CFG["factory"],
|
||||
settings.ASR_CFG["factory"],
|
||||
settings.IMAGE2TEXT_CFG["factory"],
|
||||
settings.RERANK_CFG["factory"],
|
||||
]:
|
||||
factory_name = factory_config["factory"]
|
||||
if factory_name not in seen:
|
||||
seen.add(factory_name)
|
||||
factory_configs.append(factory_config)
|
||||
|
||||
for factory_config in factory_configs:
|
||||
for llm in LLMService.query(fid=factory_config["factory"]):
|
||||
tenant_llm.append(
|
||||
{
|
||||
"tenant_id": user_id,
|
||||
"llm_factory": factory_config["factory"],
|
||||
"llm_name": llm.llm_name,
|
||||
"model_type": llm.model_type,
|
||||
"api_key": factory_config["api_key"],
|
||||
"api_base": factory_config["base_url"],
|
||||
"max_tokens": llm.max_tokens if llm.max_tokens else 8192,
|
||||
}
|
||||
)
|
||||
|
||||
unique = {}
|
||||
for item in tenant_llm:
|
||||
key = (item["tenant_id"], item["llm_factory"], item["llm_name"])
|
||||
if key not in unique:
|
||||
unique[key] = item
|
||||
tenant_llm = list(unique.values())
|
||||
|
||||
if not UserService.save(**user_info):
|
||||
logging.error("can't init admin.")
|
||||
@ -103,7 +135,7 @@ def init_llm_factory():
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
factory_llm_infos = settings.FACTORY_LLM_INFOS
|
||||
factory_llm_infos = settings.FACTORY_LLM_INFOS
|
||||
for factory_llm_info in factory_llm_infos:
|
||||
info = deepcopy(factory_llm_info)
|
||||
llm_infos = info.pop("llm")
|
||||
|
||||
@ -30,6 +30,7 @@ from api import settings
|
||||
from api.db import LLMType, ParserType, StatusEnum
|
||||
from api.db.db_models import DB, Dialog
|
||||
from api.db.services.common_service import CommonService
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.langfuse_service import TenantLangfuseService
|
||||
from api.db.services.llm_service import LLMBundle, TenantLLMService
|
||||
@ -38,6 +39,7 @@ from rag.app.resume import forbidden_select_fields4resume
|
||||
from rag.app.tag import label_question
|
||||
from rag.nlp.search import index_name
|
||||
from rag.prompts import chunks_format, citation_prompt, cross_languages, full_question, kb_prompt, keyword_extraction, message_fit_in
|
||||
from rag.prompts.prompts import gen_meta_filter
|
||||
from rag.utils import num_tokens_from_string, rmSpace
|
||||
from rag.utils.tavily_conn import Tavily
|
||||
|
||||
@ -119,6 +121,7 @@ class DialogService(CommonService):
|
||||
cls.model.do_refer,
|
||||
cls.model.rerank_id,
|
||||
cls.model.kb_ids,
|
||||
cls.model.icon,
|
||||
cls.model.status,
|
||||
User.nickname,
|
||||
User.avatar.alias("tenant_avatar"),
|
||||
@ -250,6 +253,46 @@ def repair_bad_citation_formats(answer: str, kbinfos: dict, idx: set):
|
||||
return answer, idx
|
||||
|
||||
|
||||
def meta_filter(metas: dict, filters: list[dict]):
|
||||
doc_ids = []
|
||||
def filter_out(v2docs, operator, value):
|
||||
nonlocal doc_ids
|
||||
for input,docids in v2docs.items():
|
||||
try:
|
||||
input = float(input)
|
||||
value = float(value)
|
||||
except Exception:
|
||||
input = str(input)
|
||||
value = str(value)
|
||||
|
||||
for conds in [
|
||||
(operator == "contains", str(value).lower() in str(input).lower()),
|
||||
(operator == "not contains", str(value).lower() not in str(input).lower()),
|
||||
(operator == "start with", str(input).lower().startswith(str(value).lower())),
|
||||
(operator == "end with", str(input).lower().endswith(str(value).lower())),
|
||||
(operator == "empty", not input),
|
||||
(operator == "not empty", input),
|
||||
(operator == "=", input == value),
|
||||
(operator == "≠", input != value),
|
||||
(operator == ">", input > value),
|
||||
(operator == "<", input < value),
|
||||
(operator == "≥", input >= value),
|
||||
(operator == "≤", input <= value),
|
||||
]:
|
||||
try:
|
||||
if all(conds):
|
||||
doc_ids.extend(docids)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for k, v2docs in metas.items():
|
||||
for f in filters:
|
||||
if k != f["key"]:
|
||||
continue
|
||||
filter_out(v2docs, f["op"], f["value"])
|
||||
return doc_ids
|
||||
|
||||
|
||||
def chat(dialog, messages, stream=True, **kwargs):
|
||||
assert messages[-1]["role"] == "user", "The last content of this conversation is not from user."
|
||||
if not dialog.kb_ids and not dialog.prompt_config.get("tavily_api_key"):
|
||||
@ -287,9 +330,10 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
|
||||
retriever = settings.retrievaler
|
||||
questions = [m["content"] for m in messages if m["role"] == "user"][-3:]
|
||||
attachments = kwargs["doc_ids"].split(",") if "doc_ids" in kwargs else None
|
||||
attachments = kwargs["doc_ids"].split(",") if "doc_ids" in kwargs else []
|
||||
if "doc_ids" in messages[-1]:
|
||||
attachments = messages[-1]["doc_ids"]
|
||||
|
||||
prompt_config = dialog.prompt_config
|
||||
field_map = KnowledgebaseService.get_field_map(dialog.kb_ids)
|
||||
# try to use sql if field mapping is good to go
|
||||
@ -316,6 +360,14 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
if prompt_config.get("cross_languages"):
|
||||
questions = [cross_languages(dialog.tenant_id, dialog.llm_id, questions[0], prompt_config["cross_languages"])]
|
||||
|
||||
if dialog.meta_data_filter:
|
||||
metas = DocumentService.get_meta_by_kbs(dialog.kb_ids)
|
||||
if dialog.meta_data_filter.get("method") == "auto":
|
||||
filters = gen_meta_filter(chat_mdl, metas, questions[-1])
|
||||
attachments.extend(meta_filter(metas, filters))
|
||||
elif dialog.meta_data_filter.get("method") == "manual":
|
||||
attachments.extend(meta_filter(metas, dialog.meta_data_filter["manual"]))
|
||||
|
||||
if prompt_config.get("keyword", False):
|
||||
questions[-1] += keyword_extraction(chat_mdl, questions[-1])
|
||||
|
||||
|
||||
@ -243,7 +243,7 @@ class DocumentService(CommonService):
|
||||
from api.db.services.task_service import TaskService
|
||||
cls.clear_chunk_num(doc.id)
|
||||
try:
|
||||
TaskService.filter_delete(Task.doc_id == doc.id)
|
||||
TaskService.filter_delete([Task.doc_id == doc.id])
|
||||
page = 0
|
||||
page_size = 1000
|
||||
all_chunk_ids = []
|
||||
@ -574,6 +574,25 @@ class DocumentService(CommonService):
|
||||
def update_meta_fields(cls, doc_id, meta_fields):
|
||||
return cls.update_by_id(doc_id, {"meta_fields": meta_fields})
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def get_meta_by_kbs(cls, kb_ids):
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.meta_fields,
|
||||
]
|
||||
meta = {}
|
||||
for r in cls.model.select(*fields).where(cls.model.kb_id.in_(kb_ids)):
|
||||
doc_id = r.id
|
||||
for k,v in r.meta_fields.items():
|
||||
if k not in meta:
|
||||
meta[k] = {}
|
||||
v = str(v)
|
||||
if v not in meta[k]:
|
||||
meta[k][v] = []
|
||||
meta[k][v].append(doc_id)
|
||||
return meta
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def update_progress(cls):
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from functools import partial
|
||||
@ -141,6 +142,7 @@ class TenantLLMService(CommonService):
|
||||
@DB.connection_context()
|
||||
def model_instance(cls, tenant_id, llm_type, llm_name=None, lang="Chinese", **kwargs):
|
||||
model_config = TenantLLMService.get_model_config(tenant_id, llm_type, llm_name)
|
||||
kwargs.update({"provider": model_config["llm_factory"]})
|
||||
if llm_type == LLMType.EMBEDDING.value:
|
||||
if model_config["llm_factory"] not in EmbeddingModel:
|
||||
return
|
||||
@ -376,7 +378,24 @@ class LLMBundle:
|
||||
return txt
|
||||
|
||||
return txt[last_think_end + len("</think>") :]
|
||||
|
||||
@staticmethod
|
||||
def _clean_param(chat_partial, **kwargs):
|
||||
func = chat_partial.func
|
||||
sig = inspect.signature(func)
|
||||
keyword_args = []
|
||||
support_var_args = False
|
||||
for param in sig.parameters.values():
|
||||
if param.kind == inspect.Parameter.VAR_KEYWORD or param.kind == inspect.Parameter.VAR_POSITIONAL:
|
||||
support_var_args = True
|
||||
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
|
||||
keyword_args.append(param.name)
|
||||
|
||||
use_kwargs = kwargs
|
||||
if not support_var_args:
|
||||
use_kwargs = {k: v for k, v in kwargs.items() if k in keyword_args}
|
||||
return use_kwargs
|
||||
|
||||
def chat(self, system: str, history: list, gen_conf: dict = {}, **kwargs) -> str:
|
||||
if self.langfuse:
|
||||
generation = self.langfuse.start_generation(trace_context=self.trace_context, name="chat", model=self.llm_name, input={"system": system, "history": history})
|
||||
@ -384,8 +403,9 @@ class LLMBundle:
|
||||
chat_partial = partial(self.mdl.chat, system, history, gen_conf)
|
||||
if self.is_tools and self.mdl.is_tools:
|
||||
chat_partial = partial(self.mdl.chat_with_tools, system, history, gen_conf)
|
||||
|
||||
txt, used_tokens = chat_partial(**kwargs)
|
||||
|
||||
use_kwargs = self._clean_param(chat_partial, **kwargs)
|
||||
txt, used_tokens = chat_partial(**use_kwargs)
|
||||
txt = self._remove_reasoning_content(txt)
|
||||
|
||||
if not self.verbose_tool_use:
|
||||
@ -409,8 +429,8 @@ class LLMBundle:
|
||||
total_tokens = 0
|
||||
if self.is_tools and self.mdl.is_tools:
|
||||
chat_partial = partial(self.mdl.chat_streamly_with_tools, system, history, gen_conf)
|
||||
|
||||
for txt in chat_partial(**kwargs):
|
||||
use_kwargs = self._clean_param(chat_partial, **kwargs)
|
||||
for txt in chat_partial(**use_kwargs):
|
||||
if isinstance(txt, int):
|
||||
total_tokens = txt
|
||||
if self.langfuse:
|
||||
|
||||
@ -38,6 +38,11 @@ EMBEDDING_MDL = ""
|
||||
RERANK_MDL = ""
|
||||
ASR_MDL = ""
|
||||
IMAGE2TEXT_MDL = ""
|
||||
CHAT_CFG = ""
|
||||
EMBEDDING_CFG = ""
|
||||
RERANK_CFG = ""
|
||||
ASR_CFG = ""
|
||||
IMAGE2TEXT_CFG = ""
|
||||
API_KEY = None
|
||||
PARSERS = None
|
||||
HOST_IP = None
|
||||
@ -74,23 +79,22 @@ STRONG_TEST_COUNT = int(os.environ.get("STRONG_TEST_COUNT", "8"))
|
||||
|
||||
BUILTIN_EMBEDDING_MODELS = ["BAAI/bge-large-zh-v1.5@BAAI", "maidalun1020/bce-embedding-base_v1@Youdao"]
|
||||
|
||||
|
||||
def get_or_create_secret_key():
|
||||
secret_key = os.environ.get("RAGFLOW_SECRET_KEY")
|
||||
if secret_key and len(secret_key) >= 32:
|
||||
return secret_key
|
||||
|
||||
|
||||
# Check if there's a configured secret key
|
||||
configured_key = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("secret_key")
|
||||
if configured_key and configured_key != str(date.today()) and len(configured_key) >= 32:
|
||||
return configured_key
|
||||
|
||||
|
||||
# Generate a new secure key and warn about it
|
||||
import logging
|
||||
|
||||
new_key = secrets.token_hex(32)
|
||||
logging.warning(
|
||||
"SECURITY WARNING: Using auto-generated SECRET_KEY. "
|
||||
f"Generated key: {new_key}"
|
||||
)
|
||||
logging.warning(f"SECURITY WARNING: Using auto-generated SECRET_KEY. Generated key: {new_key}")
|
||||
return new_key
|
||||
|
||||
|
||||
@ -99,10 +103,10 @@ def init_settings():
|
||||
LIGHTEN = int(os.environ.get("LIGHTEN", "0"))
|
||||
DATABASE_TYPE = os.getenv("DB_TYPE", "mysql")
|
||||
DATABASE = decrypt_database_config(name=DATABASE_TYPE)
|
||||
LLM = get_base_config("user_default_llm", {})
|
||||
LLM_DEFAULT_MODELS = LLM.get("default_models", {})
|
||||
LLM_FACTORY = LLM.get("factory")
|
||||
LLM_BASE_URL = LLM.get("base_url")
|
||||
LLM = get_base_config("user_default_llm", {}) or {}
|
||||
LLM_DEFAULT_MODELS = LLM.get("default_models", {}) or {}
|
||||
LLM_FACTORY = LLM.get("factory", "") or ""
|
||||
LLM_BASE_URL = LLM.get("base_url", "") or ""
|
||||
try:
|
||||
REGISTER_ENABLED = int(os.environ.get("REGISTER_ENABLED", "1"))
|
||||
except Exception:
|
||||
@ -115,29 +119,34 @@ def init_settings():
|
||||
FACTORY_LLM_INFOS = []
|
||||
|
||||
global CHAT_MDL, EMBEDDING_MDL, RERANK_MDL, ASR_MDL, IMAGE2TEXT_MDL
|
||||
global CHAT_CFG, EMBEDDING_CFG, RERANK_CFG, ASR_CFG, IMAGE2TEXT_CFG
|
||||
if not LIGHTEN:
|
||||
EMBEDDING_MDL = BUILTIN_EMBEDDING_MODELS[0]
|
||||
|
||||
if LLM_DEFAULT_MODELS:
|
||||
CHAT_MDL = LLM_DEFAULT_MODELS.get("chat_model", CHAT_MDL)
|
||||
EMBEDDING_MDL = LLM_DEFAULT_MODELS.get("embedding_model", EMBEDDING_MDL)
|
||||
RERANK_MDL = LLM_DEFAULT_MODELS.get("rerank_model", RERANK_MDL)
|
||||
ASR_MDL = LLM_DEFAULT_MODELS.get("asr_model", ASR_MDL)
|
||||
IMAGE2TEXT_MDL = LLM_DEFAULT_MODELS.get("image2text_model", IMAGE2TEXT_MDL)
|
||||
|
||||
# factory can be specified in the config name with "@". LLM_FACTORY will be used if not specified
|
||||
CHAT_MDL = CHAT_MDL + (f"@{LLM_FACTORY}" if "@" not in CHAT_MDL and CHAT_MDL != "" else "")
|
||||
EMBEDDING_MDL = EMBEDDING_MDL + (f"@{LLM_FACTORY}" if "@" not in EMBEDDING_MDL and EMBEDDING_MDL != "" else "")
|
||||
RERANK_MDL = RERANK_MDL + (f"@{LLM_FACTORY}" if "@" not in RERANK_MDL and RERANK_MDL != "" else "")
|
||||
ASR_MDL = ASR_MDL + (f"@{LLM_FACTORY}" if "@" not in ASR_MDL and ASR_MDL != "" else "")
|
||||
IMAGE2TEXT_MDL = IMAGE2TEXT_MDL + (f"@{LLM_FACTORY}" if "@" not in IMAGE2TEXT_MDL and IMAGE2TEXT_MDL != "" else "")
|
||||
|
||||
global API_KEY, PARSERS, HOST_IP, HOST_PORT, SECRET_KEY
|
||||
API_KEY = LLM.get("api_key")
|
||||
PARSERS = LLM.get(
|
||||
"parsers", "naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One,audio:Audio,email:Email,tag:Tag"
|
||||
)
|
||||
|
||||
chat_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("chat_model", CHAT_MDL))
|
||||
embedding_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("embedding_model", EMBEDDING_MDL))
|
||||
rerank_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("rerank_model", RERANK_MDL))
|
||||
asr_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("asr_model", ASR_MDL))
|
||||
image2text_entry = _parse_model_entry(LLM_DEFAULT_MODELS.get("image2text_model", IMAGE2TEXT_MDL))
|
||||
|
||||
CHAT_CFG = _resolve_per_model_config(chat_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
EMBEDDING_CFG = _resolve_per_model_config(embedding_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
RERANK_CFG = _resolve_per_model_config(rerank_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
ASR_CFG = _resolve_per_model_config(asr_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
IMAGE2TEXT_CFG = _resolve_per_model_config(image2text_entry, LLM_FACTORY, API_KEY, LLM_BASE_URL)
|
||||
|
||||
CHAT_MDL = CHAT_CFG.get("model", "") or ""
|
||||
EMBEDDING_MDL = EMBEDDING_CFG.get("model", "") or ""
|
||||
RERANK_MDL = RERANK_CFG.get("model", "") or ""
|
||||
ASR_MDL = ASR_CFG.get("model", "") or ""
|
||||
IMAGE2TEXT_MDL = IMAGE2TEXT_CFG.get("model", "") or ""
|
||||
|
||||
HOST_IP = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("host", "127.0.0.1")
|
||||
HOST_PORT = get_base_config(RAG_FLOW_SERVICE_NAME, {}).get("http_port")
|
||||
|
||||
@ -170,6 +179,7 @@ def init_settings():
|
||||
|
||||
retrievaler = search.Dealer(docStoreConn)
|
||||
from graphrag import search as kg_search
|
||||
|
||||
kg_retrievaler = kg_search.KGSearch(docStoreConn)
|
||||
|
||||
if int(os.environ.get("SANDBOX_ENABLED", "0")):
|
||||
@ -210,3 +220,34 @@ class RetCode(IntEnum, CustomEnum):
|
||||
SERVER_ERROR = 500
|
||||
FORBIDDEN = 403
|
||||
NOT_FOUND = 404
|
||||
|
||||
|
||||
def _parse_model_entry(entry):
|
||||
if isinstance(entry, str):
|
||||
return {"name": entry, "factory": None, "api_key": None, "base_url": None}
|
||||
if isinstance(entry, dict):
|
||||
name = entry.get("name") or entry.get("model") or ""
|
||||
return {
|
||||
"name": name,
|
||||
"factory": entry.get("factory"),
|
||||
"api_key": entry.get("api_key"),
|
||||
"base_url": entry.get("base_url"),
|
||||
}
|
||||
return {"name": "", "factory": None, "api_key": None, "base_url": None}
|
||||
|
||||
|
||||
def _resolve_per_model_config(entry_dict, backup_factory, backup_api_key, backup_base_url):
|
||||
name = (entry_dict.get("name") or "").strip()
|
||||
m_factory = entry_dict.get("factory") or backup_factory or ""
|
||||
m_api_key = entry_dict.get("api_key") or backup_api_key or ""
|
||||
m_base_url = entry_dict.get("base_url") or backup_base_url or ""
|
||||
|
||||
if name and "@" not in name and m_factory:
|
||||
name = f"{name}@{m_factory}"
|
||||
|
||||
return {
|
||||
"model": name,
|
||||
"factory": m_factory,
|
||||
"api_key": m_api_key,
|
||||
"base_url": m_base_url,
|
||||
}
|
||||
|
||||
@ -64,9 +64,21 @@ redis:
|
||||
# config:
|
||||
# oss_table: 'opendal_storage'
|
||||
# user_default_llm:
|
||||
# factory: 'Tongyi-Qianwen'
|
||||
# api_key: 'sk-xxxxxxxxxxxxx'
|
||||
# base_url: ''
|
||||
# factory: 'BAAI'
|
||||
# api_key: 'backup'
|
||||
# base_url: 'backup_base_url'
|
||||
# default_models:
|
||||
# chat_model:
|
||||
# name: 'qwen2.5-7b-instruct'
|
||||
# factory: 'xxxx'
|
||||
# api_key: 'xxxx'
|
||||
# base_url: 'https://api.xx.com'
|
||||
# embedding_model:
|
||||
# name: 'bge-m3'
|
||||
# rerank_model: 'bge-reranker-v2'
|
||||
# asr_model:
|
||||
# model: 'whisper-large-v3' # alias of name
|
||||
# image2text_model: ''
|
||||
# oauth:
|
||||
# oauth2:
|
||||
# display_name: "OAuth2"
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
@ -20,6 +21,8 @@ from openpyxl import Workbook, load_workbook
|
||||
|
||||
from rag.nlp import find_codec
|
||||
|
||||
# copied from `/openpyxl/cell/cell.py`
|
||||
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
|
||||
|
||||
class RAGFlowExcelParser:
|
||||
|
||||
@ -50,13 +53,29 @@ class RAGFlowExcelParser:
|
||||
logging.info(f"openpyxl load error: {e}, try pandas instead")
|
||||
try:
|
||||
file_like_object.seek(0)
|
||||
df = pd.read_excel(file_like_object)
|
||||
return RAGFlowExcelParser._dataframe_to_workbook(df)
|
||||
try:
|
||||
df = pd.read_excel(file_like_object)
|
||||
return RAGFlowExcelParser._dataframe_to_workbook(df)
|
||||
except Exception as ex:
|
||||
logging.info(f"pandas with default engine load error: {ex}, try calamine instead")
|
||||
file_like_object.seek(0)
|
||||
df = pd.read_excel(file_like_object, engine='calamine')
|
||||
return RAGFlowExcelParser._dataframe_to_workbook(df)
|
||||
except Exception as e_pandas:
|
||||
raise Exception(f"pandas.read_excel error: {e_pandas}, original openpyxl error: {e}")
|
||||
|
||||
@staticmethod
|
||||
def _clean_dataframe(df: pd.DataFrame):
|
||||
def clean_string(s):
|
||||
if isinstance(s, str):
|
||||
return ILLEGAL_CHARACTERS_RE.sub(" ", s)
|
||||
return s
|
||||
|
||||
return df.apply(lambda col: col.map(clean_string))
|
||||
|
||||
@staticmethod
|
||||
def _dataframe_to_workbook(df):
|
||||
df = RAGFlowExcelParser._clean_dataframe(df)
|
||||
wb = Workbook()
|
||||
ws = wb.active
|
||||
ws.title = "Data"
|
||||
|
||||
@ -63,7 +63,7 @@ docker build -t sandbox-executor-manager:latest ./executor_manager
|
||||
3. Add the following entry to your /etc/hosts file to resolve the executor manager service:
|
||||
|
||||
```bash
|
||||
127.0.0.1 sandbox-executor-manager
|
||||
127.0.0.1 es01 infinity mysql minio redis sandbox-executor-manager
|
||||
```
|
||||
|
||||
4. Start the RAGFlow service as usual.
|
||||
|
||||
@ -9,8 +9,8 @@ Key features, improvements and bug fixes in the latest releases.
|
||||
|
||||
:::info
|
||||
Each RAGFlow release is available in two editions:
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.19.1-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.19.1`
|
||||
- **Slim edition**: excludes built-in embedding models and is identified by a **-slim** suffix added to the version name. Example: `infiniflow/ragflow:v0.20.1-slim`
|
||||
- **Full edition**: includes built-in embedding models and has no suffix added to the version name. Example: `infiniflow/ragflow:v0.20.1`
|
||||
:::
|
||||
|
||||
:::danger IMPORTANT
|
||||
@ -33,10 +33,10 @@ Released on August 8, 2025.
|
||||
|
||||
### Added Models
|
||||
|
||||
- ChatGPT 5
|
||||
- GPT-5
|
||||
- Claude 4.1
|
||||
|
||||
### New agent Templates (both workflow and agentic)
|
||||
### New agent templates (both workflow and agentic)
|
||||
|
||||
- SQL Assistant Workflow: Empowers non-technical teams (e.g., operations, product) to independently query business data.
|
||||
- Choose Your Knowledge Base Workflow: Lets users select a knowledge base to query during conversations. [#9325](https://github.com/infiniflow/ragflow/pull/9325)
|
||||
|
||||
@ -180,7 +180,7 @@ async def list_tools(*, connector) -> list[types.Tool]:
|
||||
return [
|
||||
types.Tool(
|
||||
name="ragflow_retrieval",
|
||||
description="Retrieve relevant chunks from the RAGFlow retrieve interface based on the question, using the specified dataset_ids and optionally document_ids. Below is the list of all available datasets, including their descriptions and IDs. If you're unsure which datasets are relevant to the question, simply pass all dataset IDs to the function."
|
||||
description="Retrieve relevant chunks from the RAGFlow retrieve interface based on the question. You can optionally specify dataset_ids to search only specific datasets, or omit dataset_ids entirely to search across ALL available datasets. You can also optionally specify document_ids to search within specific documents. When dataset_ids is not provided or is empty, the system will automatically search across all available datasets. Below is the list of all available datasets, including their descriptions and IDs:"
|
||||
+ dataset_description,
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
@ -188,14 +188,16 @@ async def list_tools(*, connector) -> list[types.Tool]:
|
||||
"dataset_ids": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Optional array of dataset IDs to search. If not provided or empty, all datasets will be searched."
|
||||
},
|
||||
"document_ids": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Optional array of document IDs to search within."
|
||||
},
|
||||
"question": {"type": "string"},
|
||||
"question": {"type": "string", "description": "The question or query to search for."},
|
||||
},
|
||||
"required": ["dataset_ids", "question"],
|
||||
"required": ["question"],
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -206,8 +208,26 @@ async def list_tools(*, connector) -> list[types.Tool]:
|
||||
async def call_tool(name: str, arguments: dict, *, connector) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
|
||||
if name == "ragflow_retrieval":
|
||||
document_ids = arguments.get("document_ids", [])
|
||||
dataset_ids = arguments.get("dataset_ids", [])
|
||||
|
||||
# If no dataset_ids provided or empty list, get all available dataset IDs
|
||||
if not dataset_ids:
|
||||
dataset_list_str = connector.list_datasets()
|
||||
dataset_ids = []
|
||||
|
||||
# Parse the dataset list to extract IDs
|
||||
if dataset_list_str:
|
||||
for line in dataset_list_str.strip().split('\n'):
|
||||
if line.strip():
|
||||
try:
|
||||
dataset_info = json.loads(line.strip())
|
||||
dataset_ids.append(dataset_info["id"])
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
# Skip malformed lines
|
||||
continue
|
||||
|
||||
return connector.retrieval(
|
||||
dataset_ids=arguments["dataset_ids"],
|
||||
dataset_ids=dataset_ids,
|
||||
document_ids=document_ids,
|
||||
question=arguments["question"],
|
||||
)
|
||||
|
||||
@ -24,7 +24,7 @@ dependencies = [
|
||||
"chardet==5.2.0",
|
||||
"cn2an==0.5.22",
|
||||
"cohere==5.6.2",
|
||||
"Crawl4AI==0.3.8",
|
||||
"Crawl4AI>=0.3.8",
|
||||
"dashscope==1.20.11",
|
||||
"deepl==1.18.0",
|
||||
"demjson3==3.0.6",
|
||||
@ -43,7 +43,7 @@ dependencies = [
|
||||
"groq==0.9.0",
|
||||
"hanziconv==0.3.2",
|
||||
"html-text==0.6.2",
|
||||
"httpx==0.27.0",
|
||||
"httpx==0.27.2",
|
||||
"huggingface-hub>=0.25.0,<0.26.0",
|
||||
"infinity-sdk==0.6.0-dev4",
|
||||
"infinity-emb>=0.0.66,<0.0.67",
|
||||
@ -58,7 +58,7 @@ dependencies = [
|
||||
"ollama==0.2.1",
|
||||
"onnxruntime==1.19.2; sys_platform == 'darwin' or platform_machine != 'x86_64'",
|
||||
"onnxruntime-gpu==1.19.2; sys_platform != 'darwin' and platform_machine == 'x86_64'",
|
||||
"openai==1.45.0",
|
||||
"openai>=1.45.0",
|
||||
"opencv-python==4.10.0.84",
|
||||
"opencv-python-headless==4.10.0.84",
|
||||
"openpyxl>=3.1.0,<4.0.0",
|
||||
@ -128,6 +128,8 @@ dependencies = [
|
||||
"opensearch-py==2.7.1",
|
||||
"pluginlib==0.9.4",
|
||||
"click>=8.1.8",
|
||||
"python-calamine>=0.4.0",
|
||||
"litellm>=1.74.15.post1",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
||||
@ -226,17 +226,20 @@ class Docx(DocxParser):
|
||||
for r in tb.rows:
|
||||
html += "<tr>"
|
||||
i = 0
|
||||
while i < len(r.cells):
|
||||
span = 1
|
||||
c = r.cells[i]
|
||||
for j in range(i + 1, len(r.cells)):
|
||||
if c.text == r.cells[j].text:
|
||||
span += 1
|
||||
i = j
|
||||
else:
|
||||
break
|
||||
i += 1
|
||||
html += f"<td>{c.text}</td>" if span == 1 else f"<td colspan='{span}'>{c.text}</td>"
|
||||
try:
|
||||
while i < len(r.cells):
|
||||
span = 1
|
||||
c = r.cells[i]
|
||||
for j in range(i + 1, len(r.cells)):
|
||||
if c.text == r.cells[j].text:
|
||||
span += 1
|
||||
i = j
|
||||
else:
|
||||
break
|
||||
i += 1
|
||||
html += f"<td>{c.text}</td>" if span == 1 else f"<td colspan='{span}'>{c.text}</td>"
|
||||
except Exception as e:
|
||||
logging.warning(f"Error parsing table, ignore: {e}")
|
||||
html += "</tr>"
|
||||
html += "</table>"
|
||||
tbls.append(((None, html), ""))
|
||||
|
||||
194
rag/app/table.py
194
rag/app/table.py
@ -40,7 +40,6 @@ class Excel(ExcelParser):
|
||||
total = 0
|
||||
for sheetname in wb.sheetnames:
|
||||
total += len(list(wb[sheetname].rows))
|
||||
|
||||
res, fails, done = [], [], 0
|
||||
rn = 0
|
||||
for sheetname in wb.sheetnames:
|
||||
@ -48,31 +47,204 @@ class Excel(ExcelParser):
|
||||
rows = list(ws.rows)
|
||||
if not rows:
|
||||
continue
|
||||
headers = [cell.value for cell in rows[0]]
|
||||
missed = set([i for i, h in enumerate(headers) if h is None])
|
||||
headers = [cell.value for i, cell in enumerate(rows[0]) if i not in missed]
|
||||
headers, header_rows = self._parse_headers(ws, rows)
|
||||
if not headers:
|
||||
continue
|
||||
data = []
|
||||
for i, r in enumerate(rows[1:]):
|
||||
for i, r in enumerate(rows[header_rows:]):
|
||||
rn += 1
|
||||
if rn - 1 < from_page:
|
||||
continue
|
||||
if rn - 1 >= to_page:
|
||||
break
|
||||
row = [cell.value for ii, cell in enumerate(r) if ii not in missed]
|
||||
if len(row) != len(headers):
|
||||
row_data = self._extract_row_data(ws, r, header_rows + i, len(headers))
|
||||
if row_data is None:
|
||||
fails.append(str(i))
|
||||
continue
|
||||
data.append(row)
|
||||
if self._is_empty_row(row_data):
|
||||
continue
|
||||
data.append(row_data)
|
||||
done += 1
|
||||
if np.array(data).size == 0:
|
||||
if len(data) == 0:
|
||||
continue
|
||||
res.append(pd.DataFrame(np.array(data), columns=headers))
|
||||
|
||||
df = pd.DataFrame(data, columns=headers)
|
||||
res.append(df)
|
||||
callback(0.3, ("Extract records: {}~{}".format(from_page + 1, min(to_page, from_page + rn)) + (f"{len(fails)} failure, line: %s..." % (",".join(fails[:3])) if fails else "")))
|
||||
return res
|
||||
|
||||
def _parse_headers(self, ws, rows):
|
||||
if len(rows) == 0:
|
||||
return [], 0
|
||||
has_complex_structure = self._has_complex_header_structure(ws, rows)
|
||||
if has_complex_structure:
|
||||
return self._parse_multi_level_headers(ws, rows)
|
||||
else:
|
||||
return self._parse_simple_headers(rows)
|
||||
|
||||
def _has_complex_header_structure(self, ws, rows):
|
||||
if len(rows) < 1:
|
||||
return False
|
||||
merged_ranges = list(ws.merged_cells.ranges)
|
||||
# 检查前两行是否涉及合并单元格
|
||||
for rng in merged_ranges:
|
||||
if rng.min_row <= 2: # 只要合并区域涉及第1或第2行
|
||||
return True
|
||||
return False
|
||||
|
||||
def _row_looks_like_header(self, row):
|
||||
header_like_cells = 0
|
||||
data_like_cells = 0
|
||||
non_empty_cells = 0
|
||||
for cell in row:
|
||||
if cell.value is not None:
|
||||
non_empty_cells += 1
|
||||
val = str(cell.value).strip()
|
||||
if self._looks_like_header(val):
|
||||
header_like_cells += 1
|
||||
elif self._looks_like_data(val):
|
||||
data_like_cells += 1
|
||||
if non_empty_cells == 0:
|
||||
return False
|
||||
return header_like_cells >= data_like_cells
|
||||
|
||||
def _parse_simple_headers(self, rows):
|
||||
if not rows:
|
||||
return [], 0
|
||||
header_row = rows[0]
|
||||
headers = []
|
||||
for cell in header_row:
|
||||
if cell.value is not None:
|
||||
header_value = str(cell.value).strip()
|
||||
if header_value:
|
||||
headers.append(header_value)
|
||||
else:
|
||||
pass
|
||||
final_headers = []
|
||||
for i, cell in enumerate(header_row):
|
||||
if cell.value is not None:
|
||||
header_value = str(cell.value).strip()
|
||||
if header_value:
|
||||
final_headers.append(header_value)
|
||||
else:
|
||||
final_headers.append(f"Column_{i + 1}")
|
||||
else:
|
||||
final_headers.append(f"Column_{i + 1}")
|
||||
return final_headers, 1
|
||||
|
||||
def _parse_multi_level_headers(self, ws, rows):
|
||||
if len(rows) < 2:
|
||||
return [], 0
|
||||
header_rows = self._detect_header_rows(rows)
|
||||
if header_rows == 1:
|
||||
return self._parse_simple_headers(rows)
|
||||
else:
|
||||
return self._build_hierarchical_headers(ws, rows, header_rows), header_rows
|
||||
|
||||
def _detect_header_rows(self, rows):
|
||||
if len(rows) < 2:
|
||||
return 1
|
||||
header_rows = 1
|
||||
max_check_rows = min(5, len(rows))
|
||||
for i in range(1, max_check_rows):
|
||||
row = rows[i]
|
||||
if self._row_looks_like_header(row):
|
||||
header_rows = i + 1
|
||||
else:
|
||||
break
|
||||
return header_rows
|
||||
|
||||
def _looks_like_header(self, value):
|
||||
if len(value) < 1:
|
||||
return False
|
||||
if any(ord(c) > 127 for c in value):
|
||||
return True
|
||||
if len([c for c in value if c.isalpha()]) >= 2:
|
||||
return True
|
||||
if any(c in value for c in ["(", ")", ":", ":", "(", ")", "_", "-"]):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _looks_like_data(self, value):
|
||||
if len(value) == 1 and value.upper() in ["Y", "N", "M", "X", "/", "-"]:
|
||||
return True
|
||||
if value.replace(".", "").replace("-", "").replace(",", "").isdigit():
|
||||
return True
|
||||
if value.startswith("0x") and len(value) <= 10:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _build_hierarchical_headers(self, ws, rows, header_rows):
|
||||
headers = []
|
||||
max_col = max(len(row) for row in rows[:header_rows]) if header_rows > 0 else 0
|
||||
merged_ranges = list(ws.merged_cells.ranges)
|
||||
for col_idx in range(max_col):
|
||||
header_parts = []
|
||||
for row_idx in range(header_rows):
|
||||
if col_idx < len(rows[row_idx]):
|
||||
cell_value = rows[row_idx][col_idx].value
|
||||
merged_value = self._get_merged_cell_value(ws, row_idx + 1, col_idx + 1, merged_ranges)
|
||||
if merged_value is not None:
|
||||
cell_value = merged_value
|
||||
if cell_value is not None:
|
||||
cell_value = str(cell_value).strip()
|
||||
if cell_value and cell_value not in header_parts and self._is_valid_header_part(cell_value):
|
||||
header_parts.append(cell_value)
|
||||
if header_parts:
|
||||
header = "-".join(header_parts)
|
||||
headers.append(header)
|
||||
else:
|
||||
headers.append(f"Column_{col_idx + 1}")
|
||||
final_headers = [h for h in headers if h and h != "-"]
|
||||
return final_headers
|
||||
|
||||
def _is_valid_header_part(self, value):
|
||||
if len(value) == 1 and value.upper() in ["Y", "N", "M", "X"]:
|
||||
return False
|
||||
if value.replace(".", "").replace("-", "").replace(",", "").isdigit():
|
||||
return False
|
||||
if value in ["/", "-", "+", "*", "="]:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_merged_cell_value(self, ws, row, col, merged_ranges):
|
||||
for merged_range in merged_ranges:
|
||||
if merged_range.min_row <= row <= merged_range.max_row and merged_range.min_col <= col <= merged_range.max_col:
|
||||
return ws.cell(merged_range.min_row, merged_range.min_col).value
|
||||
return None
|
||||
|
||||
def _extract_row_data(self, ws, row, absolute_row_idx, expected_cols):
|
||||
row_data = []
|
||||
merged_ranges = list(ws.merged_cells.ranges)
|
||||
actual_row_num = absolute_row_idx + 1
|
||||
for col_idx in range(expected_cols):
|
||||
cell_value = None
|
||||
actual_col_num = col_idx + 1
|
||||
try:
|
||||
cell_value = ws.cell(row=actual_row_num, column=actual_col_num).value
|
||||
except ValueError:
|
||||
if col_idx < len(row):
|
||||
cell_value = row[col_idx].value
|
||||
if cell_value is None:
|
||||
merged_value = self._get_merged_cell_value(ws, actual_row_num, actual_col_num, merged_ranges)
|
||||
if merged_value is not None:
|
||||
cell_value = merged_value
|
||||
else:
|
||||
cell_value = self._get_inherited_value(ws, actual_row_num, actual_col_num, merged_ranges)
|
||||
row_data.append(cell_value)
|
||||
return row_data
|
||||
|
||||
def _get_inherited_value(self, ws, row, col, merged_ranges):
|
||||
for merged_range in merged_ranges:
|
||||
if merged_range.min_row <= row <= merged_range.max_row and merged_range.min_col <= col <= merged_range.max_col:
|
||||
return ws.cell(merged_range.min_row, merged_range.min_col).value
|
||||
return None
|
||||
|
||||
def _is_empty_row(self, row_data):
|
||||
for val in row_data:
|
||||
if val is not None and str(val).strip() != "":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def trans_datatime(s):
|
||||
try:
|
||||
|
||||
@ -19,6 +19,48 @@
|
||||
import importlib
|
||||
import inspect
|
||||
|
||||
from strenum import StrEnum
|
||||
|
||||
|
||||
class SupportedLiteLLMProvider(StrEnum):
|
||||
Tongyi_Qianwen = "Tongyi-Qianwen"
|
||||
Dashscope = "Dashscope"
|
||||
Bedrock = "Bedrock"
|
||||
Moonshot = "Moonshot"
|
||||
xAI = "xAI"
|
||||
DeepInfra = "DeepInfra"
|
||||
Groq = "Groq"
|
||||
Cohere = "Cohere"
|
||||
Gemini = "Gemini"
|
||||
DeepSeek = "DeepSeek"
|
||||
Nvidia = "NVIDIA"
|
||||
TogetherAI = "TogetherAI"
|
||||
Anthropic = "Anthropic"
|
||||
|
||||
|
||||
FACTORY_DEFAULT_BASE_URL = {
|
||||
SupportedLiteLLMProvider.Tongyi_Qianwen: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
||||
SupportedLiteLLMProvider.Dashscope: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
||||
SupportedLiteLLMProvider.Moonshot: "https://api.moonshot.cn/v1",
|
||||
}
|
||||
|
||||
|
||||
LITELLM_PROVIDER_PREFIX = {
|
||||
SupportedLiteLLMProvider.Tongyi_Qianwen: "dashscope/",
|
||||
SupportedLiteLLMProvider.Dashscope: "dashscope/",
|
||||
SupportedLiteLLMProvider.Bedrock: "bedrock/",
|
||||
SupportedLiteLLMProvider.Moonshot: "moonshot/",
|
||||
SupportedLiteLLMProvider.xAI: "xai/",
|
||||
SupportedLiteLLMProvider.DeepInfra: "deepinfra/",
|
||||
SupportedLiteLLMProvider.Groq: "groq/",
|
||||
SupportedLiteLLMProvider.Cohere: "", # don't need a prefix
|
||||
SupportedLiteLLMProvider.Gemini: "gemini/",
|
||||
SupportedLiteLLMProvider.DeepSeek: "deepseek/",
|
||||
SupportedLiteLLMProvider.Nvidia: "nvidia_nim/",
|
||||
SupportedLiteLLMProvider.TogetherAI: "together_ai/",
|
||||
SupportedLiteLLMProvider.Anthropic: "", # don't need a prefix
|
||||
}
|
||||
|
||||
ChatModel = globals().get("ChatModel", {})
|
||||
CvModel = globals().get("CvModel", {})
|
||||
EmbeddingModel = globals().get("EmbeddingModel", {})
|
||||
@ -26,6 +68,7 @@ RerankModel = globals().get("RerankModel", {})
|
||||
Seq2txtModel = globals().get("Seq2txtModel", {})
|
||||
TTSModel = globals().get("TTSModel", {})
|
||||
|
||||
|
||||
MODULE_MAPPING = {
|
||||
"chat_model": ChatModel,
|
||||
"cv_model": CvModel,
|
||||
@ -42,20 +85,30 @@ for module_name, mapping_dict in MODULE_MAPPING.items():
|
||||
module = importlib.import_module(full_module_name)
|
||||
|
||||
base_class = None
|
||||
lite_llm_base_class = None
|
||||
for name, obj in inspect.getmembers(module):
|
||||
if inspect.isclass(obj) and name == "Base":
|
||||
base_class = obj
|
||||
break
|
||||
if base_class is None:
|
||||
continue
|
||||
if inspect.isclass(obj):
|
||||
if name == "Base":
|
||||
base_class = obj
|
||||
elif name == "LiteLLMBase":
|
||||
lite_llm_base_class = obj
|
||||
assert hasattr(obj, "_FACTORY_NAME"), "LiteLLMbase should have _FACTORY_NAME field."
|
||||
if hasattr(obj, "_FACTORY_NAME"):
|
||||
if isinstance(obj._FACTORY_NAME, list):
|
||||
for factory_name in obj._FACTORY_NAME:
|
||||
mapping_dict[factory_name] = obj
|
||||
else:
|
||||
mapping_dict[obj._FACTORY_NAME] = obj
|
||||
|
||||
if base_class is not None:
|
||||
for _, obj in inspect.getmembers(module):
|
||||
if inspect.isclass(obj) and issubclass(obj, base_class) and obj is not base_class and hasattr(obj, "_FACTORY_NAME"):
|
||||
if isinstance(obj._FACTORY_NAME, list):
|
||||
for factory_name in obj._FACTORY_NAME:
|
||||
mapping_dict[factory_name] = obj
|
||||
else:
|
||||
mapping_dict[obj._FACTORY_NAME] = obj
|
||||
|
||||
for _, obj in inspect.getmembers(module):
|
||||
if inspect.isclass(obj) and issubclass(obj, base_class) and obj is not base_class and hasattr(obj, "_FACTORY_NAME"):
|
||||
if isinstance(obj._FACTORY_NAME, list):
|
||||
for factory_name in obj._FACTORY_NAME:
|
||||
mapping_dict[factory_name] = obj
|
||||
else:
|
||||
mapping_dict[obj._FACTORY_NAME] = obj
|
||||
|
||||
__all__ = [
|
||||
"ChatModel",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -268,7 +268,7 @@ class LocalAIRerank(Base):
|
||||
max_rank = np.max(rank)
|
||||
|
||||
# Avoid division by zero if all ranks are identical
|
||||
if max_rank - min_rank != 0:
|
||||
if not np.isclose(min_rank, max_rank, atol=1e-3):
|
||||
rank = (rank - min_rank) / (max_rank - min_rank)
|
||||
else:
|
||||
rank = np.zeros_like(rank)
|
||||
|
||||
@ -383,8 +383,6 @@ class Dealer:
|
||||
vector_column = f"q_{dim}_vec"
|
||||
zero_vector = [0.0] * dim
|
||||
sim_np = np.array(sim)
|
||||
if doc_ids:
|
||||
similarity_threshold = 0
|
||||
filtered_count = (sim_np >= similarity_threshold).sum()
|
||||
ranks["total"] = int(filtered_count) # Convert from np.int64 to Python int otherwise JSON serializable error
|
||||
for i in idx:
|
||||
|
||||
@ -4,6 +4,9 @@ Task: {{ task }}
|
||||
|
||||
Context: {{ context }}
|
||||
|
||||
**Agent Prompt**
|
||||
{{ agent_prompt }}
|
||||
|
||||
**Analysis Requirements:**
|
||||
1. Is it just a small talk? (If yes, no further plan or analysis is needed)
|
||||
2. What is the core objective of the task?
|
||||
|
||||
53
rag/prompts/meta_filter.md
Normal file
53
rag/prompts/meta_filter.md
Normal file
@ -0,0 +1,53 @@
|
||||
You are a metadata filtering condition generator. Analyze the user's question and available document metadata to output a JSON array of filter objects. Follow these rules:
|
||||
|
||||
1. **Metadata Structure**:
|
||||
- Metadata is provided as JSON where keys are attribute names (e.g., "color"), and values are objects mapping attribute values to document IDs.
|
||||
- Example:
|
||||
{
|
||||
"color": {"red": ["doc1"], "blue": ["doc2"]},
|
||||
"listing_date": {"2025-07-11": ["doc1"], "2025-08-01": ["doc2"]}
|
||||
}
|
||||
|
||||
2. **Output Requirements**:
|
||||
- Always output a JSON array of filter objects
|
||||
- Each object must have:
|
||||
"key": (metadata attribute name),
|
||||
"value": (string value to compare),
|
||||
"op": (operator from allowed list)
|
||||
|
||||
3. **Operator Guide**:
|
||||
- Use these operators only: ["contains", "not contains", "start with", "end with", "empty", "not empty", "=", "≠", ">", "<", "≥", "≤"]
|
||||
- Date ranges: Break into two conditions (≥ start_date AND < next_month_start)
|
||||
- Negations: Always use "≠" for exclusion terms ("not", "except", "exclude", "≠")
|
||||
- Implicit logic: Derive unstated filters (e.g., "July" → [≥ YYYY-07-01, < YYYY-08-01])
|
||||
|
||||
4. **Processing Steps**:
|
||||
a) Identify ALL filterable attributes in the query (both explicit and implicit)
|
||||
b) For dates:
|
||||
- Infer missing year from current date if needed
|
||||
- Always format dates as "YYYY-MM-DD"
|
||||
- Convert ranges: [≥ start, < end]
|
||||
c) For values: Match EXACTLY to metadata's value keys
|
||||
d) Skip conditions if:
|
||||
- Attribute doesn't exist in metadata
|
||||
- Value has no match in metadata
|
||||
|
||||
5. **Example**:
|
||||
- User query: "上市日期七月份的有哪些商品,不要蓝色的"
|
||||
- Metadata: { "color": {...}, "listing_date": {...} }
|
||||
- Output:
|
||||
[
|
||||
{"key": "listing_date", "value": "2025-07-01", "op": "≥"},
|
||||
{"key": "listing_date", "value": "2025-08-01", "op": "<"},
|
||||
{"key": "color", "value": "blue", "op": "≠"}
|
||||
]
|
||||
|
||||
6. **Final Output**:
|
||||
- ONLY output valid JSON array
|
||||
- NO additional text/explanations
|
||||
|
||||
**Current Task**:
|
||||
- Today's date: {{current_date}}
|
||||
- Available metadata keys: {{metadata_keys}}
|
||||
- User query: "{{user_question}}"
|
||||
|
||||
@ -149,6 +149,7 @@ NEXT_STEP = load_prompt("next_step")
|
||||
REFLECT = load_prompt("reflect")
|
||||
SUMMARY4MEMORY = load_prompt("summary4memory")
|
||||
RANK_MEMORY = load_prompt("rank_memory")
|
||||
META_FILTER = load_prompt("meta_filter")
|
||||
|
||||
PROMPT_JINJA_ENV = jinja2.Environment(autoescape=False, trim_blocks=True, lstrip_blocks=True)
|
||||
|
||||
@ -335,13 +336,13 @@ def form_history(history, limit=-6):
|
||||
return context
|
||||
|
||||
|
||||
def analyze_task(chat_mdl, task_name, tools_description: list[dict]):
|
||||
def analyze_task(chat_mdl, prompt, task_name, tools_description: list[dict]):
|
||||
tools_desc = tool_schema(tools_description)
|
||||
context = ""
|
||||
|
||||
template = PROMPT_JINJA_ENV.from_string(ANALYZE_TASK_USER)
|
||||
|
||||
kwd = chat_mdl.chat(ANALYZE_TASK_SYSTEM,[{"role": "user", "content": template.render(task=task_name, context=context, tools_desc=tools_desc)}], {})
|
||||
context = template.render(task=task_name, context=context, agent_prompt=prompt, tools_desc=tools_desc)
|
||||
kwd = chat_mdl.chat(ANALYZE_TASK_SYSTEM,[{"role": "user", "content": context}], {})
|
||||
if isinstance(kwd, tuple):
|
||||
kwd = kwd[0]
|
||||
kwd = re.sub(r"^.*</think>", "", kwd, flags=re.DOTALL)
|
||||
@ -413,3 +414,20 @@ def rank_memories(chat_mdl, goal:str, sub_goal:str, tool_call_summaries: list[st
|
||||
ans = chat_mdl.chat(msg[0]["content"], msg[1:], stop="<|stop|>")
|
||||
return re.sub(r"^.*</think>", "", ans, flags=re.DOTALL)
|
||||
|
||||
|
||||
def gen_meta_filter(chat_mdl, meta_data:dict, query: str) -> list:
|
||||
sys_prompt = PROMPT_JINJA_ENV.from_string(META_FILTER).render(
|
||||
current_date=datetime.datetime.today().strftime('%Y-%m-%d'),
|
||||
metadata_keys=json.dumps(meta_data),
|
||||
user_question=query
|
||||
)
|
||||
user_prompt = "Generate filters:"
|
||||
ans = chat_mdl.chat(sys_prompt, [{"role": "user", "content": user_prompt}])
|
||||
ans = re.sub(r"(^.*</think>|```json\n|```\n*$)", "", ans, flags=re.DOTALL)
|
||||
try:
|
||||
ans = json_repair.loads(ans)
|
||||
assert isinstance(ans, list), ans
|
||||
return ans
|
||||
except Exception:
|
||||
logging.exception(f"Loading json failure: {ans}")
|
||||
return []
|
||||
@ -304,7 +304,11 @@ async def build_chunks(task, progress_callback):
|
||||
converted_image = d["image"].convert("RGB")
|
||||
d["image"].close() # Close original image
|
||||
d["image"] = converted_image
|
||||
d["image"].save(output_buffer, format='JPEG')
|
||||
try:
|
||||
d["image"].save(output_buffer, format='JPEG')
|
||||
except OSError as e:
|
||||
logging.warning(
|
||||
"Saving image of chunk {}/{}/{} got exception, ignore: {}".format(task["location"], task["name"], d["id"], str(e)))
|
||||
|
||||
async with minio_limiter:
|
||||
await trio.to_thread.run_sync(lambda: STORAGE_IMPL.put(task["kb_id"], d["id"], output_buffer.getvalue()))
|
||||
@ -440,7 +444,7 @@ async def embedding(docs, mdl, parser_config=None, callback=None):
|
||||
tts = np.concatenate([vts for _ in range(len(tts))], axis=0)
|
||||
tk_count += c
|
||||
|
||||
@timeout(5)
|
||||
@timeout(60)
|
||||
def batch_encode(txts):
|
||||
nonlocal mdl
|
||||
return mdl.encode([truncate(c, mdl.max_length-10) for c in txts])
|
||||
|
||||
@ -190,3 +190,17 @@ class RAGFlowS3:
|
||||
self.__open__()
|
||||
time.sleep(1)
|
||||
return
|
||||
|
||||
@use_prefix_path
|
||||
@use_default_bucket
|
||||
def rm_bucket(self, bucket, *args, **kwargs):
|
||||
for conn in self.conn:
|
||||
try:
|
||||
if not conn.bucket_exists(bucket):
|
||||
continue
|
||||
for o in conn.list_objects_v2(Bucket=bucket):
|
||||
conn.delete_object(bucket, o.object_name)
|
||||
conn.delete_bucket(Bucket=bucket)
|
||||
return
|
||||
except Exception as e:
|
||||
logging.error(f"Fail rm {bucket}: " + str(e))
|
||||
|
||||
@ -24,7 +24,6 @@ class Chat(Base):
|
||||
self.id = ""
|
||||
self.name = "assistant"
|
||||
self.avatar = "path/to/avatar"
|
||||
self.dataset_ids = ["kb1"]
|
||||
self.llm = Chat.LLM(rag, {})
|
||||
self.prompt = Chat.Prompt(rag, {})
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
@ -44,6 +44,7 @@ class Document(Base):
|
||||
self.process_duration = 0.0
|
||||
self.run = "0"
|
||||
self.status = "1"
|
||||
self.meta_fields = {}
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
|
||||
@ -245,4 +245,4 @@ class TestUpdatedChunk:
|
||||
delete_documents(HttpApiAuth, dataset_id, {"ids": [document_id]})
|
||||
res = update_chunk(HttpApiAuth, dataset_id, document_id, chunk_ids[0])
|
||||
assert res["code"] == 102
|
||||
assert res["message"] == f"Can't find this chunk {chunk_ids[0]}"
|
||||
assert res["message"] == f"You don't own the document {document_id}."
|
||||
|
||||
@ -163,9 +163,9 @@ class TestDatasetsList:
|
||||
[
|
||||
{"orderby": ""},
|
||||
{"orderby": "unknown"},
|
||||
({"orderby": "CREATE_TIME"}, lambda r: (is_sorted(r["data"], "create_time", True))),
|
||||
({"orderby": "UPDATE_TIME"}, lambda r: (is_sorted(r["data"], "update_time", True))),
|
||||
({"orderby": " create_time "}, lambda r: (is_sorted(r["data"], "update_time", True))),
|
||||
{"orderby": "CREATE_TIME"},
|
||||
{"orderby": "UPDATE_TIME"},
|
||||
{"orderby": " create_time "},
|
||||
],
|
||||
ids=["empty", "unknown", "orderby_create_time_upper", "orderby_update_time_upper", "whitespace"],
|
||||
)
|
||||
|
||||
@ -151,4 +151,4 @@ class TestUpdatedChunk:
|
||||
|
||||
with pytest.raises(Exception) as excinfo:
|
||||
chunks[0].update({})
|
||||
assert f"Can't find this chunk {chunks[0].id}" in str(excinfo.value), str(excinfo.value)
|
||||
assert f"You don't own the document {chunks[0].document_id}" in str(excinfo.value), str(excinfo.value)
|
||||
|
||||
@ -693,6 +693,7 @@ class TestDatasetUpdate:
|
||||
client,
|
||||
{
|
||||
"raptor": {"use_raptor": False},
|
||||
"graphrag": {"use_graphrag": False},
|
||||
},
|
||||
)
|
||||
dataset.update({"chunk_method": "qa"})
|
||||
@ -708,6 +709,7 @@ class TestDatasetUpdate:
|
||||
client,
|
||||
{
|
||||
"raptor": {"use_raptor": False},
|
||||
"graphrag": {"use_graphrag": False},
|
||||
},
|
||||
)
|
||||
dataset.update({"chunk_method": "qa", "parser_config": None})
|
||||
|
||||
278
uv.lock
generated
278
uv.lock
generated
@ -1,4 +1,5 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.10, <3.13"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12' and sys_platform == 'darwin'",
|
||||
@ -30,6 +31,15 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/1c/a17fb513aeb684fb83bef5f395910f53103ab30308bbdd77fd66d6698c46/accelerate-1.9.0-py3-none-any.whl", hash = "sha256:c24739a97ade1d54af4549a65f8b6b046adc87e2b3e4d6c66516e32c53d5a8f1" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
version = "24.1.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
version = "2.6.1"
|
||||
@ -1028,24 +1038,29 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "crawl4ai"
|
||||
version = "0.3.8"
|
||||
version = "0.3.745"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiofiles" },
|
||||
{ name = "aiosqlite" },
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "colorama" },
|
||||
{ name = "html2text" },
|
||||
{ name = "litellm" },
|
||||
{ name = "lxml" },
|
||||
{ name = "numpy" },
|
||||
{ name = "pillow" },
|
||||
{ name = "playwright" },
|
||||
{ name = "playwright-stealth" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "rank-bm25" },
|
||||
{ name = "requests" },
|
||||
{ name = "snowballstemmer" },
|
||||
{ name = "tf-playwright-stealth" },
|
||||
{ name = "xxhash" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/1c/31/327598a0c2cc3cd13dcb786ab41e9638c4c100db1940c9345b1e4d953f39/crawl4ai-0.3.8.tar.gz", hash = "sha256:bacc97509ddbfa5e328e299538a27a4c7fc2317e3fd5ad707b04677e4fc23fc6" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/02/5a/919e64ff2977d7aa1b2cda4d45f16ff8996cd2c2dc1f55936fb6cd214222/crawl4ai-0.3.745.tar.gz", hash = "sha256:990396d57e10ae7ccabf35c34a317dbd8c59a3ceca475eac75320a8808334438" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/af/03/4d69b8d64b39096a721808a349199ca5d7989acf2177e270d15e6f82c356/Crawl4AI-0.3.8-py3-none-any.whl", hash = "sha256:aa19165440c32b667b7325c166d68b00a99375b09e3a7db929d3873064d5ef4f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ed/7e/ebe351a457140330b20b6d8289b8f243b21de6e6bce505cd15b230a83bcb/Crawl4AI-0.3.745-py3-none-any.whl", hash = "sha256:763e6aba80959e60e1fe70cb9d954a4cf257eb230af30f51fcd99ff641a7a88d" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1175,9 +1190,6 @@ name = "datrie"
|
||||
version = "0.8.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/9d/fe/db74bd405d515f06657f11ad529878fd389576dca4812bea6f98d9b31574/datrie-0.8.2.tar.gz", hash = "sha256:525b08f638d5cf6115df6ccd818e5a01298cd230b2dac91c8ff2e6499d18765d" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/44/02/53f0cf0bf0cd629ba6c2cc13f2f9db24323459e9c19463783d890a540a96/datrie-0.8.2-pp273-pypy_73-win32.whl", hash = "sha256:b07bd5fdfc3399a6dab86d6e35c72b1dbd598e80c97509c7c7518ab8774d3fda" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "debugpy"
|
||||
@ -1423,6 +1435,14 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fake-http-header"
|
||||
version = "0.3.5"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e3/0b/2849c87d9f13766e29c0a2f4d31681aa72e035016b251ab19d99bde7b592/fake_http_header-0.3.5-py3-none-any.whl", hash = "sha256:cd05f4bebf1b7e38b5f5c03d7fb820c0c17e87d9614fbee0afa39c32c7a2ad3c" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fake-useragent"
|
||||
version = "1.5.1"
|
||||
@ -1486,17 +1506,17 @@ name = "fastembed-gpu"
|
||||
version = "0.3.6"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "huggingface-hub" },
|
||||
{ name = "loguru" },
|
||||
{ name = "mmh3" },
|
||||
{ name = "numpy" },
|
||||
{ name = "onnxruntime-gpu" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pystemmer" },
|
||||
{ name = "requests" },
|
||||
{ name = "snowballstemmer" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "huggingface-hub", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "loguru", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "mmh3", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "numpy", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "onnxruntime-gpu", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "pillow", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "pystemmer", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "requests", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "snowballstemmer", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "tokenizers", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "tqdm", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/da/07/7336c7f3d7ee47f33b407eeb50f5eeb152889de538a52a8f1cc637192816/fastembed_gpu-0.3.6.tar.gz", hash = "sha256:ee2de8918b142adbbf48caaffec0c492f864d73c073eea5a3dcd0e8c1041c50d" }
|
||||
wheels = [
|
||||
@ -2142,37 +2162,37 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.0.3"
|
||||
version = "3.2.3"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a6/64/bea53c592e3e45799f7c8039a8ee7d6883c518eafef1fcae60beb776070f/greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a6/d6/408ad9603339db28ce334021b1403dfcfbcb7501a435d49698408d928de7/greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6c/90/5b14670653f7363fb3e1665f8da6d64bd4c31d53a796d09ef69f48be7273/greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ef/17/e8e72cabfb5a906c0d976d7fbcc88310df292beea0f816efbefdaf694284/greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1c/2f/64628f6ae48e05f585e0eb3fb7399b52e240ef99f602107b445bf6be23ef/greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/24/35/945d5b10648fec9b20bcc6df8952d20bb3bba76413cd71c1fdbee98f5616/greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/74/00/27e2da76b926e9b5a2c97d3f4c0baf1b7d8181209d3026c0171f621ae6c0/greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e1/65/506e0a80931170b0dac1a03d36b7fc299f3fa3576235b916718602fff2c3/greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a6/76/e1ee9f290bb0d46b09704c2fb0e609cae329eb308ad404c0ee6fa1ecb8a5/greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6e/20/68a278a6f93fa36e21cfc3d7599399a8a831225644eb3b6b18755cd3d6fc/greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/21/b4/90e06e07c78513ab03855768200bdb35c8e764e805b3f14fb488e56f82dc/greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f6/a2/0ed21078039072f9dc738bbf3af12b103a84106b1385ac4723841f846ce7/greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/42/11/42ad6b1104c357826bbee7d7b9e4f24dbd9fde94899a03efb004aab62963/greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/bb/6b/384dee7e0121cbd1757bdc1824a5ee28e43d8d4e3f99aa59521f629442fe/greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c6/1f/12d5a6cc26e8b483c2e7975f9c22e088ac735c0d8dcb8a8f72d31a4e5f04/greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c7/ec/85b647e59e0f137c7792a809156f413e38379cf7f3f2e1353c37f4be4026/greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/94/ed/1e5f4bca691a81700e5a88e86d6f0e538acb10188cd2cc17140e523255ef/greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/47/79/26d54d7d700ef65b689fc2665a40846d13e834da0486674a8d4f0f371a47/greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a2/2f/461615adc53ba81e99471303b15ac6b2a6daa8d2a0f7f77fd15605e16d5b/greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e9/55/2c3cfa3cdbb940cf7321fbcf544f0e9c74898eed43bf678abf416812d132/greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/38/77/efb21ab402651896c74f24a172eb4d7479f9f53898bd5e56b9e20bb24ffd/greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/74/3a/92f188ace0190f0066dca3636cf1b09481d0854c46e92ec5e29c7cefe5b1/greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/63/0f/847ed02cdfce10f0e6e3425cd054296bddb11a17ef1b34681fa01a055187/greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/bd/37/56b0da468a85e7704f3b2bc045015301bdf4be2184a44868c71f6dca6fe2/greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7c/68/b5f4084c0a252d7e9c0d95fc1cfc845d08622037adb74e05be3a49831186/greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/fa/31e22345518adcd69d1d6ab5087a12c178aa7f3c51103f6d5d702199d243/greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/53/80/3d94d5999b4179d91bcc93745d1b0815b073d61be79dd546b840d17adb18/greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/92/db/b4c12cff13ebac2786f4f217f06588bccd8b53d260453404ef22b121fc3a/greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/52/61/75b4abd8147f13f70986df2801bf93735c1bd87ea780d70e3b3ecda8c165/greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/35/aa/6894ae299d059d26254779a5088632874b80ee8cf89a88bca00b0709d22f/greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/30/64/e01a8261d13c47f3c082519a5e9dbf9e143cc0498ed20c911d04e54d526c/greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/47/48/ff9ca8ba9772d083a4f5221f7b4f0ebe8978131a9ae0909cf202f94cd879/greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e9/45/626e974948713bc15775b696adb3eb0bd708bec267d6d2d5c47bb47a6119/greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b1/8e/8b6f42c67d5df7db35b8c55c9a850ea045219741bb14416255616808c690/greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/05/46/ab58828217349500a7ebb81159d52ca357da747ff1797c29c6023d79d798/greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/68/7f/d1b537be5080721c0f0089a8447d4ef72839039cdb743bdd8ffd23046e9a/greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2375,7 +2395,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.27.0"
|
||||
version = "0.27.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@ -2384,9 +2404,9 @@ dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/5c/2d/3da5bdf4408b8b2800061c339f240c1802f2e82d55e50bd39c5a881f47f0/httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/41/7b/ddacf6dcebb42466abd03f368782142baa82e08fc0c1f8eaa05b4bae87d5/httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2857,24 +2877,24 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.48.0"
|
||||
version = "1.75.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "click" },
|
||||
{ name = "httpx" },
|
||||
{ name = "importlib-metadata" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "openai" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "requests" },
|
||||
{ name = "tiktoken" },
|
||||
{ name = "tokenizers" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/85/cf/ec69c348c6f16148a55657f3bd63215e965028441c0f322ae8edf9c1210a/litellm-1.48.0.tar.gz", hash = "sha256:31a9b8a25a9daf44c24ddc08bf74298da920f2c5cea44135e5061278d0aa6fc9" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/1b/28/50837cb0246c42a8caac45610572883de7f478543cf4d143e84f099c0234/litellm-1.75.0.tar.gz", hash = "sha256:ec7fbfe79e1b9cd4a2b36ca9e71e71959d8fc43305b222e5f257aced1a0d1d63" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/37/2b/6a42747557dc557e71d1e0664c4d5a814b08cda0589213921bb51c64c5e4/litellm-1.48.0-py3-none-any.whl", hash = "sha256:7765e8a92069778f5fc66aacfabd0e2f8ec8d74fb117f5e475567d89b0d376b9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/db/43/e10905870d42e927de3b095a9248f2764156c7eb45ec172d72be35cd2bb4/litellm-1.75.0-py3-none-any.whl", hash = "sha256:1657472f37d291b366050dd2035e3640eebd96142d6fa0f935ceb290a0e1d5ad" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3765,12 +3785,12 @@ name = "onnxruntime-gpu"
|
||||
version = "1.19.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "coloredlogs" },
|
||||
{ name = "flatbuffers" },
|
||||
{ name = "numpy" },
|
||||
{ name = "packaging" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "sympy" },
|
||||
{ name = "coloredlogs", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "flatbuffers", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "numpy", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "packaging", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "protobuf", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "sympy", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d0/9c/3fa310e0730643051eb88e884f19813a6c8b67d0fbafcda610d960e589db/onnxruntime_gpu-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a49740e079e7c5215830d30cde3df792e903df007aa0b0fd7aa797937061b27a" },
|
||||
@ -3783,7 +3803,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.45.0"
|
||||
version = "1.99.1"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@ -3795,9 +3815,9 @@ dependencies = [
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/70/cd/5ec65b9a56999370c032af7933433143f78239d44a8c03a5ba34159af945/openai-1.45.0.tar.gz", hash = "sha256:731207d10637335413aa3c0955f8f8df30d7636a4a0f9c381f2209d32cf8de97" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/03/30/f0fb7907a77e733bb801c7bdcde903500b31215141cdb261f04421e6fbec/openai-1.99.1.tar.gz", hash = "sha256:2c9d8e498c298f51bb94bcac724257a3a6cac6139ccdfc1186c6708f7a93120f" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/2a/97e80a4551346efc9cd937e11adb640207acc5045fdf4e06786eac55bfb1/openai-1.45.0-py3-none-any.whl", hash = "sha256:2f1f7b7cf90f038a9f1c24f0d26c0f1790c102ec5acd07ffd70a9b7feac1ff4e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/54/15/9c85154ffd283abfc43309ff3aaa63c3fd02f7767ee684e73670f6c5ade2/openai-1.99.1-py3-none-any.whl", hash = "sha256:8eeccc69e0ece1357b51ca0d9fb21324afee09b20c3e5b547d02445ca18a4e03" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4242,32 +4262,21 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "playwright"
|
||||
version = "1.47.0"
|
||||
version = "1.54.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "greenlet" },
|
||||
{ name = "pyee" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f8/70/01cad1d41861cd939fe66bff725771dd03f2de39b7c25b4479de2f583ce0/playwright-1.47.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:f205df24edb925db1a4ab62f1ab0da06f14bb69e382efecfb0deedc4c7f4b8cd" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/42/17/2300e578b434b56ebfc3d56a5e0fe6dc5e99d6ff43a88fa492b881f3b7e3/playwright-1.47.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fc820faf6885f69a52ba4ec94124e575d3c4a4003bf29200029b4a4f2b2d0ab" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5a/6a/3cff2abfa4b4c52e1fa34fa8b71bf09cc2a89b03b7417733e5138f1be61d/playwright-1.47.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:8e212dc472ff19c7d46ed7e900191c7a786ce697556ac3f1615986ec3aa00341" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/80/a6/c5152c817db664d75c439c2bd99d51f906a31c1df4a04e673ef51008b12f/playwright-1.47.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:a1935672531963e4b2a321de5aa59b982fb92463ee6e1032dd7326378e462955" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d6/50/b573c13d3748a1ab94ed45f2faeb868c63263df0055f57028c4cc775419f/playwright-1.47.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0a1b61473d6f7f39c5d77d4800b3cbefecb03344c90b98f3fbcae63294ad249" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7d/6c/34225ee5707db5e34bffa77f05d152c797c0e0b9bf3d3a5b426d99160f8f/playwright-1.47.0-py3-none-win32.whl", hash = "sha256:1b977ed81f6bba5582617684a21adab9bad5676d90a357ebf892db7bdf4a9974" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/cb/88/9a3c77025702e506fe04275e677676246ff0b2e6964de5d2527dfdab3416/playwright-1.47.0-py3-none-win_amd64.whl", hash = "sha256:0ec1056042d2e86088795a503347407570bffa32cbe20748e5d4c93dba085280" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "playwright-stealth"
|
||||
version = "1.0.6"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "playwright" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/e5/dc/4e88b517e4c9cfb63f1b0b67d59adddcef2dc2fe0883b90e07119d15895a/playwright-stealth-1.0.6.tar.gz", hash = "sha256:b504d951d00fac755c7d13665a29611d415180510bd7d23f14ebc89439ba2043" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/34/10/60981cb8d8e22487061b98a0803313c4fb519cc95ab1421516304a0cfcd0/playwright_stealth-1.0.6-py3-none-any.whl", hash = "sha256:b1b2bcf58eb6859aa53d42c49b91c4e27b74a6d13fc3d0c85eea513dd55efda3" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f3/09/33d5bfe393a582d8dac72165a9e88b274143c9df411b65ece1cc13f42988/playwright-1.54.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:bf3b845af744370f1bd2286c2a9536f474cc8a88dc995b72ea9a5be714c9a77d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e1/7b/51882dc584f7aa59f446f2bb34e33c0e5f015de4e31949e5b7c2c10e54f0/playwright-1.54.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:780928b3ca2077aea90414b37e54edd0c4bbb57d1aafc42f7aa0b3fd2c2fac02" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/73/a1/7aa8ae175b240c0ec8849fcf000e078f3c693f9aa2ffd992da6550ea0dff/playwright-1.54.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:81d0b6f28843b27f288cfe438af0a12a4851de57998009a519ea84cee6fbbfb9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/34/a9/45084fd23b6206f954198296ce39b0acf50debfdf3ec83a593e4d73c9c8a/playwright-1.54.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:09919f45cc74c64afb5432646d7fef0d19fff50990c862cb8d9b0577093f40cc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/02/d4/6a692f4c6db223adc50a6e53af405b45308db39270957a6afebddaa80ea2/playwright-1.54.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ae206c55737e8e3eae51fb385d61c0312eeef31535643bb6232741b41b6fdc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/72/7a/4ee60a1c3714321db187bebbc40d52cea5b41a856925156325058b5fca5a/playwright-1.54.0-py3-none-win32.whl", hash = "sha256:0b108622ffb6906e28566f3f31721cd57dda637d7e41c430287804ac01911f56" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/aa/77/8f8fae05a242ef639de963d7ae70a69d0da61d6d72f1207b8bbf74ffd3e7/playwright-1.54.0-py3-none-win_amd64.whl", hash = "sha256:9e5aee9ae5ab1fdd44cd64153313a2045b136fcbcfb2541cc0a3d909132671a2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/33/ff/99a6f4292a90504f2927d34032a4baf6adb498dc3f7cf0f3e0e22899e310/playwright-1.54.0-py3-none-win_arm64.whl", hash = "sha256:a975815971f7b8dca505c441a4c56de1aeb56a211290f8cc214eeef5524e8d75" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4653,8 +4662,6 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/7c/f5b0556590e7b4e710509105e668adb55aa9470a9f0e4dea9c40a4a11ce1/pycryptodome-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:350ebc1eba1da729b35ab7627a833a1a355ee4e852d8ba0447fafe7b14504d56" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/33/38/dcc795578d610ea1aaffef4b148b8cafcfcf4d126b1e58231ddc4e475c70/pycryptodome-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:93837e379a3e5fd2bb00302a47aee9fdf7940d83595be3915752c74033d17ca7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d9/12/e33935a0709c07de084d7d58d330ec3f4daf7910a18e77937affdb728452/pycryptodome-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddb95b49df036ddd264a0ad246d1be5b672000f12d6961ea2c267083a5e19379" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/22/0b/aa8f9419f25870889bebf0b26b223c6986652bdf071f000623df11212c90/pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e95564beb8782abfd9e431c974e14563a794a4944c29d6d3b7b5ea042110b4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/5e/63f5cbde2342b7f70a39e591dbe75d9809d6338ce0b07c10406f1a140cdc/pycryptodome-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e15c081e912c4b0d75632acd8382dfce45b258667aa3c67caf7a4d4c13f630" },
|
||||
@ -4678,8 +4685,6 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/48/7d/0f2b09490b98cc6a902ac15dda8760c568b9c18cfe70e0ef7a16de64d53a/pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b0/1c/375adb14b71ee1c8d8232904e928b3e7af5bbbca7c04e4bec94fe8e90c3d/pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b2/e8/1b92184ab7e5595bf38000587e6f8cf9556ebd1bf0a583619bee2057afbd/pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e7/c5/9140bb867141d948c8e242013ec8a8011172233c898dfdba0a2417c3169a/pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5e/6a/04acb4978ce08ab16890c70611ebc6efd251681341617bbb9e53356dee70/pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/eb/df/3f1ea084e43b91e6d2b6b3493cc948864c17ea5d93ff1261a03812fbfd1a/pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c9/f3/83ffbdfa0c8f9154bcd8866895f6cae5a3ec749da8b0840603cf936c4412/pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c9/9d/c113e640aaf02af5631ae2686b742aac5cd0e1402b9d6512b1c7ec5ef05d/pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781" },
|
||||
@ -4792,14 +4797,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyee"
|
||||
version = "12.0.0"
|
||||
version = "13.0.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d2/a7/8faaa62a488a2a1e0d56969757f087cbd2729e9bcfa508c230299f366b4c/pyee-12.0.0.tar.gz", hash = "sha256:c480603f4aa2927d4766eb41fa82793fe60a82cbfdb8d688e0d08c55a534e145" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1d/0d/95993c08c721ec68892547f2117e8f9dfbcef2ca71e098533541b4a54d5f/pyee-12.0.0-py3-none-any.whl", hash = "sha256:7b14b74320600049ccc7d0e0b1becd3b4bd0a03c745758225e31a59f4095c990" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5026,6 +5031,63 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-calamine"
|
||||
version = "0.4.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "packaging" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/cc/03/269f96535705b2f18c8977fa58e76763b4e4727a9b3ae277a9468c8ffe05/python_calamine-0.4.0.tar.gz", hash = "sha256:94afcbae3fec36d2d7475095a59d4dc6fae45829968c743cb799ebae269d7bbf" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8e/06/885c73fd472cb76af4c4650174a7c11b77a8bc40585044bc445ac694e5e6/python_calamine-0.4.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:06011f11fd8d2dbfe0bc9bd8bd135c191aafe66f2d0c9eecf0ae3cb38f42f888" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/2d/76/28c704d875046cc1ca92d8c6e680f6bc38d83735397fee821929691fd57f/python_calamine-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12e350e5967bf3206a8b472d9b6c348ff37ae791dba1a1715e076b2c39328557" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ff/71/dc00e6d9187044d15c85cd0875d8aba2b3e0d3051ceabd47d859f40f69c8/python_calamine-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35be298f69006e86b0311a538c1c9694ce3012237c33572d3dfe2bea6b5b9820" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/4b/f6/be5e35263ceec21e77810d7900235124a9a83fd3c0afbbbb79da658d535c/python_calamine-0.4.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7abb10367aea435ca473b9b698636db912f2ab164f19a6c9675710ed926f33ac" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9c/61/0068297ec0000b2c0755a608c8068a1070b8193675d2ff603d390291aa45/python_calamine-0.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58c2c4440982ec6db64c826136661f84f84bc0d8ee0cdd64a38128cd217797eb" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d1/1d/8a9c7d491d31db1def335f4d90b85ea29940d84c59a27a88a442b840fda7/python_calamine-0.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e58cd89154fd1b5ef77c609f63dce108d390ece5a5f3225ca3ebedc8d343e9d5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/40/87/a104c11b320d3fb7f1997d97207addce0fe5e1d41e5fd2e8adb0fb8b1325/python_calamine-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90f85e04c281d96c6dc5551176fc4e32c95257c3a2d384a947b3e68275c7d6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8c/a5/9580de7758950b39c5048787909b639771c92ad6ea7f909a48dca1dbc6fe/python_calamine-0.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f8408b01d8097b2e662d0205ca09695788fb5f3492ade27de4ad4160cb6bd4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/25/a1/2b8282ec4acdf1879f9d46d84a6907843d2a331639719a2cfc90356345b5/python_calamine-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3d61957c10d37e6bf508fafdf52e6bb3112db8196e30bca8bc4b4560db2cc5f5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/95/03/ae56667a2a2eb273eea5f754212454c7073f8abe8e0fdca0edfbe5c0cf37/python_calamine-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5a58bbfcad9c1192dada189e367ed46e72037fcaec585e970fa919b92e07a57" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/52/60/d7aaa39977ab401de33ae975dd704805ec9c76117f0fe3a53e45b718822c/python_calamine-0.4.0-cp310-cp310-win32.whl", hash = "sha256:f06415096bcd9218b6c15d39ee2006ec0f32282e3d08605391d2a8a52187f9ca" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6a/f5/fdfeccd7d66e5c9c834288df6a657858a046d94a6e4cd624418cb5bb96dd/python_calamine-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:e457d1e07acb2798b72e70bd4e88f07cd486ca5129a19fadc6aa19a2cd4e76e8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/a5/bcd82326d0ff1ab5889e7a5e13c868b483fc56398e143aae8e93149ba43b/python_calamine-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d1687f8c4d7852920c7b4e398072f183f88dd273baf5153391edc88b7454b8c0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f6/1a/a681f1d2f28164552e91ef47bcde6708098aa64a5f5fe3952f22362d340a/python_calamine-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:258d04230bebbbafa370a15838049d912d6a0a2c4da128943d8160ca4b6db58e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3d/92/2fc911431733739d4e7a633cefa903fa49a6b7a61e8765bad29a4a7c47b1/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686e491634934f059553d55f77ac67ca4c235452d5b444f98fe79b3579f1ea5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f4/f0/48bfae6802eb360028ca6c15e9edf42243aadd0006b6ac3e9edb41a57119/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4480af7babcc2f919c638a554b06b7b145d9ab3da47fd696d68c2fc6f67f9541" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/dc/f8c956e15bac9d5d1e05cd1b907ae780e40522d2fd103c8c6e2f21dff4ed/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e405b87a8cd1e90a994e570705898634f105442029f25bab7da658ee9cbaa771" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/54/3f/e69ab97c7734fb850fba2f506b775912fd59f04e17488582c8fbf52dbc72/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a831345ee42615f0dfcb0ed60a3b1601d2f946d4166edae64fd9a6f9bbd57fc1" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/79/03/b4c056b468908d87a3de94389166e0f4dba725a70bc39e03bc039ba96f6b/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951b8e4cafb3e1623bb5dfc31a18d38ef43589275f9657e99dfcbe4c8c4b33e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/86/4f/b9092f7c970894054083656953184e44cb2dadff8852425e950d4ca419af/python_calamine-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6619fe3b5c9633ed8b178684605f8076c9d8d85b29ade15f7a7713fcfdee2d0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/64/da/137239027bf253aabe7063450950085ec9abd827d0cbc5170f585f38f464/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cc45b8e76ee331f6ea88ca23677be0b7a05b502cd4423ba2c2bc8dad53af1be" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/80/96/74c38bcf6b6825d5180c0e147b85be8c52dbfba11848b1e98ba358e32a64/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b2cfb7ced1a7c80befa0cfddfe4aae65663eb4d63c4ae484b9b7a80ebe1b528" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/33/95/9d7b8fe8b32d99a6c79534df3132cfe40e9df4a0f5204048bf5e66ddbd93/python_calamine-0.4.0-cp311-cp311-win32.whl", hash = "sha256:04f4e32ee16814fc1fafc49300be8eeb280d94878461634768b51497e1444bd6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7c/e3/1c6cd9fd499083bea6ff1c30033ee8215b9f64e862babf5be170cacae190/python_calamine-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:a8543f69afac2213c0257bb56215b03dadd11763064a9d6b19786f27d1bef586" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/94/1c/3105d19fbab6b66874ce8831652caedd73b23b72e88ce18addf8ceca8c12/python_calamine-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:54622e35ec7c3b6f07d119da49aa821731c185e951918f152c2dbf3bec1e15d6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/63/60/f951513aaaa470b3a38a87d65eca45e0a02bc329b47864f5a17db563f746/python_calamine-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:74bca5d44a73acf3dcfa5370820797fcfd225c8c71abcddea987c5b4f5077e98" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/76/3f/789955bbc77831c639890758f945eb2b25d6358065edf00da6751226cf31/python_calamine-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf80178f5d1b0ee2ccfffb8549c50855f6249e930664adc5807f4d0d6c2b269c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/00/4c/f87d17d996f647030a40bfd124fe45fe893c002bee35ae6aca9910a923ae/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfef345386ae86f7720f1be93495a40fd7e7feabb8caa1df5025d7fbc58a1f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/47/d2/3269367303f6c0488cf1bfebded3f9fe968d118a988222e04c9b2636bf2e/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f23e6214dbf9b29065a5dcfd6a6c674dd0e251407298c9138611c907d53423ff" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f9/6d/c7ac35f5c7125e8bd07eb36773f300fda20dd2da635eae78a8cebb0b6ab7/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d792d304ee232ab01598e1d3ab22e074a32c2511476b5fb4f16f4222d9c2a265" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f0/81/5ea8792a2e9ab5e2a05872db3a4d3ed3538ad5af1861282c789e2f13a8cf/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf813425918fd68f3e991ef7c4b5015be0a1a95fc4a8ab7e73c016ef1b881bb4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/cc/6e/989e56e6f073fc0981a74ba7a393881eb351bb143e5486aa629b5e5d6a8b/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbe2a0ccb4d003635888eea83a995ff56b0748c8c76fc71923544f5a4a7d4cd7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5d/92/2c9bd64277c6fe4be695d7d5a803b38d953ec8565037486be7506642c27c/python_calamine-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7b3bb5f0d910b9b03c240987560f843256626fd443279759df4e91b717826d2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/64/fa/fc758ca37701d354a6bc7d63118699f1c73788a1f2e1b44d720824992764/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bd2c0fc2b5eabd08ceac8a2935bffa88dbc6116db971aa8c3f244bad3fd0f644" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/65/52/40d7e08ae0ddba331cdc9f7fb3e92972f8f38d7afbd00228158ff6d1fceb/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85b547cb1c5b692a0c2406678d666dbc1cec65a714046104683fe4f504a1721d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/16/de/e8a071c0adfda73285d891898a24f6e99338328c404f497ff5b0e6bc3d45/python_calamine-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c2a1e3a0db4d6de4587999a21cc35845648c84fba81c03dd6f3072c690888e4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5e/f2/7fdfada13f80db12356853cf08697ff4e38800a1809c2bdd26ee60962e7a/python_calamine-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b193c89ffcc146019475cd121c552b23348411e19c04dedf5c766a20db64399a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/20/66/d37412ad854480ce32f50d9f74f2a2f88b1b8a6fbc32f70aabf3211ae89e/python_calamine-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:43a0f15e0b60c75a71b21a012b911d5d6f5fa052afad2a8edbc728af43af0fcf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a1/8c/120a1128ff422dba43d6f2d1d19520bca95abf1e5135bbe3b84a782d3927/python_calamine-0.4.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4f9a44015de9e19a876babf707dc55708881930024220c8ae926ea0255f705fe" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/db/62/6062945b8d5fc73d0a0c44b25ee5f4037cc32d62b57688a0d0ca6763006d/python_calamine-0.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c10bb42e0d0810368e78ee9359902f999a1f09bcc2391b060f91f981f75ae21" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/7b/597470e5349056a1dd7b0e3a7e838da53cba9186771ca5501a264446f4ad/python_calamine-0.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:494c5dd1dcee25935ff9d7a9eef6b0f629266d1670aef3ee5e0d38370dcb3352" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/4b/e9/fabdd376713409e6ae6a8fee41293304798d794a64c9d407ba90f765f3d4/python_calamine-0.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f04fb24e70ab4403fc367b9b779eaa3bf61c140908d9115ddfe1e221372d5d4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/80/8e/cc09cc14276662cea1f161a20bdce46d8bfd409e84027a0a0515a00b63f5/python_calamine-0.4.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:49dce56dbd1efc024b63b913595f1a9bef6f66a6467aefad7dcd548654fedb5b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7b/b4/44f560b0ab4dcb49845f5c5361641885f8dc2b7e9b35fbf59242191c2eeb/python_calamine-0.4.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc1755cd0b10ce5e2d80e77e9f19c13ed405b354178c3547ba5a11d34fce6ea" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9b/4b/ee4ac45d500bd2ae189f84adf3338dbd32579fa2198a05ad180666578575/python_calamine-0.4.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9d981efa53ddfd8733555ad4c9368c8c1254bd8d1e162c93b8d341ead6acc5a9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/74/99/835a9cd3ed503cb51fd15039b6404c536d201a6f3d16a6e069ce1079c5e4/python_calamine-0.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b370998567de0cd7a36a8ac73acabefea8397ad2d9aad3cf245b5d35f74cb990" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
@ -5239,6 +5301,7 @@ dependencies = [
|
||||
{ name = "itsdangerous" },
|
||||
{ name = "json-repair" },
|
||||
{ name = "langfuse" },
|
||||
{ name = "litellm" },
|
||||
{ name = "markdown" },
|
||||
{ name = "markdown-to-json" },
|
||||
{ name = "mcp" },
|
||||
@ -5271,6 +5334,7 @@ dependencies = [
|
||||
{ name = "pyodbc" },
|
||||
{ name = "pypdf" },
|
||||
{ name = "pypdf2" },
|
||||
{ name = "python-calamine" },
|
||||
{ name = "python-dateutil" },
|
||||
{ name = "python-docx" },
|
||||
{ name = "python-dotenv" },
|
||||
@ -5357,7 +5421,7 @@ requires-dist = [
|
||||
{ name = "click", specifier = ">=8.1.8" },
|
||||
{ name = "cn2an", specifier = "==0.5.22" },
|
||||
{ name = "cohere", specifier = "==5.6.2" },
|
||||
{ name = "crawl4ai", specifier = "==0.3.8" },
|
||||
{ name = "crawl4ai", specifier = ">=0.3.8" },
|
||||
{ name = "dashscope", specifier = "==1.20.11" },
|
||||
{ name = "datrie", specifier = "==0.8.2" },
|
||||
{ name = "debugpy", specifier = ">=1.8.13" },
|
||||
@ -5384,13 +5448,14 @@ requires-dist = [
|
||||
{ name = "groq", specifier = "==0.9.0" },
|
||||
{ name = "hanziconv", specifier = "==0.3.2" },
|
||||
{ name = "html-text", specifier = "==0.6.2" },
|
||||
{ name = "httpx", specifier = "==0.27.0" },
|
||||
{ name = "httpx", specifier = "==0.27.2" },
|
||||
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
|
||||
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
|
||||
{ name = "infinity-sdk", specifier = "==0.6.0.dev4" },
|
||||
{ name = "itsdangerous", specifier = "==2.1.2" },
|
||||
{ name = "json-repair", specifier = "==0.35.0" },
|
||||
{ name = "langfuse", specifier = ">=2.60.0" },
|
||||
{ name = "litellm", specifier = ">=1.74.15.post1" },
|
||||
{ name = "markdown", specifier = "==3.6" },
|
||||
{ name = "markdown-to-json", specifier = "==2.1.1" },
|
||||
{ name = "mcp", specifier = ">=1.9.4" },
|
||||
@ -5402,7 +5467,7 @@ requires-dist = [
|
||||
{ name = "ollama", specifier = "==0.2.1" },
|
||||
{ name = "onnxruntime", marker = "platform_machine != 'x86_64' or sys_platform == 'darwin'", specifier = "==1.19.2" },
|
||||
{ name = "onnxruntime-gpu", marker = "platform_machine == 'x86_64' and sys_platform != 'darwin'", specifier = "==1.19.2" },
|
||||
{ name = "openai", specifier = "==1.45.0" },
|
||||
{ name = "openai", specifier = ">=1.45.0" },
|
||||
{ name = "opencv-python", specifier = "==4.10.0.84" },
|
||||
{ name = "opencv-python-headless", specifier = "==4.10.0.84" },
|
||||
{ name = "opendal", specifier = ">=0.45.0,<0.46.0" },
|
||||
@ -5423,6 +5488,7 @@ requires-dist = [
|
||||
{ name = "pyodbc", specifier = ">=5.2.0,<6.0.0" },
|
||||
{ name = "pypdf", specifier = ">=5.0.0,<6.0.0" },
|
||||
{ name = "pypdf2", specifier = ">=3.0.1,<4.0.0" },
|
||||
{ name = "python-calamine", specifier = ">=0.4.0" },
|
||||
{ name = "python-dateutil", specifier = "==2.8.2" },
|
||||
{ name = "python-docx", specifier = ">=1.1.2,<2.0.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.0.1" },
|
||||
@ -5467,6 +5533,7 @@ requires-dist = [
|
||||
{ name = "yfinance", specifier = "==0.2.65" },
|
||||
{ name = "zhipuai", specifier = "==2.0.1" },
|
||||
]
|
||||
provides-extras = ["full"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
test = [
|
||||
@ -5481,6 +5548,18 @@ test = [
|
||||
{ name = "requests-toolbelt", specifier = ">=1.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rank-bm25"
|
||||
version = "0.2.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/fc/0a/f9579384aa017d8b4c15613f86954b92a95a93d641cc849182467cf0bb3b/rank_bm25-0.2.2.tar.gz", hash = "sha256:096ccef76f8188563419aaf384a02f0ea459503fdf77901378d4fd9d87e5e51d" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/2a/21/f691fb2613100a62b3fa91e9988c991e9ca5b89ea31c0d3152a3210344f9/rank_bm25-0.2.2-py3-none-any.whl", hash = "sha256:7bd4a95571adadfc271746fa146a4bcfd89c0cf731e49c3d1ad863290adbe8ae" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ranx"
|
||||
version = "0.3.20"
|
||||
@ -6423,6 +6502,19 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/55/08/98090d1a139e8995053ed22e099b43aa4dea8cffe056f8f0bc5178aeecbd/tencentcloud_sdk_python-3.0.1215-py2.py3-none-any.whl", hash = "sha256:899ced749baf74846f1eabf452f74aa0e48d1965f0ca7828a8b73b446f76f5f2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tf-playwright-stealth"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "fake-http-header" },
|
||||
{ name = "playwright" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d6/6b/32bb58c65991f91aeaaf7473b650175d9d4af5dd383983d177d49ccba08d/tf_playwright_stealth-1.2.0.tar.gz", hash = "sha256:7bb8d32d3e60324fbf6b9eeae540b8cd9f3b9e07baeb33b025dbc98ad47658ba" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/11/3d/2653f4cf49660bb44eeac8270617cc4c0287d61716f249f55053f0af0724/tf_playwright_stealth-1.2.0-py3-none-any.whl", hash = "sha256:26ee47ee89fa0f43c606fe37c188ea3ccd36f96ea90c01d167b768df457e7886" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "threadpoolctl"
|
||||
version = "3.6.0"
|
||||
|
||||
@ -1,5 +1,43 @@
|
||||
// .eslintrc.js
|
||||
module.exports = {
|
||||
// Umi 项目
|
||||
extends: [require.resolve('umi/eslint'), 'plugin:react-hooks/recommended'],
|
||||
plugins: ['check-file'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-use-before-define': [
|
||||
'warn',
|
||||
{
|
||||
functions: false,
|
||||
variables: true,
|
||||
},
|
||||
],
|
||||
'check-file/filename-naming-convention': [
|
||||
'error',
|
||||
{
|
||||
'**/*.{jsx,tsx}': 'KEBAB_CASE',
|
||||
'**/*.{js,ts}': 'KEBAB_CASE',
|
||||
},
|
||||
],
|
||||
'check-file/folder-naming-convention': [
|
||||
'error',
|
||||
{
|
||||
'src/**/': 'KEBAB_CASE',
|
||||
'mocks/*/': 'KEBAB_CASE',
|
||||
},
|
||||
],
|
||||
'react/no-unescaped-entities': [
|
||||
'warn',
|
||||
{
|
||||
forbid: [
|
||||
{
|
||||
char: "'",
|
||||
alternatives: [''', '''],
|
||||
},
|
||||
{
|
||||
char: '"',
|
||||
alternatives: ['"', '"'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@ -3,6 +3,7 @@ import TerserPlugin from 'terser-webpack-plugin';
|
||||
import { defineConfig } from 'umi';
|
||||
import { appName } from './src/conf.json';
|
||||
import routes from './src/routes';
|
||||
const ESLintPlugin = require('eslint-webpack-plugin');
|
||||
|
||||
export default defineConfig({
|
||||
title: appName,
|
||||
@ -52,6 +53,15 @@ export default defineConfig({
|
||||
|
||||
memo.optimization.minimizer('terser').use(TerserPlugin); // Fixed the issue that the page displayed an error after packaging lexical with terser
|
||||
|
||||
// memo.plugin('eslint').use(ESLintPlugin, [
|
||||
// {
|
||||
// extensions: ['js', 'ts', 'tsx'],
|
||||
// failOnError: true,
|
||||
// exclude: ['**/node_modules/**', '**/mfsu**', '**/mfsu-virtual-entry**'],
|
||||
// files: ['src/**/*.{js,ts,tsx}'],
|
||||
// },
|
||||
// ]);
|
||||
|
||||
return memo;
|
||||
},
|
||||
tailwindcss: {},
|
||||
|
||||
179
web/package-lock.json
generated
179
web/package-lock.json
generated
@ -121,6 +121,8 @@
|
||||
"@umijs/plugins": "^4.1.0",
|
||||
"@welldone-software/why-did-you-render": "^8.0.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint-plugin-check-file": "^2.8.0",
|
||||
"eslint-webpack-plugin": "^4.1.0",
|
||||
"html-loader": "^5.1.0",
|
||||
"husky": "^9.0.11",
|
||||
"jest": "^29.7.0",
|
||||
@ -9147,9 +9149,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/eslint": {
|
||||
"version": "8.56.1",
|
||||
"resolved": "https://registry.npmmirror.com/@types/eslint/-/eslint-8.56.1.tgz",
|
||||
"integrity": "sha512-18PLWRzhy9glDQp3+wOgfLYRWlhgX0azxgJ63rdpoUHyrC9z0f5CkFburjQx4uD7ZCruw85ZtMt6K+L+R8fLJQ==",
|
||||
"version": "9.6.1",
|
||||
"resolved": "https://registry.npmmirror.com/@types/eslint/-/eslint-9.6.1.tgz",
|
||||
"integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/estree": "*",
|
||||
@ -16334,6 +16337,27 @@
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-check-file": {
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmmirror.com/eslint-plugin-check-file/-/eslint-plugin-check-file-2.8.0.tgz",
|
||||
"integrity": "sha512-FvvafMTam2WJYH9uj+FuMxQ1y+7jY3Z6P9T4j2214cH0FBxNzTcmeCiGTj1Lxp3mI6kbbgsXvmgewvf+llKYyw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.3",
|
||||
"micromatch": "^4.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"type": "ko_fi",
|
||||
"url": "https://ko-fi.com/huanluo"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=7.28.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-jest": {
|
||||
"version": "27.2.3",
|
||||
"resolved": "https://registry.npmmirror.com/eslint-plugin-jest/-/eslint-plugin-jest-27.2.3.tgz",
|
||||
@ -16437,6 +16461,141 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/eslint-webpack-plugin/-/eslint-webpack-plugin-4.1.0.tgz",
|
||||
"integrity": "sha512-C3wAG2jyockIhN0YRLuKieKj2nx/gnE/VHmoHemD5ifnAtY6ZU+jNPfzPoX4Zd6RIbUyWTiZUh/ofUlBhoAX7w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/eslint": "^8.56.5",
|
||||
"jest-worker": "^29.7.0",
|
||||
"micromatch": "^4.0.5",
|
||||
"normalize-path": "^3.0.0",
|
||||
"schema-utils": "^4.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14.15.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/webpack"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.0.0",
|
||||
"webpack": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/@types/eslint": {
|
||||
"version": "8.56.12",
|
||||
"resolved": "https://registry.npmmirror.com/@types/eslint/-/eslint-8.56.12.tgz",
|
||||
"integrity": "sha512-03ruubjWyOHlmljCVoxSuNDdmfZDzsrrz0P2LeJsOXr+ZwFQ+0yQIwNCwt/GYhV7Z31fgtXJTAEs+FYlEL851g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "*",
|
||||
"@types/json-schema": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/ajv": {
|
||||
"version": "8.17.1",
|
||||
"resolved": "https://registry.npmmirror.com/ajv/-/ajv-8.17.1.tgz",
|
||||
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fast-uri": "^3.0.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/ajv-keywords": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz",
|
||||
"integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"ajv": "^8.8.2"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/jest-worker": {
|
||||
"version": "29.7.0",
|
||||
"resolved": "https://registry.npmmirror.com/jest-worker/-/jest-worker-29.7.0.tgz",
|
||||
"integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"jest-util": "^29.7.0",
|
||||
"merge-stream": "^2.0.0",
|
||||
"supports-color": "^8.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
|
||||
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/schema-utils": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmmirror.com/schema-utils/-/schema-utils-4.3.2.tgz",
|
||||
"integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/json-schema": "^7.0.9",
|
||||
"ajv": "^8.9.0",
|
||||
"ajv-formats": "^2.1.1",
|
||||
"ajv-keywords": "^5.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.13.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/webpack"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-webpack-plugin/node_modules/supports-color": {
|
||||
"version": "8.1.1",
|
||||
"resolved": "https://registry.npmmirror.com/supports-color/-/supports-color-8.1.1.tgz",
|
||||
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-flag": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/supports-color?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
@ -17222,9 +17381,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/flatted": {
|
||||
"version": "3.2.9",
|
||||
"resolved": "https://registry.npmmirror.com/flatted/-/flatted-3.2.9.tgz",
|
||||
"integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==",
|
||||
"version": "3.3.3",
|
||||
"resolved": "https://registry.npmmirror.com/flatted/-/flatted-3.3.3.tgz",
|
||||
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
|
||||
"license": "ISC",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/flatten": {
|
||||
@ -24121,9 +24281,10 @@
|
||||
"integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w=="
|
||||
},
|
||||
"node_modules/micromatch": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmmirror.com/micromatch/-/micromatch-4.0.7.tgz",
|
||||
"integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==",
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmmirror.com/micromatch/-/micromatch-4.0.8.tgz",
|
||||
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"braces": "^3.0.3",
|
||||
"picomatch": "^2.3.1"
|
||||
|
||||
@ -132,6 +132,8 @@
|
||||
"@umijs/plugins": "^4.1.0",
|
||||
"@welldone-software/why-did-you-render": "^8.0.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint-plugin-check-file": "^2.8.0",
|
||||
"eslint-webpack-plugin": "^4.1.0",
|
||||
"html-loader": "^5.1.0",
|
||||
"husky": "^9.0.11",
|
||||
"jest": "^29.7.0",
|
||||
|
||||
@ -80,7 +80,6 @@ export function ChunkMethodDialog({
|
||||
hideModal,
|
||||
onOk,
|
||||
parserId,
|
||||
documentId,
|
||||
documentExtension,
|
||||
visible,
|
||||
parserConfig,
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { Form, FormInstance, Input, InputRef, Typography } from 'antd';
|
||||
import { omit } from 'lodash';
|
||||
import React, { useContext, useEffect, useRef, useState } from 'react';
|
||||
|
||||
const EditableContext = React.createContext<FormInstance<any> | null>(null);
|
||||
@ -15,15 +16,12 @@ interface Item {
|
||||
address: string;
|
||||
}
|
||||
|
||||
export const EditableRow: React.FC<EditableRowProps> = ({
|
||||
index,
|
||||
...props
|
||||
}) => {
|
||||
export const EditableRow: React.FC<EditableRowProps> = ({ ...props }) => {
|
||||
const [form] = Form.useForm();
|
||||
return (
|
||||
<Form form={form} component={false}>
|
||||
<EditableContext.Provider value={form}>
|
||||
<tr {...props} />
|
||||
<tr {...omit(props, 'index')} />
|
||||
</EditableContext.Provider>
|
||||
</Form>
|
||||
);
|
||||
|
||||
@ -31,7 +31,7 @@ const HightLightMarkdown = ({
|
||||
components={
|
||||
{
|
||||
code(props: any) {
|
||||
const { children, className, node, ...rest } = props;
|
||||
const { children, className, ...rest } = props;
|
||||
const match = /language-(\w+)/.exec(className || '');
|
||||
return match ? (
|
||||
<SyntaxHighlighter
|
||||
|
||||
@ -30,15 +30,16 @@ interface LlmSettingFieldItemsProps {
|
||||
|
||||
export const LlmSettingSchema = {
|
||||
llm_id: z.string(),
|
||||
temperature: z.coerce.number(),
|
||||
top_p: z.string(),
|
||||
presence_penalty: z.coerce.number(),
|
||||
frequency_penalty: z.coerce.number(),
|
||||
temperatureEnabled: z.boolean(),
|
||||
topPEnabled: z.boolean(),
|
||||
presencePenaltyEnabled: z.boolean(),
|
||||
frequencyPenaltyEnabled: z.boolean(),
|
||||
maxTokensEnabled: z.boolean(),
|
||||
temperature: z.coerce.number().optional(),
|
||||
top_p: z.number().optional(),
|
||||
presence_penalty: z.coerce.number().optional(),
|
||||
frequency_penalty: z.coerce.number().optional(),
|
||||
temperatureEnabled: z.boolean().optional(),
|
||||
topPEnabled: z.boolean().optional(),
|
||||
presencePenaltyEnabled: z.boolean().optional(),
|
||||
frequencyPenaltyEnabled: z.boolean().optional(),
|
||||
maxTokensEnabled: z.boolean().optional(),
|
||||
max_tokens: z.number().optional(),
|
||||
};
|
||||
|
||||
export function LlmSettingFieldItems({
|
||||
@ -53,13 +54,6 @@ export function LlmSettingFieldItems({
|
||||
LlmModelType.Image2text,
|
||||
]);
|
||||
|
||||
const handleChange = useHandleFreedomChange();
|
||||
|
||||
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
|
||||
label: t(camelCase(x)),
|
||||
value: x,
|
||||
}));
|
||||
|
||||
const getFieldWithPrefix = useCallback(
|
||||
(name: string) => {
|
||||
return prefix ? `${prefix}.${name}` : name;
|
||||
@ -67,6 +61,13 @@ export function LlmSettingFieldItems({
|
||||
[prefix],
|
||||
);
|
||||
|
||||
const handleChange = useHandleFreedomChange(getFieldWithPrefix);
|
||||
|
||||
const parameterOptions = Object.values(ModelVariableType).map((x) => ({
|
||||
label: t(camelCase(x)),
|
||||
value: x,
|
||||
}));
|
||||
|
||||
return (
|
||||
<div className="space-y-5">
|
||||
<FormField
|
||||
@ -77,6 +78,7 @@ export function LlmSettingFieldItems({
|
||||
<FormLabel>{t('model')}</FormLabel>
|
||||
<FormControl>
|
||||
<SelectWithSearch
|
||||
allowClear
|
||||
options={options || modelOptions}
|
||||
{...field}
|
||||
></SelectWithSearch>
|
||||
|
||||
@ -9,7 +9,7 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from '../ui/form';
|
||||
import { Input } from '../ui/input';
|
||||
import { NumberInput } from '../ui/input';
|
||||
import { Switch } from '../ui/switch';
|
||||
|
||||
type SliderInputSwitchFormFieldProps = {
|
||||
@ -73,15 +73,14 @@ export function SliderInputSwitchFormField({
|
||||
></SingleFormSlider>
|
||||
</FormControl>
|
||||
<FormControl>
|
||||
<Input
|
||||
<NumberInput
|
||||
disabled={disabled}
|
||||
type={'number'}
|
||||
className="h-7 w-20"
|
||||
max={max}
|
||||
min={min}
|
||||
step={step}
|
||||
{...field}
|
||||
></Input>
|
||||
></NumberInput>
|
||||
</FormControl>
|
||||
</div>
|
||||
<FormMessage />
|
||||
|
||||
@ -4,7 +4,9 @@ import useGraphStore from '@/pages/agent/store';
|
||||
import { useCallback, useContext } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
|
||||
export function useHandleFreedomChange() {
|
||||
export function useHandleFreedomChange(
|
||||
getFieldWithPrefix: (name: string) => string,
|
||||
) {
|
||||
const form = useFormContext();
|
||||
const node = useContext(AgentFormContext);
|
||||
const updateNodeForm = useGraphStore((state) => state.updateNodeForm);
|
||||
@ -25,13 +27,14 @@ export function useHandleFreedomChange() {
|
||||
|
||||
for (const key in values) {
|
||||
if (Object.prototype.hasOwnProperty.call(values, key)) {
|
||||
const element = values[key];
|
||||
const realKey = getFieldWithPrefix(key);
|
||||
const element = values[key as keyof typeof values];
|
||||
|
||||
form.setValue(key, element);
|
||||
form.setValue(realKey, element);
|
||||
}
|
||||
}
|
||||
},
|
||||
[form, node, updateNodeForm],
|
||||
[form, getFieldWithPrefix, node?.id, updateNodeForm],
|
||||
);
|
||||
|
||||
return handleChange;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { PromptIcon } from '@/assets/icon/Icon';
|
||||
import { PromptIcon } from '@/assets/icon/next-icon';
|
||||
import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { IRemoveMessageById } from '@/hooks/logic-hooks';
|
||||
|
||||
@ -27,6 +27,7 @@ import {
|
||||
import { cn } from '@/lib/utils';
|
||||
import { currentReg, replaceTextByOldReg } from '@/pages/chat/utils';
|
||||
import classNames from 'classnames';
|
||||
import { omit } from 'lodash';
|
||||
import { pipe } from 'lodash/fp';
|
||||
import { CircleAlert } from 'lucide-react';
|
||||
import { Button } from '../ui/button';
|
||||
@ -256,11 +257,12 @@ function MarkdownContent({
|
||||
'custom-typography': ({ children }: { children: string }) =>
|
||||
renderReference(children),
|
||||
code(props: any) {
|
||||
const { children, className, node, ...rest } = props;
|
||||
const { children, className, ...rest } = props;
|
||||
const restProps = omit(rest, 'node');
|
||||
const match = /language-(\w+)/.exec(className || '');
|
||||
return match ? (
|
||||
<SyntaxHighlighter
|
||||
{...rest}
|
||||
{...restProps}
|
||||
PreTag="div"
|
||||
language={match[1]}
|
||||
wrapLongLines
|
||||
@ -268,7 +270,10 @@ function MarkdownContent({
|
||||
{String(children).replace(/\n$/, '')}
|
||||
</SyntaxHighlighter>
|
||||
) : (
|
||||
<code {...rest} className={classNames(className, 'text-wrap')}>
|
||||
<code
|
||||
{...restProps}
|
||||
className={classNames(className, 'text-wrap')}
|
||||
>
|
||||
{children}
|
||||
</code>
|
||||
);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { PromptIcon } from '@/assets/icon/Icon';
|
||||
import { PromptIcon } from '@/assets/icon/next-icon';
|
||||
import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { IRemoveMessageById } from '@/hooks/logic-hooks';
|
||||
|
||||
@ -17,7 +17,7 @@ import { IRegenerateMessage, IRemoveMessageById } from '@/hooks/logic-hooks';
|
||||
import { INodeEvent, MessageEventType } from '@/hooks/use-send-message';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { AgentChatContext } from '@/pages/agent/context';
|
||||
import { WorkFlowTimeline } from '@/pages/agent/log-sheet/workFlowTimeline';
|
||||
import { WorkFlowTimeline } from '@/pages/agent/log-sheet/workflow-timeline';
|
||||
import { IMessage } from '@/pages/chat/interface';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { Atom, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
|
||||
@ -124,7 +124,7 @@ interface TimelineIndicatorProps extends React.HTMLAttributes<HTMLDivElement> {
|
||||
}
|
||||
|
||||
function TimelineIndicator({
|
||||
asChild = false,
|
||||
// asChild = false,
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import { Input } from '@/components/originui/input';
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { EyeIcon, EyeOffIcon } from 'lucide-react';
|
||||
import { ChangeEvent, LegacyRef, forwardRef, useId, useState } from 'react';
|
||||
import { ChangeEvent, forwardRef, useId, useState } from 'react';
|
||||
|
||||
type PropType = {
|
||||
name: string;
|
||||
@ -10,17 +9,12 @@ type PropType = {
|
||||
onChange: (event: ChangeEvent<HTMLInputElement>) => void;
|
||||
};
|
||||
|
||||
function PasswordInput(
|
||||
props: PropType,
|
||||
ref: LegacyRef<HTMLInputElement> | undefined,
|
||||
) {
|
||||
function PasswordInput(props: PropType) {
|
||||
const id = useId();
|
||||
const [isVisible, setIsVisible] = useState<boolean>(false);
|
||||
|
||||
const toggleVisibility = () => setIsVisible((prevState) => !prevState);
|
||||
|
||||
const { t } = useTranslate('setting');
|
||||
|
||||
return (
|
||||
<div className="*:not-first:mt-2 w-full">
|
||||
{/* <Label htmlFor={id}>Show/hide password input</Label> */}
|
||||
|
||||
@ -23,8 +23,8 @@ const getColorForName = (name: string): { from: string; to: string } => {
|
||||
const hue = hash % 360;
|
||||
|
||||
return {
|
||||
from: `hsl(${hue}, 70%, 80%)`,
|
||||
to: `hsl(${hue}, 60%, 30%)`,
|
||||
to: `hsl(${hue}, 70%, 80%)`,
|
||||
from: `hsl(${hue}, 60%, 30%)`,
|
||||
};
|
||||
};
|
||||
export const RAGFlowAvatar = memo(
|
||||
|
||||
@ -5,6 +5,7 @@ import { Select as AntSelect, Form, message, Slider } from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import { useFormContext } from 'react-hook-form';
|
||||
import { z } from 'zod';
|
||||
import { SelectWithSearch } from './originui/select-with-search';
|
||||
import { SliderInputFormField } from './slider-input-form-field';
|
||||
import {
|
||||
FormControl,
|
||||
@ -13,7 +14,6 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from './ui/form';
|
||||
import { RAGFlowSelect } from './ui/select';
|
||||
|
||||
type FieldType = {
|
||||
rerank_id?: string;
|
||||
@ -109,11 +109,11 @@ function RerankFormField() {
|
||||
<FormItem>
|
||||
<FormLabel tooltip={t('rerankTip')}>{t('rerankModel')}</FormLabel>
|
||||
<FormControl>
|
||||
<RAGFlowSelect
|
||||
<SelectWithSearch
|
||||
allowClear
|
||||
{...field}
|
||||
options={options}
|
||||
></RAGFlowSelect>
|
||||
></SelectWithSearch>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
@ -122,6 +122,11 @@ function RerankFormField() {
|
||||
);
|
||||
}
|
||||
|
||||
export const rerankFormSchema = {
|
||||
[RerankId]: z.string().optional(),
|
||||
top_k: z.coerce.number().optional(),
|
||||
};
|
||||
|
||||
export function RerankFormFields() {
|
||||
const { watch } = useFormContext();
|
||||
const { t } = useTranslate('knowledgeDetails');
|
||||
|
||||
@ -61,11 +61,20 @@ export const keywordsSimilarityWeightSchema = {
|
||||
keywords_similarity_weight: z.number(),
|
||||
};
|
||||
|
||||
export const vectorSimilarityWeightSchema = {
|
||||
vector_similarity_weight: z.number(),
|
||||
};
|
||||
|
||||
export const initialVectorSimilarityWeightValue = {
|
||||
vector_similarity_weight: 0.3,
|
||||
};
|
||||
|
||||
export function SimilaritySliderFormField({
|
||||
vectorSimilarityWeightName = 'vector_similarity_weight',
|
||||
isTooltipShown,
|
||||
}: SimilaritySliderFormFieldProps) {
|
||||
const { t } = useTranslate('knowledgeDetails');
|
||||
const isVector = vectorSimilarityWeightName === 'vector_similarity_weight';
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -78,10 +87,19 @@ export function SimilaritySliderFormField({
|
||||
></SliderInputFormField>
|
||||
<SliderInputFormField
|
||||
name={vectorSimilarityWeightName}
|
||||
label={t('vectorSimilarityWeight')}
|
||||
label={t(
|
||||
isVector ? 'vectorSimilarityWeight' : 'keywordSimilarityWeight',
|
||||
)}
|
||||
max={1}
|
||||
step={0.01}
|
||||
tooltip={isTooltipShown && t('vectorSimilarityWeightTip')}
|
||||
tooltip={
|
||||
isTooltipShown &&
|
||||
t(
|
||||
isVector
|
||||
? 'vectorSimilarityWeightTip'
|
||||
: 'keywordSimilarityWeightTip',
|
||||
)
|
||||
}
|
||||
></SliderInputFormField>
|
||||
</>
|
||||
);
|
||||
|
||||
@ -10,7 +10,7 @@ import {
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from './ui/form';
|
||||
import { Input } from './ui/input';
|
||||
import { NumberInput } from './ui/input';
|
||||
|
||||
export type FormLayoutType = {
|
||||
layout?: FormLayout;
|
||||
@ -79,19 +79,14 @@ export function SliderInputFormField({
|
||||
></SingleFormSlider>
|
||||
</FormControl>
|
||||
<FormControl>
|
||||
<Input
|
||||
type={'number'}
|
||||
<NumberInput
|
||||
className="h-7 w-20"
|
||||
max={max}
|
||||
min={min}
|
||||
step={step}
|
||||
{...field}
|
||||
onChange={(ev) => {
|
||||
const value = ev.target.value;
|
||||
field.onChange(value === '' ? 0 : Number(value)); // convert to number
|
||||
}}
|
||||
// defaultValue={defaultValue}
|
||||
></Input>
|
||||
></NumberInput>
|
||||
</FormControl>
|
||||
</div>
|
||||
<FormMessage />
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { useTranslate } from '@/hooks/common-hooks';
|
||||
import { Form, Slider } from 'antd';
|
||||
import { z } from 'zod';
|
||||
import { SliderInputFormField } from './slider-input-form-field';
|
||||
|
||||
type FieldType = {
|
||||
@ -32,6 +33,10 @@ interface SimilaritySliderFormFieldProps {
|
||||
max?: number;
|
||||
}
|
||||
|
||||
export const topnSchema = {
|
||||
top_n: z.number().optional(),
|
||||
};
|
||||
|
||||
export function TopNFormField({ max = 30 }: SimilaritySliderFormFieldProps) {
|
||||
const { t } = useTranslate('chat');
|
||||
|
||||
|
||||
@ -39,7 +39,7 @@ const CommandInput = React.forwardRef<
|
||||
React.ElementRef<typeof CommandPrimitive.Input>,
|
||||
React.ComponentPropsWithoutRef<typeof CommandPrimitive.Input>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div className="flex items-center border-b px-3" cmdk-input-wrapper="">
|
||||
<div className="flex items-center border-b px-3" data-cmdk-input-wrapper="">
|
||||
<Search className="mr-2 h-4 w-4 shrink-0 opacity-50" />
|
||||
<CommandPrimitive.Input
|
||||
ref={ref}
|
||||
|
||||
@ -34,6 +34,7 @@ export type MultiSelectOptionType = {
|
||||
label: React.ReactNode;
|
||||
value: string;
|
||||
disabled?: boolean;
|
||||
suffix?: React.ReactNode;
|
||||
icon?: React.ComponentType<{ className?: string }>;
|
||||
};
|
||||
|
||||
@ -54,23 +55,41 @@ function MultiCommandItem({
|
||||
return (
|
||||
<CommandItem
|
||||
key={option.value}
|
||||
onSelect={() => toggleOption(option.value)}
|
||||
className="cursor-pointer"
|
||||
onSelect={() => {
|
||||
if (option.disabled) return false;
|
||||
toggleOption(option.value);
|
||||
}}
|
||||
className={cn('cursor-pointer', {
|
||||
'cursor-not-allowed text-text-disabled': option.disabled,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'mr-2 flex h-4 w-4 items-center justify-center rounded-sm border border-primary',
|
||||
isSelected
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'opacity-50 [&_svg]:invisible',
|
||||
isSelected ? 'bg-primary ' : 'opacity-50 [&_svg]:invisible',
|
||||
|
||||
{ 'text-primary-foreground': !option.disabled },
|
||||
{ 'text-text-disabled': option.disabled },
|
||||
)}
|
||||
>
|
||||
<CheckIcon className="h-4 w-4" />
|
||||
</div>
|
||||
{option.icon && (
|
||||
<option.icon className="mr-2 h-4 w-4 text-muted-foreground" />
|
||||
<option.icon
|
||||
className={cn('mr-2 h-4 w-4 ', {
|
||||
'text-text-disabled': option.disabled,
|
||||
'text-muted-foreground': !option.disabled,
|
||||
})}
|
||||
/>
|
||||
)}
|
||||
<span className={cn({ 'text-text-disabled': option.disabled })}>
|
||||
{option.label}
|
||||
</span>
|
||||
{option.suffix && (
|
||||
<span className={cn({ 'text-text-disabled': option.disabled })}>
|
||||
{option.suffix}
|
||||
</span>
|
||||
)}
|
||||
<span>{option.label}</span>
|
||||
</CommandItem>
|
||||
);
|
||||
}
|
||||
@ -156,6 +175,11 @@ interface MultiSelectProps
|
||||
* Optional, can be used to add custom styles.
|
||||
*/
|
||||
className?: string;
|
||||
|
||||
/**
|
||||
* If true, renders the multi-select component with a select all option.
|
||||
*/
|
||||
showSelectAll?: boolean;
|
||||
}
|
||||
|
||||
export const MultiSelect = React.forwardRef<
|
||||
@ -172,8 +196,9 @@ export const MultiSelect = React.forwardRef<
|
||||
animation = 0,
|
||||
maxCount = 3,
|
||||
modalPopover = false,
|
||||
asChild = false,
|
||||
// asChild = false,
|
||||
className,
|
||||
showSelectAll = true,
|
||||
...props
|
||||
},
|
||||
ref,
|
||||
@ -183,6 +208,10 @@ export const MultiSelect = React.forwardRef<
|
||||
const [isPopoverOpen, setIsPopoverOpen] = React.useState(false);
|
||||
const [isAnimating, setIsAnimating] = React.useState(false);
|
||||
|
||||
React.useEffect(() => {
|
||||
setSelectedValues(defaultValue);
|
||||
}, [defaultValue]);
|
||||
|
||||
const flatOptions = React.useMemo(() => {
|
||||
return options.flatMap((option) =>
|
||||
'options' in option ? option.options : [option],
|
||||
@ -340,23 +369,25 @@ export const MultiSelect = React.forwardRef<
|
||||
<CommandList>
|
||||
<CommandEmpty>No results found.</CommandEmpty>
|
||||
<CommandGroup>
|
||||
<CommandItem
|
||||
key="all"
|
||||
onSelect={toggleAll}
|
||||
className="cursor-pointer"
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'mr-2 flex h-4 w-4 items-center justify-center rounded-sm border border-primary',
|
||||
selectedValues.length === flatOptions.length
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'opacity-50 [&_svg]:invisible',
|
||||
)}
|
||||
{showSelectAll && (
|
||||
<CommandItem
|
||||
key="all"
|
||||
onSelect={toggleAll}
|
||||
className="cursor-pointer"
|
||||
>
|
||||
<CheckIcon className="h-4 w-4" />
|
||||
</div>
|
||||
<span>(Select All)</span>
|
||||
</CommandItem>
|
||||
<div
|
||||
className={cn(
|
||||
'mr-2 flex h-4 w-4 items-center justify-center rounded-sm border border-primary',
|
||||
selectedValues.length === flatOptions.length
|
||||
? 'bg-primary text-primary-foreground'
|
||||
: 'opacity-50 [&_svg]:invisible',
|
||||
)}
|
||||
>
|
||||
<CheckIcon className="h-4 w-4" />
|
||||
</div>
|
||||
<span>(Select All)</span>
|
||||
</CommandItem>
|
||||
)}
|
||||
{!options.some((x) => 'options' in x) &&
|
||||
(options as unknown as MultiSelectOptionType[]).map(
|
||||
(option) => {
|
||||
|
||||
@ -26,7 +26,7 @@ const SIDEBAR_WIDTH_MOBILE = '18rem';
|
||||
const SIDEBAR_WIDTH_ICON = '3rem';
|
||||
const SIDEBAR_KEYBOARD_SHORTCUT = 'b';
|
||||
|
||||
type SidebarContext = {
|
||||
type SidebarContextType = {
|
||||
state: 'expanded' | 'collapsed';
|
||||
open: boolean;
|
||||
setOpen: (open: boolean) => void;
|
||||
@ -36,7 +36,7 @@ type SidebarContext = {
|
||||
toggleSidebar: () => void;
|
||||
};
|
||||
|
||||
const SidebarContext = React.createContext<SidebarContext | null>(null);
|
||||
const SidebarContext = React.createContext<SidebarContextType | null>(null);
|
||||
|
||||
function useSidebar() {
|
||||
const context = React.useContext(SidebarContext);
|
||||
@ -116,7 +116,7 @@ const SidebarProvider = React.forwardRef<
|
||||
// This makes it easier to style the sidebar with Tailwind classes.
|
||||
const state = open ? 'expanded' : 'collapsed';
|
||||
|
||||
const contextValue = React.useMemo<SidebarContext>(
|
||||
const contextValue = React.useMemo<SidebarContextType>(
|
||||
() => ({
|
||||
state,
|
||||
open,
|
||||
@ -580,11 +580,8 @@ const SidebarMenuButton = React.forwardRef<
|
||||
return button;
|
||||
}
|
||||
|
||||
if (typeof tooltip === 'string') {
|
||||
tooltip = {
|
||||
children: tooltip,
|
||||
};
|
||||
}
|
||||
const tooltipContent =
|
||||
typeof tooltip === 'string' ? { children: tooltip } : tooltip;
|
||||
|
||||
return (
|
||||
<Tooltip>
|
||||
@ -593,7 +590,7 @@ const SidebarMenuButton = React.forwardRef<
|
||||
side="right"
|
||||
align="center"
|
||||
hidden={state !== 'collapsed' || isMobile}
|
||||
{...tooltip}
|
||||
{...tooltipContent}
|
||||
/>
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
@ -4,8 +4,8 @@ import isEqual from 'lodash/isEqual';
|
||||
import { ReactNode, useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export const useSetModalState = () => {
|
||||
const [visible, setVisible] = useState(false);
|
||||
export const useSetModalState = (initialVisible = false) => {
|
||||
const [visible, setVisible] = useState(initialVisible);
|
||||
|
||||
const showModal = useCallback(() => {
|
||||
setVisible(true);
|
||||
|
||||
@ -27,7 +27,7 @@ import { useTranslate } from './common-hooks';
|
||||
import { useSetPaginationParams } from './route-hook';
|
||||
import { useFetchTenantInfo, useSaveSetting } from './user-setting-hooks';
|
||||
|
||||
function usePrevious<T>(value: T) {
|
||||
export function usePrevious<T>(value: T) {
|
||||
const ref = useRef<T>();
|
||||
useEffect(() => {
|
||||
ref.current = value;
|
||||
|
||||
@ -72,9 +72,12 @@ export const useNavigatePage = () => {
|
||||
navigate(Routes.Searches);
|
||||
}, [navigate]);
|
||||
|
||||
const navigateToSearch = useCallback(() => {
|
||||
navigate(Routes.Search);
|
||||
}, [navigate]);
|
||||
const navigateToSearch = useCallback(
|
||||
(id: string) => {
|
||||
navigate(`${Routes.Search}/${id}`);
|
||||
},
|
||||
[navigate],
|
||||
);
|
||||
|
||||
const navigateToChunkParsedResult = useCallback(
|
||||
(id: string, knowledgeId?: string) => () => {
|
||||
|
||||
@ -2,11 +2,13 @@ import { FileUploadProps } from '@/components/file-upload';
|
||||
import message from '@/components/ui/message';
|
||||
import { AgentGlobals } from '@/constants/agent';
|
||||
import {
|
||||
DSL,
|
||||
IAgentLogsRequest,
|
||||
IAgentLogsResponse,
|
||||
IFlow,
|
||||
IFlowTemplate,
|
||||
ITraceData,
|
||||
} from '@/interfaces/database/agent';
|
||||
import { DSL, IFlow, IFlowTemplate } from '@/interfaces/database/flow';
|
||||
import { IDebugSingleRequestBody } from '@/interfaces/request/agent';
|
||||
import i18n from '@/locales/config';
|
||||
import { BeginId } from '@/pages/agent/constant';
|
||||
@ -122,7 +124,7 @@ export const useFetchAgentListByPage = () => {
|
||||
const debouncedSearchString = useDebounce(searchString, { wait: 500 });
|
||||
|
||||
const { data, isFetching: loading } = useQuery<{
|
||||
kbs: IFlow[];
|
||||
canvas: IFlow[];
|
||||
total: number;
|
||||
}>({
|
||||
queryKey: [
|
||||
@ -132,7 +134,7 @@ export const useFetchAgentListByPage = () => {
|
||||
...pagination,
|
||||
},
|
||||
],
|
||||
initialData: { kbs: [], total: 0 },
|
||||
initialData: { canvas: [], total: 0 },
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await agentService.listCanvasTeam(
|
||||
@ -146,7 +148,7 @@ export const useFetchAgentListByPage = () => {
|
||||
true,
|
||||
);
|
||||
|
||||
return data?.data ?? [];
|
||||
return data?.data;
|
||||
},
|
||||
});
|
||||
|
||||
@ -159,7 +161,7 @@ export const useFetchAgentListByPage = () => {
|
||||
);
|
||||
|
||||
return {
|
||||
data: data.kbs,
|
||||
data: data.canvas,
|
||||
loading,
|
||||
searchString,
|
||||
handleInputChange: onInputChange,
|
||||
@ -366,16 +368,7 @@ export const useUploadCanvasFileWithProgress = (
|
||||
{
|
||||
url: api.uploadAgentFile(identifier || id),
|
||||
data: formData,
|
||||
onUploadProgress: ({
|
||||
loaded,
|
||||
total,
|
||||
progress,
|
||||
bytes,
|
||||
estimated,
|
||||
rate,
|
||||
upload,
|
||||
lengthComputable,
|
||||
}) => {
|
||||
onUploadProgress: ({ progress }) => {
|
||||
files.forEach((file) => {
|
||||
onProgress(file, (progress || 0) * 100);
|
||||
});
|
||||
|
||||
@ -5,7 +5,7 @@ import { IAskRequestBody } from '@/interfaces/request/chat';
|
||||
import { IClientConversation } from '@/pages/next-chats/chat/interface';
|
||||
import { useGetSharedChatSearchParams } from '@/pages/next-chats/hooks/use-send-shared-message';
|
||||
import { isConversationIdExist } from '@/pages/next-chats/utils';
|
||||
import chatService from '@/services/next-chat-service ';
|
||||
import chatService from '@/services/next-chat-service';
|
||||
import { buildMessageListWithUuid, getConversationId } from '@/utils/chat';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useDebounce } from 'ahooks';
|
||||
|
||||
@ -28,6 +28,8 @@ export const enum KnowledgeApiAction {
|
||||
DeleteKnowledge = 'deleteKnowledge',
|
||||
SaveKnowledge = 'saveKnowledge',
|
||||
FetchKnowledgeDetail = 'fetchKnowledgeDetail',
|
||||
FetchKnowledgeGraph = 'fetchKnowledgeGraph',
|
||||
FetchMetadata = 'fetchMetadata',
|
||||
}
|
||||
|
||||
export const useKnowledgeBaseId = (): string => {
|
||||
@ -263,7 +265,7 @@ export function useFetchKnowledgeGraph() {
|
||||
const knowledgeBaseId = useKnowledgeBaseId();
|
||||
|
||||
const { data, isFetching: loading } = useQuery<IKnowledgeGraph>({
|
||||
queryKey: ['fetchKnowledgeGraph', knowledgeBaseId],
|
||||
queryKey: [KnowledgeApiAction.FetchKnowledgeGraph, knowledgeBaseId],
|
||||
initialData: { graph: {}, mind_map: {} } as IKnowledgeGraph,
|
||||
enabled: !!knowledgeBaseId,
|
||||
gcTime: 0,
|
||||
@ -275,3 +277,20 @@ export function useFetchKnowledgeGraph() {
|
||||
|
||||
return { data, loading };
|
||||
}
|
||||
|
||||
export function useFetchKnowledgeMetadata(kbIds: string[] = []) {
|
||||
const { data, isFetching: loading } = useQuery<
|
||||
Record<string, Record<string, string[]>>
|
||||
>({
|
||||
queryKey: [KnowledgeApiAction.FetchMetadata, kbIds],
|
||||
initialData: {},
|
||||
enabled: kbIds.length > 0,
|
||||
gcTime: 0,
|
||||
queryFn: async () => {
|
||||
const { data } = await kbService.getMeta({ kb_ids: kbIds.join(',') });
|
||||
return data?.data ?? {};
|
||||
},
|
||||
});
|
||||
|
||||
return { data, loading };
|
||||
}
|
||||
|
||||
@ -73,6 +73,7 @@ export declare interface IFlow {
|
||||
user_id: string;
|
||||
permission: string;
|
||||
nickname: string;
|
||||
operator_permission: number;
|
||||
}
|
||||
|
||||
export interface IFlowTemplate {
|
||||
|
||||
@ -6,6 +6,10 @@ export interface PromptConfig {
|
||||
prologue: string;
|
||||
system: string;
|
||||
tts?: boolean;
|
||||
quote: boolean;
|
||||
keyword: boolean;
|
||||
refine_multiturn: boolean;
|
||||
use_kg: boolean;
|
||||
}
|
||||
|
||||
export interface Parameter {
|
||||
@ -26,6 +30,7 @@ export interface Variable {
|
||||
presence_penalty?: number;
|
||||
temperature?: number;
|
||||
top_p?: number;
|
||||
llm_id?: string;
|
||||
}
|
||||
|
||||
export interface IDialog {
|
||||
@ -50,6 +55,8 @@ export interface IDialog {
|
||||
update_time: number;
|
||||
vector_similarity_weight: number;
|
||||
similarity_threshold: number;
|
||||
top_k: number;
|
||||
top_n: number;
|
||||
}
|
||||
|
||||
export interface IConversation {
|
||||
|
||||
@ -13,8 +13,7 @@ import { LanguageList, LanguageMap, ThemeEnum } from '@/constants/common';
|
||||
import { useChangeLanguage } from '@/hooks/logic-hooks';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { useNavigateWithFromState } from '@/hooks/route-hook';
|
||||
import { useFetchUserInfo, useListTenant } from '@/hooks/user-setting-hooks';
|
||||
import { TenantRole } from '@/pages/user-setting/constants';
|
||||
import { useFetchUserInfo } from '@/hooks/user-setting-hooks';
|
||||
import { Routes } from '@/routes';
|
||||
import { camelCase } from 'lodash';
|
||||
import {
|
||||
@ -55,11 +54,11 @@ export function Header() {
|
||||
changeLanguage(key);
|
||||
};
|
||||
|
||||
const { data } = useListTenant();
|
||||
// const { data } = useListTenant();
|
||||
|
||||
const showBell = useMemo(() => {
|
||||
return data.some((x) => x.role === TenantRole.Invite);
|
||||
}, [data]);
|
||||
// const showBell = useMemo(() => {
|
||||
// return data.some((x) => x.role === TenantRole.Invite);
|
||||
// }, [data]);
|
||||
|
||||
const items = LanguageList.map((x) => ({
|
||||
key: x,
|
||||
@ -70,9 +69,9 @@ export function Header() {
|
||||
setTheme(theme === ThemeEnum.Dark ? ThemeEnum.Light : ThemeEnum.Dark);
|
||||
}, [setTheme, theme]);
|
||||
|
||||
const handleBellClick = useCallback(() => {
|
||||
navigate('/user-setting/team');
|
||||
}, [navigate]);
|
||||
// const handleBellClick = useCallback(() => {
|
||||
// navigate('/user-setting/team');
|
||||
// }, [navigate]);
|
||||
|
||||
const tagsData = useMemo(
|
||||
() => [
|
||||
|
||||
@ -10,6 +10,7 @@ import translation_fr from './fr';
|
||||
import translation_id from './id';
|
||||
import translation_ja from './ja';
|
||||
import translation_pt_br from './pt-br';
|
||||
import translation_ru from './ru';
|
||||
import { createTranslationTable, flattenObject } from './until';
|
||||
import translation_vi from './vi';
|
||||
import translation_zh from './zh';
|
||||
@ -23,12 +24,14 @@ const resources = {
|
||||
[LanguageAbbreviation.Ja]: translation_ja,
|
||||
[LanguageAbbreviation.Es]: translation_es,
|
||||
[LanguageAbbreviation.Vi]: translation_vi,
|
||||
[LanguageAbbreviation.Ru]: translation_ru,
|
||||
[LanguageAbbreviation.PtBr]: translation_pt_br,
|
||||
[LanguageAbbreviation.De]: translation_de,
|
||||
[LanguageAbbreviation.Fr]: translation_fr,
|
||||
};
|
||||
const enFlattened = flattenObject(translation_en);
|
||||
const viFlattened = flattenObject(translation_vi);
|
||||
const ruFlattened = flattenObject(translation_ru);
|
||||
const esFlattened = flattenObject(translation_es);
|
||||
const zhFlattened = flattenObject(translation_zh);
|
||||
const jaFlattened = flattenObject(translation_ja);
|
||||
@ -40,6 +43,7 @@ export const translationTable = createTranslationTable(
|
||||
[
|
||||
enFlattened,
|
||||
viFlattened,
|
||||
ruFlattened,
|
||||
esFlattened,
|
||||
zhFlattened,
|
||||
zh_traditionalFlattened,
|
||||
@ -51,6 +55,7 @@ export const translationTable = createTranslationTable(
|
||||
[
|
||||
'English',
|
||||
'Vietnamese',
|
||||
'Rus',
|
||||
'Spanish',
|
||||
'zh',
|
||||
'zh-TRADITIONAL',
|
||||
|
||||
@ -124,9 +124,11 @@ export default {
|
||||
similarityThreshold: 'Similarity threshold',
|
||||
similarityThresholdTip:
|
||||
'RAGFlow employs either a combination of weighted keyword similarity and weighted vector cosine similarity, or a combination of weighted keyword similarity and weighted reranking score during retrieval. This parameter sets the threshold for similarities between the user query and chunks. Any chunk with a similarity score below this threshold will be excluded from the results. By default, the threshold is set to 0.2. This means that only chunks with hybrid similarity score of 20 or higher will be retrieved.',
|
||||
vectorSimilarityWeight: 'Keyword similarity weight',
|
||||
vectorSimilarityWeight: 'Vector similarity weight',
|
||||
vectorSimilarityWeightTip:
|
||||
'This sets the weight of keyword similarity in the combined similarity score, either used with vector cosine similarity or with reranking score. The total of the two weights must equal 1.0.',
|
||||
keywordSimilarityWeight: 'Keyword similarity weight',
|
||||
keywordSimilarityWeightTip: '',
|
||||
testText: 'Test text',
|
||||
testTextPlaceholder: 'Input your question here!',
|
||||
testingLabel: 'Testing',
|
||||
@ -252,7 +254,7 @@ export default {
|
||||
book: `<p>Supported file formats are <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.</p><p>
|
||||
For each book in PDF, please set the <i>page ranges</i> to remove unwanted information and reduce analysis time.</p>`,
|
||||
laws: `<p>Supported file formats are <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.</p><p>
|
||||
Legal documents typically follow a rigorous writing format. We use text feature to identify split point.
|
||||
Legal documents typically follow a rigorous writing format. We use text feature to identify split point.
|
||||
</p><p>
|
||||
The chunk has a granularity consistent with 'ARTICLE', ensuring all upper level text is included in the chunk.
|
||||
</p>`,
|
||||
@ -266,7 +268,7 @@ export default {
|
||||
<li>Then, combine adjacent segments until the token count exceeds the threshold specified by 'Chunk token number for text', at which point a chunk is created.</li></p>`,
|
||||
paper: `<p>Only <b>PDF</b> file is supported.</p><p>
|
||||
Papers will be split by section, such as <i>abstract, 1.1, 1.2</i>. </p><p>
|
||||
This approach enables the LLM to summarize the paper more effectively and to provide more comprehensive, understandable responses.
|
||||
This approach enables the LLM to summarize the paper more effectively and to provide more comprehensive, understandable responses.
|
||||
However, it also increases the context for AI conversations and adds to the computational cost for the LLM. So during a conversation, consider reducing the value of ‘<b>topN</b>’.</p>`,
|
||||
presentation: `<p>Supported file formats are <b>PDF</b>, <b>PPTX</b>.</p><p>
|
||||
Every page in the slides is treated as a chunk, with its thumbnail image stored.</p><p>
|
||||
@ -471,7 +473,7 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
modelEnabledTools: 'Enabled tools',
|
||||
modelEnabledToolsTip:
|
||||
'Please select one or more tools for the chat model to use. It takes no effect for models not supporting tool call.',
|
||||
freedom: 'Freedom',
|
||||
freedom: 'Creativity',
|
||||
improvise: 'Improvise',
|
||||
precise: 'Precise',
|
||||
balance: 'Balance',
|
||||
@ -563,6 +565,16 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
crossLanguage: 'Cross-language search',
|
||||
crossLanguageTip: `Select one or more languages for cross‑language search. If no language is selected, the system searches with the original query.`,
|
||||
createChat: 'Create chat',
|
||||
metadata: 'Meta Data',
|
||||
metadataTip:
|
||||
'Metadata filtering is the process of using metadata attributes (such as tags, categories, or access permissions) to refine and control the retrieval of relevant information within a system.',
|
||||
conditions: 'Conditions',
|
||||
addCondition: 'Add Condition',
|
||||
meta: {
|
||||
disabled: 'Disabled',
|
||||
automatic: 'Automatic',
|
||||
manual: 'Manual',
|
||||
},
|
||||
},
|
||||
setting: {
|
||||
profile: 'Profile',
|
||||
@ -670,13 +682,41 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
|
||||
bedrockSKMessage: 'Please input your SECRET KEY',
|
||||
bedrockRegion: 'AWS Region',
|
||||
bedrockRegionMessage: 'Please select!',
|
||||
'us-east-2': 'US East (Ohio)',
|
||||
'us-east-1': 'US East (N. Virginia)',
|
||||
'us-west-1': 'US West (N. California)',
|
||||
'us-west-2': 'US West (Oregon)',
|
||||
'af-south-1': 'Africa (Cape Town)',
|
||||
'ap-east-1': 'Asia Pacific (Hong Kong)',
|
||||
'ap-south-2': 'Asia Pacific (Hyderabad)',
|
||||
'ap-southeast-3': 'Asia Pacific (Jakarta)',
|
||||
'ap-southeast-5': 'Asia Pacific (Malaysia)',
|
||||
'ap-southeast-4': 'Asia Pacific (Melbourne)',
|
||||
'ap-south-1': 'Asia Pacific (Mumbai)',
|
||||
'ap-northeast-3': 'Asia Pacific (Osaka)',
|
||||
'ap-northeast-2': 'Asia Pacific (Seoul)',
|
||||
'ap-southeast-1': 'Asia Pacific (Singapore)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
'ap-southeast-2': 'Asia Pacific (Sydney)',
|
||||
'ap-east-2': 'Asia Pacific (Taipei)',
|
||||
'ap-southeast-7': 'Asia Pacific (Thailand)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'ca-central-1': 'Canada (Central)',
|
||||
'ca-west-1': 'Canada West (Calgary)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'eu-west-1': 'Europe (Ireland)',
|
||||
'eu-west-2': 'Europe (London)',
|
||||
'eu-south-1': 'Europe (Milan)',
|
||||
'eu-west-3': 'Europe (Paris)',
|
||||
'eu-south-2': 'Europe (Spain)',
|
||||
'eu-north-1': 'Europe (Stockholm)',
|
||||
'eu-central-2': 'Europe (Zurich)',
|
||||
'il-central-1': 'Israel (Tel Aviv)',
|
||||
'mx-central-1': 'Mexico (Central)',
|
||||
'me-south-1': 'Middle East (Bahrain)',
|
||||
'me-central-1': 'Middle East (UAE)',
|
||||
'sa-east-1': 'South America (São Paulo)',
|
||||
'us-gov-east-1': 'AWS GovCloud (US-East)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
addHunyuanSID: 'Hunyuan Secret ID',
|
||||
HunyuanSIDMessage: 'Please input your Secret ID',
|
||||
addHunyuanSK: 'Hunyuan Secret Key',
|
||||
|
||||
@ -203,7 +203,7 @@ export default {
|
||||
Karena buku panjang dan tidak semua bagian berguna, jika itu adalah PDF,
|
||||
silakan atur <i>rentang halaman</i> untuk setiap buku untuk menghilangkan efek negatif dan menghemat waktu komputasi untuk analisis.</p>`,
|
||||
laws: `<p>Format file yang didukung adalah <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.</p><p>
|
||||
Dokumen hukum memiliki format penulisan yang sangat ketat. Kami menggunakan fitur teks untuk mendeteksi titik pemisah.
|
||||
Dokumen hukum memiliki format penulisan yang sangat ketat. Kami menggunakan fitur teks untuk mendeteksi titik pemisah.
|
||||
</p><p>
|
||||
Granularitas potongan konsisten dengan 'ARTIKEL', dan semua teks tingkat atas akan disertakan dalam potongan.
|
||||
</p>`,
|
||||
@ -218,9 +218,9 @@ export default {
|
||||
<li>Selanjutnya, potongan berturut-turut ini digabungkan menjadi potongan yang jumlah tokennya tidak lebih dari 'Jumlah token'.</li></p>`,
|
||||
paper: `<p>Hanya file <b>PDF</b> yang didukung.</p><p>
|
||||
Jika model kami bekerja dengan baik, makalah akan dipotong berdasarkan bagiannya, seperti <i>abstrak, 1.1, 1.2</i>, dll. </p><p>
|
||||
Manfaat dari melakukan ini adalah LLM dapat lebih baik merangkum konten bagian yang relevan dalam makalah,
|
||||
menghasilkan jawaban yang lebih komprehensif yang membantu pembaca lebih memahami makalah.
|
||||
Kelemahannya adalah meningkatkan konteks percakapan LLM dan menambah biaya komputasi,
|
||||
Manfaat dari melakukan ini adalah LLM dapat lebih baik merangkum konten bagian yang relevan dalam makalah,
|
||||
menghasilkan jawaban yang lebih komprehensif yang membantu pembaca lebih memahami makalah.
|
||||
Kelemahannya adalah meningkatkan konteks percakapan LLM dan menambah biaya komputasi,
|
||||
jadi selama percakapan, Anda dapat mempertimbangkan untuk mengurangi pengaturan ‘<b>topN</b>’.</p>`,
|
||||
presentation: `<p>Format file yang didukung adalah <b>PDF</b>, <b>PPTX</b>.</p><p>
|
||||
Setiap halaman akan diperlakukan sebagai potongan. Dan thumbnail setiap halaman akan disimpan.</p><p>
|
||||
@ -249,7 +249,7 @@ export default {
|
||||
</p><p>
|
||||
Resume datang dalam berbagai format, seperti kepribadian seseorang, tetapi kita sering harus mengaturnya menjadi data terstruktur yang memudahkan pencarian.
|
||||
</p><p>
|
||||
Alih-alih memotong resume, kami memparsing resume menjadi data terstruktur. Sebagai HR, Anda dapat membuang semua resume yang Anda miliki,
|
||||
Alih-alih memotong resume, kami memparsing resume menjadi data terstruktur. Sebagai HR, Anda dapat membuang semua resume yang Anda miliki,
|
||||
maka Anda dapat mencantumkan semua kandidat yang memenuhi kualifikasi hanya dengan berbicara dengan <i>'assistxsuite'</i>.
|
||||
</p>
|
||||
`,
|
||||
@ -283,11 +283,11 @@ export default {
|
||||
Jika Anda ingin merangkum sesuatu yang membutuhkan semua konteks dari sebuah artikel dan panjang konteks LLM yang dipilih mencakup panjang dokumen, Anda dapat mencoba metode ini.
|
||||
</p>`,
|
||||
knowledgeGraph: `<p>Format file yang didukung adalah <b>DOCX, EXCEL, PPT, IMAGE, PDF, TXT, MD, JSON, EML</b>
|
||||
|
||||
|
||||
<p>Setelah file dipotong, digunakan potongan untuk mengekstrak grafik pengetahuan dan peta pikiran dari seluruh dokumen. Metode ini menerapkan cara naif untuk memotong file:
|
||||
Teks berturut-turut akan dipotong menjadi potongan masing-masing yang berjumlah sekitar 512 token.</p>
|
||||
<p>Selanjutnya, potongan akan dikirim ke LLM untuk mengekstrak node dan hubungan dari grafik pengetahuan, dan peta pikiran.</p>
|
||||
|
||||
|
||||
Perhatikan jenis entitas yang perlu Anda tentukan.</p>`,
|
||||
useRaptor: 'Gunakan RAPTOR untuk meningkatkan pengambilan',
|
||||
useRaptorTip:
|
||||
@ -558,13 +558,41 @@ export default {
|
||||
bedrockSKMessage: 'Silakan masukkan SECRET KEY Anda',
|
||||
bedrockRegion: 'Wilayah AWS',
|
||||
bedrockRegionMessage: 'Silakan pilih!',
|
||||
'us-east-2': 'US East (Ohio)',
|
||||
'us-east-1': 'US East (N. Virginia)',
|
||||
'us-west-1': 'US West (N. California)',
|
||||
'us-west-2': 'US West (Oregon)',
|
||||
'af-south-1': 'Africa (Cape Town)',
|
||||
'ap-east-1': 'Asia Pacific (Hong Kong)',
|
||||
'ap-south-2': 'Asia Pacific (Hyderabad)',
|
||||
'ap-southeast-3': 'Asia Pacific (Jakarta)',
|
||||
'ap-southeast-5': 'Asia Pacific (Malaysia)',
|
||||
'ap-southeast-4': 'Asia Pacific (Melbourne)',
|
||||
'ap-south-1': 'Asia Pacific (Mumbai)',
|
||||
'ap-northeast-3': 'Asia Pacific (Osaka)',
|
||||
'ap-northeast-2': 'Asia Pacific (Seoul)',
|
||||
'ap-southeast-1': 'Asia Pacific (Singapore)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
'ap-southeast-2': 'Asia Pacific (Sydney)',
|
||||
'ap-east-2': 'Asia Pacific (Taipei)',
|
||||
'ap-southeast-7': 'Asia Pacific (Thailand)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'ca-central-1': 'Canada (Central)',
|
||||
'ca-west-1': 'Canada West (Calgary)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'eu-west-1': 'Europe (Ireland)',
|
||||
'eu-west-2': 'Europe (London)',
|
||||
'eu-south-1': 'Europe (Milan)',
|
||||
'eu-west-3': 'Europe (Paris)',
|
||||
'eu-south-2': 'Europe (Spain)',
|
||||
'eu-north-1': 'Europe (Stockholm)',
|
||||
'eu-central-2': 'Europe (Zurich)',
|
||||
'il-central-1': 'Israel (Tel Aviv)',
|
||||
'mx-central-1': 'Mexico (Central)',
|
||||
'me-south-1': 'Middle East (Bahrain)',
|
||||
'me-central-1': 'Middle East (UAE)',
|
||||
'sa-east-1': 'South America (São Paulo)',
|
||||
'us-gov-east-1': 'AWS GovCloud (US-East)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
addHunyuanSID: 'Hunyuan Secret ID',
|
||||
HunyuanSIDMessage: 'Silakan masukkan Secret ID Anda',
|
||||
addHunyuanSK: 'Hunyuan Secret Key',
|
||||
|
||||
1365
web/src/locales/ru.ts
Normal file
1365
web/src/locales/ru.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -220,7 +220,7 @@ export default {
|
||||
book: `<p>Các định dạng tệp được hỗ trợ là <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.</p><p>
|
||||
Đối với mỗi sách trong PDF, vui lòng đặt <i>phạm vi trang</i> để loại bỏ thông tin không mong muốn và giảm thời gian phân tích.</p>`,
|
||||
laws: `<p>Các định dạng tệp được hỗ trợ là <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.</p><p>
|
||||
Các tài liệu pháp lý thường tuân theo định dạng viết nghiêm ngặt. Chúng tôi sử dụng tính năng văn bản để xác định điểm phân chia.
|
||||
Các tài liệu pháp lý thường tuân theo định dạng viết nghiêm ngặt. Chúng tôi sử dụng tính năng văn bản để xác định điểm phân chia.
|
||||
</p><p>
|
||||
Khối có độ chi tiết nhất quán với 'ARTICLE', đảm bảo tất cả văn bản cấp trên được bao gồm trong khối.
|
||||
</p>`,
|
||||
@ -234,7 +234,7 @@ export default {
|
||||
<p>Các định dạng tệp được hỗ trợ là <b>MD, MDX, DOCX, XLSX, XLS (Excel 97-2003), PPT, PDF, TXT, JPEG, JPG, PNG, TIF, GIF, CSV, JSON, EML, HTML</b>.</p>`,
|
||||
paper: `<p>Chỉ hỗ trợ tệp <b>PDF</b>.</p><p>
|
||||
Bài báo sẽ được chia theo các phần, chẳng hạn như <i>tóm tắt, 1.1, 1.2</i>. </p><p>
|
||||
Cách tiếp cận này cho phép LLM tóm tắt bài báo hiệu quả hơn và cung cấp các phản hồi toàn diện, dễ hiểu hơn.
|
||||
Cách tiếp cận này cho phép LLM tóm tắt bài báo hiệu quả hơn và cung cấp các phản hồi toàn diện, dễ hiểu hơn.
|
||||
Tuy nhiên, nó cũng làm tăng ngữ cảnh cho các cuộc hội thoại AI và tăng thêm chi phí tính toán cho LLM. Vì vậy, trong quá trình trò chuyện, hãy cân nhắc giảm giá trị của '<b>topN</b>'.</p>`,
|
||||
presentation: `<p>Các định dạng tệp được hỗ trợ là <b>PDF</b>, <b>PPTX</b>.</p><p>
|
||||
Mỗi trang trong slide được coi là một khối, với hình thu nhỏ của nó được lưu trữ.</p><p>
|
||||
@ -290,7 +290,7 @@ export default {
|
||||
Áp dụng khi bạn yêu cầu LLM tóm tắt toàn bộ tài liệu, với điều kiện nó có thể xử lý được lượng ngữ cảnh đó.
|
||||
</p>`,
|
||||
knowledgeGraph: `<p>Các định dạng tệp được hỗ trợ là <b>DOCX, EXCEL, PPT, IMAGE, PDF, TXT, MD, JSON, EML</b>
|
||||
|
||||
|
||||
<p>Cách tiếp cận này phân đoạn tệp bằng phương pháp 'ngây thơ'/'Tổng hợp'. Nó chia tài liệu thành các phân đoạn và sau đó kết hợp các phân đoạn liền kề cho đến khi số lượng token vượt quá ngưỡng được chỉ định bởi 'Số token khối', tại thời điểm đó, một khối được tạo.</p>
|
||||
<p>Các khối sau đó được đưa vào LLM để trích xuất các thực thể và mối quan hệ cho biểu đồ tri thức và sơ đồ tư duy.</p>
|
||||
<p>Đảm bảo bạn đã đặt <b>Loại thực thể</b>.</p>`,
|
||||
@ -603,13 +603,41 @@ export default {
|
||||
bedrockSKMessage: 'Vui lòng nhập KHÓA BÍ MẬT của bạn',
|
||||
bedrockRegion: 'Vùng AWS',
|
||||
bedrockRegionMessage: 'Vui lòng chọn!',
|
||||
'us-east-2': 'US East (Ohio)',
|
||||
'us-east-1': 'US East (N. Virginia)',
|
||||
'us-west-1': 'US West (N. California)',
|
||||
'us-west-2': 'US West (Oregon)',
|
||||
'af-south-1': 'Africa (Cape Town)',
|
||||
'ap-east-1': 'Asia Pacific (Hong Kong)',
|
||||
'ap-south-2': 'Asia Pacific (Hyderabad)',
|
||||
'ap-southeast-3': 'Asia Pacific (Jakarta)',
|
||||
'ap-southeast-5': 'Asia Pacific (Malaysia)',
|
||||
'ap-southeast-4': 'Asia Pacific (Melbourne)',
|
||||
'ap-south-1': 'Asia Pacific (Mumbai)',
|
||||
'ap-northeast-3': 'Asia Pacific (Osaka)',
|
||||
'ap-northeast-2': 'Asia Pacific (Seoul)',
|
||||
'ap-southeast-1': 'Asia Pacific (Singapore)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
'ap-southeast-2': 'Asia Pacific (Sydney)',
|
||||
'ap-east-2': 'Asia Pacific (Taipei)',
|
||||
'ap-southeast-7': 'Asia Pacific (Thailand)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'ca-central-1': 'Canada (Central)',
|
||||
'ca-west-1': 'Canada West (Calgary)',
|
||||
'eu-central-1': 'Europe (Frankfurt)',
|
||||
'eu-west-1': 'Europe (Ireland)',
|
||||
'eu-west-2': 'Europe (London)',
|
||||
'eu-south-1': 'Europe (Milan)',
|
||||
'eu-west-3': 'Europe (Paris)',
|
||||
'eu-south-2': 'Europe (Spain)',
|
||||
'eu-north-1': 'Europe (Stockholm)',
|
||||
'eu-central-2': 'Europe (Zurich)',
|
||||
'il-central-1': 'Israel (Tel Aviv)',
|
||||
'mx-central-1': 'Mexico (Central)',
|
||||
'me-south-1': 'Middle East (Bahrain)',
|
||||
'me-central-1': 'Middle East (UAE)',
|
||||
'sa-east-1': 'South America (São Paulo)',
|
||||
'us-gov-east-1': 'AWS GovCloud (US-East)',
|
||||
'us-gov-west-1': 'AWS GovCloud (US-West)',
|
||||
addHunyuanSID: 'Hunyuan Secret ID',
|
||||
HunyuanSIDMessage: 'Vui lòng nhập ID bí mật của bạn',
|
||||
addHunyuanSK: 'Hunyuan Secret Key',
|
||||
|
||||
@ -115,7 +115,7 @@ export default {
|
||||
similarityThreshold: '相似度阈值',
|
||||
similarityThresholdTip:
|
||||
'我们使用混合相似度得分来评估两行文本之间的距离。 它是加权关键词相似度和向量余弦相似度。 如果查询和块之间的相似度小于此阈值,则该块将被过滤掉。默认设置为 0.2,也就是说文本块的混合相似度得分至少 20 才会被召回。',
|
||||
vectorSimilarityWeight: '关键字相似度权重',
|
||||
vectorSimilarityWeight: '相似度相似度权重',
|
||||
vectorSimilarityWeightTip:
|
||||
'我们使用混合相似性评分来评估两行文本之间的距离。它是加权关键字相似性和矢量余弦相似性或rerank得分(0〜1)。两个权重的总和为1.0。',
|
||||
testText: '测试文本',
|
||||
@ -558,6 +558,16 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
|
||||
tavilyApiKeyHelp: '如何获取?',
|
||||
crossLanguage: '跨语言搜索',
|
||||
crossLanguageTip: `选择一种或多种语言进行跨语言搜索。如果未选择任何语言,系统将使用原始查询进行搜索。`,
|
||||
metadata: '元数据',
|
||||
metadataTip:
|
||||
'元数据过滤是使用元数据属性(例如标签、类别或访问权限)来优化和控制系统内相关信息检索的过程。',
|
||||
conditions: '条件',
|
||||
addCondition: '增加条件',
|
||||
meta: {
|
||||
disabled: '禁用',
|
||||
automatic: '自动',
|
||||
manual: '手动',
|
||||
},
|
||||
},
|
||||
setting: {
|
||||
profile: '概要',
|
||||
|
||||
@ -147,7 +147,7 @@ export const useHandleUploadDocument = () => {
|
||||
const [fileList, setFileList] = useState<UploadFile[]>([]);
|
||||
const [uploadProgress, setUploadProgress] = useState<number>(0);
|
||||
const { uploadDocument, loading } = useUploadNextDocument();
|
||||
const { runDocumentByIds, loading: _ } = useRunNextDocument();
|
||||
const { runDocumentByIds } = useRunNextDocument();
|
||||
|
||||
const onDocumentUploadOk = useCallback(
|
||||
async ({
|
||||
|
||||
@ -168,7 +168,7 @@ const KnowledgeFile = () => {
|
||||
),
|
||||
dataIndex: 'run',
|
||||
key: 'run',
|
||||
filters: Object.entries(RunningStatus).map(([key, value]) => ({
|
||||
filters: Object.values(RunningStatus).map((value) => ({
|
||||
text: t(`runningStatus${value}`),
|
||||
value: value,
|
||||
})),
|
||||
|
||||
@ -27,7 +27,6 @@ const KnowledgeSidebar = () => {
|
||||
const { knowledgeId } = useGetKnowledgeSearchParams();
|
||||
|
||||
const [windowWidth, setWindowWidth] = useState(getWidth());
|
||||
const [collapsed, setCollapsed] = useState(false);
|
||||
const { t } = useTranslation();
|
||||
const { data: knowledgeDetails } = useFetchKnowledgeBaseConfiguration();
|
||||
|
||||
@ -92,14 +91,6 @@ const KnowledgeSidebar = () => {
|
||||
return list;
|
||||
}, [data, getItem]);
|
||||
|
||||
useEffect(() => {
|
||||
if (windowWidth.width > 957) {
|
||||
setCollapsed(false);
|
||||
} else {
|
||||
setCollapsed(true);
|
||||
}
|
||||
}, [windowWidth.width]);
|
||||
|
||||
useEffect(() => {
|
||||
const widthSize = () => {
|
||||
const width = getWidth();
|
||||
|
||||
@ -4,7 +4,7 @@ import { IRetrievalNode } from '@/interfaces/database/flow';
|
||||
import { NodeProps, Position } from '@xyflow/react';
|
||||
import classNames from 'classnames';
|
||||
import { get } from 'lodash';
|
||||
import { memo, useMemo } from 'react';
|
||||
import { memo } from 'react';
|
||||
import { NodeHandleId } from '../../constant';
|
||||
import { useGetVariableLabelByValue } from '../../hooks/use-get-begin-query';
|
||||
import { CommonHandle } from './handle';
|
||||
@ -21,18 +21,7 @@ function InnerRetrievalNode({
|
||||
selected,
|
||||
}: NodeProps<IRetrievalNode>) {
|
||||
const knowledgeBaseIds: string[] = get(data, 'form.kb_ids', []);
|
||||
console.log('🚀 ~ InnerRetrievalNode ~ knowledgeBaseIds:', knowledgeBaseIds);
|
||||
const { list: knowledgeList } = useFetchKnowledgeList(true);
|
||||
const knowledgeBases = useMemo(() => {
|
||||
return knowledgeBaseIds.map((x) => {
|
||||
const item = knowledgeList.find((y) => x === y.id);
|
||||
return {
|
||||
name: item?.name,
|
||||
avatar: item?.avatar,
|
||||
id: x,
|
||||
};
|
||||
});
|
||||
}, [knowledgeList, knowledgeBaseIds]);
|
||||
|
||||
const getLabel = useGetVariableLabelByValue(id);
|
||||
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import { ModelVariableType } from '@/constants/knowledge';
|
||||
import { RAGFlowNodeType } from '@/interfaces/database/flow';
|
||||
import { get, isEmpty, isPlainObject } from 'lodash';
|
||||
import { isEmpty, isPlainObject } from 'lodash';
|
||||
import { useMemo } from 'react';
|
||||
import { buildCategorizeListFromObject } from '../../utils';
|
||||
|
||||
const defaultValues = {
|
||||
parameter: ModelVariableType.Precise,
|
||||
@ -21,9 +20,6 @@ export function useValues(node?: RAGFlowNodeType) {
|
||||
if (isEmpty(formData)) {
|
||||
return defaultValues;
|
||||
}
|
||||
const items = buildCategorizeListFromObject(
|
||||
get(node, 'data.form.category_description', {}),
|
||||
);
|
||||
if (isPlainObject(formData)) {
|
||||
// const nextValues = {
|
||||
// ...omit(formData, 'category_description'),
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
'use client';
|
||||
|
||||
import { SideDown } from '@/assets/icon/Icon';
|
||||
import { SideDown } from '@/assets/icon/next-icon';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import {
|
||||
Collapsible,
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import { Edge } from '@xyflow/react';
|
||||
import pick from 'lodash/pick';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
import { IOperatorForm } from '../../interface';
|
||||
@ -21,16 +20,12 @@ export const useBuildRelevantOptions = () => {
|
||||
return buildRelevantOptions;
|
||||
};
|
||||
|
||||
const getTargetOfEdge = (edges: Edge[], sourceHandle: string) =>
|
||||
edges.find((x) => x.sourceHandle === sourceHandle)?.target;
|
||||
|
||||
/**
|
||||
* monitor changes in the connection and synchronize the target to the yes and no fields of the form
|
||||
* similar to the categorize-form's useHandleFormValuesChange method
|
||||
* @param param0
|
||||
*/
|
||||
export const useWatchConnectionChanges = ({ nodeId, form }: IOperatorForm) => {
|
||||
const edges = useGraphStore((state) => state.edges);
|
||||
const getNode = useGraphStore((state) => state.getNode);
|
||||
const node = getNode(nodeId);
|
||||
|
||||
@ -40,13 +35,6 @@ export const useWatchConnectionChanges = ({ nodeId, form }: IOperatorForm) => {
|
||||
}
|
||||
}, [node, form]);
|
||||
|
||||
const watchConnectionChanges = useCallback(() => {
|
||||
const edgeList = edges.filter((x) => x.source === nodeId);
|
||||
const yes = getTargetOfEdge(edgeList, 'yes');
|
||||
const no = getTargetOfEdge(edgeList, 'no');
|
||||
form?.setFieldsValue({ yes, no });
|
||||
}, [edges, nodeId, form]);
|
||||
|
||||
useEffect(() => {
|
||||
watchFormChanges();
|
||||
}, [watchFormChanges]);
|
||||
|
||||
@ -59,7 +59,7 @@ export const LogicalOperatorIcon = function OperatorIcon({
|
||||
return icon;
|
||||
};
|
||||
|
||||
function useBuildSwitchOperatorOptions() {
|
||||
export function useBuildSwitchOperatorOptions() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const switchOperatorOptions = useMemo(() => {
|
||||
|
||||
@ -8,7 +8,7 @@ import { IModalProps } from '@/interfaces/common';
|
||||
import { NotebookText } from 'lucide-react';
|
||||
import 'react18-json-view/src/style.css';
|
||||
import { useCacheChatLog } from '../hooks/use-cache-chat-log';
|
||||
import { WorkFlowTimeline } from './workFlowTimeline';
|
||||
import { WorkFlowTimeline } from './workflow-timeline';
|
||||
|
||||
type LogSheetProps = IModalProps<any> &
|
||||
Pick<
|
||||
|
||||
@ -19,7 +19,7 @@ import {
|
||||
JsonViewer,
|
||||
toLowerCaseStringAndDeleteChar,
|
||||
typeMap,
|
||||
} from './workFlowTimeline';
|
||||
} from './workflow-timeline';
|
||||
type IToolIcon =
|
||||
| Operator.ArXiv
|
||||
| Operator.GitHub
|
||||
@ -28,7 +28,7 @@ import JsonView from 'react18-json-view';
|
||||
import { Operator } from '../constant';
|
||||
import { useCacheChatLog } from '../hooks/use-cache-chat-log';
|
||||
import OperatorIcon from '../operator-icon';
|
||||
import ToolTimelineItem from './toolTimelineItem';
|
||||
import ToolTimelineItem from './tool-timeline-item';
|
||||
type LogFlowTimelineProps = Pick<
|
||||
ReturnType<typeof useCacheChatLog>,
|
||||
'currentEventListWithoutMessage' | 'currentMessageId'
|
||||
@ -3,7 +3,7 @@ import { RAGFlowAvatar } from '@/components/ragflow-avatar';
|
||||
import { SharedBadge } from '@/components/shared-badge';
|
||||
import { Card, CardContent } from '@/components/ui/card';
|
||||
import { useNavigatePage } from '@/hooks/logic-hooks/navigate-hooks';
|
||||
import { IFlow } from '@/interfaces/database/flow';
|
||||
import { IFlow } from '@/interfaces/database/agent';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { AgentDropdown } from './agent-dropdown';
|
||||
import { useRenameAgent } from './use-rename-agent';
|
||||
|
||||
@ -7,7 +7,7 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import { useDeleteAgent } from '@/hooks/use-agent-request';
|
||||
import { IFlow } from '@/interfaces/database/flow';
|
||||
import { IFlow } from '@/interfaces/database/agent';
|
||||
import { PenLine, Trash2 } from 'lucide-react';
|
||||
import { MouseEventHandler, PropsWithChildren, useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { useSetModalState } from '@/hooks/common-hooks';
|
||||
import { useUpdateAgentSetting } from '@/hooks/use-agent-request';
|
||||
import { IFlow } from '@/interfaces/database/flow';
|
||||
import { IFlow } from '@/interfaces/database/agent';
|
||||
import { pick } from 'lodash';
|
||||
import { useCallback, useState } from 'react';
|
||||
|
||||
|
||||
@ -8,7 +8,9 @@ import classNames from 'classnames';
|
||||
import { useCallback } from 'react';
|
||||
import { ISegmentedContentProps } from '../interface';
|
||||
|
||||
import { DatasetMetadata } from '../constants';
|
||||
import styles from './index.less';
|
||||
import { MetadataFilterConditions } from './metadata-filter-conditions';
|
||||
|
||||
const emptyResponseField = ['prompt_config', 'empty_response'];
|
||||
|
||||
@ -20,6 +22,18 @@ const AssistantSetting = ({
|
||||
const { t } = useTranslate('chat');
|
||||
const { data } = useFetchTenantInfo(true);
|
||||
|
||||
const MetadataOptions = Object.values(DatasetMetadata).map((x) => {
|
||||
return {
|
||||
value: x,
|
||||
label: t(`meta.${x}`),
|
||||
};
|
||||
});
|
||||
|
||||
const metadata = Form.useWatch(['meta_data_filter', 'method'], form);
|
||||
const kbIds = Form.useWatch(['kb_ids'], form);
|
||||
|
||||
const hasKnowledge = Array.isArray(kbIds) && kbIds.length > 0;
|
||||
|
||||
const handleChange = useCallback(() => {
|
||||
const kbIds = form.getFieldValue('kb_ids');
|
||||
const emptyResponse = form.getFieldValue(emptyResponseField);
|
||||
@ -153,6 +167,25 @@ const AssistantSetting = ({
|
||||
required={false}
|
||||
onChange={handleChange}
|
||||
></KnowledgeBaseItem>
|
||||
{hasKnowledge && (
|
||||
<Form.Item
|
||||
label={t('metadata')}
|
||||
name={['meta_data_filter', 'method']}
|
||||
tooltip={t('metadataTip')}
|
||||
initialValue={DatasetMetadata.Disabled}
|
||||
>
|
||||
<Select options={MetadataOptions} />
|
||||
</Form.Item>
|
||||
)}
|
||||
{hasKnowledge && metadata === DatasetMetadata.Manual && (
|
||||
<Form.Item
|
||||
label={t('conditions')}
|
||||
tooltip={t('ttsTip')}
|
||||
initialValue={false}
|
||||
>
|
||||
<MetadataFilterConditions kbIds={kbIds}></MetadataFilterConditions>
|
||||
</Form.Item>
|
||||
)}
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user