Compare commits

...

6 Commits

Author SHA1 Message Date
3fcf2ee54c feat: add new LLM provider Jiekou.AI (#11300)
### What problem does this PR solve?

_Briefly describe what this PR aims to solve. Include background context
that will help reviewers understand the purpose of the PR._

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

Co-authored-by: Jason <ggbbddjm@gmail.com>
2025-11-17 19:47:46 +08:00
d8f413a885 Feat: Construct a dynamic variable assignment form #10427 (#11316)
### What problem does this PR solve?

Feat: Construct a dynamic variable assignment form #10427

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
2025-11-17 19:45:58 +08:00
7264fb6978 Fix: concat images in word document. (#11310)
### What problem does this PR solve?

Fix: concat images in word document. Partially solved issues in #11063 

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-11-17 19:38:26 +08:00
bd4bc57009 Refactor: move mcp connection utilities to common (#11304)
### What problem does this PR solve?

As title

### Type of change

- [x] Refactoring

---------

Signed-off-by: Jin Hai <haijin.chn@gmail.com>
2025-11-17 15:34:17 +08:00
0569b50fed Fix: create dataset return type inconsistent (#11272)
### What problem does this PR solve?

Fix: create dataset return type inconsistent #11167 
 
### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
2025-11-17 15:27:19 +08:00
6b64641042 Fix: default model base url extraction logic (#11263)
### What problem does this PR solve?

Fixes an issue where default models which used the same factory but
different base URLs would all be initialised with the default chat
model's base URL and would ignore e.g. the embedding model's base URL
config.

For example, with the following service config, the embedding and
reranker models would end up using the base URL for the default chat
model (i.e. `llm1.example.com`):

```yaml
ragflow:
  service_conf:
    user_default_llm:
      factory: OpenAI-API-Compatible
      api_key: not-used
      default_models:
        chat_model:
          name: llm1
          base_url: https://llm1.example.com/v1
        embedding_model:
          name: llm2
          base_url: https://llm2.example.com/v1
        rerank_model:
          name: llm3
          base_url: https://llm3.example.com/v1/rerank

  llm_factories:
    factory_llm_infos:
    - name: OpenAI-API-Compatible
      logo: ""
      tags: "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION"
      status: "1"
      llm:
        - llm_name: llm1
          base_url: 'https://llm1.example.com/v1'
          api_key: not-used
          tags: "LLM,CHAT,IMAGE2TEXT"
          max_tokens: 100000
          model_type: chat
          is_tools: false

        - llm_name: llm2
          base_url: https://llm2.example.com/v1
          api_key: not-used
          tags: "TEXT EMBEDDING"
          max_tokens: 10000
          model_type: embedding

        - llm_name: llm3
          base_url: https://llm3.example.com/v1/rerank
          api_key: not-used
          tags: "RERANK,1k"
          max_tokens: 10000
          model_type: rerank
```

### Type of change

- [X] Bug Fix (non-breaking change which fixes an issue)
2025-11-17 14:21:27 +08:00
37 changed files with 2009 additions and 379 deletions

View File

@ -30,7 +30,7 @@ from api.db.services.mcp_server_service import MCPServerService
from common.connection_utils import timeout
from rag.prompts.generator import next_step, COMPLETE_TASK, analyze_task, \
citation_prompt, reflect, rank_memories, kb_prompt, citation_plus, full_question, message_fit_in
from rag.utils.mcp_tool_call_conn import MCPToolCallSession, mcp_tool_metadata_to_openai_tool
from common.mcp_tool_call_conn import MCPToolCallSession, mcp_tool_metadata_to_openai_tool
from agent.component.llm import LLMParam, LLM

View File

@ -21,9 +21,8 @@ from functools import partial
from typing import TypedDict, List, Any
from agent.component.base import ComponentParamBase, ComponentBase
from common.misc_utils import hash_str2int
from rag.llm.chat_model import ToolCallSession
from rag.prompts.generator import kb_prompt
from rag.utils.mcp_tool_call_conn import MCPToolCallSession
from common.mcp_tool_call_conn import MCPToolCallSession, ToolCallSession
from timeit import default_timer as timer

View File

@ -55,6 +55,10 @@ def create():
**req
)
code = req.get("code")
if code:
return get_data_error_result(code=code, message=req.get("message"))
try:
if not KnowledgebaseService.save(**req):
return get_data_error_result()

View File

@ -25,7 +25,7 @@ from common.misc_utils import get_uuid
from api.utils.api_utils import get_data_error_result, get_json_result, server_error_response, validate_request, \
get_mcp_tools
from api.utils.web_utils import get_float, safe_json_parse
from rag.utils.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_toolcall_sessions
from common.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_toolcall_sessions
@manager.route("/list", methods=["POST"]) # noqa: F821

View File

@ -41,12 +41,12 @@ def list_agents(tenant_id):
return get_error_data_result("The agent doesn't exist.")
page_number = int(request.args.get("page", 1))
items_per_page = int(request.args.get("page_size", 30))
orderby = request.args.get("orderby", "update_time")
order_by = request.args.get("orderby", "update_time")
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
desc = False
else:
desc = True
canvas = UserCanvasService.get_list(tenant_id, page_number, items_per_page, orderby, desc, id, title)
canvas = UserCanvasService.get_list(tenant_id, page_number, items_per_page, order_by, desc, id, title)
return get_result(data=canvas)

View File

@ -24,9 +24,9 @@ from common.time_utils import current_timestamp, datetime_format
from api.db.services import duplicate_name
from api.db.services.user_service import TenantService
from common.misc_utils import get_uuid
from common.constants import StatusEnum
from common.constants import StatusEnum, RetCode
from api.constants import DATASET_NAME_LIMIT
from api.utils.api_utils import get_parser_config, get_data_error_result
from api.utils.api_utils import get_parser_config
class KnowledgebaseService(CommonService):
"""Service class for managing knowledge base operations.
@ -391,12 +391,12 @@ class KnowledgebaseService(CommonService):
"""
# Validate name
if not isinstance(name, str):
return get_data_error_result(message="Dataset name must be string.")
return {"code": RetCode.DATA_ERROR, "message": "Dataset name must be string."}
dataset_name = name.strip()
if dataset_name == "":
return get_data_error_result(message="Dataset name can't be empty.")
if len(dataset_name) == 0:
return {"code": RetCode.DATA_ERROR, "message": "Dataset name can't be empty."}
if len(dataset_name.encode("utf-8")) > DATASET_NAME_LIMIT:
return get_data_error_result(message=f"Dataset name length is {len(dataset_name)} which is larger than {DATASET_NAME_LIMIT}")
return {"code": RetCode.DATA_ERROR, "message": f"Dataset name length is {len(dataset_name)} which is larger than {DATASET_NAME_LIMIT}"}
# Deduplicate name within tenant
dataset_name = duplicate_name(
@ -409,7 +409,7 @@ class KnowledgebaseService(CommonService):
# Verify tenant exists
ok, _t = TenantService.get_by_id(tenant_id)
if not ok:
return False, "Tenant not found."
return {"code": RetCode.DATA_ERROR, "message": "Tenant does not exist."}
# Build payload
kb_id = get_uuid()
@ -419,10 +419,10 @@ class KnowledgebaseService(CommonService):
"tenant_id": tenant_id,
"created_by": tenant_id,
"parser_id": (parser_id or "naive"),
**kwargs
**kwargs # Includes optional fields such as description, language, permission, avatar, parser_config, etc.
}
# Default parser_config (align with kb_app.create) — do not accept external overrides
# Update parser_config (always override with validated default/merged config)
payload["parser_config"] = get_parser_config(parser_id, kwargs.get("parser_config"))
return payload

View File

@ -19,6 +19,7 @@ import re
from common.token_utils import num_tokens_from_string
from functools import partial
from typing import Generator
from common.constants import LLMType
from api.db.db_models import LLM
from api.db.services.common_service import CommonService
from api.db.services.tenant_llm_service import LLM4Tenant, TenantLLMService
@ -32,6 +33,14 @@ def get_init_tenant_llm(user_id):
from common import settings
tenant_llm = []
model_configs = {
LLMType.CHAT: settings.CHAT_CFG,
LLMType.EMBEDDING: settings.EMBEDDING_CFG,
LLMType.SPEECH2TEXT: settings.ASR_CFG,
LLMType.IMAGE2TEXT: settings.IMAGE2TEXT_CFG,
LLMType.RERANK: settings.RERANK_CFG,
}
seen = set()
factory_configs = []
for factory_config in [
@ -54,8 +63,8 @@ def get_init_tenant_llm(user_id):
"llm_factory": factory_config["factory"],
"llm_name": llm.llm_name,
"model_type": llm.model_type,
"api_key": factory_config["api_key"],
"api_base": factory_config["base_url"],
"api_key": model_configs.get(llm.model_type, {}).get("api_key", factory_config["api_key"]),
"api_base": model_configs.get(llm.model_type, {}).get("base_url", factory_config["base_url"]),
"max_tokens": llm.max_tokens if llm.max_tokens else 8192,
}
)
@ -80,8 +89,8 @@ class LLMBundle(LLM4Tenant):
def encode(self, texts: list):
if self.langfuse:
generation = self.langfuse.start_generation(trace_context=self.trace_context, name="encode", model=self.llm_name, input={"texts": texts})
generation = self.langfuse.start_generation(trace_context=self.trace_context, name="encode", model=self.llm_name, input={"texts": texts})
safe_texts = []
for text in texts:
token_size = num_tokens_from_string(text)
@ -90,7 +99,7 @@ class LLMBundle(LLM4Tenant):
safe_texts.append(text[:target_len])
else:
safe_texts.append(text)
embeddings, used_tokens = self.mdl.encode(safe_texts)
llm_name = getattr(self, "llm_name", None)

View File

@ -41,7 +41,7 @@ from api.db.db_models import init_database_tables as init_web_db
from api.db.init_data import init_web_data
from common.versions import get_ragflow_version
from common.config_utils import show_configs
from rag.utils.mcp_tool_call_conn import shutdown_all_mcp_sessions
from common.mcp_tool_call_conn import shutdown_all_mcp_sessions
from rag.utils.redis_conn import RedisDistributedLock
stop_event = threading.Event()

View File

@ -37,7 +37,7 @@ from peewee import OperationalError
from common.constants import ActiveEnum
from api.db.db_models import APIToken
from api.utils.json_encode import CustomJSONEncoder
from rag.utils.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_toolcall_sessions
from common.mcp_tool_call_conn import MCPToolCallSession, close_multiple_mcp_toolcall_sessions
from api.db.services.tenant_llm_service import LLMFactoriesService
from common.connection_utils import timeout
from common.constants import RetCode

View File

@ -69,7 +69,7 @@ class SlimConnectorWithPermSync(ABC):
class CheckpointedConnectorWithPermSync(ABC):
"""Checkpointed connector interface (with permission sync)"""
"""Checkpoint connector interface (with permission sync)"""
@abstractmethod
def load_from_checkpoint(
@ -143,7 +143,7 @@ class CredentialsProviderInterface(abc.ABC, Generic[T]):
@abc.abstractmethod
def is_dynamic(self) -> bool:
"""If dynamic, the credentials may change during usage ... maening the client
"""If dynamic, the credentials may change during usage ... meaning the client
needs to use the locking features of the credentials provider to operate
correctly.

View File

@ -21,7 +21,7 @@ import weakref
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import TimeoutError as FuturesTimeoutError
from string import Template
from typing import Any, Literal
from typing import Any, Literal, Protocol
from typing_extensions import override
@ -30,12 +30,15 @@ from mcp.client.session import ClientSession
from mcp.client.sse import sse_client
from mcp.client.streamable_http import streamablehttp_client
from mcp.types import CallToolResult, ListToolsResult, TextContent, Tool
from rag.llm.chat_model import ToolCallSession
MCPTaskType = Literal["list_tools", "tool_call"]
MCPTask = tuple[MCPTaskType, dict[str, Any], asyncio.Queue[Any]]
class ToolCallSession(Protocol):
def tool_call(self, name: str, arguments: dict[str, Any]) -> str: ...
class MCPToolCallSession(ToolCallSession):
_ALL_INSTANCES: weakref.WeakSet["MCPToolCallSession"] = weakref.WeakSet()
@ -106,7 +109,8 @@ class MCPToolCallSession(ToolCallSession):
await self._process_mcp_tasks(None, msg)
else:
await self._process_mcp_tasks(None, f"Unsupported MCP server type: {self._mcp_server.server_type}, id: {self._mcp_server.id}")
await self._process_mcp_tasks(None,
f"Unsupported MCP server type: {self._mcp_server.server_type}, id: {self._mcp_server.id}")
async def _process_mcp_tasks(self, client_session: ClientSession | None, error_message: str | None = None) -> None:
while not self._close:
@ -164,7 +168,8 @@ class MCPToolCallSession(ToolCallSession):
raise
async def _call_mcp_tool(self, name: str, arguments: dict[str, Any], timeout: float | int = 10) -> str:
result: CallToolResult = await self._call_mcp_server("tool_call", name=name, arguments=arguments, timeout=timeout)
result: CallToolResult = await self._call_mcp_server("tool_call", name=name, arguments=arguments,
timeout=timeout)
if result.isError:
return f"MCP server error: {result.content}"
@ -283,7 +288,8 @@ def close_multiple_mcp_toolcall_sessions(sessions: list[MCPToolCallSession]) ->
except Exception:
logging.exception("Exception during MCP session cleanup thread management")
logging.info(f"{len(sessions)} MCP sessions has been cleaned up. {len(list(MCPToolCallSession._ALL_INSTANCES))} in global context.")
logging.info(
f"{len(sessions)} MCP sessions has been cleaned up. {len(list(MCPToolCallSession._ALL_INSTANCES))} in global context.")
def shutdown_all_mcp_sessions():
@ -298,7 +304,7 @@ def shutdown_all_mcp_sessions():
logging.info("All MCPToolCallSession instances have been closed.")
def mcp_tool_metadata_to_openai_tool(mcp_tool: Tool|dict) -> dict[str, Any]:
def mcp_tool_metadata_to_openai_tool(mcp_tool: Tool | dict) -> dict[str, Any]:
if isinstance(mcp_tool, dict):
return {
"type": "function",

View File

@ -4839,6 +4839,639 @@
"is_tools": false
}
]
},
{
"name": "JieKou.AI",
"logo": "",
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK",
"status": "1",
"llm": [
{
"llm_name": "Sao10K/L3-8B-Stheno-v3.2",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baichuan/baichuan-m2-32b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baidu/ernie-4.5-300b-a47b-paddle",
"tags": "LLM,CHAT,123K",
"max_tokens": 123000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baidu/ernie-4.5-vl-424b-a47b",
"tags": "LLM,CHAT,123K",
"max_tokens": 123000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-haiku-20241022",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-sonnet-20241022",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-7-sonnet-20250219",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-haiku-20240307",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-haiku-4-5-20251001",
"tags": "LLM,CHAT,20K,IMAGE2TEXT",
"max_tokens": 20000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "claude-opus-4-1-20250805",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-opus-4-20250514",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-20250514",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-5-20250929",
"tags": "LLM,CHAT,200K,IMAGE2TEXT",
"max_tokens": 200000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-r1-0528",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-v3-0324",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-v3.1",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "doubao-1-5-pro-32k-250115",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "doubao-1.5-pro-32k-character-250715",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.0-flash-20250609",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.0-flash-lite",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite-preview-06-17",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite-preview-09-2025",
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
"max_tokens": 1048576,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-preview-05-20",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-pro",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-pro-preview-06-05",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "google/gemma-3-12b-it",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "google/gemma-3-27b-it",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1-mini",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1-nano",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4o",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4o-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-chat-latest",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-codex",
"tags": "LLM,CHAT,400K,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gpt-5-mini",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-nano",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-pro",
"tags": "LLM,CHAT,400K,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gpt-5.1",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5.1-chat-latest",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5.1-codex",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-3",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-3-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-4-0709",
"tags": "LLM,CHAT,256K",
"max_tokens": 256000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-4-fast-non-reasoning",
"tags": "LLM,CHAT,2M,IMAGE2TEXT",
"max_tokens": 2000000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "grok-4-fast-reasoning",
"tags": "LLM,CHAT,2M,IMAGE2TEXT",
"max_tokens": 2000000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "grok-code-fast-1",
"tags": "LLM,CHAT,256K",
"max_tokens": 256000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gryphe/mythomax-l2-13b",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "meta-llama/llama-3.1-8b-instruct",
"tags": "LLM,CHAT,16K",
"max_tokens": 16384,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "meta-llama/llama-3.2-3b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-3.3-70b-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-4-maverick-17b-128e-instruct-fp8",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-4-scout-17b-16e-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "minimaxai/minimax-m1-80k",
"tags": "LLM,CHAT,1M",
"max_tokens": 1000000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "mistralai/mistral-7b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "mistralai/mistral-nemo",
"tags": "LLM,CHAT,60K",
"max_tokens": 60288,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "moonshotai/kimi-k2-0905",
"tags": "LLM,CHAT,262K",
"max_tokens": 262144,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "moonshotai/kimi-k2-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o1",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o1-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o3",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o3-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "openai/gpt-oss-120b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "openai/gpt-oss-20b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen-2.5-72b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen-mt-plus",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen2.5-7b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen2.5-vl-72b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-235b-a22b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-235b-a22b-instruct-2507",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-235b-a22b-thinking-2507",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-30b-a3b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-32b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-8b-fp8",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-coder-480b-a35b-instruct",
"tags": "LLM,CHAT,262K",
"max_tokens": 262144,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-next-80b-a3b-instruct",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-next-80b-a3b-thinking",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l3-70b-euryale-v2.1",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l3-8b-lunaris",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l31-70b-euryale-v2.2",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "thudm/glm-4.1v-9b-thinking",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "zai-org/glm-4.5",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "zai-org/glm-4.5v",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baai/bge-m3",
"tags": "TEXT EMBEDDING,8K",
"max_tokens": 8192,
"model_type": "embedding"
},
{
"llm_name": "qwen/qwen3-embedding-0.6b",
"tags": "TEXT EMBEDDING,32K",
"max_tokens": 32768,
"model_type": "embedding"
},
{
"llm_name": "qwen/qwen3-embedding-8b",
"tags": "TEXT EMBEDDING,32K",
"max_tokens": 32768,
"model_type": "embedding"
},
{
"llm_name": "baai/bge-reranker-v2-m3",
"tags": "RE-RANK,8K",
"max_tokens": 8000,
"model_type": "reranker"
},
{
"llm_name": "qwen/qwen3-reranker-8b",
"tags": "RE-RANK,32K",
"max_tokens": 32768,
"model_type": "reranker"
}
]
}
]
}

View File

@ -67,6 +67,7 @@ A complete list of models supported by RAGFlow, which will continue to expand.
| 302.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
| CometAPI | :heavy_check_mark: | :heavy_check_mark: | | | | |
| DeerAPI | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | :heavy_check_mark: |
| Jiekou.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | |
```mdx-code-block
</APITable>

View File

@ -693,7 +693,7 @@ Released on August 26, 2024.
- Incorporates monitoring for the task executor.
- Introduces Agent tools **GitHub**, **DeepL**, **BaiduFanyi**, **QWeather**, and **GoogleScholar**.
- Supports chunking of EML files.
- Supports more LLMs or model services: **GPT-4o-mini**, **PerfXCloud**, **TogetherAI**, **Upstage**, **Novita AI**, **01.AI**, **SiliconFlow**, **PPIO**, **XunFei Spark**, **Baidu Yiyan**, and **Tencent Hunyuan**.
- Supports more LLMs or model services: **GPT-4o-mini**, **PerfXCloud**, **TogetherAI**, **Upstage**, **Novita AI**, **01.AI**, **SiliconFlow**, **PPIO**, **XunFei Spark**, **Jiekou.AI**, **Baidu Yiyan**, and **Tencent Hunyuan**.
## v0.9.0

View File

@ -49,6 +49,7 @@ class SupportedLiteLLMProvider(StrEnum):
Lingyi_AI = "01.AI"
GiteeAI = "GiteeAI"
AI_302 = "302.AI"
JiekouAI = "Jiekou.AI"
FACTORY_DEFAULT_BASE_URL = {
@ -69,6 +70,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.GiteeAI: "https://ai.gitee.com/v1/",
SupportedLiteLLMProvider.AI_302: "https://api.302.ai/v1",
SupportedLiteLLMProvider.Anthropic: "https://api.anthropic.com/",
SupportedLiteLLMProvider.JiekouAI: "https://api.jiekou.ai/openai",
}
@ -99,6 +101,7 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.Lingyi_AI: "openai/",
SupportedLiteLLMProvider.GiteeAI: "openai/",
SupportedLiteLLMProvider.AI_302: "openai/",
SupportedLiteLLMProvider.JiekouAI: "openai/",
}
ChatModel = globals().get("ChatModel", {})

View File

@ -22,7 +22,6 @@ import re
import time
from abc import ABC
from copy import deepcopy
from typing import Any, Protocol
from urllib.parse import urljoin
import json_repair
@ -65,10 +64,6 @@ LENGTH_NOTIFICATION_CN = "······\n由于大模型的上下文窗口大小
LENGTH_NOTIFICATION_EN = "...\nThe answer is truncated by your chosen LLM due to its limitation on context length."
class ToolCallSession(Protocol):
def tool_call(self, name: str, arguments: dict[str, Any]) -> str: ...
class Base(ABC):
def __init__(self, key, model_name, base_url, **kwargs):
timeout = int(os.environ.get("LM_TIMEOUT_SECONDS", 600))
@ -1402,6 +1397,7 @@ class LiteLLMBase(ABC):
"01.AI",
"GiteeAI",
"302.AI",
"Jiekou.AI",
]
def __init__(self, key, model_name, base_url=None, **kwargs):

View File

@ -931,3 +931,12 @@ class DeerAPIEmbed(OpenAIEmbed):
if not base_url:
base_url = "https://api.deerapi.com/v1"
super().__init__(key, model_name, base_url)
class JiekouAIEmbed(OpenAIEmbed):
_FACTORY_NAME = "Jiekou.AI"
def __init__(self, key, model_name, base_url="https://api.jiekou.ai/openai/v1/embeddings"):
if not base_url:
base_url = "https://api.jiekou.ai/openai/v1/embeddings"
super().__init__(key, model_name, base_url)

View File

@ -489,3 +489,12 @@ class Ai302Rerank(Base):
if not base_url:
base_url = "https://api.302.ai/v1/rerank"
super().__init__(key, model_name, base_url)
class JiekouAIRerank(JinaRerank):
_FACTORY_NAME = "Jiekou.AI"
def __init__(self, key, model_name, base_url="https://api.jiekou.ai/openai/v1/rerank"):
if not base_url:
base_url = "https://api.jiekou.ai/openai/v1/rerank"
super().__init__(key, model_name, base_url)

View File

@ -155,13 +155,13 @@ def qbullets_category(sections):
if re.match(pro, sec) and not not_bullet(sec):
hits[i] += 1
break
maxium = 0
maximum = 0
res = -1
for i, h in enumerate(hits):
if h <= maxium:
if h <= maximum:
continue
res = i
maxium = h
maximum = h
return res, QUESTION_PATTERN[res]
@ -222,13 +222,13 @@ def bullets_category(sections):
if re.match(p, sec) and not not_bullet(sec):
hits[i] += 1
break
maxium = 0
maximum = 0
res = -1
for i, h in enumerate(hits):
if h <= maxium:
if h <= maximum:
continue
res = i
maxium = h
maximum = h
return res
@ -723,47 +723,40 @@ def naive_merge_docx(sections, chunk_token_num=128, delimiter="\n。"):
if not sections:
return [], []
cks = [""]
images = [None]
tk_nums = [0]
cks = []
images = []
tk_nums = []
def add_chunk(t, image, pos=""):
nonlocal cks, tk_nums, delimiter
nonlocal cks, images, tk_nums
tnum = num_tokens_from_string(t)
if tnum < 8:
pos = ""
if cks[-1] == "" or tk_nums[-1] > chunk_token_num:
if t.find(pos) < 0:
if not cks or tk_nums[-1] > chunk_token_num:
# new chunk
if pos and t.find(pos) < 0:
t += pos
cks.append(t)
images.append(image)
tk_nums.append(tnum)
else:
if cks[-1].find(pos) < 0:
# add to last chunk
if pos and cks[-1].find(pos) < 0:
t += pos
cks[-1] += t
images[-1] = concat_img(images[-1], image)
tk_nums[-1] += tnum
dels = get_delimiters(delimiter)
line = ""
for sec, image in sections:
if not image:
line += sec + "\n"
continue
split_sec = re.split(r"(%s)" % dels, line + sec)
for sub_sec in split_sec:
if re.match(f"^{dels}$", sub_sec):
continue
add_chunk("\n"+sub_sec, image,"")
line = ""
pattern = r"(%s)" % dels
if line:
split_sec = re.split(r"(%s)" % dels, line)
for sec, image in sections:
split_sec = re.split(pattern, sec)
for sub_sec in split_sec:
if re.match(f"^{dels}$", sub_sec):
if not sub_sec or re.match(f"^{dels}$", sub_sec):
continue
add_chunk("\n"+sub_sec, image,"")
add_chunk("\n" + sub_sec, image, "")
return cks, images

View File

@ -104,7 +104,7 @@ def test_invalid_name_dataset(get_auth):
assert res['code'] == 100
res = create_dataset(get_auth, "")
assert res['code'] == 100
assert res['code'] == 102
long_string = ""
@ -112,7 +112,7 @@ def test_invalid_name_dataset(get_auth):
long_string += random.choice(string.ascii_letters + string.digits)
res = create_dataset(get_auth, long_string)
assert res['code'] == 100
assert res['code'] == 102
print(res)

1077
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -119,6 +119,7 @@
"zustand": "^4.5.2"
},
"devDependencies": {
"@hookform/devtools": "^4.4.0",
"@react-dev-inspector/umi4-plugin": "^2.0.1",
"@redux-devtools/extension": "^3.3.0",
"@storybook/addon-docs": "^9.1.4",

View File

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M16 4H4V20H16C18.2091 20 20 18.2091 20 16V8H24V16C24 20.4183 20.4183 24 16 24H4C1.79086 24 1.61064e-08 22.2091 0 20V0H16V4ZM24 4H20V0H24V4Z" fill="#7C3AED"/>
</svg>

After

Width:  |  Height:  |  Size: 270 B

View File

@ -58,6 +58,7 @@ export enum LLMFactory {
Longcat = 'LongCat',
CometAPI = 'CometAPI',
DeerAPI = 'DeerAPI',
JiekouAI = 'Jiekou.AI',
Builtin = 'Builtin',
}
@ -122,5 +123,6 @@ export const IconMap = {
[LLMFactory.Longcat]: 'longcat',
[LLMFactory.CometAPI]: 'cometapi',
[LLMFactory.DeerAPI]: 'deerapi',
[LLMFactory.JiekouAI]: 'jiekouai',
[LLMFactory.Builtin]: 'builtin',
};

View File

@ -13,7 +13,7 @@ export const LogicalOperatorIcon = function OperatorIcon({
<IconFont
name={icon}
className={cn('size-4', {
'rotate-180': value === '>',
'rotate-180': value === ComparisonOperator.GreatThan,
})}
></IconFont>
);

View File

@ -79,8 +79,9 @@ export function AccordionOperators({
Operator.Code,
Operator.StringTransform,
Operator.DataOperations,
Operator.VariableAssigner,
Operator.ListOperations,
// Operator.VariableAssigner,
Operator.VariableAssigner,
Operator.VariableAggregator,
]}
isCustomDropdown={isCustomDropdown}

View File

@ -55,8 +55,8 @@ function InnerRetrievalNode({
<div className="flex items-center gap-1.5">
<RAGFlowAvatar
className="size-6 rounded-lg"
avatar={id}
name={item?.name || (label as string) || 'CN'}
avatar={item?.avatar}
name={item ? item?.name : id}
/>
<div className={'truncate flex-1'}>{label || item?.name}</div>

View File

@ -847,6 +847,31 @@ export enum JsonSchemaDataType {
Object = 'object',
}
export enum VariableAssignerLogicalOperator {
Overwrite = 'overwrite',
Clear = 'clear',
Set = 'set',
}
export enum VariableAssignerLogicalNumberOperator {
Overwrite = VariableAssignerLogicalOperator.Overwrite,
Clear = VariableAssignerLogicalOperator.Clear,
Set = VariableAssignerLogicalOperator.Set,
Add = '+=',
Subtract = '-=',
Multiply = '*=',
Divide = '/=',
}
export enum VariableAssignerLogicalArrayOperator {
Overwrite = VariableAssignerLogicalOperator.Overwrite,
Clear = VariableAssignerLogicalOperator.Clear,
Append = 'append',
Extend = 'extend',
RemoveFirst = 'remove_first',
RemoveLast = 'remove_last',
}
export enum ExportFileType {
PDF = 'pdf',
HTML = 'html',

View File

@ -19,6 +19,8 @@ type QueryVariableProps = {
hideLabel?: boolean;
className?: string;
onChange?: (value: string) => void;
pureQuery?: boolean;
value?: string;
};
export function QueryVariable({
@ -28,12 +30,34 @@ export function QueryVariable({
hideLabel = false,
className,
onChange,
pureQuery = false,
value,
}: QueryVariableProps) {
const { t } = useTranslation();
const form = useFormContext();
const finalOptions = useFilterQueryVariableOptionsByTypes(types);
const renderWidget = (
value?: string,
handleChange?: (value: string) => void,
) => (
<GroupedSelectWithSecondaryMenu
options={finalOptions}
value={value}
onChange={(val) => {
handleChange?.(val);
onChange?.(val);
}}
// allowClear
types={types}
></GroupedSelectWithSecondaryMenu>
);
if (pureQuery) {
renderWidget(value, onChange);
}
return (
<FormField
control={form.control}
@ -45,18 +69,7 @@ export function QueryVariable({
{t('flow.query')}
</FormLabel>
)}
<FormControl>
<GroupedSelectWithSecondaryMenu
options={finalOptions}
value={field.value}
onChange={(val) => {
field.onChange(val);
onChange?.(val);
}}
// allowClear
types={types}
></GroupedSelectWithSecondaryMenu>
</FormControl>
<FormControl>{renderWidget(field.value, field.onChange)}</FormControl>
<FormMessage />
</FormItem>
)}

View File

@ -10,10 +10,7 @@ import { PropsWithChildren, ReactNode, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { JsonSchemaDataType } from '../../constant';
import { useGetStructuredOutputByValue } from '../../hooks/use-build-structured-output';
import {
hasJsonSchemaChild,
hasSpecificTypeChild,
} from '../../utils/filter-agent-structured-output';
import { hasSpecificTypeChild } from '../../utils/filter-agent-structured-output';
type DataItem = { label: ReactNode; value: string; parentLabel?: ReactNode };
@ -101,8 +98,9 @@ export function StructuredOutputSecondaryMenu({
);
if (
!hasJsonSchemaChild(structuredOutput) ||
(!isEmpty(types) && !hasSpecificTypeChild(structuredOutput, types))
!isEmpty(types) &&
!hasSpecificTypeChild(structuredOutput, types) &&
!types.some((x) => x === JsonSchemaDataType.Object)
) {
return null;
}

View File

@ -0,0 +1,290 @@
import { SelectWithSearch } from '@/components/originui/select-with-search';
import { RAGFlowFormItem } from '@/components/ragflow-form';
import { useIsDarkTheme } from '@/components/theme-provider';
import { Button } from '@/components/ui/button';
import { Input } from '@/components/ui/input';
import { Label } from '@/components/ui/label';
import { RadioGroup, RadioGroupItem } from '@/components/ui/radio-group';
import { Separator } from '@/components/ui/separator';
import { Textarea } from '@/components/ui/textarea';
import { Editor } from '@monaco-editor/react';
import * as RadioGroupPrimitive from '@radix-ui/react-radio-group';
import { X } from 'lucide-react';
import { ReactNode, useCallback } from 'react';
import { useFieldArray, useFormContext } from 'react-hook-form';
import {
JsonSchemaDataType,
VariableAssignerLogicalArrayOperator,
VariableAssignerLogicalNumberOperator,
VariableAssignerLogicalOperator,
} from '../../constant';
import { useGetVariableLabelOrTypeByValue } from '../../hooks/use-get-begin-query';
import { DynamicFormHeader } from '../components/dynamic-fom-header';
import { QueryVariable } from '../components/query-variable';
import { useBuildLogicalOptions } from './use-build-logical-options';
type SelectKeysProps = {
name: string;
label: ReactNode;
tooltip?: string;
keyField?: string;
valueField?: string;
operatorField?: string;
};
type RadioGroupProps = React.ComponentProps<typeof RadioGroupPrimitive.Root>;
type RadioButtonProps = Partial<
Omit<RadioGroupProps, 'onValueChange'> & {
onChange: RadioGroupProps['onValueChange'];
}
>;
function RadioButton({ value, onChange }: RadioButtonProps) {
return (
<RadioGroup
defaultValue="yes"
className="flex"
value={value}
onValueChange={onChange}
>
<div className="flex items-center gap-3">
<RadioGroupItem value="yes" id="r1" />
<Label htmlFor="r1">Yes</Label>
</div>
<div className="flex items-center gap-3">
<RadioGroupItem value="no" id="r2" />
<Label htmlFor="r2">No</Label>
</div>
</RadioGroup>
);
}
const EmptyFields = [
VariableAssignerLogicalOperator.Clear,
VariableAssignerLogicalArrayOperator.RemoveFirst,
VariableAssignerLogicalArrayOperator.RemoveLast,
];
const EmptyValueMap = {
[JsonSchemaDataType.String]: '',
[JsonSchemaDataType.Number]: 0,
[JsonSchemaDataType.Boolean]: 'yes',
[JsonSchemaDataType.Object]: {},
[JsonSchemaDataType.Array]: [],
};
export function DynamicVariables({
name,
label,
tooltip,
keyField = 'variable',
valueField = 'parameter',
operatorField = 'operator',
}: SelectKeysProps) {
const form = useFormContext();
const { getType } = useGetVariableLabelOrTypeByValue();
const isDarkTheme = useIsDarkTheme();
const { fields, remove, append, update } = useFieldArray({
name: name,
control: form.control,
});
const { buildLogicalOptions } = useBuildLogicalOptions();
const getVariableType = useCallback(
(keyFieldName: string) => {
const key = form.getValues(keyFieldName);
return getType(key);
},
[form, getType],
);
const renderParameter = useCallback(
(
keyFieldName: string,
operatorFieldName: string,
valueFieldAlias: string,
) => {
console.log(
'🚀 ~ DynamicVariables ~ valueFieldAlias:',
form.getValues(valueFieldAlias),
);
const logicalOperator = form.getValues(operatorFieldName);
const type = getVariableType(keyFieldName);
if (EmptyFields.includes(logicalOperator)) {
return null;
} else if (
logicalOperator === VariableAssignerLogicalOperator.Overwrite ||
VariableAssignerLogicalArrayOperator.Extend === logicalOperator
) {
return (
<QueryVariable types={[type]} hideLabel pureQuery></QueryVariable>
);
} else if (logicalOperator === VariableAssignerLogicalOperator.Set) {
if (type === JsonSchemaDataType.Boolean) {
return <RadioButton></RadioButton>;
}
if (type === JsonSchemaDataType.Number) {
return <Input className="w-full" type="number"></Input>;
}
if (type === JsonSchemaDataType.Object) {
return (
<Editor
height={300}
theme={isDarkTheme ? 'vs-dark' : 'vs'}
language={'json'}
options={{
minimap: { enabled: false },
automaticLayout: true,
}}
/>
);
}
if (type === JsonSchemaDataType.String) {
return <Textarea></Textarea>;
}
} else if (
Object.values(VariableAssignerLogicalNumberOperator).some(
(x) => logicalOperator === x,
)
) {
return <Input className="w-full" type="number"></Input>;
} else if (
logicalOperator === VariableAssignerLogicalArrayOperator.Append
) {
const subType = type.match(/<([^>]+)>/).at(1);
return (
<QueryVariable types={[subType]} hideLabel pureQuery></QueryVariable>
);
}
},
[form, getVariableType, isDarkTheme],
);
const handleVariableChange = useCallback(
(operatorFieldAlias: string, valueFieldAlias: string) => {
console.log(
'🚀 ~ DynamicVariables ~ operatorFieldAlias:',
operatorFieldAlias,
);
return () => {
form.setValue(
operatorFieldAlias,
VariableAssignerLogicalOperator.Overwrite,
{ shouldDirty: true, shouldValidate: true },
);
form.setValue(valueFieldAlias, '', {
shouldDirty: true,
shouldValidate: true,
});
};
},
[form],
);
const handleOperatorChange = useCallback(
(
valueFieldAlias: string,
keyFieldAlias: string,
value: string,
index: number,
) => {
const type = getVariableType(keyFieldAlias);
console.log('🚀 ~ DynamicVariables ~ type:', type);
let parameter = EmptyValueMap[type as keyof typeof EmptyValueMap];
if (value === VariableAssignerLogicalOperator.Overwrite) {
parameter = '';
}
if (value !== VariableAssignerLogicalOperator.Clear) {
form.setValue(valueFieldAlias, parameter, {
shouldDirty: true,
shouldValidate: true,
});
// form.trigger(valueFieldAlias);
// update(index, { [valueField]: parameter });
}
},
[form, getVariableType],
);
return (
<section className="space-y-2">
<DynamicFormHeader
label={label}
tooltip={tooltip}
onClick={() => append({ [keyField]: '', [valueField]: '' })}
></DynamicFormHeader>
<div className="space-y-5">
{fields.map((field, index) => {
const keyFieldAlias = `${name}.${index}.${keyField}`;
const valueFieldAlias = `${name}.${index}.${valueField}`;
const operatorFieldAlias = `${name}.${index}.${operatorField}`;
return (
<section key={field.id} className="flex gap-2">
<div className="flex-1 space-y-3 min-w-0">
<div className="flex items-center">
<QueryVariable
name={keyFieldAlias}
hideLabel
className="flex-1 min-w-0"
onChange={handleVariableChange(
operatorFieldAlias,
valueFieldAlias,
)}
></QueryVariable>
<Separator className="w-2" />
<RAGFlowFormItem name={operatorFieldAlias} className="w-1/3">
{({ onChange, value }) => (
<SelectWithSearch
value={value}
onChange={(val) => {
handleOperatorChange(
valueFieldAlias,
keyFieldAlias,
val,
index,
);
onChange(val);
}}
options={buildLogicalOptions(
getVariableType(keyFieldAlias),
)}
></SelectWithSearch>
)}
</RAGFlowFormItem>
</div>
<RAGFlowFormItem name={valueFieldAlias} className="w-full">
{renderParameter(
keyFieldAlias,
operatorFieldAlias,
valueFieldAlias,
)}
</RAGFlowFormItem>
</div>
<Button variant={'ghost'} onClick={() => remove(index)}>
<X />
</Button>
</section>
);
})}
</div>
</section>
);
}

View File

@ -1,98 +1,51 @@
import { SelectWithSearch } from '@/components/originui/select-with-search';
import { RAGFlowFormItem } from '@/components/ragflow-form';
import { Form } from '@/components/ui/form';
import { Separator } from '@/components/ui/separator';
import { buildOptions } from '@/utils/form';
import { zodResolver } from '@hookform/resolvers/zod';
import { memo } from 'react';
import { useForm } from 'react-hook-form';
import { useTranslation } from 'react-i18next';
import { z } from 'zod';
import {
JsonSchemaDataType,
Operations,
initialDataOperationsValues,
} from '../../constant';
import { initialDataOperationsValues } from '../../constant';
import { useFormValues } from '../../hooks/use-form-values';
import { useWatchFormChange } from '../../hooks/use-watch-form-change';
import { INextOperatorForm } from '../../interface';
import { buildOutputList } from '../../utils/build-output-list';
import { FormWrapper } from '../components/form-wrapper';
import { Output, OutputSchema } from '../components/output';
import { QueryVariableList } from '../components/query-variable-list';
import { DynamicVariables } from './dynamic-variables';
export const RetrievalPartialSchema = {
query: z.array(z.object({ input: z.string().optional() })),
operations: z.string(),
select_keys: z.array(z.object({ name: z.string().optional() })).optional(),
remove_keys: z.array(z.object({ name: z.string().optional() })).optional(),
updates: z
.array(
z.object({ key: z.string().optional(), value: z.string().optional() }),
)
.optional(),
rename_keys: z
.array(
z.object({
old_key: z.string().optional(),
new_key: z.string().optional(),
}),
)
.optional(),
filter_values: z
.array(
z.object({
key: z.string().optional(),
value: z.string().optional(),
operator: z.string().optional(),
}),
)
.optional(),
...OutputSchema,
export const VariableAssignerSchema = {
variables: z.array(
z.object({
variable: z.string().optional(),
operator: z.string().optional(),
parameter: z.string().or(z.number()).or(z.boolean()).optional(),
}),
),
};
export const FormSchema = z.object(RetrievalPartialSchema);
export const FormSchema = z.object(VariableAssignerSchema);
export type DataOperationsFormSchemaType = z.infer<typeof FormSchema>;
export type VariableAssignerFormSchemaType = z.infer<typeof FormSchema>;
const outputList = buildOutputList(initialDataOperationsValues.outputs);
// const outputList = buildOutputList(initialVariableAssignerValues.outputs);
function VariableAssignerForm({ node }: INextOperatorForm) {
const { t } = useTranslation();
const defaultValues = useFormValues(initialDataOperationsValues, node);
const form = useForm<DataOperationsFormSchemaType>({
const form = useForm<VariableAssignerFormSchemaType>({
defaultValues: defaultValues,
mode: 'onChange',
resolver: zodResolver(FormSchema),
shouldUnregister: true,
});
const OperationsOptions = buildOptions(
Operations,
t,
`flow.operationsOptions`,
true,
);
useWatchFormChange(node?.id, form, true);
return (
<Form {...form}>
<FormWrapper>
<QueryVariableList
tooltip={t('flow.queryTip')}
label={t('flow.query')}
types={[JsonSchemaDataType.Array, JsonSchemaDataType.Object]}
></QueryVariableList>
<Separator />
<RAGFlowFormItem name="operations" label={t('flow.operations')}>
<SelectWithSearch options={OperationsOptions} allowClear />
</RAGFlowFormItem>
<Output list={outputList} isFormRequired></Output>
<DynamicVariables name="variables" label="Variables"></DynamicVariables>
{/* <Output list={outputList} isFormRequired></Output> */}
</FormWrapper>
{/* <DevTool control={form.control} placement="top-left" /> */}
{/* set up the dev tool */}
</Form>
);
}

View File

@ -0,0 +1,28 @@
import { buildOptions } from '@/utils/form';
import { useCallback } from 'react';
import {
JsonSchemaDataType,
VariableAssignerLogicalArrayOperator,
VariableAssignerLogicalNumberOperator,
VariableAssignerLogicalOperator,
} from '../../constant';
export function useBuildLogicalOptions() {
const buildLogicalOptions = useCallback((type: string) => {
if (
type?.toLowerCase().startsWith(JsonSchemaDataType.Array.toLowerCase())
) {
return buildOptions(VariableAssignerLogicalArrayOperator);
}
if (type === JsonSchemaDataType.Number) {
return buildOptions(VariableAssignerLogicalNumberOperator);
}
return buildOptions(VariableAssignerLogicalOperator);
}, []);
return {
buildLogicalOptions,
};
}

View File

@ -7,8 +7,11 @@ import {
} from '../constant';
import useGraphStore from '../store';
function splitValue(value?: string) {
return typeof value === 'string' ? value?.split('@') : [];
}
function getNodeId(value: string) {
return value.split('@').at(0);
return splitValue(value).at(0);
}
export function useShowSecondaryMenu() {
@ -63,7 +66,7 @@ export function useFindAgentStructuredOutputLabel() {
}>,
) => {
// agent structured output
const fields = value.split('@');
const fields = splitValue(value);
if (
getOperatorTypeFromId(fields.at(0)) === Operator.Agent &&
fields.at(1)?.startsWith(AgentStructuredOutputField)
@ -130,7 +133,7 @@ export function useFindAgentStructuredOutputTypeByValue() {
if (!value) {
return;
}
const fields = value.split('@');
const fields = splitValue(value);
const nodeId = fields.at(0);
const jsonSchema = filterStructuredOutput(value);
@ -163,7 +166,7 @@ export function useFindAgentStructuredOutputLabelByValue() {
const operatorName = getNode(getNodeId(value ?? ''))?.data.name;
if (operatorName) {
return operatorName + ' / ' + value?.split('@').at(1);
return operatorName + ' / ' + splitValue(value).at(1);
}
}

View File

@ -232,8 +232,11 @@ export function useFilterQueryVariableOptionsByTypes(
...x,
options: x.options.filter(
(y) =>
types?.some((x) => toLower(y.type).includes(x)) ||
y.type === undefined, // agent structured output
types?.some((x) =>
toLower(x).startsWith('array')
? toLower(y.type).includes(toLower(x))
: toLower(y.type) === toLower(x),
) || y.type === undefined, // agent structured output
),
};
})

View File

@ -1,14 +1,18 @@
import { JSONSchema } from '@/components/jsonjoy-builder';
import { get, isPlainObject } from 'lodash';
import { get, isPlainObject, toLower } from 'lodash';
import { JsonSchemaDataType } from '../constant';
function predicate(types: string[], type: string) {
return types.some((x) => toLower(x) === toLower(type));
}
export function hasSpecificTypeChild(
data: Record<string, any> | Array<any>,
types: string[] = [],
) {
if (Array.isArray(data)) {
for (const value of data) {
if (isPlainObject(value) && types.some((x) => x === value.type)) {
if (isPlainObject(value) && predicate(types, value.type)) {
return true;
}
if (hasSpecificTypeChild(value, types)) {
@ -19,7 +23,7 @@ export function hasSpecificTypeChild(
if (isPlainObject(data)) {
for (const value of Object.values(data)) {
if (isPlainObject(value) && types.some((x) => x === value.type)) {
if (isPlainObject(value) && predicate(types, value.type)) {
return true;
}

View File

@ -46,6 +46,7 @@ const orderFactoryList = [
LLMFactory.Ai302,
LLMFactory.CometAPI,
LLMFactory.DeerAPI,
LLMFactory.JiekouAI,
];
export const sortLLmFactoryListBySpecifiedOrder = (list: IFactory[]) => {