mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Feat: add primitive support for function calls (#6840)
### What problem does this PR solve? This PR introduces **primitive support for function calls**, enabling the system to handle basic function call capabilities. However, this feature is currently experimental and **not yet enabled for general use**, as it is only supported by a subset of models, namely, Qwen and OpenAI models. ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -145,6 +145,9 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
chat_mdl = LLMBundle(dialog.tenant_id, LLMType.IMAGE2TEXT, dialog.llm_id)
|
||||
else:
|
||||
chat_mdl = LLMBundle(dialog.tenant_id, LLMType.CHAT, dialog.llm_id)
|
||||
toolcall_session, tools = kwargs.get("toolcall_session"), kwargs.get("tools")
|
||||
if toolcall_session and tools:
|
||||
chat_mdl.bind_tools(toolcall_session, tools)
|
||||
|
||||
bind_llm_ts = timer()
|
||||
|
||||
@ -338,7 +341,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
langfuse_output = {"time_elapsed:": re.sub(r"\n", " \n", langfuse_output), "created_at": time.time()}
|
||||
|
||||
# Add a condition check to call the end method only if langfuse_tracer exists
|
||||
if langfuse_tracer and 'langfuse_generation' in locals():
|
||||
if langfuse_tracer and "langfuse_generation" in locals():
|
||||
langfuse_generation.end(output=langfuse_output)
|
||||
|
||||
return {"answer": think + answer, "reference": refs, "prompt": re.sub(r"\n", " \n", prompt), "created_at": time.time()}
|
||||
|
||||
Reference in New Issue
Block a user