mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-02 08:35:08 +08:00
Fix: migrate deprecated Langfuse API from v2 to v3 (#9204)
### What problem does this PR solve? Fix: ```bash 'Langfuse' object has no attribute 'trace' ``` ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@ -208,12 +208,14 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
check_llm_ts = timer()
|
||||
|
||||
langfuse_tracer = None
|
||||
trace_context = {}
|
||||
langfuse_keys = TenantLangfuseService.filter_by_tenant(tenant_id=dialog.tenant_id)
|
||||
if langfuse_keys:
|
||||
langfuse = Langfuse(public_key=langfuse_keys.public_key, secret_key=langfuse_keys.secret_key, host=langfuse_keys.host)
|
||||
if langfuse.auth_check():
|
||||
langfuse_tracer = langfuse
|
||||
langfuse.trace = langfuse_tracer.trace(name=f"{dialog.name}-{llm_model_config['llm_name']}")
|
||||
trace_id = langfuse_tracer.create_trace_id()
|
||||
trace_context = {"trace_id": trace_id}
|
||||
|
||||
check_langfuse_tracer_ts = timer()
|
||||
kbs, embd_mdl, rerank_mdl, chat_mdl, tts_mdl = get_models(dialog)
|
||||
@ -400,17 +402,19 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
f" - Token speed: {int(tk_num / (generate_result_time_cost / 1000.0))}/s"
|
||||
)
|
||||
|
||||
langfuse_output = "\n" + re.sub(r"^.*?(### Query:.*)", r"\1", prompt, flags=re.DOTALL)
|
||||
langfuse_output = {"time_elapsed:": re.sub(r"\n", " \n", langfuse_output), "created_at": time.time()}
|
||||
|
||||
# Add a condition check to call the end method only if langfuse_tracer exists
|
||||
if langfuse_tracer and "langfuse_generation" in locals():
|
||||
langfuse_generation.end(output=langfuse_output)
|
||||
langfuse_output = "\n" + re.sub(r"^.*?(### Query:.*)", r"\1", prompt, flags=re.DOTALL)
|
||||
langfuse_output = {"time_elapsed:": re.sub(r"\n", " \n", langfuse_output), "created_at": time.time()}
|
||||
langfuse_generation.update(output=langfuse_output)
|
||||
langfuse_generation.end()
|
||||
|
||||
return {"answer": think + answer, "reference": refs, "prompt": re.sub(r"\n", " \n", prompt), "created_at": time.time()}
|
||||
|
||||
if langfuse_tracer:
|
||||
langfuse_generation = langfuse_tracer.trace.generation(name="chat", model=llm_model_config["llm_name"], input={"prompt": prompt, "prompt4citation": prompt4citation, "messages": msg})
|
||||
langfuse_generation = langfuse_tracer.start_generation(
|
||||
trace_context=trace_context, name="chat", model=llm_model_config["llm_name"], input={"prompt": prompt, "prompt4citation": prompt4citation, "messages": msg}
|
||||
)
|
||||
|
||||
if stream:
|
||||
last_ans = ""
|
||||
|
||||
Reference in New Issue
Block a user