Feat: user defined prompt. (#9972)

### What problem does this PR solve?


### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Kevin Hu
2025-09-08 14:05:01 +08:00
committed by GitHub
parent cf18231713
commit e9ee9269f5
11 changed files with 203 additions and 66 deletions

View File

@ -470,3 +470,16 @@ def sessions(canvas_id):
except Exception as e:
return server_error_response(e)
@manager.route('/prompts', methods=['GET']) # noqa: F821
@login_required
def prompts():
from rag.prompts.prompts import ANALYZE_TASK_SYSTEM, ANALYZE_TASK_USER, NEXT_STEP, REFLECT, SUMMARY4MEMORY, RANK_MEMORY, CITATION_PROMPT_TEMPLATE
return get_json_result(data={
"task_analysis": ANALYZE_TASK_SYSTEM + ANALYZE_TASK_USER,
"plan_generation": NEXT_STEP,
"reflection": REFLECT,
"context_summary": SUMMARY4MEMORY,
"context_ranking": RANK_MEMORY,
"citation_guidelines": CITATION_PROMPT_TEMPLATE
})

View File

@ -400,6 +400,8 @@ def related_questions():
chat_mdl = LLMBundle(current_user.id, LLMType.CHAT, chat_id)
gen_conf = search_config.get("llm_setting", {"temperature": 0.9})
if "parameter" in gen_conf:
del gen_conf["parameter"]
prompt = load_prompt("related_question")
ans = chat_mdl.chat(
prompt,