'load llm infomation from a json file and add support for OpenRouter' (#1533)

### What problem does this PR solve?

#1467 

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
黄腾
2024-07-16 15:19:43 +08:00
committed by GitHub
parent 3657b1f2a2
commit 75086f41a9
8 changed files with 2000 additions and 904 deletions

View File

@ -57,8 +57,8 @@ def set_api_key():
mdl = ChatModel[factory](
req["api_key"], llm.llm_name, base_url=req.get("base_url"))
try:
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
"temperature": 0.9})
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}],
{"temperature": 0.9,'max_tokens':50})
if not tc:
raise Exception(m)
except Exception as e: