Double check error of adding llm. (#5237)

### What problem does this PR solve?

#5227

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu
2025-02-21 19:09:49 +08:00
committed by GitHub
parent d78010c376
commit ef8847eda7

View File

@ -230,7 +230,7 @@ def add_llm():
try:
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
"temperature": 0.9})
if not tc:
if not tc and m.find("**ERROR**:") >= 0:
raise Exception(m)
except Exception as e:
msg += f"\nFail to access model({mdl_nm})." + str(