fix sequence2txt error and usage total token issue (#2961)

### What problem does this PR solve?

#1363

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu
2024-10-22 11:38:37 +08:00
committed by GitHub
parent 6a4858a7ee
commit b2524eec49
5 changed files with 16 additions and 11 deletions

View File

@ -26,7 +26,6 @@ from api.db.services.dialog_service import DialogService, ConversationService, c
from api.db.services.knowledgebase_service import KnowledgebaseService
from api.db.services.llm_service import LLMBundle, TenantService, TenantLLMService
from api.settings import RetCode, retrievaler
from api.utils import get_uuid
from api.utils.api_utils import get_json_result
from api.utils.api_utils import server_error_response, get_data_error_result, validate_request
from graphrag.mind_map_extractor import MindMapExtractor
@ -187,6 +186,7 @@ def completion():
yield "data:" + json.dumps({"retcode": 0, "retmsg": "", "data": ans}, ensure_ascii=False) + "\n\n"
ConversationService.update_by_id(conv.id, conv.to_dict())
except Exception as e:
traceback.print_exc()
yield "data:" + json.dumps({"retcode": 500, "retmsg": str(e),
"data": {"answer": "**ERROR**: " + str(e), "reference": []}},
ensure_ascii=False) + "\n\n"

View File

@ -133,7 +133,8 @@ class TenantLLMService(CommonService):
if model_config["llm_factory"] not in Seq2txtModel:
return
return Seq2txtModel[model_config["llm_factory"]](
model_config["api_key"], model_config["llm_name"], lang,
key=model_config["api_key"], model_name=model_config["llm_name"],
lang=lang,
base_url=model_config["api_base"]
)
if llm_type == LLMType.TTS:

View File

@ -197,6 +197,7 @@ def thumbnail_img(filename, blob):
pass
return None
def thumbnail(filename, blob):
img = thumbnail_img(filename, blob)
if img is not None:
@ -205,6 +206,7 @@ def thumbnail(filename, blob):
else:
return ''
def traversal_files(base):
for root, ds, fs in os.walk(base):
for f in fs: