let's load model from local (#163)

This commit is contained in:
KevinHuSh
2024-03-28 16:10:47 +08:00
committed by GitHub
parent f3477202fe
commit a5384446e3
7 changed files with 31 additions and 48 deletions

View File

@ -28,16 +28,13 @@ from api.utils.file_utils import get_project_base_directory
from rag.utils import num_tokens_from_string
try:
model_dir = snapshot_download(
repo_id="BAAI/bge-large-zh-v1.5",
local_dir=os.path.join(
flag_model = FlagModel(os.path.join(
get_project_base_directory(),
"rag/res/bge-large-zh-v1.5"),
local_files_only=True)
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())
except Exception as e:
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5")
flag_model = FlagModel(model_dir,
flag_model = FlagModel("BAAI/bge-large-zh-v1.5",
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())