mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
let's load model from local (#163)
This commit is contained in:
@ -28,16 +28,13 @@ from api.utils.file_utils import get_project_base_directory
|
||||
from rag.utils import num_tokens_from_string
|
||||
|
||||
try:
|
||||
model_dir = snapshot_download(
|
||||
repo_id="BAAI/bge-large-zh-v1.5",
|
||||
local_dir=os.path.join(
|
||||
flag_model = FlagModel(os.path.join(
|
||||
get_project_base_directory(),
|
||||
"rag/res/bge-large-zh-v1.5"),
|
||||
local_files_only=True)
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
except Exception as e:
|
||||
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5")
|
||||
|
||||
flag_model = FlagModel(model_dir,
|
||||
flag_model = FlagModel("BAAI/bge-large-zh-v1.5",
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
|
||||
|
||||
Reference in New Issue
Block a user