let's load model from local (#163)

This commit is contained in:
KevinHuSh
2024-03-28 16:10:47 +08:00
committed by GitHub
parent f3477202fe
commit a5384446e3
7 changed files with 31 additions and 48 deletions

View File

@ -18,7 +18,7 @@ from api.utils.file_utils import get_project_base_directory
from deepdoc.vision import OCR, Recognizer, LayoutRecognizer, TableStructureRecognizer
from rag.nlp import huqie
from copy import deepcopy
from huggingface_hub import hf_hub_download, snapshot_download
from huggingface_hub import snapshot_download
logging.getLogger("pdfminer").setLevel(logging.WARNING)
@ -36,18 +36,18 @@ class HuParser:
if torch.cuda.is_available():
self.updown_cnt_mdl.set_param({"device": "cuda"})
try:
model_dir = snapshot_download(
repo_id="InfiniFlow/text_concat_xgb_v1.0",
local_dir=os.path.join(
model_dir = os.path.join(
get_project_base_directory(),
"rag/res/deepdoc"),
local_files_only=True)
"rag/res/deepdoc")
self.updown_cnt_mdl.load_model(os.path.join(
model_dir, "updown_concat_xgb.model"))
except Exception as e:
model_dir = snapshot_download(
repo_id="InfiniFlow/text_concat_xgb_v1.0")
self.updown_cnt_mdl.load_model(os.path.join(
model_dir, "updown_concat_xgb.model"))
self.updown_cnt_mdl.load_model(os.path.join(
model_dir, "updown_concat_xgb.model"))
self.page_from = 0
"""
If you have trouble downloading HuggingFace models, -_^ this might help!!