mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Fix HuggingFace model error. (#3870)
### What problem does this PR solve? #3865 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
@ -22,7 +22,7 @@ from abc import ABC
|
||||
from openai import OpenAI
|
||||
import openai
|
||||
from ollama import Client
|
||||
from rag.nlp import is_chinese
|
||||
from rag.nlp import is_chinese, is_english
|
||||
from rag.utils import num_tokens_from_string
|
||||
from groq import Groq
|
||||
import os
|
||||
@ -123,7 +123,7 @@ class HuggingFaceChat(Base):
|
||||
raise ValueError("Local llm url cannot be None")
|
||||
if base_url.split("/")[-1] != "v1":
|
||||
base_url = os.path.join(base_url, "v1")
|
||||
super().__init__(key, model_name, base_url)
|
||||
super().__init__(key, model_name.split("___")[0], base_url)
|
||||
|
||||
|
||||
class DeepSeekChat(Base):
|
||||
|
||||
Reference in New Issue
Block a user