Support downloading models from ModelScope Community. (#5073)

This PR supports downloading models from ModelScope. The main
modifications are as follows:
-New Feature (non-breaking change which adds functionality)
-Documentation Update

---------

Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
This commit is contained in:
yrk111222
2025-02-24 10:12:20 +08:00
committed by GitHub
parent 217caecfda
commit 7ce675030b
9 changed files with 30 additions and 3 deletions

View File

@ -143,6 +143,16 @@ class HuggingFaceChat(Base):
super().__init__(key, model_name.split("___")[0], base_url)
class ModelScopeChat(Base):
def __init__(self, key=None, model_name="", base_url=""):
if not base_url:
raise ValueError("Local llm url cannot be None")
base_url = base_url.rstrip('/')
if base_url.split("/")[-1] != "v1":
base_url = os.path.join(base_url, "v1")
super().__init__(key, model_name.split("___")[0], base_url)
class DeepSeekChat(Base):
def __init__(self, key, model_name="deepseek-chat", base_url="https://api.deepseek.com/v1"):
if not base_url:
@ -1537,4 +1547,4 @@ class GPUStackChat(Base):
raise ValueError("Local llm url cannot be None")
if base_url.split("/")[-1] != "v1-openai":
base_url = os.path.join(base_url, "v1-openai")
super().__init__(key, model_name, base_url)
super().__init__(key, model_name, base_url)