mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Feat: add gitee as LLM provider. (#8545)
### What problem does this PR solve? ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -3287,6 +3287,420 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "GiteeAI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT,SPEECH2TEXT,TEXT RE-RANK",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
"llm_name": "ERNIE-4.5-Turbo",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "ERNIE-X1-Turbo",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-R1",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-V3",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-235B-A22B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-30B-A3B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-32B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-8B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-4B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-0.6B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 32000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "QwQ-32B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-R1-Distill-Qwen-32B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-R1-Distill-Qwen-14B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-R1-Distill-Qwen-7B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "DeepSeek-R1-Distill-Qwen-1.5B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 65792,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2.5-72B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2.5-32B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2.5-14B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2.5-7B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2-72B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2-7B-Instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "GLM-4-32B",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "GLM-4-9B-0414",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "glm-4-9b-chat",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "internlm3-8b-instruct",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Yi-34B-Chat",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "chat",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "ERNIE-4.5-Turbo-VL",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2.5-VL-32B-Instruct",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2-VL-72B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Align-DS-V",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "InternVL3-78B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "InternVL3-38B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "InternVL2.5-78B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "InternVL2.5-26B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 16384,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "InternVL2-8B",
|
||||
"tags": "LLM,IMAGE2TEXT",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "image2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen2-Audio-7B-Instruct",
|
||||
"tags": "LLM,SPEECH2TEXT,IMAGE2TEXT",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-base",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 512,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-large",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 512,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-large-v3-turbo",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 512,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-large-v3",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 512,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "SenseVoiceSmall",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 512,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Reranker-8B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Reranker-4B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Reranker-0.6B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Embedding-8B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Embedding-4B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "Qwen3-Embedding-0.6B",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 4096,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "jina-clip-v1",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "jina-clip-v2",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "jina-reranker-m0",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 10240,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bce-embedding-base_v1",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bce-reranker-base_v1",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bge-m3",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bge-reranker-v2-m3",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 8192,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bge-large-zh-v1.5",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 1024,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "bge-small-zh-v1.5",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "nomic-embed-code",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "all-mpnet-base-v2",
|
||||
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
||||
"max_tokens": 512,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Google Cloud",
|
||||
"logo": "",
|
||||
|
||||
@ -45,7 +45,8 @@ from .embedding_model import (
|
||||
HuggingFaceEmbed,
|
||||
VolcEngineEmbed,
|
||||
GPUStackEmbed,
|
||||
NovitaEmbed
|
||||
NovitaEmbed,
|
||||
GiteeEmbed
|
||||
)
|
||||
from .chat_model import (
|
||||
GptTurbo,
|
||||
@ -87,6 +88,7 @@ from .chat_model import (
|
||||
HuggingFaceChat,
|
||||
GPUStackChat,
|
||||
ModelScopeChat,
|
||||
GiteeChat
|
||||
)
|
||||
|
||||
from .cv_model import (
|
||||
@ -129,7 +131,8 @@ from .rerank_model import (
|
||||
QWenRerank,
|
||||
GPUStackRerank,
|
||||
HuggingfaceRerank,
|
||||
NovitaRerank
|
||||
NovitaRerank,
|
||||
GiteeRerank
|
||||
)
|
||||
|
||||
from .sequence2txt_model import (
|
||||
@ -139,6 +142,7 @@ from .sequence2txt_model import (
|
||||
XinferenceSeq2txt,
|
||||
TencentCloudSeq2txt,
|
||||
GPUStackSeq2txt,
|
||||
GiteeSeq2txt
|
||||
)
|
||||
|
||||
from .tts_model import (
|
||||
@ -182,7 +186,8 @@ EmbeddingModel = {
|
||||
"HuggingFace": HuggingFaceEmbed,
|
||||
"VolcEngine": VolcEngineEmbed,
|
||||
"GPUStack": GPUStackEmbed,
|
||||
"NovitaAI": NovitaEmbed
|
||||
"NovitaAI": NovitaEmbed,
|
||||
"GiteeAI": GiteeEmbed
|
||||
}
|
||||
|
||||
CvModel = {
|
||||
@ -206,7 +211,7 @@ CvModel = {
|
||||
"Tencent Hunyuan": HunyuanCV,
|
||||
"Anthropic": AnthropicCV,
|
||||
"SILICONFLOW": SILICONFLOWCV,
|
||||
"GPUStack": GPUStackCV,
|
||||
"GPUStack": GPUStackCV
|
||||
}
|
||||
|
||||
ChatModel = {
|
||||
@ -250,6 +255,7 @@ ChatModel = {
|
||||
"HuggingFace": HuggingFaceChat,
|
||||
"GPUStack": GPUStackChat,
|
||||
"ModelScope":ModelScopeChat,
|
||||
"GiteeAI": GiteeChat
|
||||
}
|
||||
|
||||
RerankModel = {
|
||||
@ -270,7 +276,8 @@ RerankModel = {
|
||||
"Tongyi-Qianwen": QWenRerank,
|
||||
"GPUStack": GPUStackRerank,
|
||||
"HuggingFace": HuggingfaceRerank,
|
||||
"NovitaAI": NovitaRerank
|
||||
"NovitaAI": NovitaRerank,
|
||||
"GiteeAI": GiteeRerank
|
||||
}
|
||||
|
||||
Seq2txtModel = {
|
||||
@ -280,6 +287,7 @@ Seq2txtModel = {
|
||||
"Xinference": XinferenceSeq2txt,
|
||||
"Tencent Cloud": TencentCloudSeq2txt,
|
||||
"GPUStack": GPUStackSeq2txt,
|
||||
"GiteeAI": GiteeSeq2txt
|
||||
}
|
||||
|
||||
TTSModel = {
|
||||
|
||||
@ -1253,6 +1253,13 @@ class YiChat(Base):
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
|
||||
class GiteeChat(Base):
|
||||
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://ai.gitee.com/v1/"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
|
||||
class ReplicateChat(Base):
|
||||
def __init__(self, key, model_name, base_url=None, **kwargs):
|
||||
super().__init__(key, model_name, base_url=base_url, **kwargs)
|
||||
|
||||
@ -912,3 +912,8 @@ class GPUStackEmbed(OpenAIEmbed):
|
||||
class NovitaEmbed(SILICONFLOWEmbed):
|
||||
def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/embeddings"):
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
|
||||
class GiteeEmbed(SILICONFLOWEmbed):
|
||||
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/embeddings"):
|
||||
super().__init__(key, model_name, base_url)
|
||||
@ -630,3 +630,8 @@ class GPUStackRerank(Base):
|
||||
class NovitaRerank(JinaRerank):
|
||||
def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/rerank"):
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
|
||||
class GiteeRerank(JinaRerank):
|
||||
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/rerank"):
|
||||
super().__init__(key, model_name, base_url)
|
||||
@ -203,3 +203,11 @@ class GPUStackSeq2txt(Base):
|
||||
self.base_url = base_url
|
||||
self.model_name = model_name
|
||||
self.key = key
|
||||
|
||||
|
||||
class GiteeSeq2txt(Base):
|
||||
def __init__(self, key, model_name="whisper-1", base_url="https://ai.gitee.com/v1/"):
|
||||
if not base_url:
|
||||
base_url = "https://ai.gitee.com/v1/"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
Reference in New Issue
Block a user