mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Feat: add CometAPI to LLMFactory and update related mappings (#10119)
### Related issues #10078 ### What problem does this PR solve? Integrate CometAPI provider. ### Type of change - [x] New Feature (non-breaking change which adds functionality) - [x] Documentation Update
This commit is contained in:
@ -689,7 +689,7 @@
|
||||
},
|
||||
{
|
||||
"llm_name": "glm-4",
|
||||
"tags":"LLM,CHAT,128K",
|
||||
"tags": "LLM,CHAT,128K",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
@ -4541,6 +4541,273 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "CometAPI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
||||
"status": "1",
|
||||
"llm": [
|
||||
{
|
||||
"llm_name": "gpt-5-chat-latest",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "chatgpt-4o-latest",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5-mini",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5-nano",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-5",
|
||||
"tags": "LLM,CHAT,400k",
|
||||
"max_tokens": 400000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1-mini",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1-nano",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4.1",
|
||||
"tags": "LLM,CHAT,1M",
|
||||
"max_tokens": 1047576,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gpt-4o-mini",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "o4-mini-2025-04-16",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "o3-pro-2025-06-10",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-opus-4-1-20250805",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-opus-4-1-20250805-thinking",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-sonnet-4-20250514-thinking",
|
||||
"tags": "LLM,CHAT,200k,IMAGE2TEXT",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-3-7-sonnet-latest",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "claude-3-5-haiku-latest",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-pro",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-flash",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.5-flash-lite",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "gemini-2.0-flash",
|
||||
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
|
||||
"max_tokens": 1000000,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-4-0709",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-3",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-3-mini",
|
||||
"tags": "LLM,CHAT,131k",
|
||||
"max_tokens": 131072,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "grok-2-image-1212",
|
||||
"tags": "LLM,CHAT,32k,IMAGE2TEXT",
|
||||
"max_tokens": 32768,
|
||||
"model_type": "image2text",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-v3.1",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-v3",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-r1-0528",
|
||||
"tags": "LLM,CHAT,164k",
|
||||
"max_tokens": 164000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-chat",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "deepseek-reasoner",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "qwen3-30b-a3b",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "qwen3-coder-plus-2025-07-22",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": "chat",
|
||||
"is_tools": true
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-ada-002",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-3-small",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "text-embedding-3-large",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": "embedding",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "whisper-1",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 26214400,
|
||||
"model_type": "speech2text",
|
||||
"is_tools": false
|
||||
},
|
||||
{
|
||||
"llm_name": "tts-1",
|
||||
"tags": "TTS",
|
||||
"max_tokens": 2048,
|
||||
"model_type": "tts",
|
||||
"is_tools": false
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Meituan",
|
||||
"logo": "",
|
||||
|
||||
@ -65,6 +65,7 @@ A complete list of models supported by RAGFlow, which will continue to expand.
|
||||
| 01.AI | :heavy_check_mark: | | | | | |
|
||||
| DeepInfra | :heavy_check_mark: | :heavy_check_mark: | | | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| 302.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
|
||||
| CometAPI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
|
||||
|
||||
```mdx-code-block
|
||||
</APITable>
|
||||
|
||||
@ -1356,6 +1356,15 @@ class Ai302Chat(Base):
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
|
||||
class CometChat(Base):
|
||||
_FACTORY_NAME = "CometAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.cometapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
|
||||
class TokenPonyChat(Base):
|
||||
_FACTORY_NAME = "TokenPony"
|
||||
|
||||
|
||||
@ -958,3 +958,12 @@ class Ai302Embed(Base):
|
||||
if not base_url:
|
||||
base_url = "https://api.302.ai/v1/embeddings"
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
|
||||
class CometEmbed(Base):
|
||||
_FACTORY_NAME = "CometAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.cometapi.com/v1/embeddings"):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1/embeddings"
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
@ -234,3 +234,13 @@ class DeepInfraSeq2txt(Base):
|
||||
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
|
||||
class CometSeq2txt(Base):
|
||||
_FACTORY_NAME = "CometAPI"
|
||||
|
||||
def __init__(self, key, model_name="whisper-1", base_url="https://api.cometapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
@ -394,3 +394,11 @@ class DeepInfraTTS(OpenAITTS):
|
||||
if not base_url:
|
||||
base_url = "https://api.deepinfra.com/v1/openai"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
class CometAPITTS(OpenAITTS):
|
||||
_FACTORY_NAME = "CometAPI"
|
||||
|
||||
def __init__(self, key, model_name, base_url="https://api.cometapi.com/v1", **kwargs):
|
||||
if not base_url:
|
||||
base_url = "https://api.cometapi.com/v1"
|
||||
super().__init__(key, model_name, base_url, **kwargs)
|
||||
|
||||
6
web/src/assets/svg/llm/cometapi.svg
Normal file
6
web/src/assets/svg/llm/cometapi.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 96 KiB |
@ -56,6 +56,7 @@ export enum LLMFactory {
|
||||
XAI = 'xAI',
|
||||
TokenPony = 'TokenPony',
|
||||
Meituan = 'Meituan',
|
||||
CometAPI = 'CometAPI',
|
||||
}
|
||||
|
||||
// Please lowercase the file name
|
||||
@ -117,4 +118,5 @@ export const IconMap = {
|
||||
[LLMFactory.XAI]: 'xai',
|
||||
[LLMFactory.TokenPony]: 'token-pony',
|
||||
[LLMFactory.Meituan]: 'longcat',
|
||||
[LLMFactory.CometAPI]: 'cometapi',
|
||||
};
|
||||
|
||||
@ -44,6 +44,7 @@ const orderFactoryList = [
|
||||
LLMFactory.Ollama,
|
||||
LLMFactory.Xinference,
|
||||
LLMFactory.Ai302,
|
||||
LLMFactory.CometAPI,
|
||||
];
|
||||
|
||||
export const sortLLmFactoryListBySpecifiedOrder = (list: IFactory[]) => {
|
||||
|
||||
Reference in New Issue
Block a user