mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-23 11:36:38 +08:00
feat: Add n1n provider (#12680)
This PR adds n1n as an LLM provider to RAGFlow. Co-authored-by: Qun <qun@ip-10-5-5-38.us-west-2.compute.internal>
This commit is contained in:
@ -5539,6 +5539,43 @@
|
|||||||
"status": "1",
|
"status": "1",
|
||||||
"rank": "910",
|
"rank": "910",
|
||||||
"llm": []
|
"llm": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "n1n",
|
||||||
|
"logo": "",
|
||||||
|
"tags": "LLM",
|
||||||
|
"status": "1",
|
||||||
|
"rank": "900",
|
||||||
|
"llm": [
|
||||||
|
{
|
||||||
|
"llm_name": "gpt-4o-mini",
|
||||||
|
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "gpt-4o",
|
||||||
|
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "gpt-3.5-turbo",
|
||||||
|
"tags": "LLM,CHAT,4K",
|
||||||
|
"max_tokens": 4096,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "deepseek-chat",
|
||||||
|
"tags": "LLM,CHAT,128K",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -56,6 +56,7 @@ class SupportedLiteLLMProvider(StrEnum):
|
|||||||
GPUStack = "GPUStack"
|
GPUStack = "GPUStack"
|
||||||
OpenAI = "OpenAI"
|
OpenAI = "OpenAI"
|
||||||
Azure_OpenAI = "Azure-OpenAI"
|
Azure_OpenAI = "Azure-OpenAI"
|
||||||
|
n1n = "n1n"
|
||||||
|
|
||||||
|
|
||||||
FACTORY_DEFAULT_BASE_URL = {
|
FACTORY_DEFAULT_BASE_URL = {
|
||||||
@ -81,6 +82,7 @@ FACTORY_DEFAULT_BASE_URL = {
|
|||||||
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
|
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
|
||||||
SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1",
|
SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1",
|
||||||
SupportedLiteLLMProvider.OpenAI: "https://api.openai.com/v1",
|
SupportedLiteLLMProvider.OpenAI: "https://api.openai.com/v1",
|
||||||
|
SupportedLiteLLMProvider.n1n: "https://api.n1n.ai/v1",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -118,6 +120,7 @@ LITELLM_PROVIDER_PREFIX = {
|
|||||||
SupportedLiteLLMProvider.GPUStack: "openai/",
|
SupportedLiteLLMProvider.GPUStack: "openai/",
|
||||||
SupportedLiteLLMProvider.OpenAI: "openai/",
|
SupportedLiteLLMProvider.OpenAI: "openai/",
|
||||||
SupportedLiteLLMProvider.Azure_OpenAI: "azure/",
|
SupportedLiteLLMProvider.Azure_OpenAI: "azure/",
|
||||||
|
SupportedLiteLLMProvider.n1n: "openai/",
|
||||||
}
|
}
|
||||||
|
|
||||||
ChatModel = globals().get("ChatModel", {})
|
ChatModel = globals().get("ChatModel", {})
|
||||||
|
|||||||
@ -1165,6 +1165,15 @@ class TokenPonyChat(Base):
|
|||||||
super().__init__(key, model_name, base_url, **kwargs)
|
super().__init__(key, model_name, base_url, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class N1nChat(Base):
|
||||||
|
_FACTORY_NAME = "n1n"
|
||||||
|
|
||||||
|
def __init__(self, key, model_name, base_url="https://api.n1n.ai/v1", **kwargs):
|
||||||
|
if not base_url:
|
||||||
|
base_url = "https://api.n1n.ai/v1"
|
||||||
|
super().__init__(key, model_name, base_url, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class LiteLLMBase(ABC):
|
class LiteLLMBase(ABC):
|
||||||
_FACTORY_NAME = [
|
_FACTORY_NAME = [
|
||||||
"Tongyi-Qianwen",
|
"Tongyi-Qianwen",
|
||||||
|
|||||||
4
web/src/assets/svg/llm/n1n.svg
Normal file
4
web/src/assets/svg/llm/n1n.svg
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<rect x="2" y="2" width="20" height="20" rx="5" ry="5" fill="#000000" stroke="none" />
|
||||||
|
<text x="50%" y="54%" dominant-baseline="middle" text-anchor="middle" font-family="Arial, sans-serif" font-size="8" fill="#ffffff" font-weight="bold">n1n</text>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 441 B |
@ -83,6 +83,7 @@ const svgIcons = [
|
|||||||
LLMFactory.StepFun,
|
LLMFactory.StepFun,
|
||||||
LLMFactory.MinerU,
|
LLMFactory.MinerU,
|
||||||
LLMFactory.PaddleOCR,
|
LLMFactory.PaddleOCR,
|
||||||
|
LLMFactory.N1n,
|
||||||
// LLMFactory.DeerAPI,
|
// LLMFactory.DeerAPI,
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@ -62,6 +62,7 @@ export enum LLMFactory {
|
|||||||
Builtin = 'Builtin',
|
Builtin = 'Builtin',
|
||||||
MinerU = 'MinerU',
|
MinerU = 'MinerU',
|
||||||
PaddleOCR = 'PaddleOCR',
|
PaddleOCR = 'PaddleOCR',
|
||||||
|
N1n = 'n1n',
|
||||||
}
|
}
|
||||||
|
|
||||||
// Please lowercase the file name
|
// Please lowercase the file name
|
||||||
@ -129,6 +130,7 @@ export const IconMap = {
|
|||||||
[LLMFactory.Builtin]: 'builtin',
|
[LLMFactory.Builtin]: 'builtin',
|
||||||
[LLMFactory.MinerU]: 'mineru',
|
[LLMFactory.MinerU]: 'mineru',
|
||||||
[LLMFactory.PaddleOCR]: 'paddleocr',
|
[LLMFactory.PaddleOCR]: 'paddleocr',
|
||||||
|
[LLMFactory.N1n]: 'n1n',
|
||||||
};
|
};
|
||||||
|
|
||||||
export const APIMapUrl = {
|
export const APIMapUrl = {
|
||||||
@ -181,4 +183,5 @@ export const APIMapUrl = {
|
|||||||
[LLMFactory.TokenPony]: 'https://www.tokenpony.cn/#/user/keys',
|
[LLMFactory.TokenPony]: 'https://www.tokenpony.cn/#/user/keys',
|
||||||
[LLMFactory.DeepInfra]: 'https://deepinfra.com/dash/api_keys',
|
[LLMFactory.DeepInfra]: 'https://deepinfra.com/dash/api_keys',
|
||||||
[LLMFactory.PaddleOCR]: 'https://www.paddleocr.ai/latest/',
|
[LLMFactory.PaddleOCR]: 'https://www.paddleocr.ai/latest/',
|
||||||
|
[LLMFactory.N1n]: 'https://docs.n1n.ai',
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user