feat: Add n1n provider (#12680)

This PR adds n1n as an LLM provider to RAGFlow.

Co-authored-by: Qun <qun@ip-10-5-5-38.us-west-2.compute.internal>
This commit is contained in:
n1n.ai
2026-01-19 13:12:42 +08:00
committed by GitHub
parent 9da48ab0bd
commit f3d347f55f
6 changed files with 58 additions and 1 deletions

View File

@ -5539,6 +5539,43 @@
"status": "1",
"rank": "910",
"llm": []
},
{
"name": "n1n",
"logo": "",
"tags": "LLM",
"status": "1",
"rank": "900",
"llm": [
{
"llm_name": "gpt-4o-mini",
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4o",
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-3.5-turbo",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "deepseek-chat",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
}
]
}
]
}
}

View File

@ -56,6 +56,7 @@ class SupportedLiteLLMProvider(StrEnum):
GPUStack = "GPUStack"
OpenAI = "OpenAI"
Azure_OpenAI = "Azure-OpenAI"
n1n = "n1n"
FACTORY_DEFAULT_BASE_URL = {
@ -81,6 +82,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1",
SupportedLiteLLMProvider.OpenAI: "https://api.openai.com/v1",
SupportedLiteLLMProvider.n1n: "https://api.n1n.ai/v1",
}
@ -118,6 +120,7 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.GPUStack: "openai/",
SupportedLiteLLMProvider.OpenAI: "openai/",
SupportedLiteLLMProvider.Azure_OpenAI: "azure/",
SupportedLiteLLMProvider.n1n: "openai/",
}
ChatModel = globals().get("ChatModel", {})

View File

@ -1165,6 +1165,15 @@ class TokenPonyChat(Base):
super().__init__(key, model_name, base_url, **kwargs)
class N1nChat(Base):
_FACTORY_NAME = "n1n"
def __init__(self, key, model_name, base_url="https://api.n1n.ai/v1", **kwargs):
if not base_url:
base_url = "https://api.n1n.ai/v1"
super().__init__(key, model_name, base_url, **kwargs)
class LiteLLMBase(ABC):
_FACTORY_NAME = [
"Tongyi-Qianwen",

View File

@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<rect x="2" y="2" width="20" height="20" rx="5" ry="5" fill="#000000" stroke="none" />
<text x="50%" y="54%" dominant-baseline="middle" text-anchor="middle" font-family="Arial, sans-serif" font-size="8" fill="#ffffff" font-weight="bold">n1n</text>
</svg>

After

Width:  |  Height:  |  Size: 441 B

View File

@ -83,6 +83,7 @@ const svgIcons = [
LLMFactory.StepFun,
LLMFactory.MinerU,
LLMFactory.PaddleOCR,
LLMFactory.N1n,
// LLMFactory.DeerAPI,
];

View File

@ -62,6 +62,7 @@ export enum LLMFactory {
Builtin = 'Builtin',
MinerU = 'MinerU',
PaddleOCR = 'PaddleOCR',
N1n = 'n1n',
}
// Please lowercase the file name
@ -129,6 +130,7 @@ export const IconMap = {
[LLMFactory.Builtin]: 'builtin',
[LLMFactory.MinerU]: 'mineru',
[LLMFactory.PaddleOCR]: 'paddleocr',
[LLMFactory.N1n]: 'n1n',
};
export const APIMapUrl = {
@ -181,4 +183,5 @@ export const APIMapUrl = {
[LLMFactory.TokenPony]: 'https://www.tokenpony.cn/#/user/keys',
[LLMFactory.DeepInfra]: 'https://deepinfra.com/dash/api_keys',
[LLMFactory.PaddleOCR]: 'https://www.paddleocr.ai/latest/',
[LLMFactory.N1n]: 'https://docs.n1n.ai',
};