feat: Add n1n provider (#12680)

This PR adds n1n as an LLM provider to RAGFlow.

Co-authored-by: Qun <qun@ip-10-5-5-38.us-west-2.compute.internal>
This commit is contained in:
n1n.ai
2026-01-19 13:12:42 +08:00
committed by GitHub
parent 9da48ab0bd
commit f3d347f55f
6 changed files with 58 additions and 1 deletions

View File

@ -56,6 +56,7 @@ class SupportedLiteLLMProvider(StrEnum):
GPUStack = "GPUStack"
OpenAI = "OpenAI"
Azure_OpenAI = "Azure-OpenAI"
n1n = "n1n"
FACTORY_DEFAULT_BASE_URL = {
@ -81,6 +82,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1",
SupportedLiteLLMProvider.OpenAI: "https://api.openai.com/v1",
SupportedLiteLLMProvider.n1n: "https://api.n1n.ai/v1",
}
@ -118,6 +120,7 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.GPUStack: "openai/",
SupportedLiteLLMProvider.OpenAI: "openai/",
SupportedLiteLLMProvider.Azure_OpenAI: "azure/",
SupportedLiteLLMProvider.n1n: "openai/",
}
ChatModel = globals().get("ChatModel", {})

View File

@ -1165,6 +1165,15 @@ class TokenPonyChat(Base):
super().__init__(key, model_name, base_url, **kwargs)
class N1nChat(Base):
_FACTORY_NAME = "n1n"
def __init__(self, key, model_name, base_url="https://api.n1n.ai/v1", **kwargs):
if not base_url:
base_url = "https://api.n1n.ai/v1"
super().__init__(key, model_name, base_url, **kwargs)
class LiteLLMBase(ABC):
_FACTORY_NAME = [
"Tongyi-Qianwen",