Feat: Add GPT-5.2 & pro (#11929)

### What problem does this PR solve?

Feat: Add GPT-5.2 & pro

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Magicbook1108
2025-12-12 17:35:08 +08:00
committed by GitHub
parent 0f0fb53256
commit 948bc93786
3 changed files with 19 additions and 9 deletions

View File

@ -7,6 +7,20 @@
"status": "1", "status": "1",
"rank": "999", "rank": "999",
"llm": [ "llm": [
{
"llm_name": "gpt-5.2-pro",
"tags": "LLM,CHAT,400k,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5.2",
"tags": "LLM,CHAT,400k,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{ {
"llm_name": "gpt-5.1", "llm_name": "gpt-5.1",
"tags": "LLM,CHAT,400k,IMAGE2TEXT", "tags": "LLM,CHAT,400k,IMAGE2TEXT",

View File

@ -54,6 +54,7 @@ class SupportedLiteLLMProvider(StrEnum):
MiniMax = "MiniMax" MiniMax = "MiniMax"
DeerAPI = "DeerAPI" DeerAPI = "DeerAPI"
GPUStack = "GPUStack" GPUStack = "GPUStack"
OpenAI = "OpenAI"
FACTORY_DEFAULT_BASE_URL = { FACTORY_DEFAULT_BASE_URL = {
@ -78,6 +79,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.ZHIPU_AI: "https://open.bigmodel.cn/api/paas/v4", SupportedLiteLLMProvider.ZHIPU_AI: "https://open.bigmodel.cn/api/paas/v4",
SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1", SupportedLiteLLMProvider.MiniMax: "https://api.minimaxi.com/v1",
SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1", SupportedLiteLLMProvider.DeerAPI: "https://api.deerapi.com/v1",
SupportedLiteLLMProvider.OpenAI: "https://api.openai.com/v1",
} }
@ -113,6 +115,8 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.MiniMax: "openai/", SupportedLiteLLMProvider.MiniMax: "openai/",
SupportedLiteLLMProvider.DeerAPI: "openai/", SupportedLiteLLMProvider.DeerAPI: "openai/",
SupportedLiteLLMProvider.GPUStack: "openai/", SupportedLiteLLMProvider.GPUStack: "openai/",
SupportedLiteLLMProvider.OpenAI: "openai/",
} }
ChatModel = globals().get("ChatModel", {}) ChatModel = globals().get("ChatModel", {})

View File

@ -487,15 +487,6 @@ class Base(ABC):
assert False, "Shouldn't be here." assert False, "Shouldn't be here."
class GptTurbo(Base):
_FACTORY_NAME = "OpenAI"
def __init__(self, key, model_name="gpt-3.5-turbo", base_url="https://api.openai.com/v1", **kwargs):
if not base_url:
base_url = "https://api.openai.com/v1"
super().__init__(key, model_name, base_url, **kwargs)
class XinferenceChat(Base): class XinferenceChat(Base):
_FACTORY_NAME = "Xinference" _FACTORY_NAME = "Xinference"
@ -1227,6 +1218,7 @@ class LiteLLMBase(ABC):
"MiniMax", "MiniMax",
"DeerAPI", "DeerAPI",
"GPUStack", "GPUStack",
"OpenAI",
] ]
def __init__(self, key, model_name, base_url=None, **kwargs): def __init__(self, key, model_name, base_url=None, **kwargs):