Feat: add LongCat-Flash-Chat (#9973)

### What problem does this PR solve?

Add LongCat-Flash-Chat from Meituan, deepseek v3.1 from SiliconFlow,
kimi-k2-09-05-preview and kimi-k2-turbo-preview from Moonshot.

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Yongteng Lei
2025-09-08 19:00:52 +08:00
committed by GitHub
parent 2616f651c9
commit 936f27e9e5
4 changed files with 65 additions and 1 deletions

View File

@ -155,7 +155,10 @@ class Base(ABC):
def _chat_streamly(self, history, gen_conf, **kwargs):
logging.info("[HISTORY STREAMLY]" + json.dumps(history, ensure_ascii=False, indent=4))
reasoning_start = False
response = self.client.chat.completions.create(model=self.model_name, messages=history, stream=True, **gen_conf, stop=kwargs.get("stop"))
if kwargs.get("stop") or "stop" in gen_conf:
response = self.client.chat.completions.create(model=self.model_name, messages=history, stream=True, **gen_conf, stop=kwargs.get("stop"))
else:
response = self.client.chat.completions.create(model=self.model_name, messages=history, stream=True, **gen_conf)
for resp in response:
if not resp.choices:
continue
@ -1353,6 +1356,15 @@ class Ai302Chat(Base):
super().__init__(key, model_name, base_url, **kwargs)
class MeituanChat(Base):
_FACTORY_NAME = "Meituan"
def __init__(self, key, model_name, base_url="https://api.longcat.chat/openai", **kwargs):
if not base_url:
base_url = "https://api.longcat.chat/openai"
super().__init__(key, model_name, base_url, **kwargs)
class LiteLLMBase(ABC):
_FACTORY_NAME = ["Tongyi-Qianwen", "Bedrock", "Moonshot", "xAI", "DeepInfra", "Groq", "Cohere", "Gemini", "DeepSeek", "NVIDIA", "TogetherAI", "Anthropic", "Ollama"]