mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
add model: qwen3-max and qewn3-vl series (#10256)
### What problem does this PR solve? qwen3-max and qewn3-vl series ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -402,7 +402,7 @@
|
|||||||
"is_tools": true
|
"is_tools": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"llm_name": "qwen3-max-preview",
|
"llm_name": "qwen3-max",
|
||||||
"tags": "LLM,CHAT,256k",
|
"tags": "LLM,CHAT,256k",
|
||||||
"max_tokens": 256000,
|
"max_tokens": 256000,
|
||||||
"model_type": "chat",
|
"model_type": "chat",
|
||||||
@ -478,6 +478,27 @@
|
|||||||
"model_type": "chat",
|
"model_type": "chat",
|
||||||
"is_tools": true
|
"is_tools": true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "qwen3-vl-plus",
|
||||||
|
"tags": "LLM,CHAT,IMAGE2TEXT,256k",
|
||||||
|
"max_tokens": 256000,
|
||||||
|
"model_type": "image2text",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "qwen3-vl-235b-a22b-instruct",
|
||||||
|
"tags": "LLM,CHAT,IMAGE2TEXT,128k",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "image2text",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "qwen3-vl-235b-a22b-thinking",
|
||||||
|
"tags": "LLM,CHAT,IMAGE2TEXT,128k",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "image2text",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"llm_name": "qwen3-235b-a22b-instruct-2507",
|
"llm_name": "qwen3-235b-a22b-instruct-2507",
|
||||||
"tags": "LLM,CHAT,128k",
|
"tags": "LLM,CHAT,128k",
|
||||||
@ -499,6 +520,20 @@
|
|||||||
"model_type": "chat",
|
"model_type": "chat",
|
||||||
"is_tools": true
|
"is_tools": true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "qwen3-next-80b-a3b-instruct",
|
||||||
|
"tags": "LLM,CHAT,128k",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"llm_name": "qwen3-next-80b-a3b-thinking",
|
||||||
|
"tags": "LLM,CHAT,128k",
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"model_type": "chat",
|
||||||
|
"is_tools": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"llm_name": "qwen3-0.6b",
|
"llm_name": "qwen3-0.6b",
|
||||||
"tags": "LLM,CHAT,32k",
|
"tags": "LLM,CHAT,32k",
|
||||||
|
|||||||
Reference in New Issue
Block a user