Merge pull request 'feature/ai-agent-openrouter' (#44) from feature/ai-agent-openrouter into release/v9.2.0

Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/desktop-sdk/pulls/44
This commit is contained in:
Oleg Korshul
2025-11-17 16:01:02 +00:00
12 changed files with 800 additions and 121 deletions

View File

@ -15,6 +15,7 @@
"@codemirror/lang-json": "^6.0.2",
"@codemirror/state": "^6.5.2",
"@codemirror/view": "^6.38.2",
"@openrouter/ai-sdk-provider": "^1.2.3",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
@ -1444,6 +1445,47 @@
"node": ">= 8"
}
},
"node_modules/@openrouter/ai-sdk-provider": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.2.3.tgz",
"integrity": "sha512-a6Nc8dPRHakRH9966YJ/HZJhLOds7DuPTscNZDoAr+Aw+tEFUlacSJMvb/b3gukn74mgbuaJRji9YOn62ipfVg==",
"license": "Apache-2.0",
"dependencies": {
"@openrouter/sdk": "^0.1.8"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"ai": "^5.0.0",
"zod": "^3.24.1 || ^v4"
}
},
"node_modules/@openrouter/sdk": {
"version": "0.1.11",
"resolved": "https://registry.npmjs.org/@openrouter/sdk/-/sdk-0.1.11.tgz",
"integrity": "sha512-OuPc8qqidL/PUM8+9WgrOfSR9+b6rKIWiezGcUJ54iPTdh+Gye5Qjut6hrLWlOCMZE7Z853gN90r1ft4iChj7Q==",
"license": "Apache-2.0",
"dependencies": {
"zod": "^3.25.0 || ^4.0.0"
},
"peerDependencies": {
"@tanstack/react-query": "^5",
"react": "^18 || ^19",
"react-dom": "^18 || ^19"
},
"peerDependenciesMeta": {
"@tanstack/react-query": {
"optional": true
},
"react": {
"optional": true
},
"react-dom": {
"optional": true
}
}
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
@ -8303,7 +8345,6 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@ -19,6 +19,7 @@
"@codemirror/lang-json": "^6.0.2",
"@codemirror/state": "^6.5.2",
"@codemirror/view": "^6.38.2",
"@openrouter/ai-sdk-provider": "^1.2.3",
"@radix-ui/react-avatar": "^1.1.10",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",

View File

@ -1,6 +1,5 @@
import { useRef, useState } from "react";
import { Item } from "@radix-ui/react-dropdown-menu";
import { ReactSVG } from "react-svg";
import { cn } from "@/lib/utils";
@ -117,10 +116,12 @@ const DropDownItem = ({
) : (
icon ?? null
)}
<span className={cn(
<span
className={cn(
"truncate font-normal text-[14px] leading-[20px] text-[var(--drop-down-menu-item-color)]",
withSpace ? "ms-[28px]" : ""
)}>
)}
>
{text}
</span>
</div>
@ -145,7 +146,9 @@ const DropDownItem = ({
dropdownRef={submenuRef}
/>
) : null}
{checked ? <ReactSVG src={CheckedIconUrl} /> : null}
{checked ? (
<IconButton iconName={CheckedIconUrl} size={16} disableHover isStroke />
) : null}
{withToggle && onToggleChange ? (
<div onClick={(e) => e.stopPropagation()}>
<ToggleButton

View File

@ -11,7 +11,12 @@ export type Thread = {
lastEditDate?: number;
};
export type ProviderType = "anthropic" | "ollama" | "openai" | "together";
export type ProviderType =
| "anthropic"
| "ollama"
| "openai"
| "together"
| "openrouter";
export type Model = {
id: string;

View File

@ -43,6 +43,7 @@ const ServersSettings = () => {
servers["web-search"][0].name,
!webSearchEnabled
);
window.dispatchEvent(new CustomEvent("tools-changed"));
},
},
{ text: "", onClick: () => {}, isSeparator: true },

View File

@ -8,6 +8,7 @@ import { anthropicProvider, AnthropicProvider } from "./anthropic";
import { ollamaProvider, OllamaProvider } from "./ollama";
import { openaiProvider, OpenAIProvider } from "./openai";
import { togetherProvider, TogetherProvider } from "./together";
import { openrouterProvider, OpenRouterProvider } from "./openrouter";
import { SYSTEM_PROMPT } from "./Providers.utils";
@ -24,7 +25,8 @@ class Provider {
| AnthropicProvider
| OllamaProvider
| OpenAIProvider
| TogetherProvider;
| TogetherProvider
| OpenRouterProvider;
currentProviderInfo?: TProvider;
currentProviderType?: ProviderType;
@ -32,12 +34,14 @@ class Provider {
ollamaProvider: OllamaProvider;
openaiProvider: OpenAIProvider;
togetherProvider: TogetherProvider;
openrouterProvider: OpenRouterProvider;
constructor() {
this.anthropicProvider = anthropicProvider;
this.ollamaProvider = ollamaProvider;
this.openaiProvider = openaiProvider;
this.togetherProvider = togetherProvider;
this.openrouterProvider = openrouterProvider;
}
setCurrentProvider = (provider?: TProvider) => {
@ -71,6 +75,11 @@ class Provider {
this.currentProviderType = "together";
break;
case "openrouter":
this.currentProvider = openrouterProvider;
this.currentProviderType = "openrouter";
break;
default:
this.currentProvider = undefined;
this.currentProviderType = undefined;
@ -161,7 +170,13 @@ class Provider {
baseUrl: this.togetherProvider.getBaseUrl(),
};
return [anthropic, ollama, openai, together];
const openrouter = {
type: "openrouter" as ProviderType,
name: this.openrouterProvider.getName(),
baseUrl: this.openrouterProvider.getBaseUrl(),
};
return [anthropic, ollama, openai, together, openrouter];
};
getProviderInfo = (type: ProviderType) => {
@ -193,6 +208,13 @@ class Provider {
baseUrl: this.togetherProvider.getBaseUrl(),
};
if (type === "openrouter")
return {
type,
name: this.openrouterProvider.getName(),
baseUrl: this.openrouterProvider.getBaseUrl(),
};
return {
name: "",
baseUrl: "",
@ -208,6 +230,9 @@ class Provider {
if (type === "together") return this.togetherProvider.checkProvider(data);
if (type === "openrouter")
return this.openrouterProvider.checkProvider(data);
return false;
};
@ -240,6 +265,12 @@ class Provider {
apiKey: p.key,
});
if (p.type === "openrouter")
return this.openrouterProvider.getProviderModels({
url: p.baseUrl,
apiKey: p.key,
});
return null; // Explicitly return null for unsupported types
})
.filter((action): action is Promise<Model[]> => action !== null); // Filter out null values
@ -253,7 +284,8 @@ class Provider {
provider.type === "anthropic" ||
provider.type === "ollama" ||
provider.type === "openai" ||
provider.type === "together"
provider.type === "together" ||
provider.type === "openrouter"
) {
const model = fetchedModels[actionIndex];
if (

View File

@ -126,6 +126,7 @@ class OpenAIProvider
model: this.modelKey,
tools: this.tools,
stream: true,
temperature: 0,
});
this.prevMessages.push(...convertedMessage);

View File

@ -0,0 +1,115 @@
import type {
ThreadMessageLike,
ToolCallMessagePart,
} from "@assistant-ui/react";
import cloneDeep from "lodash.clonedeep";
import type { ChatCompletionChunk } from "openai/resources/chat/completions";
export const handleTextMessage = (
responseMessage: ThreadMessageLike,
chunk: ChatCompletionChunk.Choice,
afterToolCall?: boolean
) => {
// Return early if no content in delta
if (!chunk.delta.content) return responseMessage;
let newResponseMessage = cloneDeep(responseMessage);
const content = newResponseMessage.content;
if (typeof content === "string") return newResponseMessage;
const lstContent = content[content.length - 1];
if (
(!lstContent || afterToolCall) &&
typeof responseMessage.content !== "string"
) {
newResponseMessage = {
...newResponseMessage,
content: [
...content,
{
type: "text",
text: chunk.delta.content,
},
],
};
}
if (
lstContent &&
typeof lstContent !== "string" &&
typeof content !== "string"
) {
if (lstContent.type === "text") {
const text = lstContent.text + chunk.delta.content;
const newLstContent = cloneDeep({ ...lstContent, text });
newResponseMessage = {
...newResponseMessage,
content: [...content.slice(0, -1), newLstContent],
};
}
}
return newResponseMessage;
};
export const handleToolCall = (
responseMessage: ThreadMessageLike,
chunk: ChatCompletionChunk.Choice
) => {
let newResponseMessage = cloneDeep(responseMessage);
if (!chunk.delta.tool_calls || typeof newResponseMessage.content === "string")
return newResponseMessage;
const toolCallDelta = chunk.delta.tool_calls[0];
const lastContent =
newResponseMessage.content[newResponseMessage.content.length - 1];
if (!lastContent || lastContent?.type !== "tool-call") {
const toolCall: ToolCallMessagePart = {
type: "tool-call",
args: {},
argsText: toolCallDelta?.function?.arguments ?? "",
toolName: toolCallDelta?.function?.name ?? "",
toolCallId: toolCallDelta?.id ?? "",
};
newResponseMessage = {
...newResponseMessage,
content: [...newResponseMessage.content, toolCall],
};
} else {
const toolCall = lastContent;
const argsText =
toolCall.argsText + (toolCallDelta?.function?.arguments ?? "");
const name = toolCall.toolName || toolCallDelta?.function?.name || "";
const toolCallId = toolCall.toolCallId || toolCallDelta?.id || "";
let parsedArgs = {};
try {
parsedArgs = JSON.parse(argsText ?? "{}");
} catch {
// ignore
}
const newToolCall: ToolCallMessagePart = {
...toolCall,
args: parsedArgs,
argsText,
toolName: name,
toolCallId,
};
newResponseMessage = {
...newResponseMessage,
content: [...newResponseMessage.content.slice(0, -1), newToolCall],
};
}
return newResponseMessage;
};

View File

@ -0,0 +1,354 @@
import OpenAI from "openai";
import type {
ChatCompletionChunk,
ChatCompletionMessageParam,
ChatCompletionSystemMessageParam,
ChatCompletionTool,
ChatCompletionToolMessageParam,
} from "openai/resources/chat/completions";
import cloneDeep from "lodash.clonedeep";
import type { Model as OpenAIModel } from "openai/resources/models";
import type { ThreadMessageLike } from "@assistant-ui/react";
import type { Model, TMCPItem, TProvider } from "@/lib/types";
import type { BaseProvider } from "../base";
import type { SettingsProvider, TData, TErrorData } from "../settings";
import {
convertToolsToModelFormat,
convertMessagesToModelFormat,
} from "./utils";
import { handleTextMessage, handleToolCall } from "./handlers";
import { CREATE_TITLE_SYSTEM_PROMPT } from "../Providers.utils";
class OpenRouterProvider
implements
BaseProvider<ChatCompletionTool, ChatCompletionMessageParam, OpenAI>,
SettingsProvider
{
modelKey: string = "";
systemPrompt: string = "";
apiKey?: string;
url?: string;
provider?: TProvider;
prevMessages: ChatCompletionMessageParam[] = [];
tools: ChatCompletionTool[] = [];
client?: OpenAI;
stopStream = false;
constructor() {}
setProvider = (provider: TProvider) => {
this.provider = provider;
this.client = new OpenAI({
apiKey: provider.key,
baseURL: provider.baseUrl,
dangerouslyAllowBrowser: true,
});
if (provider.key) this.setApiKey(provider.key);
if (provider.baseUrl) this.setUrl(provider.baseUrl);
};
setModelKey = (modelKey: string) => {
this.modelKey = modelKey;
};
setSystemPrompt = (systemPrompt: string) => {
this.systemPrompt = systemPrompt;
};
setApiKey = (apiKey: string) => {
this.apiKey = apiKey;
if (this.client) this.client.apiKey = apiKey;
};
setUrl = (url: string) => {
this.url = url;
if (this.client) this.client.baseURL = url;
};
setPrevMessages = (prevMessages: ThreadMessageLike[]) => {
this.prevMessages = convertMessagesToModelFormat(prevMessages);
};
setTools = (tools: TMCPItem[]) => {
this.tools = convertToolsToModelFormat(tools);
};
async createChatName(message: string) {
try {
if (!this.client) return "";
const systemMessage: ChatCompletionSystemMessageParam = {
role: "system",
content: CREATE_TITLE_SYSTEM_PROMPT,
};
const response = await this.client.chat.completions.create({
messages: [systemMessage, { role: "user", content: message }],
model: this.modelKey,
stream: false,
});
const title = response.choices[0].message.content;
return title ?? message.substring(0, 25);
} catch {
return "";
}
}
async *sendMessage(
messages: ThreadMessageLike[],
afterToolCall?: boolean,
message?: ThreadMessageLike
): AsyncGenerator<
ThreadMessageLike | { isEnd: true; responseMessage: ThreadMessageLike }
> {
try {
if (!this.client) return;
const convertedMessage = convertMessagesToModelFormat(messages);
const systemMessage: ChatCompletionSystemMessageParam = {
role: "system",
content: this.systemPrompt,
};
const stream = await this.client.chat.completions.create({
messages: [systemMessage, ...this.prevMessages, ...convertedMessage],
model: this.modelKey,
tools: this.tools,
stream: true,
temperature: 0,
});
this.prevMessages.push(...convertedMessage);
let responseMessage: ThreadMessageLike =
afterToolCall && message
? cloneDeep(message)
: {
role: "assistant",
content: [],
};
let stop = false;
for await (const messageStreamEvent of stream) {
const chunks: ChatCompletionChunk["choices"] =
messageStreamEvent.choices;
chunks.forEach((chunk) => {
if (stop) return;
if (chunk.finish_reason) {
stop = true;
const curMsg = afterToolCall
? {
...responseMessage,
content:
typeof responseMessage.content === "string"
? responseMessage.content
: responseMessage.content.filter((part, index) => {
// Keep tool-call parts and new text parts added after tool execution
if (part.type === "tool-call") return true;
// Only keep text parts that were added after the original message
const originalLength = message?.content.length ?? 0;
return index >= originalLength;
}),
}
: responseMessage;
const providerMsg = convertMessagesToModelFormat([curMsg]);
this.prevMessages.push(...providerMsg);
return;
}
if (chunk.delta.content) {
responseMessage = handleTextMessage(
responseMessage,
chunk,
afterToolCall
);
}
if (
chunk.delta.tool_calls &&
typeof responseMessage.content !== "string"
) {
responseMessage = handleToolCall(responseMessage, chunk);
}
});
if (this.stopStream) {
const providerMsg = convertMessagesToModelFormat([responseMessage]);
this.prevMessages.push(...providerMsg);
stream.controller.abort();
this.stopStream = false;
yield {
isEnd: true,
responseMessage,
};
continue;
}
if (stop) {
yield {
isEnd: true,
responseMessage,
};
continue;
} else {
yield responseMessage;
}
}
} catch (e) {
console.log(e);
yield {
isEnd: true,
responseMessage: {
role: "assistant",
content: "",
status: {
type: "incomplete",
reason: "error",
error: e,
},
} as ThreadMessageLike,
};
}
}
async *sendMessageAfterToolCall(
message: ThreadMessageLike
): AsyncGenerator<
ThreadMessageLike | { isEnd: true; responseMessage: ThreadMessageLike }
> {
if (typeof message.content === "string") return message;
const result = message.content
.filter((c) => c.type === "tool-call")
.reverse()[0];
if (!result) return message;
const toolResult: ChatCompletionToolMessageParam = {
role: "tool",
content: result.result,
tool_call_id: result.toolCallId!,
};
this.prevMessages.push(toolResult);
yield* this.sendMessage([], true, message);
return message;
}
stopMessage = () => {
this.stopStream = true;
};
getName = () => {
return "OpenRouter";
};
getBaseUrl = () => {
return "https://openrouter.ai/api/v1";
};
checkProvider = async (data: TData): Promise<boolean | TErrorData> => {
try {
const response = await fetch(`${data.url}/models/user`, {
headers: {
Authorization: `Bearer ${data.apiKey}`,
},
});
if (!response.ok) {
if (response.status === 401 || !data.apiKey) {
return {
field: "key",
message: "Invalid API Key",
};
}
return {
field: "url",
message: "Invalid URL",
};
}
return true;
} catch (error) {
console.log(error);
return {
field: "url",
message: "Failed to connect",
};
}
};
getProviderModels = async (data: TData): Promise<Model[]> => {
const newClient = new OpenAI({
baseURL: data.url,
apiKey: data.apiKey,
dangerouslyAllowBrowser: true,
});
const response: OpenAIModel[] = (await newClient.models.list()).data;
return response
.filter(
(model) =>
model.id === "openai/gpt-5.1" ||
model.id === "anthropic/claude-haiku-4.5" ||
model.id === "anthropic/claude-sonnet-4.5" ||
model.id === "x-ai/grok-4" ||
// model.id === "google/gemini-2.5-flash" ||
// model.id === "google/gemini-2.5-pro" ||
model.id === "deepseek/deepseek-chat-v3.1" ||
model.id === "qwen/qwen3-235b-a22b-2507"
)
.map((model) => ({
id: model.id,
name:
model.id === "openai/gpt-5.1"
? "GPT-5.1"
: model.id === "anthropic/claude-haiku-4.5"
? "Claude Haiku 4.5"
: model.id === "anthropic/claude-sonnet-4.5"
? "Claude Sonnet 4.5"
: model.id === "x-ai/grok-4"
? "Grok 4"
: model.id === "google/gemini-2.5-flash"
? "Gemini 2.5 Flash"
: model.id === "google/gemini-2.5-pro"
? "Gemini 2.5 Pro"
: model.id === "deepseek/deepseek-chat-v3.1"
? "DeepSeek V3.1"
: model.id === "qwen/qwen3-235b-a22b-2507"
? "Qwen 3"
: model.id.toUpperCase(),
provider: "openrouter" as const,
}));
};
}
const openrouterProvider = new OpenRouterProvider();
export { OpenRouterProvider, openrouterProvider };

View File

@ -0,0 +1,126 @@
import type {
ChatCompletionMessageParam,
ChatCompletionTool,
ChatCompletionToolMessageParam,
ChatCompletionMessageFunctionToolCall,
ChatCompletionAssistantMessageParam,
ChatCompletionContentPart,
} from "openai/resources/chat/completions";
import type { ThreadMessageLike } from "@assistant-ui/react";
import type { TMCPItem } from "@/lib/types";
export const convertToolsToModelFormat = (
tools: TMCPItem[]
): ChatCompletionTool[] => {
return tools.map((tool) => {
const t: ChatCompletionTool = {
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: {
...tool.inputSchema,
},
},
};
return t;
});
};
export const convertMessagesToModelFormat = (
messages: ThreadMessageLike[]
): ChatCompletionMessageParam[] => {
const convertedMessages: ChatCompletionMessageParam[] = [];
messages.forEach((message) => {
if (message.role === "user" || message.role === "system") {
const content: string | ChatCompletionContentPart[] =
typeof message.content === "string"
? message.content
: message.content.map((part): ChatCompletionContentPart => {
if (part.type === "text") {
return { type: "text", text: part.text };
}
if (part.type === "file") {
return {
type: "text",
text: JSON.stringify({
file_data: part.data,
filename: JSON.parse(part.mimeType).path,
file_id: JSON.parse(part.mimeType).path,
}),
};
}
return { type: "text", text: "" };
});
// if (message.attachments?.length) {
// const imageContent = convertImageAttachmentsToContent(
// message.attachments
// );
// if (Array.isArray(content)) {
// content.push(...imageContent);
// }
// }
convertedMessages.push({
role: "user",
content,
} as ChatCompletionMessageParam);
} else {
const content: ChatCompletionAssistantMessageParam["content"] =
typeof message.content === "string" ? message.content : [];
const toolsResults: ChatCompletionToolMessageParam[] = [];
const toolsCalls: ChatCompletionMessageFunctionToolCall[] = [];
const msgContent = message.content;
if (typeof msgContent !== "string" && typeof content !== "string") {
msgContent.forEach((part) => {
if (!Array.isArray(content)) return;
if (part.type === "text") {
content.push({ type: "text", text: part.text });
}
if (part.type === "tool-call") {
if (part.result) {
toolsResults.push({
role: "tool",
content: part.result,
tool_call_id: part.toolCallId!,
});
}
toolsCalls.push({
id: part.toolCallId!,
type: "function",
function: { arguments: part.argsText ?? "", name: part.toolName },
});
return;
}
});
}
const msg: ChatCompletionAssistantMessageParam = {
role: "assistant",
content,
};
if (toolsCalls.length) msg.tool_calls = toolsCalls;
convertedMessages.push(msg);
if (toolsResults.length) {
convertedMessages.push(...toolsResults);
}
}
});
return convertedMessages;
};

View File

@ -325,7 +325,7 @@ class TogetherProvider
.map((model) => ({
id: model.id,
name: model.display_name ?? model.id,
provider: "openai" as const,
provider: "together" as const,
}));
};
}