Add support proxy, groq provider & fix bugs

This commit is contained in:
Oleg Korshul
2025-01-13 21:58:00 +03:00
parent 6494f58cff
commit 836827dd9f
5 changed files with 81 additions and 16 deletions

View File

@ -126,8 +126,8 @@
function sendMessage(text) {
createTyping();
window.Asc.plugin.sendToPlugin("onChatMessage", text);
settings.messages.push({role: 'Me', content: text});
settings.messages.push({role: 'user', content: text});
window.Asc.plugin.sendToPlugin("onChatMessage", settings.messages);
};
function createTyping() {
@ -207,7 +207,7 @@
};
window.Asc.plugin.attachEvent("onChatReply", function(reply) {
settings.messages.push({role: "AI", content: reply});
settings.messages.push({role: "assistant", content: reply});
createMessage(reply, 0);
removeTyping();
document.getElementById('message').focus();

View File

@ -41,7 +41,7 @@
resolve(new TextResponse(this.response, false));
};
xhr.onerror = function() {
reject(new TextResponse(this.response, false));
reject(new TextResponse(this.response || "Failed to fetch.", false));
};
xhr.send(obj.body);
@ -53,7 +53,7 @@
{
async function requestWrapper(message) {
return new Promise(function (resolve, reject) {
if (AI.isLocalDesktop && AI.isLocalUrl(message.url)) {
if (AI.isLocalDesktop && (AI.isLocalUrl(message.url) || message.isUseProxy)) {
window.AscSimpleRequest.createRequest({
url: message.url,
method: message.method,
@ -73,9 +73,23 @@
method: message.method,
headers: message.headers
};
if (request.method != "GET")
if (request.method != "GET") {
request.body = message.isBlob ? message.body : (message.body ? JSON.stringify(message.body) : "");
if (message.isUseProxy) {
request = {
"method" : request.method,
"body" : JSON.stringify({
"target" : message.url,
"method" : request.method,
"headers" : request.headers,
"data" : request.body
})
}
message.url = AI.PROXY_URL;
}
}
fetch(message.url, request)
.then(function(response) {
return response.json()
@ -95,6 +109,8 @@
AI.TmpProviderForModels = null;
AI.PROXY_URL = "https://plugins-services.onlyoffice.com/proxy";
AI._getHeaders = function(_provider) {
let provider = _provider.createInstance ? _provider : AI.Storage.getProvider(_provider.name);
if (!provider) provider = new AI.Provider();
@ -106,12 +122,15 @@
if (!provider) provider = new AI.Provider();
let bodyPr = provider.getRequestBodyOptions();
if (provider.isUseProxy())
bodyPr.target = provider.url;
for (let i in bodyPr) {
if (!body[i])
body[i] = bodyPr[i];
}
return;
return provider.isUseProxy();
};
AI._getEndpointUrl = function(_provider, endpoint) {
@ -300,9 +319,19 @@
let headers = AI._getHeaders(provider);
let input_len = content.length;
let input_tokens = Asc.OpenAIEncode(content).length;
let isMessages = Array.isArray(content);
if (isUseCompletionsInsteadChat && isMessages) {
content = content[content.length - 1].content;
isMessages = false;
}
if (isMessages)
isNoSplit = true;
let input_len = content.length;
let input_tokens = isMessages ? 0 : Asc.OpenAIEncode(content).length;
let messages = [];
if (input_tokens < max_input_tokens || isNoSplit) {
messages.push(content);
@ -334,7 +363,7 @@
model : this.modelUI.id
};
AI._extendBody(provider, objRequest.body);
objRequest.isUseProxy = AI._extendBody(provider, objRequest.body);
let processResult = function(data) {
let arrResult = data.data.choices || data.data.content;
@ -359,7 +388,7 @@
if (1 === messages.length) {
if (!isUseCompletionsInsteadChat) {
objRequest.body.messages = [{role:"user",content:messages[0]}];
objRequest.body.messages = isMessages ? messages[0] : [{role:"user",content:messages[0]}];
} else {
objRequest.body.prompt = messages[0];
}

View File

@ -16,10 +16,6 @@
return new AI.ProviderTogetherAI(name, url, key);
};
AI.ProviderTogetherAI.prototype.getUrlAddon = function(endpoint) {
return undefined;
};
AI.ProviderTogetherAI.prototype.checkModelCapability = function(model) {
if (model.context_length)
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
@ -70,6 +66,10 @@
return AI.CapabilitiesUI.Chat;
};
AI.ProviderTogetherAI.prototype.isUseProxy = function() {
return true;
};
// OpenAI
AI.ProviderOpenAI = function(name, url, key) {
AI.Provider.call(this, name || "OpenAI", url || "https://api.openai.com", key || "", "v1");
@ -288,6 +288,38 @@
return headers;
};
// Groq AI
AI.ProviderGroqAI = function(name, url, key) {
AI.Provider.call(this, name || "Groq", url || "https://api.groq.com/openai", key || "", "v1");
};
AI.ProviderGroqAI.prototype = Object.create(AI.Provider.prototype);
AI.ProviderGroqAI.prototype.constructor = AI.ProviderTogetherAI;
AI.ProviderGroqAI.prototype.createInstance = function(name, url, key) {
return new AI.ProviderGroqAI(name, url, key);
};
AI.ProviderGroqAI.prototype.checkModelCapability = function(model) {
if (model.context_length)
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
}
if (-1 !== model.id.toLowerCase().indexOf("whisper")) {
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
return AI.CapabilitiesUI.Audio;
}
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
};
// Register internal providers
AI.Storage.InternalProviders = [];
@ -302,6 +334,8 @@
if (window["AscDesktopEditor"])
AI.Storage.InternalProviders.push(new AI.ProviderGpt4All());
AI.Storage.InternalProviders.push(new AI.ProviderGroqAI());
AI.Storage.isInternalProvider = function(name) {
for (let i = 0, len = AI.Storage.InternalProviders.length; i < len; i++) {
if (name === AI.Storage.InternalProviders[i].name)

View File

@ -47,7 +47,6 @@
if (!requestEngine)
return;
let content = await Asc.Library.GetSelectedText();
let result = await requestEngine.chatRequest(message);
if (!result) result = "";

View File

@ -211,6 +211,9 @@
headers["Authorization"] = "Bearer " + key;
return headers;
};
AI.Provider.prototype.isUseProxy = function() {
return false;
};
AI.InputMaxTokens = {
"4k" : 4096,