Realize worked (run) version

This commit is contained in:
Oleg Korshul
2025-02-10 18:54:08 +03:00
parent 4cadf4acd6
commit c601ab9ee2
5 changed files with 38 additions and 355 deletions

View File

@ -25,8 +25,9 @@
<script type="text/javascript" src="vendor/GPT-3-Encoder/encoder.js"></script>
<script type="text/javascript" src="scripts/engine/storage.js"></script>
<script type="text/javascript" src="scripts/engine/internal_providers.js"></script>
<script type="text/javascript" src="scripts/engine/local_storage.js"></script>
<script type="text/javascript" src="scripts/engine/providers/base.js"></script>
<script type="text/javascript" src="scripts/engine/providers/provider.js"></script>
<script type="text/javascript" src="scripts/engine/engine.js"></script>
<script type="text/javascript" src="scripts/engine/buttons.js"></script>

View File

@ -1,347 +0,0 @@
(function(exports, undefined)
{
// This script must be added after storage.js
var AI = exports.AI;
// Together AI
AI.ProviderTogetherAI = function(name, url, key) {
AI.Provider.call(this, name || "Together AI", url || "https://api.together.xyz", key || "", "v1");
};
AI.ProviderTogetherAI.prototype = Object.create(AI.Provider.prototype);
AI.ProviderTogetherAI.prototype.constructor = AI.ProviderTogetherAI;
AI.ProviderTogetherAI.prototype.createInstance = function(name, url, key) {
return new AI.ProviderTogetherAI(name, url, key);
};
AI.ProviderTogetherAI.prototype.checkModelCapability = function(model) {
if (model.context_length)
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
if ("chat" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
let result = AI.CapabilitiesUI.Chat;
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
result |= AI.CapabilitiesUI.Vision;
}
return result;
}
if ("image" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
return AI.CapabilitiesUI.Image;
}
if ("moderation" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
return AI.CapabilitiesUI.Moderations;
}
if ("embedding" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
return AI.CapabilitiesUI.Embeddings;
}
if ("language" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Language);
return AI.CapabilitiesUI.Language;
}
if ("code" === model.type) {
model.endpoints.push(AI.Endpoints.Types.v1.Code);
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
}
if ("rerank" === model.type) {
return AI.CapabilitiesUI.None;
}
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
};
AI.ProviderTogetherAI.prototype.isUseProxy = function() {
return true;
};
// OpenAI
AI.ProviderOpenAI = function(name, url, key) {
AI.Provider.call(this, name || "OpenAI", url || "https://api.openai.com", key || "", "v1");
};
AI.ProviderOpenAI.prototype = Object.create(AI.Provider.prototype);
AI.ProviderOpenAI.prototype.constructor = AI.ProviderOpenAI;
AI.ProviderOpenAI.prototype.createInstance = function(name, url, key) {
return new AI.ProviderOpenAI(name, url, key);
};
AI.ProviderOpenAI.prototype.checkExcludeModel = function(model) {
if (-1 !== model.id.indexOf("babbage-002") ||
-1 !== model.id.indexOf("davinci-002"))
return true;
return false;
};
AI.ProviderOpenAI.prototype.checkModelCapability = function(model) {
if (-1 !== model.id.indexOf("whisper-1"))
{
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
return AI.CapabilitiesUI.Audio;
}
if (-1 !== model.id.indexOf("tts-1"))
{
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Speech);
return AI.CapabilitiesUI.Audio;
}
if (-1 !== model.id.indexOf("babbage-002") ||
-1 !== model.id.indexOf("davinci-002"))
{
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
return AI.CapabilitiesUI.Chat;
}
if (-1 !== model.id.indexOf("embedding"))
{
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
return AI.CapabilitiesUI.Embeddings;
}
if (-1 !== model.id.indexOf("moderation"))
{
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
return AI.CapabilitiesUI.Moderations;
}
if (-1 !== model.id.indexOf("realtime"))
{
model.endpoints.push(AI.Endpoints.Types.v1.Realtime);
return AI.CapabilitiesUI.Realtime;
}
if ("dall-e-2" === model.id)
{
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
return AI.CapabilitiesUI.Image;
}
if ("dall-e-3" === model.id)
{
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
return AI.CapabilitiesUI.Image;
}
if (0 === model.id.indexOf("gpt-4o") ||
0 === model.id.indexOf("o1-") ||
0 === model.id.indexOf("gpt-4-turbo"))
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
else if (0 === model.id.indexOf("gpt-4"))
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
else if (-1 != model.id.indexOf("gpt-3.5-turbo-instruct")) {
model.options.max_input_tokens = AI.InputMaxTokens["4k"];
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
return AI.CapabilitiesUI.Chat;
}
else if (0 === model.id.indexOf("gpt-3.5-turbo"))
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
};
// GPT4All
AI.ProviderGpt4All = function(name, url, key) {
AI.Provider.call(this, name || "GPT4All", url || "http://localhost:4891", key || "", "v1");
};
AI.ProviderGpt4All.prototype = Object.create(AI.Provider.prototype);
AI.ProviderGpt4All.prototype.constructor = AI.ProviderGpt4All;
AI.ProviderGpt4All.prototype.createInstance = function(name, url, key) {
return new AI.ProviderGpt4All(name, url, key);
};
AI.ProviderGpt4All.prototype.getRequestBodyOptions = function() {
return {
max_tokens : 4096
};
};
// Mistral
AI.ProviderMistral = function(name, url, key) {
AI.Provider.call(this, name || "Mistral", url || "https://api.mistral.ai", key || "", "v1");
};
AI.ProviderMistral.prototype = Object.create(AI.Provider.prototype);
AI.ProviderMistral.prototype.constructor = AI.ProviderMistral;
AI.ProviderMistral.prototype.createInstance = function(name, url, key) {
return new AI.ProviderMistral(name, url, key);
};
AI.ProviderMistral.prototype.checkModelCapability = function(model) {
if (-1 !== model.id.indexOf("mistral-embed"))
{
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
return AI.CapabilitiesUI.Embeddings;
}
if (-1 !== model.id.indexOf("mistral-moderation"))
{
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
return AI.CapabilitiesUI.Moderations;
}
if (-1 !== model.id.indexOf("pixtral"))
{
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
return AI.CapabilitiesUI.Image;
}
if (-1 !== model.id.indexOf("mistral-small"))
{
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
}
if (-1 !== model.id.indexOf("mistral-medium"))
{
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
}
if (-1 !== model.id.indexOf("codestral"))
{
model.options.max_input_tokens = AI.InputMaxTokens["256k"];
model.endpoints.push(AI.Endpoints.Types.v1.Code);
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
}
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
let capUI = AI.CapabilitiesUI.Chat;
if (model.capabilities && model.capabilities.vision)
capUI = AI.CapabilitiesUI.Vision;
return capUI;
};
// Anthropic
AI.ProviderAnthropic = function(name, url, key) {
AI.Provider.call(this, name || "Anthropic", url || "https://api.anthropic.com", key || "", "v1");
};
AI.ProviderAnthropic.prototype = Object.create(AI.Provider.prototype);
AI.ProviderAnthropic.prototype.constructor = AI.ProviderAnthropic;
AI.ProviderAnthropic.prototype.createInstance = function(name, url, key) {
return new AI.ProviderAnthropic(name, url, key);
};
AI.ProviderAnthropic.prototype.checkModelCapability = function(model) {
if (0 == model.id.indexOf("claude-2"))
{
model.options.max_input_tokens = AI.InputMaxTokens["100k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
}
if (0 == model.id.indexOf("claude-3-5-haiku"))
{
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
}
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
};
AI.ProviderAnthropic.prototype.overrideEndpointUrl = function(endpoint) {
if (AI.Endpoints.Types.v1.Chat_Completions === endpoint)
return "/messages";
return undefined;
};
AI.ProviderAnthropic.prototype.getRequestBodyOptions = function() {
return {
"max_tokens": 4096
};
};
AI.ProviderAnthropic.prototype.getRequestHeaderOptions = function(key) {
let headers = {
"Content-Type" : "application/json",
"anthropic-version" : "2023-06-01",
"anthropic-dangerous-direct-browser-access": "true"
};
if (key)
headers["x-api-key"] = key;
return headers;
};
// Groq AI
AI.ProviderGroqAI = function(name, url, key) {
AI.Provider.call(this, name || "Groq", url || "https://api.groq.com/openai", key || "", "v1");
};
AI.ProviderGroqAI.prototype = Object.create(AI.Provider.prototype);
AI.ProviderGroqAI.prototype.constructor = AI.ProviderTogetherAI;
AI.ProviderGroqAI.prototype.createInstance = function(name, url, key) {
return new AI.ProviderGroqAI(name, url, key);
};
AI.ProviderGroqAI.prototype.checkModelCapability = function(model) {
if (model.context_length)
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
}
if (-1 !== model.id.toLowerCase().indexOf("whisper")) {
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
return AI.CapabilitiesUI.Audio;
}
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
return AI.CapabilitiesUI.Chat;
};
// Register internal providers
AI.Storage.InternalProviders = [];
AI.Storage.InternalProviders.push(new AI.ProviderOpenAI());
AI.Storage.InternalProviders.push(new AI.ProviderTogetherAI());
AI.Storage.InternalProviders.push(new AI.ProviderMistral());
// bug in desktop with simple request
if (!AI.isLocalDesktop || AI.getDesktopLocalVersion() >= 8003000)
AI.Storage.InternalProviders.push(new AI.ProviderAnthropic());
if (window["AscDesktopEditor"])
AI.Storage.InternalProviders.push(new AI.ProviderGpt4All());
AI.Storage.InternalProviders.push(new AI.ProviderGroqAI());
AI.Storage.isInternalProvider = function(name) {
for (let i = 0, len = AI.Storage.InternalProviders.length; i < len; i++) {
if (name === AI.Storage.InternalProviders[i].name)
return true;
}
return false;
};
})(window);

View File

@ -6,11 +6,7 @@
var localStorageKey = "onlyoffice_ai_plugin_storage_key";
AI.Providers = {};
for (let i = 0, len = AI.Storage.InternalProviders.length; i < len; i++) {
let pr = AI.Storage.InternalProviders[i];
AI.Providers[pr.name] = pr;
}
AI.serializeProviders = function() {
let result = [];
for (let i in AI.Providers) {
@ -192,6 +188,12 @@
return null;
};
AI.Storage.load();
AI.onLoadInternalProviders = function() {
for (let i = 0, len = AI.InternalProviders.length; i < len; i++) {
let pr = AI.InternalProviders[i];
AI.Providers[pr.name] = pr;
}
AI.Storage.load();
}
})(window);

View File

@ -153,6 +153,8 @@
}
} catch(err) {
}
AI.onLoadInternalProviders();
};
})();

View File

@ -42,5 +42,30 @@
return ret;
};
AI.loadResourceAsText = async function(url) {
return new Promise(resolve => (function(){
try {
var xhr = new XMLHttpRequest();
if (xhr) {
xhr.open('GET', url, true);
xhr.onload = function () {
var status = xhr.status;
if (status == 200 || location.href.indexOf("file:") == 0) {
resolve(xhr.responseText);
} else {
resolve("");
}
};
xhr.onerror = function() {
resolve("");
}
xhr.send('');
}
} catch (e) {
resolve("");
}
})());
};
})(window);