mirror of
https://github.com/ONLYOFFICE/onlyoffice.github.io.git
synced 2026-04-07 14:04:30 +08:00
Developing...
This commit is contained in:
@ -1,67 +0,0 @@
|
||||
{
|
||||
"name" : "AI",
|
||||
"nameLocale": {
|
||||
"ru": "ИИ",
|
||||
"fr": "AI",
|
||||
"es": "AI",
|
||||
"de": "AI",
|
||||
"cs": "AI",
|
||||
"zh": "AI",
|
||||
"pt-BR": "AI"
|
||||
},
|
||||
|
||||
"guid" : "asc.{9DC93CDB-B576-4F0C-B55E-FCC9C48DD007}",
|
||||
"version": "2.1.0",
|
||||
"minVersion" : "7.4.0",
|
||||
|
||||
"variations" : [
|
||||
{
|
||||
"description": "Use the AI chatbot to perform tasks which involve understanding or generating natural language or code.",
|
||||
"descriptionLocale": {
|
||||
"ru": "Используйте чат-бот AI для выполнения задач, связанных с пониманием или генерацией естественного языка или кода.",
|
||||
"fr": "Utilisez le chatbot AI pour effectuer des tâches qui impliquent la compréhension ou la génération de langage naturel ou de code.",
|
||||
"es": "Utilice el chatbot AI para realizar tareas que impliquen la comprensión o generación de lenguaje natural o de código.",
|
||||
"pt-BR": "Use o chatbot AI para realizar tarefas que envolvam compreensão ou geração de linguagem ou código natural.",
|
||||
"de": "Verwenden Sie den AI-Chatbot, um Aufgaben auszuführen, die das Verstehen oder Generieren von natürlicher Sprache oder Code beinhalten.",
|
||||
"cs": "AI API lze použít prakticky na jakýkoli úkol, který zahrnuje porozumění nebo generování přirozeného jazyka nebo kódu.",
|
||||
"zh": "使用 AI 聊天机器人完成有关理解、生成自然语言或代码的任务。"
|
||||
},
|
||||
|
||||
"url" : "index.html",
|
||||
|
||||
"icons": "resources/%theme-type%(light|dark)/icon%scale%(default).%extension%(png)",
|
||||
|
||||
"isViewer" : false,
|
||||
"EditorsSupport" : ["word", "cell", "slide"],
|
||||
"type" : "background",
|
||||
"initDataType" : "none",
|
||||
"buttons" : [],
|
||||
"events" : ["onContextMenuShow", "onContextMenuClick", "onToolbarMenuClick"],
|
||||
|
||||
"store": {
|
||||
"background": {
|
||||
"light" : "linear-gradient(90deg, #F9B6FF 0%, #E370EE 102.01%)",
|
||||
"dark" : "linear-gradient(90deg, #F9B6FF 0%, #E370EE 102.01%)"
|
||||
},
|
||||
"screenshots" :
|
||||
[
|
||||
"resources/store/screenshots/screen_1.png",
|
||||
"resources/store/screenshots/screen_2.png",
|
||||
"resources/store/screenshots/screen_3.png",
|
||||
"resources/store/screenshots/screen_4.png",
|
||||
"resources/store/screenshots/screen_5.png",
|
||||
"resources/store/screenshots/screen_6.png",
|
||||
"resources/store/screenshots/screen_7.png",
|
||||
"resources/store/screenshots/screen_8.png",
|
||||
"resources/store/screenshots/screen_9.png",
|
||||
"resources/store/screenshots/screen_10.png"
|
||||
],
|
||||
"icons" : {
|
||||
"light" : "resources/store/icons",
|
||||
"dark" : "resources/store/icons"
|
||||
},
|
||||
"categories": ["specAbilities", "work", "recommended"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
158
sdkjs-plugins/content/ai/scripts/engine/providers/base.js
Normal file
158
sdkjs-plugins/content/ai/scripts/engine/providers/base.js
Normal file
@ -0,0 +1,158 @@
|
||||
"use strict";
|
||||
|
||||
(function(){
|
||||
|
||||
window.AI = window.AI || {};
|
||||
var AI = window.AI;
|
||||
|
||||
// Tokens
|
||||
AI.InputMaxTokens = {
|
||||
"4k" : 4096,
|
||||
"8k" : 8192,
|
||||
"16k" : 16384,
|
||||
"32k" : 32768,
|
||||
"64k" : 65536,
|
||||
"128k" : 131072,
|
||||
"200k" : 204800,
|
||||
"256k" : 262144
|
||||
};
|
||||
|
||||
let keys = [];
|
||||
for (let i in AI.InputMaxTokens)
|
||||
keys.push(i);
|
||||
|
||||
AI.InputMaxTokens.keys = keys;
|
||||
AI.InputMaxTokens.getFloor = function(value) {
|
||||
let result = undefined;
|
||||
for (let i = 0, len = AI.InputMaxTokens.keys.length; i < len; i++) {
|
||||
if (AI.InputMaxTokens[AI.InputMaxTokens.keys[i]] <= value)
|
||||
result = AI.InputMaxTokens[AI.InputMaxTokens.keys[i]];
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// UI
|
||||
AI.UI = AI.UI || {};
|
||||
|
||||
AI.UI.Model = function(name, id, provider, capabilities) {
|
||||
this.capabilities = capabilities || AI.CapabilitiesUI.None;
|
||||
this.provider = provider || "";
|
||||
this.name = name || "";
|
||||
this.id = id || "";
|
||||
};
|
||||
|
||||
AI.UI.Provider = function(name, key, url) {
|
||||
this.name = name || "";
|
||||
this.key = key || "";
|
||||
this.url = url || "";
|
||||
};
|
||||
|
||||
AI.UI.Action = function(name, icon, model) {
|
||||
this.name = name || "";
|
||||
this.icon = icon || "";
|
||||
this.model = model || "";
|
||||
};
|
||||
|
||||
// Endpoints
|
||||
AI.Endpoints = {
|
||||
|
||||
Types : {
|
||||
|
||||
Undefined : -1,
|
||||
|
||||
v1 : {
|
||||
|
||||
Models : 0x00,
|
||||
|
||||
Chat_Completions : 0x01,
|
||||
Completions : 0x02,
|
||||
|
||||
Images_Generations : 0x11,
|
||||
Images_Edits : 0x12,
|
||||
Images_Variarions : 0x13,
|
||||
|
||||
Embeddings : 0x21,
|
||||
|
||||
Audio_Transcriptions : 0x31,
|
||||
Audio_Translations : 0x32,
|
||||
Audio_Speech : 0x33,
|
||||
|
||||
Moderations : 0x41,
|
||||
|
||||
Realtime : 0x51,
|
||||
|
||||
Language : 0x61,
|
||||
Code : 0x62
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
AI.CapabilitiesUI = {
|
||||
|
||||
None : 0x00,
|
||||
|
||||
Chat : 0x01,
|
||||
|
||||
Image : 0x02,
|
||||
|
||||
Embeddings : 0x04,
|
||||
|
||||
Audio : 0x08,
|
||||
|
||||
Moderations : 0x10,
|
||||
|
||||
Realtime : 0x20,
|
||||
|
||||
Code : 0x40,
|
||||
|
||||
Vision : 0x80
|
||||
|
||||
};
|
||||
|
||||
let capabilitiesAll = 0;
|
||||
for (let item in AI.CapabilitiesUI)
|
||||
capabilitiesAll |= AI.CapabilitiesUI[item];
|
||||
AI.CapabilitiesUI.All = capabilitiesAll;
|
||||
|
||||
AI.InternalProviders = [];
|
||||
AI.createProviderInstance = function(name, url, key) {
|
||||
for (let i = 0, len = window.AI.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.InternalProviders[i].name)
|
||||
return AI.InternalProviders[i].createInstance(name, url, key);
|
||||
}
|
||||
return new Provider(name, url, key);
|
||||
};
|
||||
|
||||
AI.isInternalProvider = function(name) {
|
||||
for (let i = 0, len = AI.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.InternalProviders[i].name)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.loadInternalProviders = async function() {
|
||||
let providersText = await AI.loadResourceAsText("./scripts/engine/providers/config.json");
|
||||
if ("" === providersText)
|
||||
return;
|
||||
|
||||
try {
|
||||
let providers = JSON.parse(providersText);
|
||||
for (let i = 0, len = providers.length; i < len; i++) {
|
||||
let providerContent = await AI.loadResourceAsText("./scripts/engine/providers/internal/" + providers[i] + ".js");
|
||||
if (providerContent !== "") {
|
||||
let content = "(function(){\n" + providerContent + "\nreturn new Provider();})();";
|
||||
let provider = eval(content);
|
||||
|
||||
if (provider.isOnlyDesktop() && (-1 === navigator.userAgent.indexOf("AscDesktopEditor")))
|
||||
continue;
|
||||
|
||||
window.AI.InternalProviders.push(provider);
|
||||
}
|
||||
}
|
||||
} catch(err) {
|
||||
}
|
||||
};
|
||||
|
||||
})();
|
||||
@ -0,0 +1,11 @@
|
||||
[
|
||||
"openai",
|
||||
"anthropic",
|
||||
"google-gemini",
|
||||
"deepseek",
|
||||
"together.ai",
|
||||
"groq",
|
||||
"ollama",
|
||||
"mistral",
|
||||
"gpt4all"
|
||||
]
|
||||
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Anthropic", "https://api.anthropic.com", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (0 == model.id.indexOf("claude-2"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["100k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
if (0 == model.id.indexOf("claude-3-5-haiku"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
getEndpointUrl(endpoint) {
|
||||
if (AI.Endpoints.Types.v1.Chat_Completions === endpoint)
|
||||
return "/messages";
|
||||
return super.getEndpointUrl();
|
||||
}
|
||||
|
||||
getRequestHeaderOptions(key) {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json",
|
||||
"anthropic-version" : "2023-06-01",
|
||||
"anthropic-dangerous-direct-browser-access": "true"
|
||||
};
|
||||
if (key)
|
||||
headers["x-api-key"] = key;
|
||||
return headers;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Deepseek", "https://api.deepseek.com", "", "");
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Google-Gemini", "https://generativelanguage.googleapis.com/v1beta", "", "");
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("GPT4All", "http://localhost:4891", "", "v1");
|
||||
}
|
||||
|
||||
getRequestBodyOptions() {
|
||||
return {
|
||||
max_tokens : 4096
|
||||
};
|
||||
}
|
||||
|
||||
isOnlyDesktop() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Groq", "https://api.groq.com/openai", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (model.context_length)
|
||||
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("whisper")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Mistral", "https://api.mistral.ai", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (-1 !== model.id.indexOf("mistral-embed"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-moderation"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("pixtral"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-small"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-medium"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("codestral"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["256k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Code);
|
||||
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
|
||||
let capUI = AI.CapabilitiesUI.Chat;
|
||||
if (model.capabilities && model.capabilities.vision)
|
||||
capUI = AI.CapabilitiesUI.Vision;
|
||||
return capUI;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Ollama", "http://localhost:11434/api", "", "");
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("OpenAI", "https://api.openai.com", "", "v1");
|
||||
}
|
||||
|
||||
checkExcludeModel(model) {
|
||||
if (-1 !== model.id.indexOf("babbage-002") ||
|
||||
-1 !== model.id.indexOf("davinci-002"))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
checkModelCapability(model) {
|
||||
if (-1 !== model.id.indexOf("whisper-1"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("tts-1"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Speech);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("babbage-002") ||
|
||||
-1 !== model.id.indexOf("davinci-002"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("embedding"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("moderation"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("realtime"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Realtime);
|
||||
return AI.CapabilitiesUI.Realtime;
|
||||
}
|
||||
if ("dall-e-2" === model.id)
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
if ("dall-e-3" === model.id)
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
if (0 === model.id.indexOf("gpt-4o") ||
|
||||
0 === model.id.indexOf("o1-") ||
|
||||
0 === model.id.indexOf("gpt-4-turbo"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
else if (0 === model.id.indexOf("gpt-4"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
else if (-1 != model.id.indexOf("gpt-3.5-turbo-instruct")) {
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["4k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
else if (0 === model.id.indexOf("gpt-3.5-turbo"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
};
|
||||
|
||||
}
|
||||
@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Together AI", "https://api.together.xyz", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (model.context_length)
|
||||
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
|
||||
|
||||
if ("chat" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
let result = AI.CapabilitiesUI.Chat;
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
|
||||
result |= AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if ("image" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
if ("moderation" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
|
||||
if ("embedding" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
|
||||
if ("language" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Language);
|
||||
return AI.CapabilitiesUI.Language;
|
||||
}
|
||||
|
||||
if ("code" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Code);
|
||||
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
if ("rerank" === model.type) {
|
||||
return AI.CapabilitiesUI.None;
|
||||
}
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
isUseProxy() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
159
sdkjs-plugins/content/ai/scripts/engine/providers/provider.js
Normal file
159
sdkjs-plugins/content/ai/scripts/engine/providers/provider.js
Normal file
@ -0,0 +1,159 @@
|
||||
"use strict";
|
||||
|
||||
(async function(){
|
||||
|
||||
class Provider {
|
||||
/**
|
||||
* Provider base class.
|
||||
* @param {string} name Provider name.
|
||||
* @param {string} url Url to service.
|
||||
* @param {string} key Key for service. This is an optional field. Some providers may require a key for access.
|
||||
* @param {string} addon Addon for url. For example: v1 for many providers.
|
||||
*/
|
||||
constructor(name, url, key, addon) {
|
||||
this.name = name || "";
|
||||
this.url = url || "";
|
||||
this.key = key || "";
|
||||
this.addon = addon || "";
|
||||
|
||||
this.models = [];
|
||||
this.modelsUI = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return *true* if you do not want to work with a specific model (model.id).
|
||||
* The model will not be presented in the combo box with the list of models.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
checkExcludeModel(model) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return enumeration with capabilities for this model (model.id). (Some providers does not get the information for this functionalities).
|
||||
* Example: AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Image;
|
||||
* @returns {number}
|
||||
*/
|
||||
checkModelCapability(model) {
|
||||
return AI.CapabilitiesUI.All;
|
||||
}
|
||||
|
||||
/**
|
||||
* Url for a specific endpoint.
|
||||
* @returns {string}
|
||||
*/
|
||||
getEndpointUrl(endpoint) {
|
||||
let Types = AI.Endpoints.Types;
|
||||
switch (endpoint)
|
||||
{
|
||||
case Types.v1.Models:
|
||||
return "/models";
|
||||
|
||||
case Types.v1.Chat_Completions:
|
||||
return "/chat/completions";
|
||||
case Types.v1.Completions:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Images_Generations:
|
||||
return "/images/generations";
|
||||
case Types.v1.Images_Edits:
|
||||
return "/images/edits";
|
||||
case Types.v1.Images_Variarions:
|
||||
return "/images/variations";
|
||||
|
||||
case Types.v1.Embeddings:
|
||||
return "/embeddings";
|
||||
|
||||
case Types.v1.Audio_Transcriptions:
|
||||
return "/audio/transcriptions";
|
||||
case Types.v1.Audio_Translations:
|
||||
return "/audio/translations";
|
||||
case Types.v1.Audio_Speech:
|
||||
return "/audio/speech";
|
||||
|
||||
case Types.v1.Moderations:
|
||||
return "/moderations";
|
||||
|
||||
case Types.v1.Language:
|
||||
return "/completions";
|
||||
case Types.v1.Code:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Realtime:
|
||||
return "/realtime";
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* An object-addition to the model. It is used, among other things, to configure the model parameters.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {Object}
|
||||
*/
|
||||
getRequestBodyOptions = function() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* The returned object is an enumeration of all the headers for the requests.
|
||||
* @returns {Object}
|
||||
*/
|
||||
getRequestHeaderOptions = function(key) {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json"
|
||||
};
|
||||
if (key)
|
||||
headers["Authorization"] = "Bearer " + key;
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns whether a proxy server needs to be used to work with this provider.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isUseProxy() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns whether this provider is only supported in the desktop application.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isOnlyDesktop() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* ========================================================================================
|
||||
* The following are methods for internal work. There is no need to overload these methods.
|
||||
* ========================================================================================
|
||||
*/
|
||||
createInstance(name, url, key, addon) {
|
||||
let inst = Object.create(Object.getPrototypeOf(this));
|
||||
inst.name = name;
|
||||
inst.url = url;
|
||||
inst.key = key;
|
||||
inst.addon = addon;
|
||||
return inst;
|
||||
}
|
||||
|
||||
checkModelsUI() {
|
||||
for (let i = 0, len = this.models.length; i < len; i++) {
|
||||
let model = this.models[i];
|
||||
let modelUI = new window.AI.UI.Model(model.name, model.id, model.provider);
|
||||
modelUI.capabilities = this.checkModelCapability(model);
|
||||
this.modelsUI.push(modelUI);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
window.AI.Provider = Provider;
|
||||
await AI.loadInternalProviders();
|
||||
|
||||
})();
|
||||
@ -42,224 +42,5 @@
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
AI.Endpoints = {
|
||||
|
||||
Types : {
|
||||
|
||||
Undefined : -1,
|
||||
|
||||
v1 : {
|
||||
|
||||
Models : 0x00,
|
||||
|
||||
Chat_Completions : 0x01,
|
||||
Completions : 0x02,
|
||||
|
||||
Images_Generations : 0x11,
|
||||
Images_Edits : 0x12,
|
||||
Images_Variarions : 0x13,
|
||||
|
||||
Embeddings : 0x21,
|
||||
|
||||
Audio_Transcriptions : 0x31,
|
||||
Audio_Translations : 0x32,
|
||||
Audio_Speech : 0x33,
|
||||
|
||||
Moderations : 0x41,
|
||||
|
||||
Realtime : 0x51,
|
||||
|
||||
Language : 0x61,
|
||||
Code : 0x62
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
AI.CapabilitiesUI = {
|
||||
|
||||
None : 0x00,
|
||||
|
||||
Chat : 0x01,
|
||||
|
||||
Image : 0x02,
|
||||
|
||||
Embeddings : 0x04,
|
||||
|
||||
Audio : 0x08,
|
||||
|
||||
Moderations : 0x10,
|
||||
|
||||
Realtime : 0x20,
|
||||
|
||||
Code : 0x40,
|
||||
|
||||
Vision : 0x80
|
||||
|
||||
};
|
||||
|
||||
let capabilitiesAll = 0;
|
||||
for (let item in AI.CapabilitiesUI)
|
||||
capabilitiesAll |= AI.CapabilitiesUI[item];
|
||||
AI.CapabilitiesUI.All = capabilitiesAll;
|
||||
|
||||
AI.Endpoints.getUrl = function(type) {
|
||||
|
||||
let Types = AI.Endpoints.Types;
|
||||
switch (type)
|
||||
{
|
||||
case Types.v1.Models:
|
||||
return "/models";
|
||||
|
||||
case Types.v1.Chat_Completions:
|
||||
return "/chat/completions";
|
||||
case Types.v1.Completions:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Images_Generations:
|
||||
return "/images/generations";
|
||||
case Types.v1.Images_Edits:
|
||||
return "/images/edits";
|
||||
case Types.v1.Images_Variarions:
|
||||
return "/images/variations";
|
||||
|
||||
case Types.v1.Embeddings:
|
||||
return "/embeddings";
|
||||
|
||||
case Types.v1.Audio_Transcriptions:
|
||||
return "/audio/transcriptions";
|
||||
case Types.v1.Audio_Translations:
|
||||
return "/audio/translations";
|
||||
case Types.v1.Audio_Speech:
|
||||
return "/audio/speech";
|
||||
|
||||
case Types.v1.Moderations:
|
||||
return "/moderations";
|
||||
|
||||
case Types.v1.Language:
|
||||
return "/completions";
|
||||
case Types.v1.Code:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Realtime:
|
||||
return "/realtime";
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return "";
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* PROVIDER BASE CLASS
|
||||
*/
|
||||
|
||||
AI.Provider = function(name, url, key, addon) {
|
||||
this.name = name || "";
|
||||
this.url = url || "";
|
||||
this.key = key || "";
|
||||
this.addon = addon || "";
|
||||
|
||||
this.models = [];
|
||||
this.modelsUI = [];
|
||||
};
|
||||
|
||||
AI.Provider.createInstance = function(name, url, key) {
|
||||
for (let i = 0, len = AI.Storage.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.Storage.InternalProviders[i].name)
|
||||
return AI.Storage.InternalProviders[i].createInstance(name, url, key);
|
||||
}
|
||||
return AI.Provider.prototype.createInstance(name, url, key);
|
||||
};
|
||||
|
||||
AI.Provider.prototype.createInstance = function(name, url, key) {
|
||||
return new AI.Provider(name, url, key);
|
||||
};
|
||||
AI.Provider.prototype.checkExcludeModel = function(model) {
|
||||
return false;
|
||||
};
|
||||
AI.Provider.prototype.checkModelCapability = function(model) {
|
||||
return AI.CapabilitiesUI.All;
|
||||
};
|
||||
AI.Provider.prototype.checkModelsUI = function() {
|
||||
for (let i = 0, len = this.models.length; i < len; i++) {
|
||||
let model = this.models[i];
|
||||
let modelUI = new AI.UI.Model(model.name, model.id, model.provider);
|
||||
modelUI.capabilities = this.checkModelCapability(model);
|
||||
this.modelsUI.push(modelUI);
|
||||
}
|
||||
};
|
||||
|
||||
AI.Provider.prototype.getUrlAddon = function(endpoint) {
|
||||
return undefined;
|
||||
};
|
||||
AI.Provider.prototype.overrideEndpointUrl = function(endpoint) {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
AI.Provider.prototype.getRequestBodyOptions = function() {
|
||||
return {};
|
||||
};
|
||||
AI.Provider.prototype.getRequestHeaderOptions = function(key) {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json"
|
||||
};
|
||||
if (key)
|
||||
headers["Authorization"] = "Bearer " + key;
|
||||
return headers;
|
||||
};
|
||||
AI.Provider.prototype.isUseProxy = function() {
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.InputMaxTokens = {
|
||||
"4k" : 4096,
|
||||
"8k" : 8192,
|
||||
"16k" : 16384,
|
||||
"32k" : 32768,
|
||||
"64k" : 65536,
|
||||
"128k" : 131072,
|
||||
"200k" : 204800,
|
||||
"256k" : 262144
|
||||
};
|
||||
|
||||
let keys = [];
|
||||
for (let i in AI.InputMaxTokens)
|
||||
keys.push(i);
|
||||
|
||||
AI.InputMaxTokens.keys = keys;
|
||||
AI.InputMaxTokens.getFloor = function(value) {
|
||||
let result = undefined;
|
||||
for (let i = 0, len = AI.InputMaxTokens.keys.length; i < len; i++) {
|
||||
if (AI.InputMaxTokens[AI.InputMaxTokens.keys[i]] <= value)
|
||||
result = AI.InputMaxTokens[AI.InputMaxTokens.keys[i]];
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* UI
|
||||
*/
|
||||
AI.UI.Model = function(name, id, provider, capabilities) {
|
||||
this.capabilities = capabilities || AI.CapabilitiesUI.None;
|
||||
this.provider = provider || "";
|
||||
this.name = name || "";
|
||||
this.id = id || "";
|
||||
};
|
||||
|
||||
AI.UI.Provider = function(name, key, url) {
|
||||
this.name = name || "";
|
||||
this.key = key || "";
|
||||
this.url = url || "";
|
||||
};
|
||||
|
||||
AI.UI.Action = function(name, icon, model) {
|
||||
this.name = name || "";
|
||||
this.icon = icon || "";
|
||||
this.model = model || "";
|
||||
};
|
||||
|
||||
|
||||
})(window);
|
||||
|
||||
33
sdkjs-plugins/content/internal_providers.js
Normal file
33
sdkjs-plugins/content/internal_providers.js
Normal file
@ -0,0 +1,33 @@
|
||||
(function(exports, undefined)
|
||||
{
|
||||
// This script must be added after storage.js
|
||||
|
||||
var AI = exports.AI;
|
||||
|
||||
|
||||
|
||||
// Register internal providers
|
||||
AI.Storage.InternalProviders = [];
|
||||
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderOpenAI());
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderTogetherAI());
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderMistral());
|
||||
|
||||
// bug in desktop with simple request
|
||||
if (!AI.isLocalDesktop || AI.getDesktopLocalVersion() >= 8003000)
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderAnthropic());
|
||||
|
||||
if (window["AscDesktopEditor"])
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderGpt4All());
|
||||
|
||||
AI.Storage.InternalProviders.push(new AI.ProviderGroqAI());
|
||||
|
||||
AI.Storage.isInternalProvider = function(name) {
|
||||
for (let i = 0, len = AI.Storage.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.Storage.InternalProviders[i].name)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
})(window);
|
||||
Reference in New Issue
Block a user