mirror of
https://github.com/ONLYOFFICE/server.git
synced 2026-04-07 14:04:35 +08:00
[feature] Add aiEngineWrapper.js
This commit is contained in:
157
DocService/sources/ai/aiEngineWrapper.js
Normal file
157
DocService/sources/ai/aiEngineWrapper.js
Normal file
@ -0,0 +1,157 @@
|
||||
/*
|
||||
* (c) Copyright Ascensio System SIA 2010-2024
|
||||
*
|
||||
* This program is a free software product. You can redistribute it and/or
|
||||
* modify it under the terms of the GNU Affero General Public License (AGPL)
|
||||
* version 3 as published by the Free Software Foundation. In accordance with
|
||||
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
|
||||
* that Ascensio System SIA expressly excludes the warranty of non-infringement
|
||||
* of any third-party rights.
|
||||
*
|
||||
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
|
||||
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
|
||||
*
|
||||
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
|
||||
* street, Riga, Latvia, EU, LV-1050.
|
||||
*
|
||||
* The interactive user interfaces in modified source and object code versions
|
||||
* of the Program must display Appropriate Legal Notices, as required under
|
||||
* Section 5 of the GNU AGPL version 3.
|
||||
*
|
||||
* Pursuant to Section 7(b) of the License you must retain the original Product
|
||||
* logo when distributing the program. Pursuant to Section 7(e) we decline to
|
||||
* grant you any rights under trademark law for use of our trademarks.
|
||||
*
|
||||
* All the Product's GUI elements, including illustrations and icon sets, as
|
||||
* well as technical writing content are licensed under the terms of the
|
||||
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
|
||||
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
|
||||
*
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { buffer } = require('node:stream/consumers');
|
||||
const config = require('config');
|
||||
const utils = require('../../../Common/sources/utils');
|
||||
const operationContext = require('../../../Common/sources/operationContext');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const vm = require('vm');
|
||||
|
||||
// Configuration constants
|
||||
const cfgAiApiTimeout = config.get('ai-api.timeout');
|
||||
|
||||
function setCtx(ctx) {
|
||||
sandbox.ctx = ctx;
|
||||
sandbox.console = ctx.logger;
|
||||
}
|
||||
|
||||
// Set up the environment for the client-side engine.js
|
||||
const sandbox = {
|
||||
ctx: null,
|
||||
window: {AI: {}},
|
||||
|
||||
/**
|
||||
* Implementation of fetch that delegates to utils.httpRequest
|
||||
*
|
||||
* @param {string} url - The URL to fetch
|
||||
* @param {Object} options - Fetch options (method, headers, body)
|
||||
* @returns {Promise<Object>} - A promise that resolves to a response-like object
|
||||
*/
|
||||
fetch: function(url, options = {}) {
|
||||
const ctx = sandbox.ctx;
|
||||
const method = options.method || 'GET';
|
||||
|
||||
// Configure timeout options for the request
|
||||
const timeoutOptions = {
|
||||
connectionAndInactivity: cfgAiApiTimeout,
|
||||
wholeCycle: cfgAiApiTimeout
|
||||
};
|
||||
return utils.httpRequest(
|
||||
sandbox.ctx,
|
||||
method,
|
||||
url,
|
||||
options.headers || {},
|
||||
options.body || null,
|
||||
timeoutOptions,
|
||||
10 * 1024 * 1024,
|
||||
false
|
||||
)
|
||||
.then(async (result) => {
|
||||
const responseBuffer = await buffer(result.stream);
|
||||
const text = responseBuffer.toString('utf8');
|
||||
|
||||
return {
|
||||
status: result.response.status,
|
||||
statusText: result.response.statusText,
|
||||
ok: result.response.status >= 200 && result.response.status < 300,
|
||||
headers: result.response.headers,
|
||||
text: () => Promise.resolve(text),
|
||||
json: () => Promise.resolve(JSON.parse(text)),
|
||||
arrayBuffer: () => Promise.resolve(responseBuffer.buffer)
|
||||
};
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize minimal AI object with required functionality
|
||||
sandbox.AI = sandbox.window.AI;
|
||||
setCtx(operationContext.global);
|
||||
|
||||
/**
|
||||
* Simple loadInternalProviders implementation
|
||||
*/
|
||||
function loadInternalProviders() {
|
||||
// Add simple provider loading logic
|
||||
const enginePath = path.join(__dirname, 'engine', 'providers', 'internal');
|
||||
|
||||
try {
|
||||
// Read providers directory
|
||||
const files = fs.readdirSync(enginePath);
|
||||
|
||||
// Load each provider
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.js')) {
|
||||
const providerPath = path.join(enginePath, file);
|
||||
const providerCode = fs.readFileSync(providerPath, 'utf8');
|
||||
|
||||
try {
|
||||
sandbox.ctx.logger.debug(`Loading provider ${file}:`);
|
||||
let content = "(function(){\n" + providerCode + "\nreturn new Provider();})();";
|
||||
// Execute provider code in sandbox
|
||||
let provider = vm.runInNewContext(content, sandbox, {
|
||||
filename: file,
|
||||
timeout: 5000
|
||||
});
|
||||
sandbox.AI.InternalProviders.push(provider);
|
||||
} catch (error) {
|
||||
sandbox.ctx.logger.error(`Error loading provider ${file}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sandbox.AI.onLoadInternalProviders();
|
||||
} catch (error) {
|
||||
sandbox.ctx.logger.error('Error loading internal providers:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load engine.js
|
||||
let engineCode = '';
|
||||
engineCode += fs.readFileSync(path.join(__dirname, 'engine', 'storage.js'), 'utf8');
|
||||
engineCode += fs.readFileSync(path.join(__dirname, 'engine', 'local_storage.js'), 'utf8');
|
||||
engineCode += fs.readFileSync(path.join(__dirname, 'engine', 'providers', 'base.js'), 'utf8');
|
||||
engineCode += fs.readFileSync(path.join(__dirname, 'engine', 'providers', 'provider.js'), 'utf8');
|
||||
engineCode += fs.readFileSync(path.join(__dirname, 'engine', 'engine.js'), 'utf8');
|
||||
vm.runInNewContext(engineCode, sandbox);
|
||||
|
||||
sandbox.AI.loadInternalProviders = loadInternalProviders;
|
||||
loadInternalProviders();
|
||||
|
||||
|
||||
|
||||
|
||||
exports.setCtx = setCtx;
|
||||
exports.AI = sandbox.AI;
|
||||
368
DocService/sources/ai/aiProxyHandler.js
Normal file
368
DocService/sources/ai/aiProxyHandler.js
Normal file
@ -0,0 +1,368 @@
|
||||
/*
|
||||
* (c) Copyright Ascensio System SIA 2010-2024
|
||||
*
|
||||
* This program is a free software product. You can redistribute it and/or
|
||||
* modify it under the terms of the GNU Affero General Public License (AGPL)
|
||||
* version 3 as published by the Free Software Foundation. In accordance with
|
||||
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
|
||||
* that Ascensio System SIA expressly excludes the warranty of non-infringement
|
||||
* of any third-party rights.
|
||||
*
|
||||
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
|
||||
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
|
||||
*
|
||||
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
|
||||
* street, Riga, Latvia, EU, LV-1050.
|
||||
*
|
||||
* The interactive user interfaces in modified source and object code versions
|
||||
* of the Program must display Appropriate Legal Notices, as required under
|
||||
* Section 5 of the GNU AGPL version 3.
|
||||
*
|
||||
* Pursuant to Section 7(b) of the License you must retain the original Product
|
||||
* logo when distributing the program. Pursuant to Section 7(e) we decline to
|
||||
* grant you any rights under trademark law for use of our trademarks.
|
||||
*
|
||||
* All the Product's GUI elements, including illustrations and icon sets, as
|
||||
* well as technical writing content are licensed under the terms of the
|
||||
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
|
||||
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
|
||||
*
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { pipeline } = require('stream/promises');
|
||||
const { buffer } = require('node:stream/consumers');
|
||||
const config = require('config');
|
||||
const utils = require('./../../../Common/sources/utils');
|
||||
const operationContext = require('./../../../Common/sources/operationContext');
|
||||
const commonDefines = require('./../../../Common/sources/commondefines');
|
||||
const docsCoServer = require('./../DocsCoServer');
|
||||
|
||||
// Import the new aiEngineWrapper module
|
||||
const aiEngineWrapper = require('./aiEngineWrapper');
|
||||
|
||||
const cfgAiApiAllowedOrigins = config.get('ai-api.allowedCorsOrigins');
|
||||
const cfgAiApiTimeout = config.get('ai-api.timeout');
|
||||
const cfgTokenEnableBrowser = config.get('services.CoAuthoring.token.enable.browser');
|
||||
|
||||
/**
|
||||
* Helper function to set CORS headers if the request origin is allowed
|
||||
*
|
||||
* @param {object} req - Express request object
|
||||
* @param {object} res - Express response object
|
||||
* @param {object} ctx - Operation context for logging
|
||||
* @param {boolean} handleOptions - Whether to handle OPTIONS requests (default: true)
|
||||
* @returns {boolean} - True if this was an OPTIONS request that was handled
|
||||
*/
|
||||
function handleCorsHeaders(req, res, ctx, handleOptions = true) {
|
||||
const requestOrigin = req.headers.origin;
|
||||
|
||||
// If no origin in request or allowed origins list is empty, do nothing
|
||||
if (!requestOrigin || cfgAiApiAllowedOrigins.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the origin is in our allowed list
|
||||
if (cfgAiApiAllowedOrigins.includes(requestOrigin)) {
|
||||
res.setHeader('Access-Control-Allow-Origin', requestOrigin);
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader('Vary', 'Origin'); // Important when using dynamic origin
|
||||
|
||||
// If debug logging is available
|
||||
if (ctx && ctx.logger) {
|
||||
ctx.logger.debug('CORS headers set for origin: %s (matched allowed list)', requestOrigin);
|
||||
}
|
||||
|
||||
// Handle preflight OPTIONS requests if requested
|
||||
if (handleOptions && req.method === 'OPTIONS') {
|
||||
res.setHeader('Access-Control-Allow-Methods', 'DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT');
|
||||
// Allow all headers with wildcard
|
||||
res.setHeader('Access-Control-Allow-Headers', '*');
|
||||
|
||||
// For preflight request, we should also set non-CORS headers to match the API
|
||||
res.setHeader('Allow', 'OPTIONS, HEAD, GET, POST, PUT, DELETE, PATCH');
|
||||
res.setHeader('Content-Length', '0');
|
||||
res.setHeader('Content-Type', 'text/html; charset=utf-8');
|
||||
|
||||
// Return 204 which is standard for OPTIONS preflight
|
||||
res.sendStatus(204); // No Content response for OPTIONS
|
||||
return true; // Signal that we handled an OPTIONS request
|
||||
}
|
||||
}
|
||||
|
||||
return false; // Not an OPTIONS request or origin not allowed
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an HTTP request to an AI API endpoint using the provided request and response objects
|
||||
*
|
||||
* @param {object} req - Express request object
|
||||
* @param {object} res - Express response object
|
||||
* @returns {Promise<void>} - Promise resolving when the request is complete
|
||||
*/
|
||||
async function proxyRequest(req, res) {
|
||||
// Create operation context for logging
|
||||
const ctx = new operationContext.Context();
|
||||
ctx.initFromRequest(req);
|
||||
|
||||
try {
|
||||
ctx.logger.info('Start proxyRequest');
|
||||
const tenTokenEnableBrowser = ctx.getCfg('services.CoAuthoring.token.enable.browser', cfgTokenEnableBrowser);
|
||||
|
||||
if (tenTokenEnableBrowser) {
|
||||
let checkJwtRes = await docsCoServer.checkJwtHeader(ctx, req, 'Authorization', 'Bearer ', commonDefines.c_oAscSecretType.Session);
|
||||
if (checkJwtRes.err) {
|
||||
ctx.logger.error('checkJwtHeader error: %s', checkJwtRes.err);
|
||||
res.sendStatus(403);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Handle CORS preflight (OPTIONS) requests if necessary
|
||||
if (handleCorsHeaders(req, res, ctx) === true) {
|
||||
return; // OPTIONS request handled, stop further processing
|
||||
}
|
||||
|
||||
let body = JSON.parse(req.body);
|
||||
|
||||
// Configure timeout options for the request
|
||||
const timeoutOptions = {
|
||||
connectionAndInactivity: cfgAiApiTimeout,
|
||||
wholeCycle: cfgAiApiTimeout
|
||||
};
|
||||
|
||||
// Get request size limit if configured
|
||||
const sizeLimit = 10 * 1024 * 1024; // Default to 10MB
|
||||
|
||||
// Create a copy of the headers from the request
|
||||
const headers = { ...body.headers };
|
||||
|
||||
// Get API key based on the target URL
|
||||
const aiApi = config.get('ai-api');
|
||||
let apiKey;
|
||||
|
||||
// Determine which API key to use based on the target URL
|
||||
if (body.target) {
|
||||
// Find the provider that matches the target URL
|
||||
const matchedProvider = aiApi.providers.find(provider =>
|
||||
body.target.includes(provider.url));
|
||||
|
||||
if (matchedProvider) {
|
||||
apiKey = matchedProvider.key;
|
||||
}
|
||||
}
|
||||
|
||||
// Add authorization header if API key is available
|
||||
if (apiKey) {
|
||||
if (headers['x-api-key']) {
|
||||
headers['x-api-key'] = apiKey;
|
||||
} else if (body.target.includes('key=')) {
|
||||
body.target = body.target.replace('key=', `key=${apiKey}&`);
|
||||
} else {
|
||||
headers['Authorization'] = `Bearer ${apiKey}`;
|
||||
}
|
||||
} else {
|
||||
throw new Error('No API key found for the target URL');
|
||||
}
|
||||
|
||||
// Create request parameters object
|
||||
const requestParams = {
|
||||
method: body.method,
|
||||
uri: body.target,
|
||||
headers,
|
||||
body: body.data,
|
||||
timeout: timeoutOptions,
|
||||
limit: sizeLimit,
|
||||
filterPrivate: false
|
||||
};
|
||||
|
||||
// Create a safe copy for logging without sensitive info
|
||||
const safeLogParams = { ...requestParams };
|
||||
// if (safeLogParams.headers) {
|
||||
// safeLogParams.headers = { ...safeLogParams.headers };
|
||||
// if (safeLogParams.headers.Authorization) {
|
||||
// safeLogParams.headers.Authorization = '[REDACTED]';
|
||||
// }
|
||||
// }
|
||||
|
||||
// Log the sanitized request parameters
|
||||
ctx.logger.debug(`Proxying request: %j`, safeLogParams);
|
||||
|
||||
// Use utils.httpRequest to make the request
|
||||
const result = await utils.httpRequest(
|
||||
ctx, // Operation context
|
||||
requestParams.method, // HTTP method
|
||||
requestParams.uri, // Target URL
|
||||
requestParams.headers, // Request headers
|
||||
requestParams.body, // Request body
|
||||
requestParams.timeout, // Timeout configuration
|
||||
requestParams.limit, // Size limit
|
||||
requestParams.filterPrivate // Filter private requests
|
||||
);
|
||||
|
||||
// Set the response headers to match the target response
|
||||
res.set(result.response.headers);
|
||||
|
||||
// Use pipeline to pipe the response data to the client
|
||||
await pipeline(result.stream, res);
|
||||
|
||||
} catch (error) {
|
||||
ctx.logger.error(`AI API request error: %s`, error);
|
||||
if(error.response){
|
||||
// Set the response headers to match the target response
|
||||
res.set(error.response.headers);
|
||||
|
||||
// Use pipeline to pipe the response data to the client
|
||||
await pipeline(error.response.data, res);
|
||||
} else {
|
||||
res.status(500).json({
|
||||
"error": {
|
||||
"message": "AI API request error",
|
||||
"code": "500"
|
||||
}
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
ctx.logger.info('End proxyRequest');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process AI actions from configuration
|
||||
*
|
||||
* @param {Object} ctx - Operation context
|
||||
* @param {Object} actions - The actions from configuration
|
||||
* @returns {Object} Processed actions object
|
||||
*/
|
||||
function processActions(ctx, actions) {
|
||||
const logger = ctx.logger;
|
||||
|
||||
if (!actions || typeof actions !== 'object') {
|
||||
return {};
|
||||
}
|
||||
|
||||
try {
|
||||
const processedActions = Object.entries(actions).reduce((acc, [key, value]) => {
|
||||
if (value) {
|
||||
acc[key] = {
|
||||
name: value.name || key,
|
||||
icon: value.icon || '',
|
||||
model: value.model || '',
|
||||
capabilities: Array.isArray(value.capabilities) ? value.capabilities : []
|
||||
};
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
logger.info(`Processed ${Object.keys(processedActions).length} AI actions`);
|
||||
return processedActions;
|
||||
} catch (error) {
|
||||
logger.error('Error processing AI actions:', error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single AI provider and its models
|
||||
*
|
||||
* @param {Object} ctx - Operation context
|
||||
* @param {Object} provider - Provider configuration
|
||||
* @param {boolean} includeDisabled - Whether to include disabled models
|
||||
* @returns {Promise<Object|null>} Processed provider with models or null if provider is invalid
|
||||
*/
|
||||
async function processProvider(ctx, provider, includeDisabled) {
|
||||
const logger = ctx.logger;
|
||||
|
||||
if (!provider.url || !provider.key) {
|
||||
return null;
|
||||
}
|
||||
let engineModels = [];
|
||||
try {
|
||||
if (provider.url && provider.key) {
|
||||
aiEngineWrapper.setCtx(ctx);
|
||||
// logger.info("processProvider %j", AI.Providers);
|
||||
aiEngineWrapper.AI.Providers[provider.name].key = provider.key;
|
||||
// Call getModels from engine.js
|
||||
const result = await aiEngineWrapper.AI.getModels(provider);
|
||||
logger.info(`Got ${JSON.stringify(result)} from AI.getModels for ${provider.name}`);
|
||||
// Process result
|
||||
if (!result.error && Array.isArray(result.models)) {
|
||||
engineModels = result.models;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error processing provider ${provider.name}:`, error);
|
||||
}
|
||||
// Return provider with any models we were able to get from config
|
||||
return {
|
||||
name: provider.name,
|
||||
url: provider.url,
|
||||
key: "",
|
||||
models: engineModels
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all AI models from the configuration and dynamically from providers
|
||||
*
|
||||
* @param {Object} ctx - Operation context
|
||||
* @param {boolean} [includeDisabled=false] - Whether to include disabled providers in the result
|
||||
* @returns {Promise<Object>} Object containing providers and their models along with action configurations
|
||||
*/
|
||||
async function getPluginSettings(ctx, includeDisabled = false) {
|
||||
const logger = ctx.logger;
|
||||
logger.info('Starting getPluginSettings');
|
||||
const result = {
|
||||
actions: {},
|
||||
providers: {},
|
||||
models: []
|
||||
};
|
||||
try {
|
||||
// Get AI API configuration
|
||||
const aiApi = config.get('ai-api');
|
||||
// Process providers and their models if configuration exists
|
||||
if (aiApi?.providers && Array.isArray(aiApi.providers)) {
|
||||
// Create an array of promises for each provider
|
||||
const providerPromises = aiApi.providers
|
||||
.filter(provider => includeDisabled || provider.enable !== false || !provider.key || !provider.url)
|
||||
.map(provider => processProvider(ctx, provider, includeDisabled));
|
||||
|
||||
try {
|
||||
let providers = await Promise.allSettled(providerPromises);
|
||||
providers = providers.filter(provider => provider.status === 'fulfilled' && provider.value && provider.value.name && provider.value.models?.length > 0);
|
||||
|
||||
const providerCount = providers.length;
|
||||
let totalModels = 0;
|
||||
// Convert providers array to object by provider name
|
||||
result.providers = {};
|
||||
for(let i = 0; i < providers.length; i++) {
|
||||
const provider = providers[i].value;
|
||||
totalModels += provider.models.length;
|
||||
result.providers[provider.name] = provider
|
||||
result.models.push(...provider.models);
|
||||
}
|
||||
|
||||
logger.info(`Successfully processed ${providerCount} providers with a total of ${totalModels} models`);
|
||||
} catch (error) {
|
||||
logger.error('Error resolving provider promises:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Process AI actions
|
||||
if (aiApi?.actions && typeof aiApi.actions === 'object') {
|
||||
result.actions = processActions(ctx, aiApi.actions);
|
||||
}
|
||||
|
||||
logger.info('Completed getPluginSettings successfully');
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving AI models from config:', error);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
proxyRequest,
|
||||
getPluginSettings
|
||||
};
|
||||
359
DocService/sources/ai/engine/buttons.js
Normal file
359
DocService/sources/ai/engine/buttons.js
Normal file
@ -0,0 +1,359 @@
|
||||
(function(window, undefined)
|
||||
{
|
||||
function generateGuid()
|
||||
{
|
||||
if (!window.crypto || !window.crypto.getRandomValues)
|
||||
{
|
||||
function s4() {
|
||||
return Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1);
|
||||
}
|
||||
return s4() + s4() + '-' + s4() + '-' + s4() + '-' + s4() + '-' + s4() + s4() + s4();
|
||||
}
|
||||
|
||||
var array = new Uint16Array(8);
|
||||
window.crypto.getRandomValues(array);
|
||||
var index = 0;
|
||||
function s4() {
|
||||
var value = 0x10000 + array[index++];
|
||||
return value.toString(16).substring(1);
|
||||
}
|
||||
return s4() + s4() + '-' + s4() + '-' + s4() + '-' + s4() + '-' + s4() + s4() + s4();
|
||||
}
|
||||
|
||||
function translateItem(text) {
|
||||
return window.Asc.plugin.tr(text);
|
||||
};
|
||||
|
||||
window.Asc = window.Asc || {};
|
||||
var Asc = window.Asc;
|
||||
|
||||
Asc.Buttons = {};
|
||||
Asc.Buttons.ButtonsContextMenu = [];
|
||||
Asc.Buttons.ButtonsToolbar = [];
|
||||
|
||||
Asc.Buttons.registerContextMenu = function()
|
||||
{
|
||||
window.Asc.plugin.attachEvent("onContextMenuShow", function(options) {
|
||||
if (!options)
|
||||
return;
|
||||
|
||||
let items = {
|
||||
guid: window.Asc.plugin.guid,
|
||||
};
|
||||
for (let i = 0, len = Asc.Buttons.ButtonsContextMenu.length; i < len; i++)
|
||||
{
|
||||
let button = Asc.Buttons.ButtonsContextMenu[i];
|
||||
if (button.parent === null)
|
||||
{
|
||||
button.onContextMenuShow(options, items);
|
||||
}
|
||||
}
|
||||
|
||||
if (items.items)
|
||||
window.Asc.plugin.executeMethod("AddContextMenuItem", [items]);
|
||||
});
|
||||
};
|
||||
|
||||
Asc.Buttons.registerToolbarMenu = function()
|
||||
{
|
||||
let items = {
|
||||
guid : window.Asc.plugin.guid,
|
||||
tabs : []
|
||||
};
|
||||
|
||||
for (let i = 0, len = Asc.Buttons.ButtonsToolbar.length; i < len; i++)
|
||||
{
|
||||
let button = Asc.Buttons.ButtonsToolbar[i];
|
||||
if (button.parent === null)
|
||||
{
|
||||
button.toToolbar(items);
|
||||
}
|
||||
|
||||
if (!!button.menu) {
|
||||
for (item of button.menu) {
|
||||
if (!!item.onclick) {
|
||||
window.Asc.plugin.attachToolbarMenuClickEvent(item.id, item.onclick);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (items.tabs.length > 0)
|
||||
window.Asc.plugin.executeMethod("AddToolbarMenuItem", [items]);
|
||||
};
|
||||
|
||||
Asc.Buttons.updateToolbarMenu = function(id, name, buttons)
|
||||
{
|
||||
let buttonMainToolbar = new Asc.ButtonToolbar(null, id);
|
||||
buttonMainToolbar.text = name;
|
||||
|
||||
let items = {
|
||||
guid : window.Asc.plugin.guid,
|
||||
tabs : []
|
||||
};
|
||||
|
||||
buttonMainToolbar.childs = buttons;
|
||||
for (let i = 0, len = buttons.length; i < len; i++)
|
||||
buttons[i].parent = buttonMainToolbar;
|
||||
|
||||
buttonMainToolbar.toToolbar(items);
|
||||
|
||||
if (items.tabs.length > 0)
|
||||
window.Asc.plugin.executeMethod("UpdateToolbarMenuItem", [items]);
|
||||
};
|
||||
|
||||
var ToolbarButtonType = {
|
||||
Button : "button",
|
||||
BigButton : "big-button"
|
||||
};
|
||||
|
||||
var ItemType = {
|
||||
None : 0,
|
||||
ContextMenu : 1,
|
||||
Toolbar : 2
|
||||
};
|
||||
|
||||
function Button(parent, id)
|
||||
{
|
||||
this.itemType = ItemType.None;
|
||||
this.editors = ["word", "cell", "slide"];
|
||||
|
||||
this.id = (id === undefined) ? generateGuid() : id;
|
||||
|
||||
this.icons = null;
|
||||
|
||||
this.text = "";
|
||||
this.hint = null;
|
||||
this.data = "";
|
||||
|
||||
this.separator = false;
|
||||
this.lockInViewMode = true;
|
||||
this.enableToggle = false;
|
||||
this.disabled = false;
|
||||
this.removed = false;
|
||||
|
||||
this.parent = parent ? parent : null;
|
||||
this.childs = null;
|
||||
|
||||
if (this.parent)
|
||||
{
|
||||
if (!this.parent.childs)
|
||||
this.parent.childs = [];
|
||||
this.parent.childs.push(this);
|
||||
}
|
||||
}
|
||||
|
||||
Button.prototype.toItem = function()
|
||||
{
|
||||
let item = {
|
||||
id : this.id,
|
||||
text : translateItem(this.text)
|
||||
};
|
||||
|
||||
if (this.hint !== null)
|
||||
item.hint = translateItem(this.hint === "" ? this.hint : this.text);
|
||||
|
||||
if (this.separator)
|
||||
item.separator = true;
|
||||
|
||||
if (this.data)
|
||||
item.data = this.data;
|
||||
|
||||
if (this.lockInViewMode)
|
||||
item.lockInViewMode = true;
|
||||
|
||||
if (this.enableToggle)
|
||||
item.enableToggle = true;
|
||||
|
||||
if (this.disabled)
|
||||
item.disabled = true;
|
||||
else
|
||||
item.disabled = false;
|
||||
|
||||
if (this.removed)
|
||||
item.removed = true;
|
||||
|
||||
if (this.icons)
|
||||
item.icons = this.icons;
|
||||
|
||||
if (this.itemType === ItemType.Toolbar)
|
||||
item.type = this.type;
|
||||
|
||||
if (this.menu)
|
||||
item.items = this.menu.map(function(menuItem) {
|
||||
menuItem.text = translateItem(menuItem.text);
|
||||
return menuItem;
|
||||
});
|
||||
|
||||
if (this.split)
|
||||
item.split = true;
|
||||
|
||||
return item;
|
||||
};
|
||||
|
||||
Button.prototype.attachOnClick = function(handler)
|
||||
{
|
||||
};
|
||||
|
||||
Button.prototype.onClick = function()
|
||||
{
|
||||
console.log("BUTTON: " + this.text);
|
||||
};
|
||||
|
||||
function ButtonContextMenu(parent, id)
|
||||
{
|
||||
Button.call(this, parent, id);
|
||||
|
||||
this.itemType = ItemType.ContextMenu;
|
||||
this.showOnOptionsType = [];
|
||||
|
||||
Asc.Buttons.ButtonsContextMenu.push(this);
|
||||
}
|
||||
|
||||
ButtonContextMenu.prototype = Object.create(Button.prototype);
|
||||
ButtonContextMenu.prototype.constructor = ButtonContextMenu;
|
||||
|
||||
ButtonContextMenu.prototype.copy = function()
|
||||
{
|
||||
let ret = new ButtonContextMenu(this.parent, this.id);
|
||||
ret.editors = this.editors;
|
||||
|
||||
ret.separator = this.separator;
|
||||
ret.lockInViewMode = this.lockInViewMode;
|
||||
ret.enableToggle = this.enableToggle;
|
||||
ret.disabled = this.disabled;
|
||||
ret.showOnOptionsType = this.showOnOptionsType.slice();
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
ButtonContextMenu.prototype.addCheckers = function()
|
||||
{
|
||||
let len = arguments.length;
|
||||
this.showOnOptionsType = new Array(len);
|
||||
for (let i = 0; i < len; i++)
|
||||
this.showOnOptionsType[i] = arguments[i];
|
||||
};
|
||||
|
||||
ButtonContextMenu.prototype.attachOnClick = function(handler)
|
||||
{
|
||||
window.Asc.plugin.attachContextMenuClickEvent(this.id, handler);
|
||||
};
|
||||
|
||||
ButtonContextMenu.prototype.onContextMenuShowAnalyze = function(options, parent)
|
||||
{
|
||||
return false;
|
||||
};
|
||||
|
||||
ButtonContextMenu.prototype.onContextMenuShowExtendItem = function(options, item)
|
||||
{
|
||||
};
|
||||
|
||||
ButtonContextMenu.prototype.onContextMenuShow = function(options, parent)
|
||||
{
|
||||
if (this.onContextMenuShowAnalyze(options, parent))
|
||||
return;
|
||||
|
||||
let isSupport = false;
|
||||
for (let i = 0, len = this.editors.length; i < len; i++)
|
||||
{
|
||||
if (Asc.plugin.info.editorType === this.editors[i])
|
||||
{
|
||||
isSupport = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isSupport)
|
||||
return;
|
||||
|
||||
for (let i = 0, len = this.showOnOptionsType.length; i < len; i++)
|
||||
{
|
||||
if (options.type === this.showOnOptionsType[i] || this.showOnOptionsType[i] === "All")
|
||||
{
|
||||
if (!parent.items)
|
||||
parent.items = [];
|
||||
|
||||
let curItem = this.toItem();
|
||||
this.onContextMenuShowExtendItem(options, curItem);
|
||||
|
||||
if (this.childs)
|
||||
{
|
||||
for (let j = 0, childsLen = this.childs.length; j < childsLen; j++)
|
||||
{
|
||||
this.childs[j].onContextMenuShow(options, curItem);
|
||||
}
|
||||
}
|
||||
|
||||
parent.items.push(curItem);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function ButtonToolbar(parent, id)
|
||||
{
|
||||
Button.call(this, parent, id);
|
||||
|
||||
this.itemType = ItemType.Toolbar;
|
||||
this.type = ToolbarButtonType.BigButton;
|
||||
this.tab = "";
|
||||
|
||||
Asc.Buttons.ButtonsToolbar.push(this);
|
||||
}
|
||||
|
||||
ButtonToolbar.prototype = Object.create(Button.prototype);
|
||||
ButtonToolbar.prototype.constructor = ButtonToolbar;
|
||||
|
||||
ButtonToolbar.prototype.attachOnClick = function(handler)
|
||||
{
|
||||
window.Asc.plugin.attachToolbarMenuClickEvent(this.id, handler);
|
||||
};
|
||||
|
||||
ButtonToolbar.prototype.toItem = function(items)
|
||||
{
|
||||
let item = Button.prototype.toItem.call(this);
|
||||
item.type = this.type;
|
||||
return item;
|
||||
};
|
||||
|
||||
ButtonToolbar.prototype.toToolbar = function(items)
|
||||
{
|
||||
let currentItem = null;
|
||||
if (this.parent === null)
|
||||
{
|
||||
let tab = {
|
||||
id : this.id,
|
||||
text : translateItem(this.text),
|
||||
items : []
|
||||
};
|
||||
if (this.hint !== null)
|
||||
tab.hint = translateItem(this.hint === "" ? this.hint : this.text);
|
||||
|
||||
items.tabs.push(tab);
|
||||
|
||||
currentItem = tab;
|
||||
}
|
||||
else
|
||||
{
|
||||
currentItem = this.toItem();
|
||||
|
||||
if (!items.items)
|
||||
items.items = [];
|
||||
|
||||
items.items.push(currentItem);
|
||||
}
|
||||
|
||||
if (this.childs)
|
||||
{
|
||||
for (let j = 0, childsLen = this.childs.length; j < childsLen; j++)
|
||||
{
|
||||
this.childs[j].toToolbar(currentItem);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Asc.ToolbarButtonType = ToolbarButtonType;
|
||||
Asc.ButtonContextMenu = ButtonContextMenu;
|
||||
Asc.ButtonToolbar = ButtonToolbar;
|
||||
})(window);
|
||||
528
DocService/sources/ai/engine/engine.js
Normal file
528
DocService/sources/ai/engine/engine.js
Normal file
@ -0,0 +1,528 @@
|
||||
(function(window, undefined)
|
||||
{
|
||||
window.AI = window.AI || {};
|
||||
var AI = window.AI;
|
||||
|
||||
if (!AI.isLocalDesktop)
|
||||
return;
|
||||
|
||||
window.fetch = function(url, obj) {
|
||||
function TextResponse(text, isOk) {
|
||||
if (isOk)
|
||||
this.textResponse = text;
|
||||
else
|
||||
this.message = text;
|
||||
|
||||
this.text = function() { return new Promise(function(resolve) {
|
||||
resolve(text)
|
||||
})};
|
||||
this.json = function() { return new Promise(function(resolve, reject) {
|
||||
try {
|
||||
resolve(JSON.parse(text));
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
})};
|
||||
this.ok = isOk;
|
||||
};
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open(obj.method, url, true);
|
||||
|
||||
for (let h in obj.headers)
|
||||
if (obj.headers.hasOwnProperty(h))
|
||||
xhr.setRequestHeader(h, obj.headers[h]);
|
||||
|
||||
xhr.onload = function() {
|
||||
if (this.status == 200 || this.status == 0)
|
||||
resolve(new TextResponse(this.response, true));
|
||||
else
|
||||
resolve(new TextResponse(this.response, false));
|
||||
};
|
||||
xhr.onerror = function() {
|
||||
reject(new TextResponse(this.response || "Failed to fetch.", false));
|
||||
};
|
||||
|
||||
xhr.send(obj.body);
|
||||
});
|
||||
};
|
||||
})(window);
|
||||
|
||||
(function(window, undefined)
|
||||
{
|
||||
async function requestWrapper(message) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (AI.isLocalDesktop && (AI.isLocalUrl(message.url) || message.isUseProxy)) {
|
||||
window.AscSimpleRequest.createRequest({
|
||||
url: message.url,
|
||||
method: message.method,
|
||||
headers: message.headers,
|
||||
body: message.isBlob ? message.body : (message.body ? JSON.stringify(message.body) : ""),
|
||||
complete: function(e, status) {
|
||||
let data = JSON.parse(e.responseText);
|
||||
resolve({error: 0, data: data.data ? data.data : data});
|
||||
},
|
||||
error: function(e, status, error) {
|
||||
if ( e.statusCode == -102 ) e.statusCode = 404;
|
||||
resolve({error: e.statusCode, message: "Internal error"});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
let request = {
|
||||
method: message.method,
|
||||
headers: message.headers
|
||||
};
|
||||
if (request.method != "GET") {
|
||||
request.body = message.isBlob ? message.body : (message.body ? JSON.stringify(message.body) : "");
|
||||
|
||||
if (message.isUseProxy) {
|
||||
request = {
|
||||
"method" : request.method,
|
||||
"body" : JSON.stringify({
|
||||
"target" : message.url,
|
||||
"method" : request.method,
|
||||
"headers" : request.headers,
|
||||
"data" : request.body
|
||||
})
|
||||
}
|
||||
if (proxyUrlParam){
|
||||
message.url = proxyUrlParam;
|
||||
request["headers"] = {
|
||||
"Authorization" : "Bearer " + Asc.plugin.info.jwt,
|
||||
}
|
||||
} else {
|
||||
message.url = AI.PROXY_URL;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fetch(message.url, request)
|
||||
.then(function(response) {
|
||||
return response.json()
|
||||
})
|
||||
.then(function(data) {
|
||||
if (data.error)
|
||||
resolve({error: 1, message: data.error.message ? data.error.message : ""});
|
||||
else
|
||||
resolve({error: 0, data: data.data ? data.data : data});
|
||||
})
|
||||
.catch(function(error) {
|
||||
resolve({error: 1, message: error.message ? error.message : ""});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
AI.TmpProviderForModels = null;
|
||||
|
||||
AI.PROXY_URL = "http://localhost:8000/ai-proxy";
|
||||
const proxyUrlParam = "http://localhost:8000/ai-proxy";
|
||||
|
||||
AI._getHeaders = function(_provider) {
|
||||
let provider = _provider.createInstance ? _provider : AI.Storage.getProvider(_provider.name);
|
||||
if (!provider) provider = new AI.Provider();
|
||||
return provider.getRequestHeaderOptions();
|
||||
};
|
||||
|
||||
AI._getModelsSync = function(_provider) {
|
||||
let provider = _provider.createInstance ? _provider : AI.Storage.getProvider(_provider.name);
|
||||
if (!provider) provider = new AI.Provider();
|
||||
return provider.getModels();
|
||||
};
|
||||
|
||||
AI._extendBody = function(_provider, body) {
|
||||
let provider = _provider.createInstance ? _provider : AI.Storage.getProvider(_provider.name);
|
||||
if (!provider) provider = new AI.Provider();
|
||||
let bodyPr = provider.getRequestBodyOptions();
|
||||
|
||||
if (provider.isUseProxy())
|
||||
bodyPr.target = provider.url;
|
||||
|
||||
for (let i in bodyPr) {
|
||||
if (!body[i])
|
||||
body[i] = bodyPr[i];
|
||||
}
|
||||
|
||||
return provider.isUseProxy();
|
||||
};
|
||||
|
||||
AI._getEndpointUrl = function(_provider, endpoint, model) {
|
||||
let provider = _provider.createInstance ? _provider : AI.Storage.getProvider(_provider.name);
|
||||
if (!provider) provider = new AI.Provider(_provider.name, _provider.url, _provider.key);
|
||||
|
||||
if (_provider.key)
|
||||
provider.key = _provider.key;
|
||||
|
||||
let url = provider.url;
|
||||
if (url.endsWith("/"))
|
||||
url = url.substring(0, url.length - 1);
|
||||
if ("" !== provider.addon)
|
||||
{
|
||||
let plus = "/" + provider.addon;
|
||||
let pos = url.lastIndexOf(plus);
|
||||
if (pos === -1 || pos !== (url.length - plus.length))
|
||||
url += plus;
|
||||
}
|
||||
|
||||
return url + provider.getEndpointUrl(endpoint, model);
|
||||
};
|
||||
|
||||
AI.getModels = async function(provider)
|
||||
{
|
||||
AI.TmpProviderForModels = null;
|
||||
return new Promise(function (resolve, reject) {
|
||||
|
||||
function resolveRequest(data) {
|
||||
if (data.error)
|
||||
resolve({
|
||||
error : 1,
|
||||
message : data.message,
|
||||
models : []
|
||||
});
|
||||
else {
|
||||
AI.TmpProviderForModels = AI.createProviderInstance(provider.name, provider.url, provider.key);
|
||||
let models = data.data;
|
||||
if (data.data.models)
|
||||
models = data.data.models;
|
||||
for (let i = 0, len = models.length; i < len; i++)
|
||||
{
|
||||
let model = models[i];
|
||||
AI.TmpProviderForModels.correctModelInfo(model);
|
||||
|
||||
if (!model.id)
|
||||
continue;
|
||||
|
||||
model.endpoints = [];
|
||||
model.options = {};
|
||||
|
||||
if (AI.TmpProviderForModels.checkExcludeModel(model))
|
||||
continue;
|
||||
|
||||
let modelUI = new AI.UI.Model(model.name, model.id,
|
||||
provider.name, AI.TmpProviderForModels.checkModelCapability(model));
|
||||
AI.TmpProviderForModels.models.push(model);
|
||||
AI.TmpProviderForModels.modelsUI.push(modelUI);
|
||||
}
|
||||
|
||||
resolve({
|
||||
error : 0,
|
||||
message : "",
|
||||
models : AI.TmpProviderForModels.modelsUI
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let syncModels = AI._getModelsSync(provider);
|
||||
if (Array.isArray(syncModels))
|
||||
{
|
||||
resolveRequest({
|
||||
error : 0,
|
||||
data : syncModels
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let headers = AI._getHeaders(provider);
|
||||
requestWrapper({
|
||||
url : AI._getEndpointUrl(provider, AI.Endpoints.Types.v1.Models),
|
||||
headers : headers,
|
||||
method : "GET"
|
||||
}).then(function(data) {
|
||||
resolveRequest(data);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
AI.Request = function(model) {
|
||||
this.modelUI = model;
|
||||
this.model = null;
|
||||
this.errorHandler = null;
|
||||
|
||||
if ("" !== model.provider) {
|
||||
let provider = null;
|
||||
for (let i in AI.Providers) {
|
||||
if (model.provider === AI.Providers[i].name) {
|
||||
provider = AI.Providers[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (provider) {
|
||||
for (let i = 0, len = provider.models.length; i < len; i++) {
|
||||
if (model.id === provider.models[i].id ||
|
||||
model.id === provider.models[i].name)
|
||||
{
|
||||
this.model = provider.models[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AI.Request.create = function(action) {
|
||||
let model = AI.Storage.getModelById(AI.Actions[action].model);
|
||||
if (!model) {
|
||||
onOpenSettingsModal();
|
||||
return null;
|
||||
}
|
||||
return new AI.Request(model);
|
||||
};
|
||||
|
||||
AI.Request.prototype.setErrorHandler = function(callback) {
|
||||
this.errorHandler = callback;
|
||||
};
|
||||
|
||||
AI.Request.prototype.chatRequest = async function(content, block) {
|
||||
return await this._wrapRequest(this._chatRequest, content, block !== false);
|
||||
};
|
||||
|
||||
AI.Request.prototype._wrapRequest = async function(func, data, block) {
|
||||
if (block)
|
||||
await Asc.Editor.callMethod("StartAction", ["Block", "AI (" + this.modelUI.name + ")"]);
|
||||
let result = undefined;
|
||||
try {
|
||||
result = await func.call(this, data);
|
||||
} catch (err) {
|
||||
if (err.error) {
|
||||
if (block)
|
||||
await Asc.Editor.callMethod("EndAction", ["Block", "AI (" + this.modelUI.name + ")"]);
|
||||
if (this.errorHandler)
|
||||
this.errorHandler(err);
|
||||
else {
|
||||
if (true) {
|
||||
await Asc.Library.SendError(err.message, -1);
|
||||
} else {
|
||||
// since 8.3.0!!!
|
||||
await Asc.Editor.callMethod("ShowError", [err.message, -1]);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (block)
|
||||
await Asc.Editor.callMethod("EndAction", ["Block", "AI (" + this.modelUI.name + ")"]);
|
||||
return result;
|
||||
};
|
||||
|
||||
AI.Request.prototype._chatRequest = async function(content) {
|
||||
let provider = null;
|
||||
if (this.modelUI)
|
||||
provider = AI.Storage.getProvider(this.modelUI.provider);
|
||||
|
||||
if (!provider) {
|
||||
throw {
|
||||
error : 1,
|
||||
message : "Please select the correct model for action."
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let isUseCompletionsInsteadChat = false;
|
||||
if (this.model) {
|
||||
let isFoundChatCompletions = false;
|
||||
let isFoundCompletions = false;
|
||||
for (let i = 0, len = this.model.endpoints.length; i < len; i++) {
|
||||
if (this.model.endpoints[i] === AI.Endpoints.Types.v1.Chat_Completions) {
|
||||
isFoundChatCompletions = true;
|
||||
break;
|
||||
}
|
||||
if (this.model.endpoints[i] === AI.Endpoints.Types.v1.Completions) {
|
||||
isFoundCompletions = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isFoundCompletions && !isFoundChatCompletions)
|
||||
isUseCompletionsInsteadChat = true;
|
||||
}
|
||||
|
||||
let isNoSplit = false;
|
||||
let max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
if (this.model && this.model.options && undefined !== this.model.options.max_input_tokens)
|
||||
max_input_tokens = this.model.options.max_input_tokens;
|
||||
|
||||
let header_footer_overhead = 500;
|
||||
// for test chunks:
|
||||
if (false) {
|
||||
max_input_tokens = 50;
|
||||
let header_footer_overhead = 0;
|
||||
}
|
||||
|
||||
if (max_input_tokens < header_footer_overhead)
|
||||
max_input_tokens = header_footer_overhead + 1000;
|
||||
|
||||
let headers = AI._getHeaders(provider);
|
||||
|
||||
let isMessages = Array.isArray(content);
|
||||
|
||||
if (isUseCompletionsInsteadChat && isMessages) {
|
||||
content = content[content.length - 1].content;
|
||||
isMessages = false;
|
||||
}
|
||||
|
||||
if (isMessages)
|
||||
isNoSplit = true;
|
||||
|
||||
let input_len = content.length;
|
||||
let input_tokens = isMessages ? 0 : Asc.OpenAIEncode(content).length;
|
||||
|
||||
let messages = [];
|
||||
if (input_tokens < max_input_tokens || isNoSplit) {
|
||||
messages.push(content);
|
||||
} else {
|
||||
let chunkLen = (((max_input_tokens - header_footer_overhead) / input_tokens) * input_len) >> 0;
|
||||
let currentLen = 0;
|
||||
while (currentLen != input_len) {
|
||||
let endSymbol = currentLen + chunkLen;
|
||||
if (endSymbol >= input_len)
|
||||
endSymbol = undefined;
|
||||
messages.push(content.substring(currentLen, endSymbol));
|
||||
if (undefined === endSymbol)
|
||||
currentLen = input_len;
|
||||
else
|
||||
currentLen = endSymbol;
|
||||
}
|
||||
}
|
||||
|
||||
let objRequest = {
|
||||
headers : headers,
|
||||
method : "POST"
|
||||
};
|
||||
|
||||
let endpointType = isUseCompletionsInsteadChat ? AI.Endpoints.Types.v1.Completions :
|
||||
AI.Endpoints.Types.v1.Chat_Completions;
|
||||
objRequest.url = AI._getEndpointUrl(provider, endpointType, this.model);
|
||||
|
||||
let requestBody = {};
|
||||
let processResult = function(data) {
|
||||
let result = provider.getChatCompletionsResult(data, this.model);
|
||||
if (result.content.length === 0)
|
||||
return "";
|
||||
|
||||
if (0 === result.content[0].indexOf("<think>")) {
|
||||
let end = result.content[0].indexOf("</think>");
|
||||
if (end !== -1)
|
||||
result.content[0] = result.content[0].substring(end + 8);
|
||||
}
|
||||
|
||||
return result.content[0];
|
||||
};
|
||||
|
||||
if (1 === messages.length) {
|
||||
if (!isUseCompletionsInsteadChat) {
|
||||
if (isMessages)
|
||||
requestBody.messages = messages[0];
|
||||
else
|
||||
requestBody.messages = [{role:"user",content:messages[0]}];
|
||||
objRequest.body = provider.getChatCompletions(requestBody, this.model);
|
||||
} else {
|
||||
objRequest.body = provider.getCompletions({ text : messages[0] });
|
||||
}
|
||||
|
||||
objRequest.isUseProxy = AI._extendBody(provider, objRequest.body);
|
||||
if (proxyUrlParam) {
|
||||
objRequest.body.target = provider.url;
|
||||
objRequest.isUseProxy = true;
|
||||
}
|
||||
|
||||
|
||||
let result = await requestWrapper(objRequest);
|
||||
if (result.error) {
|
||||
throw {
|
||||
error : result.error,
|
||||
message : result.message
|
||||
};
|
||||
return;
|
||||
} else {
|
||||
return processResult(result);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
let lastFooterForOldModels = "";
|
||||
let indexTask = content.indexOf(": \"");
|
||||
if (-1 != indexTask && indexTask < 100) {
|
||||
lastFooterForOldModels = content.substring(0, indexTask);
|
||||
}
|
||||
|
||||
function getHeader(part, partsCount) {
|
||||
let header = "[START PART " + part + "/" + partsCount + "]\n";
|
||||
if (part != partsCount) {
|
||||
header = "Do not answer yet. This is just another part of the text I want to send you. Just receive and acknowledge as \"Part " + part + "/" + partsCount + " received\" and wait for the next part.\n" + header;
|
||||
}
|
||||
return header;
|
||||
}
|
||||
|
||||
function getFooter(part, partsCount) {
|
||||
let footer = "\n[END PART " + part + "/" + partsCount + "]\n";
|
||||
if (part != partsCount) {
|
||||
footer += "Remember not answering yet. Just acknowledge you received this part with the message \"Part " + part + "/" + partsCount + " received\" and wait for the next part.";
|
||||
} else {
|
||||
footer += "ALL PARTS SENT. Now you can continue processing the request." + lastFooterForOldModels;
|
||||
}
|
||||
return footer;
|
||||
}
|
||||
|
||||
for (let i = 0, len = messages.length; i < len; i++) {
|
||||
|
||||
let message = getHeader(i + 1, len) + messages[i] + getFooter(i + 1, len);
|
||||
if (!isUseCompletionsInsteadChat) {
|
||||
objRequest.body = provider.getChatCompletions({ messages : [{role:"user",content:message}] });
|
||||
} else {
|
||||
objRequest.body = provider.getCompletions( { text : message });
|
||||
}
|
||||
|
||||
objRequest.isUseProxy = AI._extendBody(provider, objRequest.body);
|
||||
if (proxyUrlParam) {
|
||||
objRequest.body.target = provider.url;
|
||||
objRequest.isUseProxy = true;
|
||||
}
|
||||
|
||||
let result = await requestWrapper(objRequest);
|
||||
if (result.error) {
|
||||
throw {
|
||||
error : result.error,
|
||||
message : result.message
|
||||
};
|
||||
return;
|
||||
} else if (i === (len - 1)) {
|
||||
return processResult(result);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function normalizeImageSize(size) {
|
||||
let width = 0, height = 0;
|
||||
if (size.width > 750 || size.height > 750)
|
||||
width = height = 1024;
|
||||
else if (size.width > 375 || size.height > 350)
|
||||
width = height = 512;
|
||||
else
|
||||
width = height = 256;
|
||||
|
||||
return {width: width, height: height, str: width + 'x' + height}
|
||||
};
|
||||
|
||||
async function getImageBlob(base64)
|
||||
{
|
||||
return new Promise(function(resolve) {
|
||||
const image = new Image();
|
||||
image.onload = function() {
|
||||
const img_size = {width: image.width, height: image.height};
|
||||
const canvas_size = normalizeImageSize(img_size);
|
||||
const draw_size = canvas_size.width > image.width ? img_size : canvas_size;
|
||||
let canvas = document.createElement('canvas');
|
||||
canvas.width = canvas_size.width;
|
||||
canvas.height = canvas_size.height;
|
||||
canvas.getContext('2d').drawImage(image, 0, 0, draw_size.width, draw_size.height*image.height/image.width);
|
||||
canvas.toBlob(function(blob) {resolve({blob: blob, size: canvas_size, image_size :img_size})}, 'image/png');
|
||||
};
|
||||
image.src = img.src;
|
||||
});
|
||||
}
|
||||
|
||||
})(window);
|
||||
512
DocService/sources/ai/engine/library.js
Normal file
512
DocService/sources/ai/engine/library.js
Normal file
@ -0,0 +1,512 @@
|
||||
(function(exports, undefined)
|
||||
{
|
||||
let Editor = {};
|
||||
|
||||
Editor.callMethod = async function(name, args)
|
||||
{
|
||||
return new Promise(resolve => (function(){
|
||||
Asc.plugin.executeMethod(name, args || [], function(returnValue){
|
||||
resolve(returnValue);
|
||||
});
|
||||
})());
|
||||
};
|
||||
|
||||
Editor.callCommand = async function(func)
|
||||
{
|
||||
return new Promise(resolve => (function(){
|
||||
Asc.plugin.callCommand(func, false, true, function(returnValue){
|
||||
resolve(returnValue);
|
||||
});
|
||||
})());
|
||||
};
|
||||
|
||||
Editor.pause = async function(msec)
|
||||
{
|
||||
return new Promise(resolve => (function(){
|
||||
setTimeout(function(){
|
||||
resolve();
|
||||
}, msec);
|
||||
})());
|
||||
};
|
||||
|
||||
Editor.getType = function() {
|
||||
if (Asc.plugin.info.editorSubType === "pdf")
|
||||
return "pdf";
|
||||
return window.Asc.plugin.info.editorType;
|
||||
};
|
||||
|
||||
exports.Asc = exports.Asc || {};
|
||||
exports.Asc.Editor = Editor;
|
||||
|
||||
function Library() {
|
||||
this.version = 0;
|
||||
}
|
||||
|
||||
exports.Asc.PluginsMD = {
|
||||
latex: function(md) {
|
||||
// Inline: $...$
|
||||
md.inline.ruler.after("escape", "latex_inline", function(state, silent) {
|
||||
let start = state.pos;
|
||||
if (state.src[start] !== '$')
|
||||
return false;
|
||||
if (state.src[start + 1] === '$')
|
||||
return false;
|
||||
|
||||
let content = "";
|
||||
let end = start + 1;
|
||||
while ((end = state.src.indexOf('$', end)) !== -1) {
|
||||
if (state.src.charCodeAt(end - 1) === 92/*\\*/) {
|
||||
end++;
|
||||
continue;
|
||||
}
|
||||
content = state.src.slice(start + 1, end);
|
||||
content = content.trim();
|
||||
break;
|
||||
}
|
||||
|
||||
if (!content)
|
||||
return false;
|
||||
|
||||
if (!silent) {
|
||||
let token = state.push("latex_inline", "span", 0);
|
||||
token.content = content;
|
||||
token.attrs = [["class", "oo-latex-inline"]];
|
||||
}
|
||||
|
||||
state.pos = end + 1;
|
||||
return true;
|
||||
});
|
||||
md.renderer.rules.latex_inline = function(tokens, idx) {
|
||||
return `<span class="oo-latex-inline">${tokens[idx].content}</span>`;
|
||||
};
|
||||
|
||||
// Block: $$...$$
|
||||
md.block.ruler.before("fence", "latex_block", function(state, startLine, endLine, silent) {
|
||||
let startPos = state.bMarks[startLine] + state.tShift[startLine];
|
||||
let maxPos = state.eMarks[startLine];
|
||||
let line = state.src.slice(startPos, maxPos).trim();
|
||||
|
||||
if (!line.startsWith("$$"))
|
||||
return false;
|
||||
if (silent)
|
||||
return true;
|
||||
|
||||
let content = "";
|
||||
let found = false;
|
||||
|
||||
for (let i = startLine + 1; i < endLine; i++) {
|
||||
let pos = state.bMarks[i] + state.tShift[i];
|
||||
let max = state.eMarks[i];
|
||||
let nextLine = state.src.slice(pos, max).trim();
|
||||
|
||||
if (nextLine === "$$") {
|
||||
found = true;
|
||||
state.line = i + 1;
|
||||
break;
|
||||
}
|
||||
|
||||
content += nextLine + "\n";
|
||||
}
|
||||
|
||||
if (!found) return false;
|
||||
|
||||
const token = state.push("latex_block", "span", 0);
|
||||
token.block = true;
|
||||
token.content = content.trim();
|
||||
token.attrs = [["class", "oo-latex"]];
|
||||
token.map = [startLine, state.line];
|
||||
|
||||
return true;
|
||||
});
|
||||
md.renderer.rules.latex_block = function(tokens, idx) {
|
||||
return `<span class="oo-latex">${tokens[idx].content}</span>\n`;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
function decodeHtmlText(text) {
|
||||
return text
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/ /g, ' ');
|
||||
}
|
||||
|
||||
Library.prototype.GetEditorVersion = async function()
|
||||
{
|
||||
if (this.version !== 0)
|
||||
return this.version;
|
||||
|
||||
let version = await Editor.callMethod("GetVersion");
|
||||
if ("develop" == version)
|
||||
version = "99.99.99";
|
||||
|
||||
let arrVer = version.split(".");
|
||||
while (3 > arrVer.length)
|
||||
arrVer.push("0");
|
||||
|
||||
this.version = 1000000 * parseInt(arrVer[0]) + 1000 * parseInt(arrVer[1]) + parseInt(arrVer[2]);
|
||||
return this.version;
|
||||
};
|
||||
|
||||
Library.prototype.GetCurrentWord = async function()
|
||||
{
|
||||
return await Editor.callMethod("GetCurrentWord");
|
||||
};
|
||||
|
||||
Library.prototype.GetSelectedText = async function()
|
||||
{
|
||||
let result = await Editor.callMethod("GetSelectedText");
|
||||
if (result !== "")
|
||||
return result;
|
||||
|
||||
return this.GetSelectedContent("text");
|
||||
};
|
||||
|
||||
Library.prototype.GetSelectedContent = async function(type) {
|
||||
return await Editor.callMethod("GetSelectedContent", [{ type : type }]);
|
||||
};
|
||||
|
||||
Library.prototype.GetSelectedImage = async function(type) {
|
||||
let res = await Editor.callMethod("GetSelectedContent", [{ type : "html" }]);
|
||||
let index1 = res.indexOf("src=\"data:image/");
|
||||
if (-1 === index1)
|
||||
return "";
|
||||
index1 += 5;
|
||||
let index2 = res.indexOf("\"", index1);
|
||||
if (-1 === index2)
|
||||
return "";
|
||||
return res.substring(index1, index2);
|
||||
};
|
||||
|
||||
Library.prototype.ReplaceTextSmart = async function(text)
|
||||
{
|
||||
return await Editor.callMethod("ReplaceTextSmart", [text]);
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsText = async function(text)
|
||||
{
|
||||
Asc.scope.data = (text || "").split("\n\n");
|
||||
return await Editor.callCommand(function() {
|
||||
let oDocument = Api.GetDocument();
|
||||
for (let ind = 0; ind < Asc.scope.data.length; ind++) {
|
||||
let text = Asc.scope.data[ind];
|
||||
if (text.length) {
|
||||
let oParagraph = Api.CreateParagraph();
|
||||
oParagraph.AddText(text);
|
||||
oDocument.Push(oParagraph);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsMD = async function(data, plugins)
|
||||
{
|
||||
let htmlContent = Asc.Library.ConvertMdToHTML(data, plugins)
|
||||
return await Asc.Library.InsertAsHTML(htmlContent);
|
||||
};
|
||||
|
||||
Library.prototype.ConvertMdToHTML = function(data, plugins)
|
||||
{
|
||||
let c = window.markdownit();
|
||||
if (plugins) {
|
||||
for (let i = 0, len = plugins.length; i < len; i++)
|
||||
c.use(plugins[i]);
|
||||
}
|
||||
return c.render(this.getMarkdownResult(data));
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsHTML = async function(data)
|
||||
{
|
||||
switch (Asc.Editor.getType()) {
|
||||
case "word": {
|
||||
if (true) {
|
||||
await Editor.callCommand(function() {
|
||||
let document = Api.GetDocument();
|
||||
document.RemoveSelection();
|
||||
}, false);
|
||||
} else {
|
||||
await Editor.callCommand(function() {
|
||||
let doc = Api.GetDocument();
|
||||
let paras = doc.GetAllParagraphs();
|
||||
if (paras.length)
|
||||
{
|
||||
let lastPara = paras[paras.length - 1];
|
||||
let lastElement = lastPara.GetElement(lastPara.GetElementsCount() - 1);
|
||||
if (lastElement && lastElement.MoveCursorToPos)
|
||||
{
|
||||
lastElement.MoveCursorToPos(100000);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return await Editor.callMethod("PasteHtml", [data]);
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsComment = async function(text)
|
||||
{
|
||||
return await Editor.callMethod("AddComment", [{
|
||||
UserName : "AI",
|
||||
Text : decodeHtmlText(text),
|
||||
Time: Date.now(),
|
||||
Solver: false
|
||||
}]);
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsHyperlink = async function(content, hint)
|
||||
{
|
||||
let text = content;
|
||||
start = text.indexOf('htt');
|
||||
end = text.indexOf(' ', start);
|
||||
if (end == -1)
|
||||
end = text.length;
|
||||
|
||||
Asc.scope.link = text.slice(start, end);
|
||||
return await Editor.callCommand(function(){
|
||||
let oDocument = Api.GetDocument();
|
||||
let oRange = oDocument.GetRangeBySelect();
|
||||
oRange.AddHyperlink(Asc.scope.link, "Meaning of the word");
|
||||
});
|
||||
};
|
||||
|
||||
Library.prototype.InsertAsReview = async function(content, isHtml)
|
||||
{
|
||||
let isTrackRevisions = await Editor.callCommand(function(){
|
||||
let res = Api.asc_GetLocalTrackRevisions();
|
||||
Api.asc_SetLocalTrackRevisions(true);
|
||||
return res;
|
||||
});
|
||||
|
||||
Asc.scope.localTrackRevisions = isTrackRevisions;
|
||||
|
||||
await Editor.callMethod(isHtml ? "PasteHtml" : "PasteText", [content.trim()]);
|
||||
|
||||
if (true !== isTrackRevisions)
|
||||
{
|
||||
await Editor.callCommand(function(){
|
||||
Api.asc_SetLocalTrackRevisions(Asc.scope.localTrackRevisions);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Library.prototype.PasteText = async function(text)
|
||||
{
|
||||
return await Editor.callMethod("PasteText", [text]);
|
||||
};
|
||||
|
||||
Library.prototype.SendError = async function(text, errorLevel)
|
||||
{
|
||||
Asc.scope.errorText = text;
|
||||
Asc.scope.errorLevel = errorLevel;
|
||||
return await Editor.callCommand(function(){
|
||||
Api.sendEvent("asc_onError", Asc.scope.errorText, Asc.scope.errorLevel);
|
||||
});
|
||||
};
|
||||
|
||||
Library.prototype.GetLocalImagePath = async function(url) {
|
||||
return await Editor.callMethod("getLocalImagePath", [url]);
|
||||
};
|
||||
|
||||
Library.prototype.AddGeneratedImage = async function(base64) {
|
||||
let editorVersion = await Asc.Library.GetEditorVersion();
|
||||
|
||||
if (Asc.Editor.getType() === "pdf") {
|
||||
return await Editor.callMethod("PasteHtml", ["<img src=\"" + base64 + "\" />"]);
|
||||
}
|
||||
|
||||
if (editorVersion >= 9000000) {
|
||||
let urlLocal = await this.GetLocalImagePath(base64);
|
||||
if (urlLocal.error === true)
|
||||
return;
|
||||
|
||||
Asc.scope.url = urlLocal.url;
|
||||
} else {
|
||||
Asc.scope.url = url;
|
||||
}
|
||||
|
||||
switch (window.Asc.plugin.info.editorType) {
|
||||
case "word": {
|
||||
return await Editor.callCommand(function() {
|
||||
let document = Api.GetDocument();
|
||||
let paragraph = Api.CreateParagraph();
|
||||
let drawing = Api.CreateImage(Asc.scope.url, 100 * 36000, 100 * 36000);
|
||||
paragraph.AddDrawing(drawing);
|
||||
document.RemoveSelection();
|
||||
document.InsertContent([paragraph], true);
|
||||
}, false);
|
||||
}
|
||||
case "cell": {
|
||||
return await Editor.callCommand(function() {
|
||||
let worksheet = Api.GetActiveSheet();
|
||||
worksheet.AddImage(Asc.scope.url, 100 * 36000, 100 * 36000, 0, 2 * 36000, 2, 3 * 36000);
|
||||
}, false);
|
||||
}
|
||||
case "slide": {
|
||||
return await Editor.callCommand(function() {
|
||||
let presentation = Api.GetPresentation();
|
||||
let slide = presentation.GetCurrentSlide();
|
||||
let image = Api.CreateImage(Asc.scope.url, 150 * 36000, 150 * 36000);
|
||||
slide.AddObject(image);
|
||||
}, false);
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
Library.prototype.AddOleObject = async function(imageUrl, data) {
|
||||
switch (window.Asc.plugin.info.editorType) {
|
||||
case "word": {
|
||||
await Editor.callCommand(function(){
|
||||
let document = Api.GetDocument();
|
||||
document.RemoveSelection();
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
let W = 100;
|
||||
let H = 100;
|
||||
|
||||
let info = window.Asc.plugin.info;
|
||||
var obj = {
|
||||
guid : info.guid,
|
||||
widthPix : info.mmToPx * W,
|
||||
heightPix : info.mmToPx * H,
|
||||
width : W,
|
||||
height : H,
|
||||
imgSrc : imageUrl,
|
||||
data : data
|
||||
};
|
||||
|
||||
return await Editor.callMethod("AddOleObject", [obj]);
|
||||
};
|
||||
|
||||
Library.prototype.trimResult = function(data, posStart, isSpaces, extraCharacters) {
|
||||
let pos = posStart || 0;
|
||||
if (-1 != pos) {
|
||||
let trimC = ["\"", "'", "\n", "\r", "`"];
|
||||
if (true === isSpaces)
|
||||
trimC.push(" ");
|
||||
while (pos < data.length && trimC.includes(data[pos]))
|
||||
pos++;
|
||||
|
||||
let posEnd = data.length - 1;
|
||||
while (posEnd > 0 && trimC.includes(data[posEnd]))
|
||||
posEnd--;
|
||||
|
||||
if (posEnd > pos)
|
||||
return data.substring(pos, posEnd + 1);
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
Library.prototype.getTranslateResult = function(data, dataSrc) {
|
||||
data = this.trimResult(data, 0, true);
|
||||
let trimC = ["\"", "'", "\n", "\r", " "];
|
||||
if (dataSrc.length > 0 && trimC.includes(dataSrc[0])) {
|
||||
data = dataSrc[0] + data;
|
||||
}
|
||||
if (dataSrc.length > 1 && trimC.includes(dataSrc[dataSrc.length - 1])) {
|
||||
data = data + dataSrc[dataSrc.length - 1];
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
Library.prototype.getMarkdownResult = function(data) {
|
||||
let markdownEscape = data.indexOf("```md");
|
||||
if (-1 !== markdownEscape && markdownEscape < 5)
|
||||
data = data.substring(markdownEscape + 5);
|
||||
return this.trimResult(data);
|
||||
};
|
||||
|
||||
exports.Asc = exports.Asc || {};
|
||||
exports.Asc.Library = new Library();
|
||||
|
||||
exports.Asc.Prompts = {
|
||||
getFixAndSpellPrompt(content) {
|
||||
let prompt = `I want you to act as an editor and proofreader. \
|
||||
I will provide you with some text that needs to be checked for spelling and grammar errors. \
|
||||
Your task is to carefully review the text and correct any mistakes, \
|
||||
ensuring that the corrected text is free of errors and maintains the original meaning. \
|
||||
Only return the corrected text. \
|
||||
Here is the text that needs revision: \"${content}\"`;
|
||||
return prompt;
|
||||
},
|
||||
getSummarizationPrompt(content, language) {
|
||||
let prompt = "Summarize the following text. ";
|
||||
if (language) {
|
||||
prompt += "and translate the result to " + language;
|
||||
prompt += "Return only the resulting translated text.";
|
||||
} else {
|
||||
prompt += "Return only the resulting text.";
|
||||
}
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getTranslatePrompt(content, language) {
|
||||
let prompt = "Translate the following text to " + language;
|
||||
prompt += ". Return only the resulting text.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getExplainPrompt(content) {
|
||||
let prompt = "Explain what the following text means. Return only the resulting text.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getTextLongerPrompt(content) {
|
||||
let prompt = "Make the following text longer. Return only the resulting text.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getTextShorterPrompt(content) {
|
||||
let prompt = "Make the following text simpler. Return only the resulting text.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getTextRewritePrompt(content) {
|
||||
let prompt = "Rewrite the following text differently. Return only the resulting text.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getTextKeywordsPrompt(content) {
|
||||
let prompt = `Get Key words from this text: "${content}"`;
|
||||
return prompt;
|
||||
},
|
||||
getExplainAsLinkPrompt(content) {
|
||||
let prompt = "Give a link to the explanation of the following text. Return only the resulting link.";
|
||||
prompt += "Text: \"\"\"\n";
|
||||
prompt += content;
|
||||
prompt += "\n\"\"\"";
|
||||
return prompt;
|
||||
},
|
||||
getImageDescription() {
|
||||
return "Describe in detail everything you see in this image. Mention the objects, their appearance, colors, arrangement, background, and any noticeable actions or interactions. Be as specific and accurate as possible. Avoid making assumptions about things that are not clearly visible."
|
||||
},
|
||||
getImagePromptOCR() {
|
||||
return "Extract all text from this image as accurately as possible. Preserve original reading order and formatting if possible. Recognize tables and images if possible. Do not add or remove any content. Output recognized objects in md format if possible. If not, return plain text.";
|
||||
}
|
||||
};
|
||||
|
||||
})(window);
|
||||
224
DocService/sources/ai/engine/local_storage.js
Normal file
224
DocService/sources/ai/engine/local_storage.js
Normal file
@ -0,0 +1,224 @@
|
||||
(function(exports, undefined)
|
||||
{
|
||||
exports.AI = exports.AI || {};
|
||||
var AI = exports.AI;
|
||||
|
||||
AI.DEFAULT_SERVER_SETTINGS = null;
|
||||
|
||||
var localStorageKey = "onlyoffice_ai_plugin_storage_key";
|
||||
|
||||
AI.Providers = {};
|
||||
|
||||
AI.serializeProviders = function() {
|
||||
let result = [];
|
||||
for (let i in AI.Providers) {
|
||||
if (AI.Providers[i].name) {
|
||||
result.push({
|
||||
name : AI.Providers[i].name,
|
||||
url : AI.Providers[i].url,
|
||||
key : AI.Providers[i].key,
|
||||
models : AI.Providers[i].models
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
AI.Models = [];
|
||||
|
||||
AI.Storage.save = function() {
|
||||
try {
|
||||
let obj = {
|
||||
version : AI.Storage.Version,
|
||||
providers : {},
|
||||
models : AI.Models,
|
||||
customProviders : AI.InternalCustomProvidersSources
|
||||
};
|
||||
|
||||
for (let pr in AI.Providers)
|
||||
{
|
||||
obj.providers[pr] = {};
|
||||
obj.providers[pr].name = AI.Providers[pr].name;
|
||||
obj.providers[pr].url = AI.Providers[pr].url;
|
||||
obj.providers[pr].key = AI.Providers[pr].key;
|
||||
obj.providers[pr].models = AI.Providers[pr].models;
|
||||
}
|
||||
|
||||
window.localStorage.setItem(localStorageKey, JSON.stringify(obj));
|
||||
|
||||
if (this.onChangeStorage)
|
||||
this.onChangeStorage();
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.Storage.load = function() {
|
||||
let obj = null;
|
||||
try {
|
||||
obj = JSON.parse(window.localStorage.getItem(localStorageKey));
|
||||
} catch (e) {
|
||||
obj = AI.DEFAULT_SERVER_SETTINGS;
|
||||
|
||||
if (obj) {
|
||||
AI.DEFAULT_SERVER_SETTINGS.version = AI.Storage.Version;
|
||||
}
|
||||
}
|
||||
|
||||
if (obj) {
|
||||
let fixVersion2 = false;
|
||||
switch (obj.version)
|
||||
{
|
||||
case undefined:
|
||||
case 1:
|
||||
obj = null;
|
||||
break;
|
||||
case 2:
|
||||
// redesign provider url: add /v1
|
||||
fixVersion2 = true;
|
||||
break;
|
||||
case 3:
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (obj) {
|
||||
let oldProviders = AI.Providers;
|
||||
AI.Providers = {};
|
||||
|
||||
AI.InternalCustomProvidersSources = obj.customProviders || {};
|
||||
AI.loadCustomProviders();
|
||||
|
||||
for (let i = 0, len = AI.InternalCustomProviders.length; i < len; i++) {
|
||||
let pr = AI.InternalCustomProviders[i];
|
||||
oldProviders[pr.name] = pr;
|
||||
}
|
||||
|
||||
for (let i = 0, len = AI.InternalCustomProviders.length; i < len; i++) {
|
||||
if (AI.InternalCustomProviders[i].name === name) {
|
||||
AI.InternalCustomProviders.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i in obj.providers) {
|
||||
let pr = obj.providers[i];
|
||||
AI.Providers[i] = AI.createProviderInstance(pr.name, pr.url, pr.key, pr.addon);
|
||||
AI.Providers[i].models = pr.models || [];
|
||||
|
||||
if (fixVersion2) {
|
||||
if (!AI.isInternalProvider(pr.name))
|
||||
AI.Providers[i].addon = "v1";
|
||||
}
|
||||
}
|
||||
|
||||
for (let pr in oldProviders)
|
||||
{
|
||||
if (!AI.Providers[pr])
|
||||
AI.Providers[pr] = oldProviders[pr];
|
||||
}
|
||||
|
||||
AI.Models = obj.models;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.Storage.addModel = function(model) {
|
||||
|
||||
if (AI.Providers[model.provider.name]) {
|
||||
AI.Providers[model.provider.name].name = model.provider.name;
|
||||
AI.Providers[model.provider.name].url = model.provider.url;
|
||||
AI.Providers[model.provider.name].key = model.provider.key;
|
||||
} else {
|
||||
AI.Providers[model.provider.name] =
|
||||
AI.createProviderInstance(model.provider.name, model.provider.url, model.provider.key);
|
||||
}
|
||||
|
||||
if (AI.TmpProviderForModels &&
|
||||
model.provider.name === AI.TmpProviderForModels.name &&
|
||||
AI.TmpProviderForModels.models.length > 0) {
|
||||
AI.Providers[model.provider.name].models = AI.TmpProviderForModels.models;
|
||||
}
|
||||
|
||||
let isFoundModel = false;
|
||||
for (let i = 0, len = AI.Models.length; i < len; i++)
|
||||
{
|
||||
if (AI.Models[i].id === model.id)
|
||||
{
|
||||
AI.Models[i].provider = model.provider.name;
|
||||
AI.Models[i].name = model.name;
|
||||
AI.Models[i].capabilities = model.capabilities;
|
||||
isFoundModel = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isFoundModel)
|
||||
AI.Models.push(new AI.UI.Model(model.name, model.id, model.provider.name,
|
||||
model.capabilities === undefined ? AI.CapabilitiesUI.All : model.capabilities));
|
||||
|
||||
this.save();
|
||||
};
|
||||
|
||||
AI.Storage.removeModel = function(modelId) {
|
||||
for (let i = 0, len = AI.Models.length; i < len; i++)
|
||||
{
|
||||
if (AI.Models[i].id === modelId)
|
||||
{
|
||||
AI.Models.splice(i, 1);
|
||||
this.save();
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AI.Storage.getModelByName = function(name) {
|
||||
for (let i in AI.Models) {
|
||||
if (AI.Models[i].name === name)
|
||||
return AI.Models[i];
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
AI.Storage.getModelById = function(id) {
|
||||
for (let i in AI.Models) {
|
||||
if (AI.Models[i].id === id)
|
||||
return AI.Models[i];
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
AI.Storage.serializeModels = function() {
|
||||
let result = [];
|
||||
for (let i in AI.Models) {
|
||||
if (AI.Models[i].id) {
|
||||
result.push({
|
||||
name : AI.Models[i].name,
|
||||
id : AI.Models[i].id,
|
||||
provider : AI.Models[i].provider,
|
||||
capabilities : AI.Models[i].capabilities,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
AI.Storage.getProvider = function(name) {
|
||||
if (AI.Providers[name])
|
||||
return AI.Providers[name];
|
||||
return null;
|
||||
};
|
||||
|
||||
AI.onLoadInternalProviders = function() {
|
||||
for (let i = 0, len = AI.InternalProviders.length; i < len; i++) {
|
||||
let pr = AI.InternalProviders[i];
|
||||
AI.Providers[pr.name] = pr;
|
||||
}
|
||||
AI.Storage.load();
|
||||
};
|
||||
|
||||
})(window);
|
||||
247
DocService/sources/ai/engine/providers/base.js
Normal file
247
DocService/sources/ai/engine/providers/base.js
Normal file
@ -0,0 +1,247 @@
|
||||
"use strict";
|
||||
|
||||
(function(){
|
||||
|
||||
window.AI = window.AI || {};
|
||||
var AI = window.AI;
|
||||
|
||||
// Tokens
|
||||
AI.InputMaxTokens = {
|
||||
"4k" : 4096,
|
||||
"8k" : 8192,
|
||||
"16k" : 16384,
|
||||
"32k" : 32768,
|
||||
"64k" : 65536,
|
||||
"128k" : 131072,
|
||||
"200k" : 204800,
|
||||
"256k" : 262144
|
||||
};
|
||||
|
||||
let keys = [];
|
||||
for (let i in AI.InputMaxTokens)
|
||||
keys.push(i);
|
||||
|
||||
AI.InputMaxTokens.keys = keys;
|
||||
AI.InputMaxTokens.getFloor = function(value) {
|
||||
let result = undefined;
|
||||
for (let i = 0, len = AI.InputMaxTokens.keys.length; i < len; i++) {
|
||||
if (AI.InputMaxTokens[AI.InputMaxTokens.keys[i]] <= value)
|
||||
result = AI.InputMaxTokens[AI.InputMaxTokens.keys[i]];
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// UI
|
||||
AI.UI = AI.UI || {};
|
||||
|
||||
AI.UI.Model = function(name, id, provider, capabilities) {
|
||||
this.capabilities = capabilities || AI.CapabilitiesUI.None;
|
||||
this.provider = provider || "";
|
||||
this.name = name || "";
|
||||
this.id = id || "";
|
||||
};
|
||||
|
||||
AI.UI.Provider = function(name, key, url) {
|
||||
this.name = name || "";
|
||||
this.key = key || "";
|
||||
this.url = url || "";
|
||||
};
|
||||
|
||||
AI.UI.Action = function(name, icon, model) {
|
||||
this.name = name || "";
|
||||
this.icon = icon || "";
|
||||
this.model = model || "";
|
||||
};
|
||||
|
||||
// Endpoints
|
||||
AI.Endpoints = {
|
||||
|
||||
Types : {
|
||||
|
||||
Undefined : -1,
|
||||
|
||||
v1 : {
|
||||
|
||||
Models : 0x00,
|
||||
|
||||
Chat_Completions : 0x01,
|
||||
Completions : 0x02,
|
||||
|
||||
Images_Generations : 0x11,
|
||||
Images_Edits : 0x12,
|
||||
Images_Variarions : 0x13,
|
||||
|
||||
Embeddings : 0x21,
|
||||
|
||||
Audio_Transcriptions : 0x31,
|
||||
Audio_Translations : 0x32,
|
||||
Audio_Speech : 0x33,
|
||||
|
||||
Moderations : 0x41,
|
||||
|
||||
Realtime : 0x51,
|
||||
|
||||
Language : 0x61,
|
||||
Code : 0x62,
|
||||
|
||||
OCR : 0x70
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
AI.CapabilitiesUI = {
|
||||
|
||||
None : 0x00,
|
||||
|
||||
Chat : 0x01,
|
||||
|
||||
Image : 0x02,
|
||||
|
||||
Embeddings : 0x04,
|
||||
|
||||
Audio : 0x08,
|
||||
|
||||
Moderations : 0x10,
|
||||
|
||||
Realtime : 0x20,
|
||||
|
||||
Code : 0x40,
|
||||
|
||||
Vision : 0x80
|
||||
|
||||
};
|
||||
|
||||
let capabilitiesAll = 0;
|
||||
for (let item in AI.CapabilitiesUI)
|
||||
capabilitiesAll |= AI.CapabilitiesUI[item];
|
||||
AI.CapabilitiesUI.All = capabilitiesAll;
|
||||
|
||||
AI.InternalProviders = [];
|
||||
AI.createProviderInstance = function(name, url, key, addon) {
|
||||
for (let i = 0, len = window.AI.InternalCustomProviders.length; i < len; i++) {
|
||||
if (name === AI.InternalCustomProviders[i].name)
|
||||
return AI.InternalCustomProviders[i].createInstance(name, url, key, addon || AI.InternalCustomProviders[i].addon);
|
||||
}
|
||||
for (let i = 0, len = window.AI.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.InternalProviders[i].name)
|
||||
return AI.InternalProviders[i].createInstance(name, url, key, addon || AI.InternalProviders[i].addon);
|
||||
}
|
||||
return new AI.Provider(name, url, key);
|
||||
};
|
||||
|
||||
AI.isInternalProvider = function(name) {
|
||||
for (let i = 0, len = AI.InternalProviders.length; i < len; i++) {
|
||||
if (name === AI.InternalProviders[i].name)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.loadInternalProviders = async function() {
|
||||
let providersText = await AI.loadResourceAsText("./scripts/engine/providers/config.json");
|
||||
if ("" === providersText)
|
||||
return;
|
||||
|
||||
try {
|
||||
let providers = JSON.parse(providersText);
|
||||
for (let i = 0, len = providers.length; i < len; i++) {
|
||||
let providerContent = await AI.loadResourceAsText("./scripts/engine/providers/internal/" + providers[i] + ".js");
|
||||
if (providerContent !== "") {
|
||||
let content = "(function(){\n" + providerContent + "\nreturn new Provider();})();";
|
||||
let provider = eval(content);
|
||||
|
||||
if (provider.isOnlyDesktop() && (-1 === navigator.userAgent.indexOf("AscDesktopEditor")))
|
||||
continue;
|
||||
|
||||
window.AI.InternalProviders.push(provider);
|
||||
}
|
||||
}
|
||||
} catch(err) {
|
||||
}
|
||||
|
||||
AI.onLoadInternalProviders();
|
||||
};
|
||||
|
||||
AI.InternalCustomProvidersSources = {};
|
||||
AI.InternalCustomProviders = [];
|
||||
|
||||
AI.loadCustomProviders = function() {
|
||||
|
||||
AI.InternalCustomProviders = [];
|
||||
for (let name in AI.InternalCustomProvidersSources) {
|
||||
AI.addCustomProvider(AI.InternalCustomProvidersSources[name], true);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
AI.addCustomProvider = function(providerContent, isRegister) {
|
||||
|
||||
try {
|
||||
let content = "(function(){\n" + providerContent + "\nreturn new Provider();})();";
|
||||
let provider = eval(content);
|
||||
|
||||
if (!provider.name)
|
||||
return false;
|
||||
|
||||
if (provider.isOnlyDesktop() && (-1 === navigator.userAgent.indexOf("AscDesktopEditor")))
|
||||
return false;
|
||||
|
||||
AI.InternalCustomProvidersSources[provider.name] = providerContent;
|
||||
|
||||
for (let i = 0, len = AI.InternalCustomProviders.length; i < len; i++) {
|
||||
if (AI.InternalCustomProviders[i].name === provider.name) {
|
||||
AI.InternalCustomProviders.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
AI.InternalCustomProviders.push(provider);
|
||||
|
||||
if (!isRegister)
|
||||
{
|
||||
AI.Storage.save();
|
||||
AI.Storage.load();
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
} catch(err) {
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
};
|
||||
|
||||
AI.removeCustomProvider = function(name) {
|
||||
|
||||
if (AI.InternalCustomProvidersSources[name])
|
||||
delete AI.InternalCustomProvidersSources[name];
|
||||
|
||||
for (let i = 0, len = AI.InternalCustomProviders.length; i < len; i++) {
|
||||
if (AI.InternalCustomProviders[i].name === name) {
|
||||
AI.InternalCustomProviders.splice(i, 1);
|
||||
|
||||
if (!AI.isInternalProvider(name) && AI.Providers[name]) {
|
||||
delete AI.Providers[name];
|
||||
}
|
||||
|
||||
AI.Storage.save();
|
||||
AI.Storage.load();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
AI.getCustomProviders = function() {
|
||||
|
||||
let names = [];
|
||||
for (let i = 0, len = AI.InternalCustomProviders.length; i < len; i++) {
|
||||
names.push(AI.InternalCustomProviders[i].name);
|
||||
}
|
||||
return names;
|
||||
|
||||
};
|
||||
|
||||
})();
|
||||
13
DocService/sources/ai/engine/providers/config.json
Normal file
13
DocService/sources/ai/engine/providers/config.json
Normal file
@ -0,0 +1,13 @@
|
||||
[
|
||||
"openai",
|
||||
"anthropic",
|
||||
"google-gemini",
|
||||
"deepseek",
|
||||
"together.ai",
|
||||
"groq",
|
||||
"ollama",
|
||||
"mistral",
|
||||
"gpt4all",
|
||||
"xAI",
|
||||
"stabilityai"
|
||||
]
|
||||
@ -0,0 +1,38 @@
|
||||
[
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-7-sonnet-20250219",
|
||||
"display_name": "Claude 3.7 Sonnet",
|
||||
"created_at": "2025-02-24T00:00:00Z"
|
||||
},
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-5-sonnet-20241022",
|
||||
"display_name": "Claude 3.5 Sonnet (New)",
|
||||
"created_at": "2024-10-22T00:00:00Z"
|
||||
},
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-5-haiku-20241022",
|
||||
"display_name": "Claude 3.5 Haiku",
|
||||
"created_at": "2024-10-22T00:00:00Z"
|
||||
},
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-5-sonnet-20240620",
|
||||
"display_name": "Claude 3.5 Sonnet (Old)",
|
||||
"created_at": "2024-06-20T00:00:00Z"
|
||||
},
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-haiku-20240307",
|
||||
"display_name": "Claude 3 Haiku",
|
||||
"created_at": "2024-03-07T00:00:00Z"
|
||||
},
|
||||
{
|
||||
"type": "model",
|
||||
"id": "claude-3-opus-20240229",
|
||||
"display_name": "Claude 3 Opus",
|
||||
"created_at": "2024-02-29T00:00:00Z"
|
||||
}
|
||||
]
|
||||
100
DocService/sources/ai/engine/providers/internal/anthropic.js
Normal file
100
DocService/sources/ai/engine/providers/internal/anthropic.js
Normal file
@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Anthropic", "https://api.anthropic.com", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (0 == model.id.indexOf("claude-2"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["100k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
if (0 == model.id.indexOf("claude-3-5-haiku"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["200k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
getEndpointUrl(endpoint, model) {
|
||||
switch (endpoint)
|
||||
{
|
||||
case AI.Endpoints.Types.v1.Chat_Completions:
|
||||
case AI.Endpoints.Types.v1.Images_Generations:
|
||||
case AI.Endpoints.Types.v1.Images_Edits:
|
||||
case AI.Endpoints.Types.v1.Images_Variarions:
|
||||
{
|
||||
return "/messages";
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return super.getEndpointUrl(endpoint, model);
|
||||
}
|
||||
|
||||
getRequestBodyOptions() {
|
||||
return {
|
||||
max_tokens : 4096
|
||||
};
|
||||
}
|
||||
|
||||
getRequestHeaderOptions() {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json",
|
||||
"anthropic-version" : "2023-06-01",
|
||||
"anthropic-dangerous-direct-browser-access": "true"
|
||||
};
|
||||
if (this.key)
|
||||
headers["x-api-key"] = this.key;
|
||||
return headers;
|
||||
}
|
||||
|
||||
getChatCompletions(message, model) {
|
||||
let systemPrompt = this.getSystemMessage(message, true);
|
||||
let result = super.getChatCompletions(message, model);
|
||||
if (systemPrompt !== "") {
|
||||
result.system = systemPrompt;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
getImageGeneration(message, model) {
|
||||
return this.getImageGenerationWithChat(message, model, "Image must be in svg format. ");
|
||||
}
|
||||
|
||||
async getImageVision(message, model) {
|
||||
return {
|
||||
model : model.id,
|
||||
messages : [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: message.prompt
|
||||
},
|
||||
{
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: AI.ImageEngine.getMimeTypeFromBase64(message.image),
|
||||
data: AI.ImageEngine.getContentFromBase64(message.image)
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
{
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"id": "deepseek-chat",
|
||||
"object": "model",
|
||||
"owned_by": "deepseek"
|
||||
},
|
||||
{
|
||||
"id": "deepseek-reasoner",
|
||||
"object": "model",
|
||||
"owned_by": "deepseek"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Deepseek", "https://api.deepseek.com", "", "");
|
||||
}
|
||||
|
||||
}
|
||||
140
DocService/sources/ai/engine/providers/internal/google-gemini.js
Normal file
140
DocService/sources/ai/engine/providers/internal/google-gemini.js
Normal file
@ -0,0 +1,140 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Google-Gemini", "https://generativelanguage.googleapis.com", "", "v1beta");
|
||||
}
|
||||
|
||||
correctModelInfo(model) {
|
||||
model.id = model.name;
|
||||
let index = model.name.indexOf("models/");
|
||||
if (index === 0)
|
||||
model.name = model.name.substring(7);
|
||||
}
|
||||
|
||||
checkExcludeModel(model) {
|
||||
if (model.id === "models/chat-bison-001" ||
|
||||
model.id === "models/text-bison-001")
|
||||
return true;
|
||||
|
||||
if (-1 !== model.id.indexOf("gemini-1.0"))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
checkModelCapability(model) {
|
||||
if (model.inputTokenLimit)
|
||||
model.options.max_input_tokens = model.inputTokenLimit;
|
||||
|
||||
if (Array.isArray(model.supportedGenerationMethods) &&
|
||||
model.supportedGenerationMethods.includes("generateContent"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
let caps = AI.CapabilitiesUI.Chat;
|
||||
if (-1 !== model.id.indexOf("vision"))
|
||||
caps |= AI.CapabilitiesUI.Vision;
|
||||
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
if (Array.isArray(model.supportedGenerationMethods) &&
|
||||
model.supportedGenerationMethods.includes("embedContent"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
|
||||
return AI.CapabilitiesUI.All;
|
||||
}
|
||||
|
||||
getEndpointUrl(endpoint, model) {
|
||||
let Types = AI.Endpoints.Types;
|
||||
let url = "";
|
||||
switch (endpoint)
|
||||
{
|
||||
case Types.v1.Models:
|
||||
url = "/models";
|
||||
break;
|
||||
default:
|
||||
let addon = ":generateContent";
|
||||
if (endpoint === Types.v1.Images_Generations) {
|
||||
if (-1 != model.id.indexOf("imagen-3"))
|
||||
addon = ":predict";
|
||||
}
|
||||
url = "/" + model.id + addon;
|
||||
break;
|
||||
}
|
||||
if (this.key)
|
||||
url += "?key=" + this.key;
|
||||
return url;
|
||||
}
|
||||
|
||||
getRequestHeaderOptions() {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json"
|
||||
};
|
||||
return headers;
|
||||
}
|
||||
|
||||
getChatCompletions(message, model) {
|
||||
let body = { contents : [] };
|
||||
for (let i = 0, len = message.messages.length; i < len; i++) {
|
||||
let rec = {
|
||||
role : message.messages[i].role,
|
||||
parts : [ { text : message.messages[i].content } ]
|
||||
};
|
||||
if (rec.role === "assistant")
|
||||
rec.role = "model";
|
||||
else if (rec.role === "system") {
|
||||
body.system_instruction = rec;
|
||||
continue;
|
||||
}
|
||||
body.contents.push(rec);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
getImageGeneration(message, model) {
|
||||
if (-1 != model.id.indexOf("flash")) {
|
||||
let result = this.getImageGenerationWithChat(message, model);
|
||||
result.generationConfig = {"responseModalities":["TEXT","IMAGE"]};
|
||||
return result;
|
||||
}
|
||||
if (-1 != model.id.indexOf("imagen-3")) {
|
||||
return {
|
||||
instances: [
|
||||
{
|
||||
prompt: message.prompt
|
||||
}
|
||||
],
|
||||
parameters: {
|
||||
"sampleCount": 1
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
async getImageVision(message, model) {
|
||||
return {
|
||||
contents : [
|
||||
{
|
||||
role: "user",
|
||||
parts: [
|
||||
{ text: message.prompt },
|
||||
{
|
||||
inline_data: {
|
||||
mime_type: AI.ImageEngine.getMimeTypeFromBase64(message.image),
|
||||
data: AI.ImageEngine.getContentFromBase64(message.image)
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
19
DocService/sources/ai/engine/providers/internal/gpt4all.js
Normal file
19
DocService/sources/ai/engine/providers/internal/gpt4all.js
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("GPT4All", "http://localhost:4891", "", "v1");
|
||||
}
|
||||
|
||||
getRequestBodyOptions() {
|
||||
return {
|
||||
max_tokens : 4096
|
||||
};
|
||||
}
|
||||
|
||||
isOnlyDesktop() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
29
DocService/sources/ai/engine/providers/internal/groq.js
Normal file
29
DocService/sources/ai/engine/providers/internal/groq.js
Normal file
@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Groq", "https://api.groq.com/openai", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (model.context_length)
|
||||
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("whisper")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
}
|
||||
115
DocService/sources/ai/engine/providers/internal/mistral.js
Normal file
115
DocService/sources/ai/engine/providers/internal/mistral.js
Normal file
@ -0,0 +1,115 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Mistral", "https://api.mistral.ai", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (-1 !== model.id.indexOf("mistral-embed"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-moderation"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("pixtral"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-small"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("mistral-medium"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("codestral"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["256k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Code);
|
||||
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
|
||||
let capUI = AI.CapabilitiesUI.Chat;
|
||||
if (model.capabilities && model.capabilities.vision)
|
||||
capUI = AI.CapabilitiesUI.Vision;
|
||||
return capUI;
|
||||
}
|
||||
|
||||
getEndpointUrl(endpoint, model) {
|
||||
let Types = AI.Endpoints.Types;
|
||||
let url = "";
|
||||
switch (endpoint)
|
||||
{
|
||||
case Types.v1.OCR:
|
||||
url = "/ocr";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (!url)
|
||||
return super.getEndpointUrl(endpoint, model);
|
||||
return url;
|
||||
}
|
||||
|
||||
async getImageOCR(message, model) {
|
||||
let result = {
|
||||
model: model.id,
|
||||
document: {
|
||||
type: "image_url",
|
||||
image_url: message.image
|
||||
}
|
||||
};
|
||||
//result.output_format = "markdown";
|
||||
result.include_image_base64 = true;
|
||||
return result;
|
||||
}
|
||||
|
||||
getImageOCRResult(messageInput, model) {
|
||||
let message = messageInput.data ? messageInput.data : messageInput;
|
||||
let images = [];
|
||||
let markdownContent = "";
|
||||
if (!message.pages)
|
||||
return markdownContent;
|
||||
|
||||
for (let i = 0, len = message.pages.length; i < len; i++) {
|
||||
let page = message.pages[i];
|
||||
|
||||
let images = page.images;
|
||||
let md = page.markdown;
|
||||
|
||||
for (let j = 0, imagesCount = images.length; j < imagesCount; j++) {
|
||||
let src = "](" + images[j].id + ")";
|
||||
let dst = "](" + images[j].image_base64 + ")";
|
||||
|
||||
src = src.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
md = md.replace(new RegExp(src, "g"), dst);
|
||||
}
|
||||
|
||||
markdownContent += md;
|
||||
markdownContent += "\n\n";
|
||||
}
|
||||
|
||||
return markdownContent;
|
||||
}
|
||||
|
||||
}
|
||||
22
DocService/sources/ai/engine/providers/internal/ollama.js
Normal file
22
DocService/sources/ai/engine/providers/internal/ollama.js
Normal file
@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Ollama", "http://localhost:11434", "", "v1");
|
||||
}
|
||||
|
||||
getImageGeneration(message, model) {
|
||||
let result = super.getImageGeneration(message, model);
|
||||
result.options = {};
|
||||
if (result.width)
|
||||
result.options.width = result.width;
|
||||
if (result.height)
|
||||
result.options.height = result.height;
|
||||
delete result.width;
|
||||
delete result.height;
|
||||
delete result.n;
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,431 @@
|
||||
{
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"id": "gpt-4o-audio-preview-2024-12-17",
|
||||
"object": "model",
|
||||
"created": 1734034239,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "dall-e-3",
|
||||
"object": "model",
|
||||
"created": 1698785189,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "dall-e-2",
|
||||
"object": "model",
|
||||
"created": 1698798177,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-audio-preview-2024-10-01",
|
||||
"object": "model",
|
||||
"created": 1727389042,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "text-embedding-3-small",
|
||||
"object": "model",
|
||||
"created": 1705948997,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini",
|
||||
"object": "model",
|
||||
"created": 1744225351,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1-nano",
|
||||
"object": "model",
|
||||
"created": 1744321707,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1-nano-2025-04-14",
|
||||
"object": "model",
|
||||
"created": 1744321025,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-realtime-preview-2024-10-01",
|
||||
"object": "model",
|
||||
"created": 1727131766,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o4-mini-2025-04-16",
|
||||
"object": "model",
|
||||
"created": 1744133506,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-realtime-preview",
|
||||
"object": "model",
|
||||
"created": 1727659998,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "babbage-002",
|
||||
"object": "model",
|
||||
"created": 1692634615,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4",
|
||||
"object": "model",
|
||||
"created": 1687882411,
|
||||
"owned_by": "openai"
|
||||
},
|
||||
{
|
||||
"id": "text-embedding-ada-002",
|
||||
"object": "model",
|
||||
"created": 1671217299,
|
||||
"owned_by": "openai-internal"
|
||||
},
|
||||
{
|
||||
"id": "text-embedding-3-large",
|
||||
"object": "model",
|
||||
"created": 1705953180,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-audio-preview",
|
||||
"object": "model",
|
||||
"created": 1734387424,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-audio-preview",
|
||||
"object": "model",
|
||||
"created": 1727460443,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-preview-2024-09-12",
|
||||
"object": "model",
|
||||
"created": 1725648865,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-realtime-preview",
|
||||
"object": "model",
|
||||
"created": 1734387380,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1-mini",
|
||||
"object": "model",
|
||||
"created": 1744318173,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-realtime-preview-2024-12-17",
|
||||
"object": "model",
|
||||
"created": 1734112601,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-instruct-0914",
|
||||
"object": "model",
|
||||
"created": 1694122472,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-search-preview",
|
||||
"object": "model",
|
||||
"created": 1741391161,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1-mini-2025-04-14",
|
||||
"object": "model",
|
||||
"created": 1744317547,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "chatgpt-4o-latest",
|
||||
"object": "model",
|
||||
"created": 1723515131,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "davinci-002",
|
||||
"object": "model",
|
||||
"created": 1692634301,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-1106",
|
||||
"object": "model",
|
||||
"created": 1698959748,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-search-preview",
|
||||
"object": "model",
|
||||
"created": 1741388720,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-turbo",
|
||||
"object": "model",
|
||||
"created": 1712361441,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-realtime-preview-2024-12-17",
|
||||
"object": "model",
|
||||
"created": 1733945430,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-instruct",
|
||||
"object": "model",
|
||||
"created": 1692901427,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo",
|
||||
"object": "model",
|
||||
"created": 1677610602,
|
||||
"owned_by": "openai"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-turbo-preview",
|
||||
"object": "model",
|
||||
"created": 1706037777,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-search-preview-2025-03-11",
|
||||
"object": "model",
|
||||
"created": 1741390858,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-0125-preview",
|
||||
"object": "model",
|
||||
"created": 1706037612,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-2024-11-20",
|
||||
"object": "model",
|
||||
"created": 1739331543,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "whisper-1",
|
||||
"object": "model",
|
||||
"created": 1677532384,
|
||||
"owned_by": "openai-internal"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-2024-05-13",
|
||||
"object": "model",
|
||||
"created": 1715368132,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-turbo-2024-04-09",
|
||||
"object": "model",
|
||||
"created": 1712601677,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-16k",
|
||||
"object": "model",
|
||||
"created": 1683758102,
|
||||
"owned_by": "openai-internal"
|
||||
},
|
||||
{
|
||||
"id": "o1-preview",
|
||||
"object": "model",
|
||||
"created": 1725648897,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-0613",
|
||||
"object": "model",
|
||||
"created": 1686588896,
|
||||
"owned_by": "openai"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.5-preview",
|
||||
"object": "model",
|
||||
"created": 1740623059,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.5-preview-2025-02-27",
|
||||
"object": "model",
|
||||
"created": 1740623304,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-search-preview-2025-03-11",
|
||||
"object": "model",
|
||||
"created": 1741388170,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "omni-moderation-2024-09-26",
|
||||
"object": "model",
|
||||
"created": 1732734466,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o3-mini-2025-01-31",
|
||||
"object": "model",
|
||||
"created": 1738010200,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o3-mini",
|
||||
"object": "model",
|
||||
"created": 1737146383,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "tts-1-hd",
|
||||
"object": "model",
|
||||
"created": 1699046015,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o",
|
||||
"object": "model",
|
||||
"created": 1715367049,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "tts-1-hd-1106",
|
||||
"object": "model",
|
||||
"created": 1699053533,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini",
|
||||
"object": "model",
|
||||
"created": 1721172741,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-2024-08-06",
|
||||
"object": "model",
|
||||
"created": 1722814719,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1",
|
||||
"object": "model",
|
||||
"created": 1744316542,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-transcribe",
|
||||
"object": "model",
|
||||
"created": 1742068463,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4.1-2025-04-14",
|
||||
"object": "model",
|
||||
"created": 1744315746,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-2024-12-17",
|
||||
"object": "model",
|
||||
"created": 1734326976,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-2024-07-18",
|
||||
"object": "model",
|
||||
"created": 1721172717,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-transcribe",
|
||||
"object": "model",
|
||||
"created": 1742068596,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-mini",
|
||||
"object": "model",
|
||||
"created": 1725649008,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-audio-preview-2024-12-17",
|
||||
"object": "model",
|
||||
"created": 1734115920,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-3.5-turbo-0125",
|
||||
"object": "model",
|
||||
"created": 1706048358,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-mini-2024-09-12",
|
||||
"object": "model",
|
||||
"created": 1725648979,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "tts-1",
|
||||
"object": "model",
|
||||
"created": 1681940951,
|
||||
"owned_by": "openai-internal"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4-1106-preview",
|
||||
"object": "model",
|
||||
"created": 1698957206,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o-mini-tts",
|
||||
"object": "model",
|
||||
"created": 1742403959,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "tts-1-1106",
|
||||
"object": "model",
|
||||
"created": 1699053241,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1",
|
||||
"object": "model",
|
||||
"created": 1734375816,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-pro",
|
||||
"object": "model",
|
||||
"created": 1742251791,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "o1-pro-2025-03-19",
|
||||
"object": "model",
|
||||
"created": 1742251504,
|
||||
"owned_by": "system"
|
||||
},
|
||||
{
|
||||
"id": "omni-moderation-latest",
|
||||
"object": "model",
|
||||
"created": 1731689265,
|
||||
"owned_by": "system"
|
||||
}
|
||||
]
|
||||
}
|
||||
89
DocService/sources/ai/engine/providers/internal/openai.js
Normal file
89
DocService/sources/ai/engine/providers/internal/openai.js
Normal file
@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("OpenAI", "https://api.openai.com", "", "v1");
|
||||
}
|
||||
|
||||
checkExcludeModel(model) {
|
||||
if (-1 !== model.id.indexOf("babbage-002") ||
|
||||
-1 !== model.id.indexOf("davinci-002"))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
checkModelCapability(model) {
|
||||
if (-1 !== model.id.indexOf("whisper-1"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Transcriptions);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Translations);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("tts-1"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Audio_Speech);
|
||||
return AI.CapabilitiesUI.Audio;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("babbage-002") ||
|
||||
-1 !== model.id.indexOf("davinci-002"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("embedding"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("moderation"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
if (-1 !== model.id.indexOf("realtime"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Realtime);
|
||||
return AI.CapabilitiesUI.Realtime;
|
||||
}
|
||||
if ("dall-e-2" === model.id)
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
if ("dall-e-3" === model.id)
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
if (0 === model.id.indexOf("gpt-4o") ||
|
||||
0 === model.id.indexOf("o1-") ||
|
||||
0 === model.id.indexOf("gpt-4-turbo"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
else if (0 === model.id.indexOf("gpt-4"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["8k"];
|
||||
else if (-1 != model.id.indexOf("gpt-3.5-turbo-instruct")) {
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["4k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
else if (0 === model.id.indexOf("gpt-3.5-turbo"))
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["16k"];
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
};
|
||||
|
||||
getImageGeneration(message, model) {
|
||||
let result = super.getImageGeneration(message, model);
|
||||
result.size = result.width + "x" + result.height;
|
||||
delete result.width;
|
||||
delete result.height;
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
62
DocService/sources/ai/engine/providers/internal/proxy.js
Normal file
62
DocService/sources/ai/engine/providers/internal/proxy.js
Normal file
@ -0,0 +1,62 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Proxy", "http://localhost:8000", "", "ai-proxy");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (model.context_length)
|
||||
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
|
||||
|
||||
if ("chat" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
let result = AI.CapabilitiesUI.Chat;
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
|
||||
result |= AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if ("image" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
if ("moderation" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
|
||||
if ("embedding" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
|
||||
if ("language" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Language);
|
||||
return AI.CapabilitiesUI.Language;
|
||||
}
|
||||
|
||||
if ("code" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Code);
|
||||
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
if ("rerank" === model.type) {
|
||||
return AI.CapabilitiesUI.None;
|
||||
}
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
isUseProxy() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Stability AI", "https://api.stability.ai", "", "");
|
||||
}
|
||||
|
||||
getModels() {
|
||||
return [
|
||||
{
|
||||
id: "Stable Diffusion"
|
||||
},
|
||||
{
|
||||
id: "Stable Image Core"
|
||||
},
|
||||
{
|
||||
id: "Stable Image Ultra"
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
checkModelCapability(model) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
};
|
||||
|
||||
getImageGeneration(message, model) {
|
||||
let formData = new FormData();
|
||||
formData.append("prompt", message.prompt);
|
||||
formData.append("output_format", "png");
|
||||
return formData;
|
||||
}
|
||||
|
||||
getEndpointUrl(endpoint, model) {
|
||||
let Types = AI.Endpoints.Types;
|
||||
let url = "";
|
||||
switch (endpoint)
|
||||
{
|
||||
case Types.v1.Images_Generations:
|
||||
if (model.id === "Stable Diffusion")
|
||||
return "/v2beta/stable-image/generate/sd3";
|
||||
if (model.id === "Stable Image Core")
|
||||
return "/v2beta/stable-image/generate/core";
|
||||
if (model.id === "Stable Image Ultra")
|
||||
return "/v2beta/stable-image/generate/ultra";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return super.getEndpointUrl(endpoint, model);
|
||||
}
|
||||
|
||||
getRequestHeaderOptions() {
|
||||
let headers = {
|
||||
"Accept": "application/json"
|
||||
};
|
||||
if (this.key)
|
||||
headers["Authorization"] = "Bearer " + this.key;
|
||||
return headers;
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("Together AI", "https://api.together.xyz", "", "v1");
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (model.context_length)
|
||||
model.options.max_input_tokens = AI.InputMaxTokens.getFloor(model.context_length);
|
||||
|
||||
if ("chat" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
let result = AI.CapabilitiesUI.Chat;
|
||||
|
||||
if (-1 !== model.id.toLowerCase().indexOf("vision")) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Vision);
|
||||
result |= AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if ("image" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Generations);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Edits);
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Images_Variarions);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
if ("moderation" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Moderations);
|
||||
return AI.CapabilitiesUI.Moderations;
|
||||
}
|
||||
|
||||
if ("embedding" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Embeddings);
|
||||
return AI.CapabilitiesUI.Embeddings;
|
||||
}
|
||||
|
||||
if ("language" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Language);
|
||||
return AI.CapabilitiesUI.Language;
|
||||
}
|
||||
|
||||
if ("code" === model.type) {
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Code);
|
||||
return AI.CapabilitiesUI.Code | AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
if ("rerank" === model.type) {
|
||||
return AI.CapabilitiesUI.None;
|
||||
}
|
||||
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
isUseProxy() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
34
DocService/sources/ai/engine/providers/internal/xAI.js
Normal file
34
DocService/sources/ai/engine/providers/internal/xAI.js
Normal file
@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
|
||||
class Provider extends AI.Provider {
|
||||
|
||||
constructor() {
|
||||
super("xAI", "https://api.x.ai", "", "v1");
|
||||
}
|
||||
|
||||
checkExcludeModel(model) {
|
||||
if (-1 !== model.id.indexOf("-beta"))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
checkModelCapability = function(model) {
|
||||
if (-1 != model.id.indexOf("vision"))
|
||||
{
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["32k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Vision;
|
||||
}
|
||||
|
||||
if (-1 != model.id.indexOf("image"))
|
||||
{
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Image_Generation | AI.Endpoints.Types.v1.Images_Edits);
|
||||
return AI.CapabilitiesUI.Image;
|
||||
}
|
||||
|
||||
model.options.max_input_tokens = AI.InputMaxTokens["128k"];
|
||||
model.endpoints.push(AI.Endpoints.Types.v1.Chat_Completions);
|
||||
return AI.CapabilitiesUI.Chat;
|
||||
}
|
||||
|
||||
}
|
||||
163
DocService/sources/ai/engine/providers/preinstall-example.json
Normal file
163
DocService/sources/ai/engine/providers/preinstall-example.json
Normal file
@ -0,0 +1,163 @@
|
||||
{
|
||||
"actions": {
|
||||
"Chat": {
|
||||
"name": "Ask AI",
|
||||
"icon": "ask-ai",
|
||||
"model": "llama-3.2-90b-vision-preview",
|
||||
"capabilities": 1
|
||||
},
|
||||
"Summarization": {
|
||||
"name": "Summarization",
|
||||
"icon": "summarization",
|
||||
"model": "llama3.2:latest",
|
||||
"capabilities": 1
|
||||
},
|
||||
"Translation": {
|
||||
"name": "Translation",
|
||||
"icon": "translation",
|
||||
"model": "gemini-1.5-pro-latest",
|
||||
"capabilities": 1
|
||||
},
|
||||
"TextAnalyze": {
|
||||
"name": "Text analysis",
|
||||
"icon": "",
|
||||
"model": "claude-3-sonnet-20240229",
|
||||
"capabilities": 1
|
||||
}
|
||||
},
|
||||
|
||||
"providers": {
|
||||
"OpenAI": {
|
||||
"name": "OpenAI",
|
||||
"url": "https://api.openai.com",
|
||||
"key": "OPEN-AI-KEY",
|
||||
"models": [
|
||||
{
|
||||
"id": "chatgpt-4o-latest",
|
||||
"object": "model",
|
||||
"created": 1723515131,
|
||||
"owned_by": "system",
|
||||
"name": "chatgpt-4o-latest",
|
||||
"endpoints": [
|
||||
1
|
||||
],
|
||||
"options": {}
|
||||
},
|
||||
{
|
||||
"id": "gpt-4o",
|
||||
"object": "model",
|
||||
"created": 1715367049,
|
||||
"owned_by": "system",
|
||||
"name": "gpt-4o",
|
||||
"endpoints": [
|
||||
1
|
||||
],
|
||||
"options": {
|
||||
"max_input_tokens": 131072
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Together AI": {
|
||||
"name": "Together AI",
|
||||
"url": "https://api.together.xyz",
|
||||
"key": "",
|
||||
"models": []
|
||||
},
|
||||
"Mistral": {
|
||||
"name": "Mistral",
|
||||
"url": "https://api.mistral.ai",
|
||||
"key": "",
|
||||
"models": []
|
||||
},
|
||||
"Deepseek": {
|
||||
"name": "Deepseek",
|
||||
"url": "https://api.deepseek.com",
|
||||
"key": "",
|
||||
"models": [
|
||||
{
|
||||
"id": "deepseek-chat",
|
||||
"object": "model",
|
||||
"owned_by": "deepseek",
|
||||
"name": "deepseek-chat",
|
||||
"endpoints": [],
|
||||
"options": {}
|
||||
},
|
||||
{
|
||||
"id": "deepseek-reasoner",
|
||||
"object": "model",
|
||||
"owned_by": "deepseek",
|
||||
"name": "deepseek-reasoner",
|
||||
"endpoints": [],
|
||||
"options": {}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Ollama": {
|
||||
"name": "Ollama",
|
||||
"url": "http://localhost:11434",
|
||||
"key": "",
|
||||
"models": [
|
||||
{
|
||||
"id": "llama3.2:latest",
|
||||
"object": "model",
|
||||
"created": 1739120925,
|
||||
"owned_by": "library",
|
||||
"name": "llama3.2:latest",
|
||||
"endpoints": [],
|
||||
"options": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"models": [
|
||||
{
|
||||
"capabilities": 129,
|
||||
"provider": "Groq",
|
||||
"name": "Groq [llama-3.2-90b-vision-preview]",
|
||||
"id": "llama-3.2-90b-vision-preview"
|
||||
},
|
||||
{
|
||||
"capabilities": 1,
|
||||
"provider": "Together AI",
|
||||
"name": "Together AI [mistralai/Mistral-7B-v0.1]",
|
||||
"id": "mistralai/Mistral-7B-v0.1"
|
||||
},
|
||||
{
|
||||
"capabilities": 1,
|
||||
"provider": "Together AI",
|
||||
"name": "Together AI [deepseek-ai/DeepSeek-V3]",
|
||||
"id": "deepseek-ai/DeepSeek-V3"
|
||||
},
|
||||
{
|
||||
"capabilities": 129,
|
||||
"provider": "OpenAI",
|
||||
"name": "OpenAI [chatgpt-4o-latest]",
|
||||
"id": "chatgpt-4o-latest"
|
||||
},
|
||||
{
|
||||
"capabilities": 129,
|
||||
"provider": "Anthropic",
|
||||
"name": "Anthropic [claude-3-sonnet-20240229]",
|
||||
"id": "claude-3-sonnet-20240229"
|
||||
},
|
||||
{
|
||||
"capabilities": 129,
|
||||
"provider": "Google-Gemini",
|
||||
"name": "Google-Gemini [gemini-1.5-pro-latest]",
|
||||
"id": "gemini-1.5-pro-latest"
|
||||
},
|
||||
{
|
||||
"capabilities": 255,
|
||||
"provider": "Ollama",
|
||||
"name": "Ollama [llama3.2:latest]",
|
||||
"id": "llama3.2:latest"
|
||||
},
|
||||
{
|
||||
"capabilities": 255,
|
||||
"provider": "Deepseek",
|
||||
"name": "Deepseek [deepseek-chat]",
|
||||
"id": "deepseek-chat"
|
||||
}
|
||||
]
|
||||
}
|
||||
532
DocService/sources/ai/engine/providers/provider.js
Normal file
532
DocService/sources/ai/engine/providers/provider.js
Normal file
@ -0,0 +1,532 @@
|
||||
"use strict";
|
||||
|
||||
(async function(){
|
||||
|
||||
class Provider {
|
||||
/**
|
||||
* Provider base class.
|
||||
* @param {string} name Provider name.
|
||||
* @param {string} url Url to service.
|
||||
* @param {string} key Key for service. This is an optional field. Some providers may require a key for access.
|
||||
* @param {string} addon Addon for url. For example: v1 for many providers.
|
||||
*/
|
||||
constructor(name, url, key, addon) {
|
||||
this.name = name || "";
|
||||
this.url = url || "";
|
||||
this.key = key || "";
|
||||
this.addon = addon || "";
|
||||
|
||||
this.models = [];
|
||||
this.modelsUI = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* If you add an implementation here, then no request will be made to the service.
|
||||
* @returns {Object[] | undefined}
|
||||
*/
|
||||
getModels() {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Correct received (*models* endpoint) model object.
|
||||
*/
|
||||
correctModelInfo(model) {
|
||||
if (undefined === model.id && model.name) {
|
||||
model.id = model.name;
|
||||
return;
|
||||
}
|
||||
model.name = model.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return *true* if you do not want to work with a specific model (model.id).
|
||||
* The model will not be presented in the combo box with the list of models.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
checkExcludeModel(model) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return enumeration with capabilities for this model (model.id). (Some providers does not get the information for this functionalities).
|
||||
* Example: AI.CapabilitiesUI.Chat | AI.CapabilitiesUI.Image;
|
||||
* @returns {number}
|
||||
*/
|
||||
checkModelCapability(model) {
|
||||
return AI.CapabilitiesUI.All;
|
||||
}
|
||||
|
||||
/**
|
||||
* Url for a specific endpoint.
|
||||
* @returns {string}
|
||||
*/
|
||||
getEndpointUrl(endpoint, model) {
|
||||
let Types = AI.Endpoints.Types;
|
||||
switch (endpoint)
|
||||
{
|
||||
case Types.v1.Models:
|
||||
return "/models";
|
||||
|
||||
case Types.v1.Chat_Completions:
|
||||
return "/chat/completions";
|
||||
case Types.v1.Completions:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Images_Generations:
|
||||
return "/images/generations";
|
||||
case Types.v1.Images_Edits:
|
||||
return "/images/edits";
|
||||
case Types.v1.Images_Variarions:
|
||||
return "/images/variations";
|
||||
|
||||
case Types.v1.Embeddings:
|
||||
return "/embeddings";
|
||||
|
||||
case Types.v1.Audio_Transcriptions:
|
||||
return "/audio/transcriptions";
|
||||
case Types.v1.Audio_Translations:
|
||||
return "/audio/translations";
|
||||
case Types.v1.Audio_Speech:
|
||||
return "/audio/speech";
|
||||
|
||||
case Types.v1.Moderations:
|
||||
return "/moderations";
|
||||
|
||||
case Types.v1.Language:
|
||||
return "/completions";
|
||||
case Types.v1.Code:
|
||||
return "/completions";
|
||||
|
||||
case Types.v1.Realtime:
|
||||
return "/realtime";
|
||||
|
||||
case Types.v1.OCR:
|
||||
return "/chat/completions";
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* An object-addition to the model. It is used, among other things, to configure the model parameters.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {Object}
|
||||
*/
|
||||
getRequestBodyOptions() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* The returned object is an enumeration of all the headers for the requests.
|
||||
* @returns {Object}
|
||||
*/
|
||||
getRequestHeaderOptions() {
|
||||
let headers = {
|
||||
"Content-Type" : "application/json"
|
||||
};
|
||||
if (this.key)
|
||||
headers["Authorization"] = "Bearer " + this.key;
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns whether a proxy server needs to be used to work with this provider.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isUseProxy() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns whether this provider is only supported in the desktop application.
|
||||
* Don't override this method unless you know what you're doing.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isOnlyDesktop() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request body object by message.
|
||||
* @param {Object} message
|
||||
* *message* is in folowing format:
|
||||
* {
|
||||
* messages: [
|
||||
* { role: "developer", content: "You are a helpful assistant." },
|
||||
* { role: "system", content: "You are a helpful assistant." },
|
||||
* { role: "user", content: "Hello" },
|
||||
* { role: "assistant", content: "Hey!" },
|
||||
* { role: "user", content: "Hello" },
|
||||
* { role: "assistant", content: "Hey again!" }
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
getChatCompletions(message, model) {
|
||||
return {
|
||||
model : model.id,
|
||||
messages : message.messages
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request body object by message.
|
||||
* @param {Object} message
|
||||
* *message* is in folowing format:
|
||||
* {
|
||||
* text: "Please, calculate 2+2."
|
||||
* }
|
||||
*/
|
||||
getCompletions(message, model) {
|
||||
return {
|
||||
model : model.id,
|
||||
prompt : message.text
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert *getChatCompletions* and *getCompletions* answer to result simple message.
|
||||
* @returns {Object} result
|
||||
* *result* is in folowing format:
|
||||
* {
|
||||
* content: ["Hello", "Hi"]
|
||||
* }
|
||||
*/
|
||||
getChatCompletionsResult(message, model) {
|
||||
let result = {
|
||||
content : []
|
||||
};
|
||||
|
||||
let arrResult = message.data.choices || message.data.content || message.data.candidates;
|
||||
if (!arrResult)
|
||||
return result;
|
||||
|
||||
let choice = arrResult[0];
|
||||
if (!choice)
|
||||
return result;
|
||||
|
||||
if (choice.message && choice.message.content)
|
||||
result.content.push(choice.message.content);
|
||||
if (choice.text)
|
||||
result.content.push(choice.text);
|
||||
if (choice.content) {
|
||||
if (typeof(choice.content) === "string")
|
||||
result.content.push(choice.content);
|
||||
else if (Array.isArray(choice.content.parts)) {
|
||||
for (let i = 0, len = choice.content.parts.length; i < len; i++) {
|
||||
result.content.push(choice.content.parts[i].text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let trimArray = ["\n".charCodeAt(0)];
|
||||
for (let i = 0, len = result.content.length; i < len; i++) {
|
||||
let iEnd = result.content[i].length - 1;
|
||||
let iStart = 0;
|
||||
while (iStart < iEnd && trimArray.includes(result.content[i].charCodeAt(iStart)))
|
||||
iStart++;
|
||||
while (iEnd > iStart && trimArray.includes(result.content[i].charCodeAt(iEnd)))
|
||||
iEnd--;
|
||||
|
||||
if (iEnd > iStart && ((0 !== iStart) || ((result.content[i].length - 1) !== iEnd)))
|
||||
result.content[i] = result.content[i].substring(iStart, iEnd + 1);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available sizes for input images.
|
||||
* @returns {Array.<Object>} sizes
|
||||
*/
|
||||
getImageSizesInput(model) {
|
||||
return [
|
||||
{ w: 256, h: 256 },
|
||||
{ w: 512, h: 512 },
|
||||
{ w: 1024, h: 1024 }
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available sizes for outpit images.
|
||||
* @returns {Array.<Object>} sizes
|
||||
*/
|
||||
getImageSizesOutput(model) {
|
||||
return [
|
||||
{ w: 256, h: 256 },
|
||||
{ w: 512, h: 512 },
|
||||
{ w: 1024, h: 1024 }
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request body object by message.
|
||||
* @param {Object} message
|
||||
* *message* is in folowing format:
|
||||
* {
|
||||
* prompt: "",
|
||||
* width:1024,
|
||||
* height:1024,
|
||||
* background: "transparent",
|
||||
* quality: "high"
|
||||
* }
|
||||
*/
|
||||
getImageGeneration(message, model) {
|
||||
let sizes = this.getImageSizesOutput(model);
|
||||
let index = sizes.length - 1;
|
||||
|
||||
return {
|
||||
model : model.id,
|
||||
width : message.width || sizes[index].w,
|
||||
height : message.width || sizes[index].h,
|
||||
n : 1,
|
||||
response_format : "b64_json",
|
||||
prompt : message.prompt
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert *getImageGeneration* answer to result base64 image.
|
||||
* @returns {String} Image in base64 format
|
||||
*/
|
||||
async getImageGenerationResult(message, model) {
|
||||
let imageUrl = "";
|
||||
let getProp = function(name) {
|
||||
if (message[name])
|
||||
return message[name];
|
||||
if (message.data && message.data[name])
|
||||
return message.data[name];
|
||||
return undefined;
|
||||
};
|
||||
|
||||
if (!imageUrl) {
|
||||
let data = getProp("data");
|
||||
if (data && data[0] && data[0].b64_json)
|
||||
imageUrl = data[0].b64_json;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let artifacts = getProp("artifacts");
|
||||
if (artifacts && artifacts[0] && artifacts[0].base64)
|
||||
imageUrl = artifacts[0].base64;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let result = getProp("result");
|
||||
if (result && result.imageUrl)
|
||||
imageUrl = result.imageUrl;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let generations = getProp("generations");
|
||||
if (generations && generations[0] && generations[0].url)
|
||||
imageUrl = generations[0].url;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let candidates = getProp("candidates");
|
||||
if (candidates && candidates[0] && candidates[0].content)
|
||||
imageUrl = candidates[0].content;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let image = getProp("image");
|
||||
if (image)
|
||||
imageUrl = image;
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let response = getProp("response");
|
||||
if (response) {
|
||||
let matches = response.match(/data:image\/[^;]+;base64,([^"'\s]+)/);
|
||||
if (matches && matches[1])
|
||||
imageUrl = matches[1];
|
||||
}
|
||||
}
|
||||
|
||||
if (!imageUrl) {
|
||||
let content = getProp("content");
|
||||
if (content) {
|
||||
for (let i = 0, len = content.length; i < len; i++) {
|
||||
if (content[i].type === 'text') {
|
||||
let svgMatch = content[i].text.match(/<svg[\s\S]*?<\/svg>/);
|
||||
if (svgMatch) {
|
||||
imageUrl = svgMatch[0];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (imageUrl) {
|
||||
imageUrl = "data:image/svg+xml;base64," + btoa(imageUrl);
|
||||
}
|
||||
}
|
||||
|
||||
if (!imageUrl)
|
||||
return "";
|
||||
|
||||
return await AI.ImageEngine.getBase64FromUrl(imageUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request body object by message.
|
||||
* @param {Object} message
|
||||
* *message* is in folowing format:
|
||||
* {
|
||||
* image: "base64...",
|
||||
* prompt: "text"
|
||||
* }
|
||||
*/
|
||||
async getImageVision(message, model) {
|
||||
return {
|
||||
model : model.id,
|
||||
messages : [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: message.prompt
|
||||
},
|
||||
{
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: message.image
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
getImageVisionResult(message, model) {
|
||||
let result = this.getChatCompletionsResult(message, model);
|
||||
|
||||
if (result.content.length === 0)
|
||||
return "";
|
||||
|
||||
if (0 === result.content[0].indexOf("<think>")) {
|
||||
let end = result.content[0].indexOf("</think>");
|
||||
if (end !== -1)
|
||||
result.content[0] = result.content[0].substring(end + 8);
|
||||
}
|
||||
|
||||
return result.content[0];
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get request body object by message.
|
||||
* @param {Object} message
|
||||
* *message* is in folowing format:
|
||||
* {
|
||||
* image: "base64..."
|
||||
* }
|
||||
*/
|
||||
async getImageOCR(message, model) {
|
||||
return await this.getImageVision({
|
||||
image : message.image,
|
||||
prompt : Asc.Prompts.getImagePromptOCR()
|
||||
}, model);
|
||||
}
|
||||
|
||||
getImageOCRResult(message, model) {
|
||||
return this.getImageVisionResult(message, model);
|
||||
}
|
||||
|
||||
/**
|
||||
* ========================================================================================
|
||||
* The following are methods for internal work. There is no need to overload these methods.
|
||||
* ========================================================================================
|
||||
*/
|
||||
createInstance(name, url, key, addon) {
|
||||
//let inst = Object.create(Object.getPrototypeOf(this));
|
||||
let inst = new this.constructor();
|
||||
inst.name = name;
|
||||
inst.url = url;
|
||||
inst.key = key;
|
||||
inst.addon = addon || "";
|
||||
return inst;
|
||||
}
|
||||
|
||||
checkModelsUI() {
|
||||
for (let i = 0, len = this.models.length; i < len; i++) {
|
||||
let model = this.models[i];
|
||||
let modelUI = new window.AI.UI.Model(model.name, model.id, model.provider);
|
||||
modelUI.capabilities = this.checkModelCapability(model);
|
||||
this.modelsUI.push(modelUI);
|
||||
}
|
||||
}
|
||||
|
||||
getSystemMessage(message, isRemove) {
|
||||
let messages = message.messages;
|
||||
let isFound = false;
|
||||
if (!messages)
|
||||
return "";
|
||||
let result = "";
|
||||
for (let i = 0; i < messages.length; ++i) {
|
||||
if (messages[i].role === "system") {
|
||||
if (isFound) {
|
||||
messages.splice(i, 1);
|
||||
} else {
|
||||
isFound = true;
|
||||
result = messages[i].content;
|
||||
if (isRemove === true) {
|
||||
messages.splice(i, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
getImageGenerationWithChat(message, model, addon) {
|
||||
let prompt = "Please generate image. ";
|
||||
if (addon)
|
||||
prompt += addon;
|
||||
// TODO: sizes
|
||||
prompt += "Here is the description for the image content:\"";
|
||||
prompt += message.prompt;
|
||||
prompt += "\"";
|
||||
|
||||
let data = {
|
||||
messages : [
|
||||
{
|
||||
role: "user",
|
||||
content: prompt
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return this.getChatCompletions(data, model);
|
||||
}
|
||||
|
||||
getImageVisionWithChat(message, model) {
|
||||
let prompt = "Please generate image. ";
|
||||
if (addon)
|
||||
prompt += addon;
|
||||
|
||||
let data = {
|
||||
messages : [
|
||||
{
|
||||
role: "user",
|
||||
content: message.prompt
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return this.getChatCompletions(data, model);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
window.AI.Provider = Provider;
|
||||
await AI.loadInternalProviders();
|
||||
|
||||
})();
|
||||
661
DocService/sources/ai/engine/register.js
Normal file
661
DocService/sources/ai/engine/register.js
Normal file
@ -0,0 +1,661 @@
|
||||
function registerButtons(window, undefined)
|
||||
{
|
||||
function getToolBarButtonIcons(icon) {
|
||||
return "resources/icons/%theme-type%(light|dark)/big/" + icon + "%scale%(default).png";
|
||||
}
|
||||
|
||||
function getContextMenuButtonIcons(icon) {
|
||||
return "resources/icons/%theme-type%(light|dark)/" + icon + "%scale%(default).png";
|
||||
}
|
||||
|
||||
// register contextmenu buttons
|
||||
let buttonMain = new Asc.ButtonContextMenu();
|
||||
buttonMain.text = "AI";
|
||||
buttonMain.icons = getContextMenuButtonIcons("general-ai");
|
||||
buttonMain.addCheckers("All");
|
||||
|
||||
function chatWindowShow(attachedText)
|
||||
{
|
||||
if (window.chatWindow) {
|
||||
window.chatWindow.activate();
|
||||
return;
|
||||
}
|
||||
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Chat);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let variation = {
|
||||
url : "chat.html",
|
||||
description : window.Asc.plugin.tr("Chatbot"),
|
||||
isVisual : true,
|
||||
buttons : [],
|
||||
icons: "resources/icons/%theme-name%(theme-default|theme-system|theme-classic-light)/%theme-type%(light|dark)/ask-ai%state%(normal|active)%scale%(default).png",
|
||||
isModal : false,
|
||||
isCanDocked: true,
|
||||
type: window.localStorage.getItem("onlyoffice_ai_chat_placement") || "window",
|
||||
EditorsSupport : ["word", "slide", "cell", "pdf"],
|
||||
size : [ 400, 400 ]
|
||||
};
|
||||
|
||||
let hasOpenedOnce = false;
|
||||
|
||||
var chatWindow = new window.Asc.PluginWindow();
|
||||
chatWindow.attachEvent("onWindowReady", function() {
|
||||
Asc.Editor.callMethod("ResizeWindow", [chatWindow.id, [400, 400], [400, 400], [0, 0]]);
|
||||
if(!hasOpenedOnce && attachedText && attachedText.trim()) {
|
||||
chatWindow.command("onAttachedText", attachedText);
|
||||
}
|
||||
hasOpenedOnce = true;
|
||||
});
|
||||
chatWindow.attachEvent("onChatMessage", async function(message) {
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Chat);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let result = await requestEngine.chatRequest(message);
|
||||
if (!result) result = "";
|
||||
|
||||
//result = result.replace(/\n\n/g, '\n');
|
||||
chatWindow.command("onChatReply", result);
|
||||
});
|
||||
chatWindow.attachEvent("onChatReplace", async function(data) {
|
||||
switch (data.type) {
|
||||
case "review": {
|
||||
if (Asc.plugin.info.editorType === "word")
|
||||
await Asc.Library.InsertAsReview(data.data, true);
|
||||
else
|
||||
await Asc.Library.InsertAsComment(data.data);
|
||||
break;
|
||||
}
|
||||
case "comment": {
|
||||
await Asc.Library.InsertAsComment(data.data);
|
||||
break;
|
||||
}
|
||||
case "insert": {
|
||||
await Asc.Library.InsertAsHTML(data.data);
|
||||
break;
|
||||
}
|
||||
case "replace": {
|
||||
await Asc.Library.ReplaceTextSmart([data.data]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
chatWindow.attachEvent("onDockedChanged", async function(type) {
|
||||
window.localStorage.setItem("onlyoffice_ai_chat_placement", type);
|
||||
|
||||
async function waitSaveSettings()
|
||||
{
|
||||
return new Promise(resolve => (function(){
|
||||
chatWindow.attachEvent("onUpdateState", function(type) {
|
||||
resolve();
|
||||
});
|
||||
chatWindow.command("onUpdateState");
|
||||
})());
|
||||
};
|
||||
|
||||
await waitSaveSettings();
|
||||
Asc.Editor.callMethod("OnWindowDockChangedCallback", [chatWindow.id]);
|
||||
});
|
||||
chatWindow.show(variation);
|
||||
|
||||
window.chatWindow = chatWindow;
|
||||
}
|
||||
|
||||
// Submenu summarize:
|
||||
if (Asc.Editor.getType() !== "pdf")
|
||||
{
|
||||
let button = new Asc.ButtonContextMenu(buttonMain);
|
||||
button.text = "Summarization";
|
||||
button.icons = getContextMenuButtonIcons("summarization");
|
||||
button.editors = ["word"];
|
||||
button.addCheckers("Selection");
|
||||
button.attachOnClick(async function(data){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Summarization);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getSummarizationPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = "Summary:\n\n" + result;
|
||||
await Asc.Library.InsertAsText(result);
|
||||
});
|
||||
}
|
||||
|
||||
// Submenu Text Analysis
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonContextMenu(buttonMain);
|
||||
button1.text = "Text analysis";
|
||||
button1.icons = getContextMenuButtonIcons("text-analysis-ai");
|
||||
button1.editors = ["word"];
|
||||
button1.addCheckers("Target", "Selection");
|
||||
|
||||
let button2 = new Asc.ButtonContextMenu(button1);
|
||||
button2.text = "Rewrite differently";
|
||||
button2.editors = ["word"];
|
||||
button2.addCheckers("Selection");
|
||||
button2.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getTextRewritePrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = result.replace(/\n\n/g, '\n');
|
||||
await Asc.Library.PasteText(result);
|
||||
});
|
||||
|
||||
let button3 = new Asc.ButtonContextMenu(button1);
|
||||
button3.text = "Make longer";
|
||||
button3.editors = ["word"];
|
||||
button3.addCheckers("Selection");
|
||||
button3.attachOnClick(async function(data){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getTextLongerPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = result.replace(/\n\n/g, '\n');
|
||||
await Asc.Library.PasteText(result);
|
||||
});
|
||||
|
||||
let button4 = new Asc.ButtonContextMenu(button1);
|
||||
button4.text = "Make shorter";
|
||||
button4.editors = ["word"];
|
||||
button4.addCheckers("Selection");
|
||||
button4.attachOnClick(async function(data){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getTextShorterPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = result.replace(/\n\n/g, '\n');
|
||||
await Asc.Library.PasteText(result);
|
||||
});
|
||||
|
||||
let button5 = new Asc.ButtonContextMenu(button1);
|
||||
button5.text = "Explain text in comment";
|
||||
button5.separator = true;
|
||||
button5.editors = ["word"];
|
||||
button5.addCheckers("Target", "Selection");
|
||||
button5.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
if (!content)
|
||||
content = await Asc.Library.GetCurrentWord();
|
||||
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let prompt = Asc.Prompts.getExplainPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = result.replace(/\n\n/g, '\n');
|
||||
await Asc.Library.InsertAsComment(result);
|
||||
});
|
||||
|
||||
let button6 = new Asc.ButtonContextMenu(button1);
|
||||
button6.text = "Explain text in hyperlink";
|
||||
button6.separator = true;
|
||||
button6.editors = ["word"];
|
||||
button6.addCheckers("Selection");
|
||||
button6.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getExplainAsLinkPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = result.replace(/\n\n/g, '\n');
|
||||
await Asc.Library.InsertAsHyperlink(result);
|
||||
});
|
||||
|
||||
let button7 = new Asc.ButtonContextMenu(button1);
|
||||
button7.text = "Fix spelling & grammar";
|
||||
button7.separator = true;
|
||||
button7.editors = ["word"];
|
||||
button7.addCheckers("Selection");
|
||||
button7.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getFixAndSpellPrompt(content);
|
||||
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
if (result !== 'The text is correct, there are no errors in it.')
|
||||
await Asc.Library.ReplaceTextSmart([result]);
|
||||
else
|
||||
console.log('The text is correct, there are no errors in it.');
|
||||
});
|
||||
|
||||
let button8 = new Asc.ButtonContextMenu(button1);
|
||||
button8.text = "Keywords";
|
||||
button8.editors = ["word"];
|
||||
button8.addCheckers("Selection");
|
||||
button8.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.TextAnalyze);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
let prompt = Asc.Prompts.getTextKeywordsPrompt(content);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
await Asc.Library.InsertAsText(result);
|
||||
});
|
||||
}
|
||||
|
||||
// Submenu Translate
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonContextMenu(buttonMain);
|
||||
button1.text = "Translate";
|
||||
button1.icons = getContextMenuButtonIcons("translation");
|
||||
button1.editors = ["word", "slide", "cell"];
|
||||
button1.addCheckers("Selection");
|
||||
|
||||
let button2 = new Asc.ButtonContextMenu(button1);
|
||||
button2.text = "English";
|
||||
button2.editors = ["word", "slide", "cell"];
|
||||
button2.addCheckers("Selection");
|
||||
button2.data = "English";
|
||||
button2.attachOnClick(async function(data){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Translation);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let lang = data;
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let prompt = Asc.Prompts.getTranslatePrompt(content, lang);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = Asc.Library.getTranslateResult(result, content);
|
||||
|
||||
await Asc.Library.PasteText(result);
|
||||
});
|
||||
|
||||
let button3 = button2.copy();
|
||||
button3.text = "French";
|
||||
button3.data = "French";
|
||||
|
||||
let button4 = button2.copy();
|
||||
button4.text = "German";
|
||||
button4.data = "German";
|
||||
|
||||
let button5 = button2.copy();
|
||||
button5.text = "Chinese";
|
||||
button5.data = "Chinese";
|
||||
|
||||
let button6 = button2.copy();
|
||||
button6.text = "Japanese";
|
||||
button6.data = "Japanese";
|
||||
|
||||
let button7 = button2.copy();
|
||||
button7.text = "Russian";
|
||||
button7.data = "Russian";
|
||||
|
||||
let button8 = button2.copy();
|
||||
button8.text = "Korean";
|
||||
button8.data = "Korean";
|
||||
|
||||
let button9 = button2.copy();
|
||||
button9.text = "Spanish";
|
||||
button9.data = "Spanish";
|
||||
|
||||
let button10 = button2.copy();
|
||||
button10.text = "Italian";
|
||||
button10.data = "Italian";
|
||||
}
|
||||
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonContextMenu(buttonMain);
|
||||
button1.text = "Show hyperlink content";
|
||||
button1.addCheckers("Hyperlink");
|
||||
|
||||
button1.onContextMenuShowExtendItem = function(options, item)
|
||||
{
|
||||
item.data = options.value;
|
||||
};
|
||||
|
||||
button1.attachOnClick(function(data){
|
||||
let variation = {
|
||||
url : "hyperlink.html",
|
||||
description : window.Asc.plugin.tr("Hyperlink"),
|
||||
isVisual : true,
|
||||
buttons : [],
|
||||
isModal : false,
|
||||
EditorsSupport : ["word", "slide", "cell", "pdf"],
|
||||
size : [ 1000, 1000 ]
|
||||
};
|
||||
|
||||
var linkWindow = new window.Asc.PluginWindow();
|
||||
linkWindow.attachEvent("onGetLink", async function(){
|
||||
let link = data;
|
||||
if (!link)
|
||||
link = await Asc.Library.GetSelectedText();
|
||||
link = link.replace(/\n/g, '');
|
||||
link = link.replace(/\r/g, '');
|
||||
linkWindow.command("onSetLink", link);
|
||||
});
|
||||
linkWindow.show(variation);
|
||||
});
|
||||
}
|
||||
|
||||
if (true)
|
||||
{
|
||||
let buttonImages = new Asc.ButtonContextMenu(buttonMain);
|
||||
buttonImages.text = "Image";
|
||||
buttonImages.icons = getContextMenuButtonIcons("image-ai");
|
||||
buttonImages.addCheckers("Selection", "Image", "OleObject");
|
||||
|
||||
let buttonGen = new Asc.ButtonContextMenu(buttonImages);
|
||||
buttonGen.text = "Text to Image";
|
||||
buttonGen.addCheckers("Selection");
|
||||
buttonGen.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.ImageGeneration);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let result = await requestEngine.imageGenerationRequest(content);
|
||||
if (!result) return;
|
||||
|
||||
if (Asc.plugin.info.editorSubType === "pdf")
|
||||
return await Asc.Library.AddGeneratedImage(result);
|
||||
await Asc.Library.AddOleObject(result, content);
|
||||
});
|
||||
|
||||
let buttonOCR = new Asc.ButtonContextMenu(buttonImages);
|
||||
buttonOCR.text = "OCR";
|
||||
buttonOCR.addCheckers("Image", "OleObject");
|
||||
buttonOCR.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.OCR);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedImage();
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let result = await requestEngine.imageOCRRequest(content);
|
||||
if (!result) return;
|
||||
|
||||
await Asc.Library.InsertAsMD(result, [Asc.PluginsMD.latex]);
|
||||
});
|
||||
|
||||
let buttonExplainImage = new Asc.ButtonContextMenu(buttonImages);
|
||||
buttonExplainImage.text = "Image to Text";
|
||||
buttonExplainImage.addCheckers("Image", "OleObject");
|
||||
buttonExplainImage.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Vision);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
let content = await Asc.Library.GetSelectedImage();
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let result = await requestEngine.imageVisionRequest({
|
||||
prompt : Asc.Prompts.getImageDescription(),
|
||||
image : content
|
||||
});
|
||||
if (!result) return;
|
||||
|
||||
await Asc.Library.InsertAsMD(result);
|
||||
});
|
||||
}
|
||||
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonContextMenu(buttonMain);
|
||||
button1.text = "Chatbot";
|
||||
button1.separator = true;
|
||||
button1.icons = getContextMenuButtonIcons("ask-ai");
|
||||
button1.addCheckers("All");
|
||||
button1.attachOnClick(async function(){
|
||||
let selectedText = await Asc.Library.GetSelectedText();
|
||||
chatWindowShow(selectedText);
|
||||
});
|
||||
}
|
||||
|
||||
if (false)
|
||||
{
|
||||
let button1 = new Asc.ButtonContextMenu(buttonMain);
|
||||
button1.text = "Settings";
|
||||
button1.separator = true;
|
||||
button1.addCheckers("All");
|
||||
button1.attachOnClick(function(){
|
||||
onOpenSettingsModal();
|
||||
});
|
||||
}
|
||||
|
||||
// register toolbar buttons
|
||||
let buttonMainToolbar = new Asc.ButtonToolbar();
|
||||
buttonMainToolbar.text = "AI";
|
||||
|
||||
window.buttonMainToolbar = buttonMainToolbar;
|
||||
window.getToolBarButtonIcons = getToolBarButtonIcons;
|
||||
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonToolbar(buttonMainToolbar);
|
||||
button1.text = "Settings";
|
||||
button1.icons = getToolBarButtonIcons("settings");
|
||||
button1.attachOnClick(function(data){
|
||||
onOpenSettingsModal();
|
||||
});
|
||||
}
|
||||
|
||||
if (true)
|
||||
{
|
||||
let button1 = new Asc.ButtonToolbar(buttonMainToolbar);
|
||||
button1.separator = true;
|
||||
button1.text = "Chatbot";
|
||||
button1.icons = getToolBarButtonIcons("ask-ai");
|
||||
button1.attachOnClick(function(data){
|
||||
chatWindowShow();
|
||||
});
|
||||
|
||||
if (Asc.Editor.getType() !== "pdf") {
|
||||
let button2 = new Asc.ButtonToolbar(buttonMainToolbar);
|
||||
button2.text = "Summarization";
|
||||
button2.icons = getToolBarButtonIcons("summarization");
|
||||
button2.attachOnClick(async function(data){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Summarization);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
onOpenSummarizationModal();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
// TODO:
|
||||
let button3 = new Asc.ButtonToolbar(buttonMainToolbar);
|
||||
button3.text = "Text to image";
|
||||
button3.icons = getToolBarButtonIcons("text-to-image");
|
||||
button3.attachOnClick(function(data){
|
||||
console.log(data);
|
||||
});
|
||||
*/
|
||||
|
||||
let button4 = new Asc.ButtonToolbar(buttonMainToolbar);
|
||||
button4.text = "Translation";
|
||||
button4.icons = getToolBarButtonIcons("translation");
|
||||
button4.menu = [{
|
||||
text:'Settings',
|
||||
id:'t10n-settings',
|
||||
onclick: () => {
|
||||
onTranslateSettingsModal();
|
||||
}}];
|
||||
button4.split = true;
|
||||
button4.attachOnClick(async function(){
|
||||
let requestEngine = AI.Request.create(AI.ActionType.Translation);
|
||||
if (!requestEngine)
|
||||
return;
|
||||
|
||||
const ls_lang_key = "onlyoffice_ai_plugin_translate_lang";
|
||||
const currLang = window.localStorage.getItem(ls_lang_key);
|
||||
|
||||
let lang = !!currLang ? currLang : "english";
|
||||
let content = await Asc.Library.GetSelectedText();
|
||||
if (!content)
|
||||
return;
|
||||
|
||||
let prompt = Asc.Prompts.getTranslatePrompt(content, lang);
|
||||
let result = await requestEngine.chatRequest(prompt);
|
||||
if (!result) return;
|
||||
|
||||
result = Asc.Library.getTranslateResult(result, content);
|
||||
await Asc.Library.PasteText(result);
|
||||
});
|
||||
}
|
||||
|
||||
// register actions
|
||||
window.AI = window.AI || {};
|
||||
var AI = window.AI;
|
||||
|
||||
AI.ActionType = {
|
||||
Chat : "Chat",
|
||||
Summarization : "Summarization",
|
||||
Translation : "Translation",
|
||||
TextAnalyze : "TextAnalyze",
|
||||
ImageGeneration : "ImageGeneration",
|
||||
OCR : "OCR",
|
||||
Vision : "Vision"
|
||||
};
|
||||
|
||||
AI.Actions = {};
|
||||
|
||||
function ActionUI(name, icon, modelId, capabilities) {
|
||||
this.name = name || "";
|
||||
this.icon = icon || "";
|
||||
this.model = modelId || "";
|
||||
this.capabilities = (capabilities === undefined) ? AI.CapabilitiesUI.Chat : capabilities;
|
||||
}
|
||||
|
||||
AI.Actions[AI.ActionType.Chat] = new ActionUI("Chatbot", "ask-ai");
|
||||
AI.Actions[AI.ActionType.Summarization] = new ActionUI("Summarization", "summarization");
|
||||
AI.Actions[AI.ActionType.Translation] = new ActionUI("Translation", "translation");
|
||||
AI.Actions[AI.ActionType.TextAnalyze] = new ActionUI("Text analysis", "text-analysis-ai");
|
||||
AI.Actions[AI.ActionType.ImageGeneration] = new ActionUI("Image generation", "image-ai", "", AI.CapabilitiesUI.Image);
|
||||
AI.Actions[AI.ActionType.OCR] = new ActionUI("OCR", "text-analysis-ai", "", AI.CapabilitiesUI.Vision);
|
||||
AI.Actions[AI.ActionType.Vision] = new ActionUI("Vision", "vision-ai", "", AI.CapabilitiesUI.Vision);
|
||||
|
||||
AI.ActionsGetKeys = function()
|
||||
{
|
||||
return [
|
||||
AI.ActionType.Chat,
|
||||
AI.ActionType.Summarization,
|
||||
AI.ActionType.Translation,
|
||||
AI.ActionType.TextAnalyze,
|
||||
AI.ActionType.ImageGeneration,
|
||||
AI.ActionType.OCR,
|
||||
AI.ActionType.Vision
|
||||
];
|
||||
};
|
||||
|
||||
AI.ActionsGetSorted = function()
|
||||
{
|
||||
let keys = AI.ActionsGetKeys();
|
||||
let count = keys.length;
|
||||
let actions = new Array(count);
|
||||
for (let i = 0; i < count; i++)
|
||||
{
|
||||
let src = AI.Actions[keys[i]];
|
||||
actions[i] = {
|
||||
id : keys[i],
|
||||
name : Asc.plugin.tr(src.name),
|
||||
icon : src.icon,
|
||||
model : src.model,
|
||||
capabilities : src.capabilities
|
||||
}
|
||||
}
|
||||
return actions;
|
||||
};
|
||||
|
||||
var actions_key = "onlyoffice_ai_actions_key";
|
||||
AI.ActionsSave = function()
|
||||
{
|
||||
try
|
||||
{
|
||||
window.localStorage.setItem(actions_key, JSON.stringify(AI.Actions));
|
||||
return true;
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.ActionsLoad = function()
|
||||
{
|
||||
let obj = null;
|
||||
try
|
||||
{
|
||||
obj = JSON.parse(window.localStorage.getItem(actions_key));
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
obj = (AI.DEFAULT_SERVER_SETTINGS && AI.DEFAULT_SERVER_SETTINGS.actions) ? AI.DEFAULT_SERVER_SETTINGS.actions : null;
|
||||
}
|
||||
|
||||
if (obj)
|
||||
{
|
||||
for (let i in obj)
|
||||
{
|
||||
if (AI.Actions[i] && obj[i].model)
|
||||
AI.Actions[i].model = obj[i].model;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.ActionsChange = function(id, model)
|
||||
{
|
||||
if (AI.Actions[id])
|
||||
{
|
||||
AI.Actions[id].model = model;
|
||||
AI.ActionsSave();
|
||||
}
|
||||
};
|
||||
|
||||
AI.ActionsLoad();
|
||||
}
|
||||
71
DocService/sources/ai/engine/storage.js
Normal file
71
DocService/sources/ai/engine/storage.js
Normal file
@ -0,0 +1,71 @@
|
||||
(function(exports, undefined)
|
||||
{
|
||||
exports.AI = exports.AI || {};
|
||||
var AI = exports.AI;
|
||||
AI.UI = AI.UI || {};
|
||||
AI.Storage = AI.Storage || {};
|
||||
AI.Storage.Version = 3;
|
||||
|
||||
AI.isLocalDesktop = (function(){
|
||||
if (window.navigator && window.navigator.userAgent.toLowerCase().indexOf("ascdesktopeditor") < 0)
|
||||
return false;
|
||||
if (window.location && window.location.protocol == "file:")
|
||||
return true;
|
||||
if (window.document && window.document.currentScript && 0 == window.document.currentScript.src.indexOf("file:///"))
|
||||
return true;
|
||||
return false;
|
||||
})();
|
||||
|
||||
AI.isLocalUrl = function(url) {
|
||||
let filter = ["localhost", "127.0.0.1"];
|
||||
for (let i = 0, len = filter.length; i < len; i++) {
|
||||
let pos = url.indexOf(filter[i]);
|
||||
if (pos >= 0 && pos < 10)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
AI.getDesktopLocalVersion = function() {
|
||||
let ret = 99 * 1000000 + 99 * 1000 + 99;
|
||||
if (!AI.isLocalDesktop)
|
||||
return ret;
|
||||
let pos = window.navigator.userAgent.indexOf("AscDesktopEditor/");
|
||||
let pos2 = window.navigator.userAgent.indexOf(" ", pos);
|
||||
if (pos === -1 || pos2 === -1)
|
||||
return ret;
|
||||
try {
|
||||
let tokens = window.navigator.userAgent.substring(pos + 17, pos2).split(".");
|
||||
return parseInt(tokens[0]) * 1000000 + parseInt(tokens[1]) * 1000 + parseInt(tokens[2]);
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
AI.loadResourceAsText = async function(url) {
|
||||
return new Promise(resolve => (function(){
|
||||
try {
|
||||
var xhr = new XMLHttpRequest();
|
||||
if (xhr) {
|
||||
xhr.open('GET', url, true);
|
||||
xhr.onload = function () {
|
||||
var status = xhr.status;
|
||||
if (status == 200 || location.href.indexOf("file:") == 0) {
|
||||
resolve(xhr.responseText);
|
||||
} else {
|
||||
resolve("");
|
||||
}
|
||||
};
|
||||
xhr.onerror = function() {
|
||||
resolve("");
|
||||
}
|
||||
xhr.send('');
|
||||
}
|
||||
} catch (e) {
|
||||
resolve("");
|
||||
}
|
||||
})());
|
||||
};
|
||||
|
||||
})(window);
|
||||
Reference in New Issue
Block a user