[bug] Prevent I/O overload from repeated clientLog messages

This commit is contained in:
Sergey Konovalov
2025-11-09 12:11:59 +03:00
parent 22ab7500bd
commit 28ebde22e0

View File

@ -1951,18 +1951,9 @@ exports.install = function (server, app, callbackFunction) {
yield canvasService.openDocument(ctx, conn, cmd); yield canvasService.openDocument(ctx, conn, cmd);
break; break;
} }
case 'clientLog': { case 'clientLog':
const level = data.level?.toLowerCase(); yield handleClientLog(ctx, conn, docId, data, tenErrorFiles);
if ('trace' === level || 'debug' === level || 'info' === level || 'warn' === level || 'error' === level || 'fatal' === level) {
ctx.logger[level]('clientLog: %s', data.msg);
}
if ('error' === level && tenErrorFiles && docId) {
const destDir = 'browser/' + docId;
yield storage.copyPath(ctx, docId, destDir, undefined, tenErrorFiles);
yield* saveErrorChanges(ctx, docId, destDir);
}
break; break;
}
case 'extendSession': case 'extendSession':
ctx.logger.debug('extendSession idletime: %d', data.idletime); ctx.logger.debug('extendSession idletime: %d', data.idletime);
conn.sessionIsSendWarning = false; conn.sessionIsSendWarning = false;
@ -2196,6 +2187,31 @@ exports.install = function (server, app, callbackFunction) {
} }
} }
/**
* Handle client log message and create error files once per connection on first error.
* @param {object} ctx - Operation context
* @param {object} conn - Socket connection
* @param {string} docId - Document identifier
* @param {{level?: string, msg?: string}} data - Client log data
* @param {object} tenErrorFiles - Error files storage configuration
* @returns {Promise<void>}
*/
async function handleClientLog(ctx, conn, docId, data, tenErrorFiles) {
const level = data.level?.toLowerCase();
if ('trace' === level || 'debug' === level || 'info' === level || 'warn' === level || 'error' === level || 'fatal' === level) {
ctx.logger[level]('clientLog: %s', data.msg);
}
if ('error' === level && tenErrorFiles && docId && !conn.clientError) {
conn.clientError = true;
const destDir = 'browser/' + docId;
const list = await storage.listObjects(ctx, destDir, tenErrorFiles);
if (list.length === 0) {
await storage.copyPath(ctx, docId, destDir, undefined, tenErrorFiles);
await saveErrorChanges(ctx, docId, destDir);
}
}
}
// Getting changes for the document (either from the cache or accessing the database, but only if there were saves) // Getting changes for the document (either from the cache or accessing the database, but only if there were saves)
function* getDocumentChanges(ctx, docId, optStartIndex, optEndIndex) { function* getDocumentChanges(ctx, docId, optStartIndex, optEndIndex) {
// If during that moment, while we were waiting for a response from the database, everyone left, then nothing needs to be sent // If during that moment, while we were waiting for a response from the database, everyone left, then nothing needs to be sent
@ -3252,7 +3268,16 @@ exports.install = function (server, app, callbackFunction) {
return res; return res;
} }
function* saveErrorChanges(ctx, docId, destDir) { /**
* Save document changes to error files storage for debugging purposes.
* Retrieves changes from database and creates JSON chunks stored as separate files.
*
* @param {object} ctx - Operation context with configuration and logger
* @param {string} docId - Document identifier to retrieve changes for
* @param {string} destDir - Destination directory path in storage for error files
* @returns {Promise<void>} Resolves when all changes are saved to storage
*/
async function saveErrorChanges(ctx, docId, destDir) {
const tenEditor = getEditorConfig(ctx); const tenEditor = getEditorConfig(ctx);
const tenMaxRequestChanges = ctx.getCfg('services.CoAuthoring.server.maxRequestChanges', cfgMaxRequestChanges); const tenMaxRequestChanges = ctx.getCfg('services.CoAuthoring.server.maxRequestChanges', cfgMaxRequestChanges);
const tenErrorFiles = ctx.getCfg('FileConverter.converter.errorfiles', cfgErrorFiles); const tenErrorFiles = ctx.getCfg('FileConverter.converter.errorfiles', cfgErrorFiles);
@ -3262,12 +3287,12 @@ exports.install = function (server, app, callbackFunction) {
let changes; let changes;
const changesPrefix = destDir + '/' + constants.CHANGES_NAME + '/' + constants.CHANGES_NAME + '.json.'; const changesPrefix = destDir + '/' + constants.CHANGES_NAME + '/' + constants.CHANGES_NAME + '.json.';
do { do {
changes = yield sqlBase.getChangesPromise(ctx, docId, index, index + tenMaxRequestChanges); changes = await sqlBase.getChangesPromise(ctx, docId, index, index + tenMaxRequestChanges);
if (changes.length > 0) { if (changes.length > 0) {
let buffer; let buffer;
if (tenEditor['binaryChanges']) { if (tenEditor['binaryChanges']) {
const buffers = changes.map(elem => elem.change_data); const buffers = changes.map(elem => elem.change_data);
buffers.unshift(Buffer.from(utils.getChangesFileHeader(), 'utf-8')); buffers.unshift(Buffer.from(utils.getChangesFileHeader(), 'utf8'));
buffer = Buffer.concat(buffers); buffer = Buffer.concat(buffers);
} else { } else {
let changesJSON = indexChunk > 1 ? ',[' : '['; let changesJSON = indexChunk > 1 ? ',[' : '[';
@ -3279,7 +3304,7 @@ exports.install = function (server, app, callbackFunction) {
changesJSON += ']\r\n'; changesJSON += ']\r\n';
buffer = Buffer.from(changesJSON, 'utf8'); buffer = Buffer.from(changesJSON, 'utf8');
} }
yield storage.putObject(ctx, changesPrefix + (indexChunk++).toString().padStart(3, '0'), buffer, buffer.length, tenErrorFiles); await storage.putObject(ctx, changesPrefix + (indexChunk++).toString().padStart(3, '0'), buffer, buffer.length, tenErrorFiles);
} }
index += tenMaxRequestChanges; index += tenMaxRequestChanges;
} while (changes && tenMaxRequestChanges === changes.length); } while (changes && tenMaxRequestChanges === changes.length);