From c0b5b7e0f6fdb35f67bda8d5740ff5af2a1ce09b Mon Sep 17 00:00:00 2001 From: PauI Ostrovckij Date: Sun, 8 Feb 2026 21:21:49 +0300 Subject: [PATCH] [Fix] Enhance file limits handling in configuration updates; Fix bug 79622 --- .../sources/routes/config/config.service.js | 34 +++++++++++++++++- .../server/sources/routes/config/router.js | 20 ++++++++++- DocService/sources/taskresult.js | 35 +++++++++++++++++++ 3 files changed, 87 insertions(+), 2 deletions(-) diff --git a/AdminPanel/server/sources/routes/config/config.service.js b/AdminPanel/server/sources/routes/config/config.service.js index 34b7601f..7d5d3fee 100644 --- a/AdminPanel/server/sources/routes/config/config.service.js +++ b/AdminPanel/server/sources/routes/config/config.service.js @@ -110,6 +110,29 @@ function getDiffFromBase(_ctx, currentConfig, incomingConfig) { return removeEmptyObjects(diff); } +/** + * Returns true if diff object contains any of the file limit config paths (nested keys). + * Use after getDiffFromBase to decide if document status reset is needed. + * @param {Object} diff - Config diff object (e.g. from getDiffFromBase) + * @returns {boolean} + */ +function diffContainsFileLimits(diff) { + if (!diff || typeof diff !== 'object') return false; + const converter = diff.FileConverter && diff.FileConverter.converter; + return Boolean(converter && (converter.inputLimits !== undefined || converter.maxDownloadBytes !== undefined)); +} + +/** + * Returns true if paths array affects file limits (e.g. reset of limits). + * @param {string[]} paths - Paths being reset (e.g. from POST /reset body) + * @returns {boolean} + */ +function pathsAffectFileLimits(paths) { + if (!paths || !paths.length) return false; + if (paths.includes('*')) return true; + return paths.some(p => p.startsWith('FileConverter.converter.inputLimits') || p.startsWith('FileConverter.converter.maxDownloadBytes')); +} + function isAdminScope(ctx) { return tenantManager.isDefaultTenant(ctx); } @@ -236,4 +259,13 @@ function getFullConfigRedacted(ctx) { return redactSensitiveParams(cfg, SENSITIVE_PARAM_PATHS); } -module.exports = {validateScoped, getScopedBaseConfig, filterAdmin, getDiffFromBase, getFullConfigRedacted, getScopedConfig}; +module.exports = { + validateScoped, + getScopedBaseConfig, + filterAdmin, + getDiffFromBase, + getFullConfigRedacted, + getScopedConfig, + diffContainsFileLimits, + pathsAffectFileLimits +}; diff --git a/AdminPanel/server/sources/routes/config/router.js b/AdminPanel/server/sources/routes/config/router.js index 5d899321..adcc6be3 100644 --- a/AdminPanel/server/sources/routes/config/router.js +++ b/AdminPanel/server/sources/routes/config/router.js @@ -6,7 +6,16 @@ const path = require('path'); const fs = require('fs'); const tenantManager = require('../../../../../Common/sources/tenantManager'); const runtimeConfigManager = require('../../../../../Common/sources/runtimeConfigManager'); -const {getScopedConfig, getScopedBaseConfig, validateScoped, getDiffFromBase, getFullConfigRedacted} = require('./config.service'); +const taskResult = require('../../../../../DocService/sources/taskresult'); +const { + getScopedConfig, + getScopedBaseConfig, + validateScoped, + getDiffFromBase, + getFullConfigRedacted, + diffContainsFileLimits, + pathsAffectFileLimits +} = require('./config.service'); const {validateJWT} = require('../../middleware/auth'); const cookieParser = require('cookie-parser'); const utils = require('../../../../../Common/sources/utils'); @@ -84,6 +93,11 @@ router.patch('/', validateJWT, rawFileParser, async (req, res) => { } else { await runtimeConfigManager.replaceConfig(ctx, diffConfig); } + + if (diffContainsFileLimits(diffConfig)) { + taskResult.resetDocumentStatusesForFileLimits(ctx); + } + const filteredConfig = getScopedConfig(ctx); res.status(200).json(utils.deepMergeObjects(filteredConfig, validationResult.value)); @@ -149,6 +163,10 @@ router.post('/reset', validateJWT, rawFileParser, async (req, res) => { await runtimeConfigManager.replaceConfig(ctx, resetConfig); } + if (pathsAffectFileLimits(paths)) { + taskResult.resetDocumentStatusesForFileLimits(ctx); + } + delete resetConfig.adminPanel; ctx.logger.info('Configuration reset successfully for paths: %j', paths); const filteredMergedConfig = getScopedBaseConfig(ctx); diff --git a/DocService/sources/taskresult.js b/DocService/sources/taskresult.js index 83159bcd..f0bcfbb5 100644 --- a/DocService/sources/taskresult.js +++ b/DocService/sources/taskresult.js @@ -384,6 +384,40 @@ function removeIf(ctx, mask) { }); } +/** + * Resets document statuses Ok -> ErrToReload for all tenants when file limits config changed. + * status_info is set to CONVERT_LIMITS (-93) so the client shows "file size exceeds" instead of "Error code: 0". On next open cleanupErrToReload runs and conversion re-checks limits. + * @param {operationContext} ctx - Operation context (for DB and logger) + */ +async function resetDocumentStatusesForFileLimits(ctx) { + ctx.logger.info('File limits changed, resetting document statuses to force re-check'); + try { + const values = []; + const pStatusTo = addSqlParam(commonDefines.FileStatus.ErrToReload, values); + const pStatusInfoTo = addSqlParam(constants.CONVERT_LIMITS, values); + const pOk = addSqlParam(commonDefines.FileStatus.Ok, values); + const sqlCommand = `UPDATE ${cfgTableResult} SET status=${pStatusTo}, status_info=${pStatusInfoTo} WHERE status=${pOk};`; + + const updateResult = await new Promise((resolve, reject) => { + sqlBase.sqlQuery( + ctx, + sqlCommand, + (error, result) => { + if (error) reject(error); + else resolve(result); + }, + undefined, + undefined, + values + ); + }); + const affectedRows = updateResult.affectedRows || 0; + ctx.logger.info('Reset document statuses: %d documents affected', affectedRows); + } catch (error) { + ctx.logger.error('Error resetting document statuses: %s', error.stack); + } +} + exports.TaskResultData = TaskResultData; exports.upsert = upsert; exports.select = select; @@ -395,3 +429,4 @@ exports.addRandomKeyTask = addRandomKeyTask; exports.remove = remove; exports.removeIf = removeIf; exports.getExpired = sqlBase.getExpired; +exports.resetDocumentStatusesForFileLimits = resetDocumentStatusesForFileLimits;