mirror of
https://github.com/ONLYOFFICE/server.git
synced 2026-02-10 18:05:07 +08:00
[Fix] Enhance file limits handling in configuration updates; Fix bug 79622
This commit is contained in:
@ -110,6 +110,29 @@ function getDiffFromBase(_ctx, currentConfig, incomingConfig) {
|
||||
return removeEmptyObjects(diff);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if diff object contains any of the file limit config paths (nested keys).
|
||||
* Use after getDiffFromBase to decide if document status reset is needed.
|
||||
* @param {Object} diff - Config diff object (e.g. from getDiffFromBase)
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function diffContainsFileLimits(diff) {
|
||||
if (!diff || typeof diff !== 'object') return false;
|
||||
const converter = diff.FileConverter && diff.FileConverter.converter;
|
||||
return Boolean(converter && (converter.inputLimits !== undefined || converter.maxDownloadBytes !== undefined));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if paths array affects file limits (e.g. reset of limits).
|
||||
* @param {string[]} paths - Paths being reset (e.g. from POST /reset body)
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function pathsAffectFileLimits(paths) {
|
||||
if (!paths || !paths.length) return false;
|
||||
if (paths.includes('*')) return true;
|
||||
return paths.some(p => p.startsWith('FileConverter.converter.inputLimits') || p.startsWith('FileConverter.converter.maxDownloadBytes'));
|
||||
}
|
||||
|
||||
function isAdminScope(ctx) {
|
||||
return tenantManager.isDefaultTenant(ctx);
|
||||
}
|
||||
@ -236,4 +259,13 @@ function getFullConfigRedacted(ctx) {
|
||||
return redactSensitiveParams(cfg, SENSITIVE_PARAM_PATHS);
|
||||
}
|
||||
|
||||
module.exports = {validateScoped, getScopedBaseConfig, filterAdmin, getDiffFromBase, getFullConfigRedacted, getScopedConfig};
|
||||
module.exports = {
|
||||
validateScoped,
|
||||
getScopedBaseConfig,
|
||||
filterAdmin,
|
||||
getDiffFromBase,
|
||||
getFullConfigRedacted,
|
||||
getScopedConfig,
|
||||
diffContainsFileLimits,
|
||||
pathsAffectFileLimits
|
||||
};
|
||||
|
||||
@ -6,7 +6,16 @@ const path = require('path');
|
||||
const fs = require('fs');
|
||||
const tenantManager = require('../../../../../Common/sources/tenantManager');
|
||||
const runtimeConfigManager = require('../../../../../Common/sources/runtimeConfigManager');
|
||||
const {getScopedConfig, getScopedBaseConfig, validateScoped, getDiffFromBase, getFullConfigRedacted} = require('./config.service');
|
||||
const taskResult = require('../../../../../DocService/sources/taskresult');
|
||||
const {
|
||||
getScopedConfig,
|
||||
getScopedBaseConfig,
|
||||
validateScoped,
|
||||
getDiffFromBase,
|
||||
getFullConfigRedacted,
|
||||
diffContainsFileLimits,
|
||||
pathsAffectFileLimits
|
||||
} = require('./config.service');
|
||||
const {validateJWT} = require('../../middleware/auth');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const utils = require('../../../../../Common/sources/utils');
|
||||
@ -84,6 +93,11 @@ router.patch('/', validateJWT, rawFileParser, async (req, res) => {
|
||||
} else {
|
||||
await runtimeConfigManager.replaceConfig(ctx, diffConfig);
|
||||
}
|
||||
|
||||
if (diffContainsFileLimits(diffConfig)) {
|
||||
taskResult.resetDocumentStatusesForFileLimits(ctx);
|
||||
}
|
||||
|
||||
const filteredConfig = getScopedConfig(ctx);
|
||||
|
||||
res.status(200).json(utils.deepMergeObjects(filteredConfig, validationResult.value));
|
||||
@ -149,6 +163,10 @@ router.post('/reset', validateJWT, rawFileParser, async (req, res) => {
|
||||
await runtimeConfigManager.replaceConfig(ctx, resetConfig);
|
||||
}
|
||||
|
||||
if (pathsAffectFileLimits(paths)) {
|
||||
taskResult.resetDocumentStatusesForFileLimits(ctx);
|
||||
}
|
||||
|
||||
delete resetConfig.adminPanel;
|
||||
ctx.logger.info('Configuration reset successfully for paths: %j', paths);
|
||||
const filteredMergedConfig = getScopedBaseConfig(ctx);
|
||||
|
||||
@ -384,6 +384,40 @@ function removeIf(ctx, mask) {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets document statuses Ok -> ErrToReload for all tenants when file limits config changed.
|
||||
* status_info is set to CONVERT_LIMITS (-93) so the client shows "file size exceeds" instead of "Error code: 0". On next open cleanupErrToReload runs and conversion re-checks limits.
|
||||
* @param {operationContext} ctx - Operation context (for DB and logger)
|
||||
*/
|
||||
async function resetDocumentStatusesForFileLimits(ctx) {
|
||||
ctx.logger.info('File limits changed, resetting document statuses to force re-check');
|
||||
try {
|
||||
const values = [];
|
||||
const pStatusTo = addSqlParam(commonDefines.FileStatus.ErrToReload, values);
|
||||
const pStatusInfoTo = addSqlParam(constants.CONVERT_LIMITS, values);
|
||||
const pOk = addSqlParam(commonDefines.FileStatus.Ok, values);
|
||||
const sqlCommand = `UPDATE ${cfgTableResult} SET status=${pStatusTo}, status_info=${pStatusInfoTo} WHERE status=${pOk};`;
|
||||
|
||||
const updateResult = await new Promise((resolve, reject) => {
|
||||
sqlBase.sqlQuery(
|
||||
ctx,
|
||||
sqlCommand,
|
||||
(error, result) => {
|
||||
if (error) reject(error);
|
||||
else resolve(result);
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
values
|
||||
);
|
||||
});
|
||||
const affectedRows = updateResult.affectedRows || 0;
|
||||
ctx.logger.info('Reset document statuses: %d documents affected', affectedRows);
|
||||
} catch (error) {
|
||||
ctx.logger.error('Error resetting document statuses: %s', error.stack);
|
||||
}
|
||||
}
|
||||
|
||||
exports.TaskResultData = TaskResultData;
|
||||
exports.upsert = upsert;
|
||||
exports.select = select;
|
||||
@ -395,3 +429,4 @@ exports.addRandomKeyTask = addRandomKeyTask;
|
||||
exports.remove = remove;
|
||||
exports.removeIf = removeIf;
|
||||
exports.getExpired = sqlBase.getExpired;
|
||||
exports.resetDocumentStatusesForFileLimits = resetDocumentStatusesForFileLimits;
|
||||
|
||||
Reference in New Issue
Block a user