Merge remote-tracking branch 'remotes/origin/release/v9.1.0' into feature/linter-mysql

# Conflicts:
#	.github/workflows/azureStorageTests.yml
#	.github/workflows/s3storageTests.yml
#	Common/config/default.json
#	Common/config/log4js/development.json
#	Common/config/log4js/production.json
#	Common/sources/constants.js
#	Common/sources/logger.js
#	Common/sources/moduleReloader.js
#	Common/sources/operationContext.js
#	Common/sources/storage/storage-az.js
#	Common/sources/storage/storage-fs.js
#	Common/sources/utils.js
#	DocService/sources/DocsCoServer.js
#	DocService/sources/ai/aiProxyHandler.js
#	DocService/sources/canvasservice.js
#	DocService/sources/converterservice.js
#	DocService/sources/databaseConnectors/oracleConnector.js
#	DocService/sources/fileuploaderservice.js
#	DocService/sources/wopiClient.js
#	DocService/sources/wopiUtils.js
#	FileConverter/sources/converter.js
This commit is contained in:
Sergey Konovalov
2025-08-31 02:22:56 +03:00
31 changed files with 1113 additions and 903 deletions

View File

@ -80,7 +80,7 @@
"enable": false,
"transportType": ["email"],
"template": {
"title": "%s Docs license connection limit warning",
"title": "%s Docs license %s limit warning",
"body": "Attention! You have reached %s%% of the %s limit set by your license."
},
"policies": {
@ -91,7 +91,7 @@
"enable": false,
"transportType": ["email"],
"template": {
"title": "%s Docs license connection limit warning",
"title": "%s Docs license %s limit warning",
"body": "Attention! You have reached %s%% of the live viewer %s limit set by your license."
},
"policies": {
@ -123,6 +123,16 @@
"MaxKeys": 1000
},
"deleteObject": {}
},
"az": {
"uploadData": {},
"uploadStream": {},
"download": {},
"syncCopyFromURL": {},
"listBlobsFlat": {
"maxPageSize": 1000
},
"deleteBlob": {}
}
},
"urlExpires": 604800,
@ -131,7 +141,7 @@
"sslEnabled": false,
"s3ForcePathStyle": true,
"externalHost": "",
"useDirectStorageUrls": true
"useDirectStorageUrls": false
},
"persistentStorage": {},
"rabbitmq": {
@ -406,6 +416,7 @@
"queueTimeout": 60000
},
"oracleExtraOptions": {
"thin": true,
"connectTimeout": 60
},
"msSqlExtraOptions": {

View File

@ -3,8 +3,8 @@
"default": {
"type": "console",
"layout": {
"type": "pattern",
"pattern": "%[[%d] [%p] [%X{TENANT}] [%X{DOCID}] [%X{USERID}] %c -%] %.10000m"
"type": "patternWithTokens",
"pattern": "%[[%d] [%p] [%X{TENANT}] [%X{DOCID}] [%X{USERID}]%x{usid} %c -%] %.10000m"
}
}
},

View File

@ -3,8 +3,8 @@
"default": {
"type": "console",
"layout": {
"type": "pattern",
"pattern": "[%d] [%p] [%X{TENANT}] [%X{DOCID}] [%X{USERID}] %c - %.10000m"
"type": "patternWithTokens",
"pattern": "[%d] [%p] [%X{TENANT}] [%X{DOCID}] [%X{USERID}]%x{usid} %c - %.10000m"
}
}
},

View File

@ -33,13 +33,13 @@
'use strict';
exports.DOC_ID_PATTERN = '0-9-.a-zA-Z_=';
exports.DOC_ID_REGEX = new RegExp('^[' + exports.DOC_ID_PATTERN + ']*$', 'i');
exports.DOC_ID_REPLACE_REGEX = new RegExp('[^' + exports.DOC_ID_PATTERN + ']', 'g');
exports.DOC_ID_SOCKET_PATTERN = new RegExp('^/doc/([' + exports.DOC_ID_PATTERN + ']*)/c.+', 'i');
exports.DOC_ID_REGEX = new RegExp("^[" + exports.DOC_ID_PATTERN + "]*$", 'i');
exports.DOC_ID_REPLACE_REGEX = new RegExp("[^" + exports.DOC_ID_PATTERN + "]", 'g');
exports.DOC_ID_SOCKET_PATTERN = new RegExp("^/doc/([" + exports.DOC_ID_PATTERN + "]*)/c.+", 'i');
exports.DOC_ID_MAX_LENGTH = 240;
exports.USER_ID_MAX_LENGTH = 240; //255-240=15 symbols to make user id unique
exports.USER_ID_MAX_LENGTH = 240;//255-240=15 symbols to make user id unique
exports.USER_NAME_MAX_LENGTH = 255;
exports.PASSWORD_MAX_LENGTH = 255; //set password limit for DoS protection with long password
exports.PASSWORD_MAX_LENGTH = 255;//set password limit for DoS protection with long password
exports.EXTENTION_REGEX = /^[a-zA-Z0-9]*$/;
exports.CHAR_DELIMITER = String.fromCharCode(5);
exports.OUTPUT_NAME = 'output';
@ -52,13 +52,14 @@ exports.DEFAULT_USER_ID = 'userId';
exports.ALLOWED_PROTO = /^https?$/i;
exports.SHARD_KEY_WOPI_NAME = 'WOPISrc';
exports.SHARD_KEY_API_NAME = 'shardkey';
exports.USER_SESSION_ID_NAME = 'usid';
exports.RIGHTS = {
None: 0,
Edit: 1,
Review: 2,
Comment: 3,
View: 4
None : 0,
Edit : 1,
Review : 2,
Comment : 3,
View : 4
};
exports.LICENSE_MODE = {
@ -69,16 +70,16 @@ exports.LICENSE_MODE = {
};
exports.LICENSE_RESULT = {
Error: 1,
Expired: 2,
Success: 3,
UnknownUser: 4,
Connections: 5,
ExpiredTrial: 6,
SuccessLimit: 7,
UsersCount: 8,
ConnectionsOS: 9,
UsersCountOS: 10,
Error : 1,
Expired : 2,
Success : 3,
UnknownUser : 4,
Connections : 5,
ExpiredTrial : 6,
SuccessLimit : 7,
UsersCount : 8,
ConnectionsOS : 9,
UsersCountOS : 10,
ExpiredLimited: 11,
ConnectionsLiveOS: 12,
ConnectionsLive: 13,
@ -91,7 +92,7 @@ exports.LICENSE_CONNECTIONS = 20;
exports.LICENSE_USERS = 3;
exports.LICENSE_EXPIRE_USERS_ONE_DAY = 24 * 60 * 60; // day in seconds
exports.AVS_OFFICESTUDIO_FILE_UNKNOWN = 0x0000;
exports.AVS_OFFICESTUDIO_FILE_UNKNOWN = 0x0000;
exports.AVS_OFFICESTUDIO_FILE_DOCUMENT = 0x0040;
exports.AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCX = exports.AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0001;
exports.AVS_OFFICESTUDIO_FILE_DOCUMENT_DOC = exports.AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0002;
@ -128,7 +129,7 @@ exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_POTM = exports.AVS_OFFICESTUDIO_FILE_
exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_ODP_FLAT = exports.AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0009;
exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_OTP = exports.AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000a;
exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_PPTX_PACKAGE = exports.AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000b;
exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_ODG = exports.AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000c;
exports.AVS_OFFICESTUDIO_FILE_PRESENTATION_ODG = exports.AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000c;
exports.AVS_OFFICESTUDIO_FILE_SPREADSHEET = 0x0100;
exports.AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSX = exports.AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0001;
@ -195,7 +196,7 @@ exports.AVS_OFFICESTUDIO_FILE_CANVAS_SPREADSHEET = exports.AVS_OFFICESTUDIO_FILE
exports.AVS_OFFICESTUDIO_FILE_CANVAS_PRESENTATION = exports.AVS_OFFICESTUDIO_FILE_CANVAS + 0x0003;
exports.AVS_OFFICESTUDIO_FILE_CANVAS_PDF = exports.AVS_OFFICESTUDIO_FILE_CANVAS + 0x0004;
exports.AVS_OFFICESTUDIO_FILE_DRAW = 0x4000;
exports.AVS_OFFICESTUDIO_FILE_DRAW = 0x4000;
exports.AVS_OFFICESTUDIO_FILE_DRAW_VSDX = exports.AVS_OFFICESTUDIO_FILE_DRAW + 0x0001;
exports.AVS_OFFICESTUDIO_FILE_DRAW_VSSX = exports.AVS_OFFICESTUDIO_FILE_DRAW + 0x0002;
exports.AVS_OFFICESTUDIO_FILE_DRAW_VSTX = exports.AVS_OFFICESTUDIO_FILE_DRAW + 0x0003;
@ -248,6 +249,7 @@ exports.VKEY_TIME_EXPIRE = -124;
exports.VKEY_TIME_INCORRECT = -125;
exports.EDITOR_CHANGES = -160;
exports.PASSWORD = -180;
exports.FORCED_VIEW_MODE = -200;
//Quorum queues internally only support two priorities: high and normal.
//Messages without a priority set will be mapped to normal as will priorities 0 - 4.
@ -291,16 +293,16 @@ exports.NO_CACHE_CODE = 4009;
exports.NO_CACHE = 'no cache';
exports.RESTORE_CODE = 4010;
exports.RESTORE = 'restore';
exports.QUIET_CODE = 4011; //browser only
exports.QUIET_CODE = 4011;//browser only
exports.QUIET = 'quiet';
exports.RECONNECT_FAILED_CODE = 4012; //browser only
exports.RECONNECT_FAILED_CODE = 4012;//browser only
exports.RECONNECT_FAILED = 'reconnect failed';
//update connection error codes
exports.CONTENT_DISPOSITION_INLINE = 'inline';
exports.CONTENT_DISPOSITION_ATTACHMENT = 'attachment';
exports.CONN_CLOSED = 'closed';
exports.CONN_CLOSED = "closed";
exports.FILE_STATUS_OK = 'ok';
exports.FILE_STATUS_UPDATE_VERSION = 'updateversion';
@ -310,9 +312,9 @@ exports.ACTIVEMQ_TOPIC_PREFIX = 'topic://';
exports.TEMPLATES_DEFAULT_LOCALE = 'en-US';
exports.TEMPLATES_FOLDER_LOCALE_COLLISON_MAP = {
en: 'en-US',
pt: 'pt-BR',
zh: 'zh-CH',
'en': 'en-US',
'pt': 'pt-BR',
'zh': 'zh-CH',
'pt-PT': 'pt-PT',
'zh-TW': 'zh-TW'
};
@ -330,4 +332,13 @@ exports.TABLE_RESULT_SCHEMA = [
'password',
'additional'
];
exports.TABLE_CHANGES_SCHEMA = ['tenant', 'id', 'change_id', 'user_id', 'user_id_original', 'user_name', 'change_data', 'change_date'];
exports.TABLE_CHANGES_SCHEMA = [
'tenant',
'id',
'change_id',
'user_id',
'user_id_original',
'user_name',
'change_data',
'change_date',
];

View File

@ -32,66 +32,86 @@
'use strict';
const config = require('config');
const util = require('util');
var config = require('config');
var util = require('util');
const log4js = require('log4js');
var log4js = require('log4js');
const layouts = require('log4js/lib/layouts');
// https://stackoverflow.com/a/36643588
const dateToJSONWithTZ = function (d) {
const timezoneOffsetInHours = -(d.getTimezoneOffset() / 60); //UTC minus local time
const sign = timezoneOffsetInHours >= 0 ? '+' : '-';
const leadingZero = Math.abs(timezoneOffsetInHours) < 10 ? '0' : '';
var dateToJSONWithTZ = function (d) {
var timezoneOffsetInHours = -(d.getTimezoneOffset() / 60); //UTC minus local time
var sign = timezoneOffsetInHours >= 0 ? '+' : '-';
var leadingZero = (Math.abs(timezoneOffsetInHours) < 10) ? '0' : '';
//It's a bit unfortunate that we need to construct a new Date instance
//It's a bit unfortunate that we need to construct a new Date instance
//(we don't want _d_ Date instance to be modified)
const correctedDate = new Date(d.getFullYear(), d.getMonth(), d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds(), d.getMilliseconds());
var correctedDate = new Date(d.getFullYear(), d.getMonth(),
d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds(),
d.getMilliseconds());
correctedDate.setHours(d.getHours() + timezoneOffsetInHours);
const iso = correctedDate.toISOString().replace('Z', '');
var iso = correctedDate.toISOString().replace('Z', '');
return iso + sign + leadingZero + Math.abs(timezoneOffsetInHours).toString() + ':00';
};
log4js.addLayout('json', _config => {
return function (logEvent) {
logEvent['startTime'] = dateToJSONWithTZ(logEvent['startTime']);
logEvent['message'] = util.format(...logEvent['data']);
delete logEvent['data'];
return JSON.stringify(logEvent);
};
log4js.addLayout('json', function(config) {
return function(logEvent) {
logEvent['startTime'] = dateToJSONWithTZ(logEvent['startTime']);
logEvent['message'] = util.format(...logEvent['data']);
delete logEvent['data'];
return JSON.stringify(logEvent);
}
});
/**
* Custom pattern layout that supports %x{usid} using USERSESSIONID from context.
* @param {object} cfg
* @returns {function}
*/
log4js.addLayout('patternWithTokens', function(cfg) {
const pattern = (cfg && cfg.pattern) ? cfg.pattern : '%m';
const baseTokens = (cfg && cfg.tokens) ? cfg.tokens : {};
const tokens = Object.assign({}, baseTokens, {
usid: function(ev) {
const id = ev && ev.context && ev.context.USERSESSIONID;
return id ? ` [${id}]` : '';
}
});
return layouts.patternLayout(pattern, tokens);
});
log4js.configure(config.get('log.filePath'));
const logger = log4js.getLogger('nodeJS');
var logger = log4js.getLogger('nodeJS');
if (config.get('log.options.replaceConsole')) {
console.log = logger.info.bind(logger);
console.info = logger.info.bind(logger);
console.warn = logger.warn.bind(logger);
console.error = logger.error.bind(logger);
console.debug = logger.debug.bind(logger);
console.log = logger.info.bind(logger);
console.info = logger.info.bind(logger);
console.warn = logger.warn.bind(logger);
console.error = logger.error.bind(logger);
console.debug = logger.debug.bind(logger);
}
exports.getLogger = function () {
return log4js.getLogger.apply(log4js, Array.prototype.slice.call(arguments));
exports.getLogger = function (){
return log4js.getLogger.apply(log4js, Array.prototype.slice.call(arguments));
};
exports.trace = function () {
return logger.trace.apply(logger, Array.prototype.slice.call(arguments));
exports.trace = function (){
return logger.trace.apply(logger, Array.prototype.slice.call(arguments));
};
exports.debug = function () {
return logger.debug.apply(logger, Array.prototype.slice.call(arguments));
exports.debug = function (){
return logger.debug.apply(logger, Array.prototype.slice.call(arguments));
};
exports.info = function () {
return logger.info.apply(logger, Array.prototype.slice.call(arguments));
exports.info = function (){
return logger.info.apply(logger, Array.prototype.slice.call(arguments));
};
exports.warn = function () {
return logger.warn.apply(logger, Array.prototype.slice.call(arguments));
exports.warn = function (){
return logger.warn.apply(logger, Array.prototype.slice.call(arguments));
};
exports.error = function () {
return logger.error.apply(logger, Array.prototype.slice.call(arguments));
exports.error = function (){
return logger.error.apply(logger, Array.prototype.slice.call(arguments));
};
exports.fatal = function () {
return logger.fatal.apply(logger, Array.prototype.slice.call(arguments));
exports.fatal = function (){
return logger.fatal.apply(logger, Array.prototype.slice.call(arguments));
};
exports.shutdown = function (callback) {
return log4js.shutdown(callback);
return log4js.shutdown(callback);
};

View File

@ -51,43 +51,52 @@ function reloadNpmModule(moduleName) {
}
/**
* Requires config module with runtime configuration support
* Requires config module with runtime configuration support.
* Temporarily sets NODE_CONFIG for reload, then restores environment to prevent E2BIG.
* @param {Object} opt_additionalConfig - Additional configuration to merge
* @returns {Object} config module
*/
function requireConfigWithRuntime(opt_additionalConfig) {
let config = require('config');
// Backup original NODE_CONFIG to avoid growing environment
const prevNodeConfig = process.env.NODE_CONFIG;
let nodeConfigOverridden = false;
try {
const configFilePath = config.get('runtimeConfig.filePath');
if (configFilePath) {
// Update NODE_CONFIG with runtime configuration
if (configFilePath) {
const configData = fs.readFileSync(configFilePath, 'utf8');
const configData = fs.readFileSync(configFilePath, 'utf8');
let curNodeConfig;
if (process.env['NODE_CONFIG']) {
curNodeConfig = JSON.parse(process.env['NODE_CONFIG']);
} else {
curNodeConfig = {};
}
// Parse existing NODE_CONFIG or start with empty object
let curNodeConfig = JSON.parse(process.env.NODE_CONFIG ?? '{}');
const fileConfig = JSON.parse(configData);
const fileConfig = JSON.parse(configData);
// Merge configurations: NODE_CONFIG -> runtime -> additional
curNodeConfig = config.util.extendDeep(curNodeConfig, fileConfig);
if (opt_additionalConfig) {
curNodeConfig = config.util.extendDeep(curNodeConfig, opt_additionalConfig);
}
process.env['NODE_CONFIG'] = JSON.stringify(curNodeConfig);
// Merge configurations: NODE_CONFIG -> runtime -> additional
curNodeConfig = config.util.extendDeep(curNodeConfig, fileConfig);
if (opt_additionalConfig) {
curNodeConfig = config.util.extendDeep(curNodeConfig, opt_additionalConfig);
}
// Temporarily set NODE_CONFIG only to reload the config module
process.env.NODE_CONFIG = JSON.stringify(curNodeConfig);
nodeConfigOverridden = true;
config = reloadNpmModule('config');
}
} catch (err) {
if (err.code !== 'ENOENT') {
console.error('Failed to load runtime config: %s', err.stack);
}
} finally {
// Restore original NODE_CONFIG to keep env small and avoid E2BIG on Windows/pkg
if (nodeConfigOverridden) {
if (typeof prevNodeConfig === 'undefined') {
delete process.env.NODE_CONFIG;
} else {
process.env.NODE_CONFIG = prevNodeConfig;
}
}
}
return config;
}

View File

@ -43,12 +43,13 @@ function Context() {
this.logger = logger.getLogger('nodeJS');
this.initDefault();
}
Context.prototype.init = function (tenant, docId, userId, opt_shardKey, opt_WopiSrc) {
Context.prototype.init = function (tenant, docId, userId, opt_shardKey, opt_WopiSrc, opt_userSessionId) {
this.setTenant(tenant);
this.setDocId(docId);
this.setUserId(userId);
this.setShardKey(opt_shardKey);
this.setWopiSrc(opt_WopiSrc);
this.setUserSessionId(opt_userSessionId);
this.config = null;
this.secret = null;
@ -72,21 +73,23 @@ Context.prototype.initFromConnection = function (conn) {
const userId = conn.user?.id;
const shardKey = utils.getShardKeyByConnection(this, conn);
const wopiSrc = utils.getWopiSrcByConnection(this, conn);
this.init(tenant, docId || this.docId, userId || this.userId, shardKey, wopiSrc);
let userSessionId = utils.getSessionIdByConnection(this, conn);
this.init(tenant, docId || this.docId, userId || this.userId, shardKey, wopiSrc, userSessionId);
};
Context.prototype.initFromRequest = function (req) {
const tenant = tenantManager.getTenantByRequest(this, req);
const shardKey = utils.getShardKeyByRequest(this, req);
const wopiSrc = utils.getWopiSrcByRequest(this, req);
this.init(tenant, this.docId, this.userId, shardKey, wopiSrc);
let userSessionId = utils.getSessionIdByRequest(this, req);
this.init(tenant, this.docId, this.userId, shardKey, wopiSrc, userSessionId);
};
Context.prototype.initFromTaskQueueData = function (task) {
const ctx = task.getCtx();
this.init(ctx.tenant, ctx.docId, ctx.userId, ctx.shardKey, ctx.wopiSrc);
this.init(ctx.tenant, ctx.docId, ctx.userId, ctx.shardKey, ctx.wopiSrc, ctx.userSessionId);
};
Context.prototype.initFromPubSub = function (data) {
const ctx = data.ctx;
this.init(ctx.tenant, ctx.docId, ctx.userId, ctx.shardKey, ctx.wopiSrc);
this.init(ctx.tenant, ctx.docId, ctx.userId, ctx.shardKey, ctx.wopiSrc, ctx.userSessionId);
};
Context.prototype.initTenantCache = async function () {
const runtimeConfig = await runtimeConfigManager.getConfig(this);
@ -114,11 +117,18 @@ Context.prototype.setShardKey = function (shardKey) {
Context.prototype.setWopiSrc = function (wopiSrc) {
this.wopiSrc = wopiSrc;
};
Context.prototype.setUserSessionId = function (userSessionId) {
if (userSessionId) {
this.userSessionId = userSessionId;
this.logger.addContext('USERSESSIONID', userSessionId);
}
};
Context.prototype.toJSON = function () {
return {
tenant: this.tenant,
docId: this.docId,
userId: this.userId,
userSessionId: this.userSessionId,
shardKey: this.shardKey,
wopiSrc: this.wopiSrc
};

View File

@ -68,12 +68,31 @@ function getFilePath(storageCfg, strPath) {
return `${storageFolderName}/${strPath}`;
}
/**
* @param {Object} baseOptions - Base options object
* @param {Object} storageCfg - Storage configuration
* @param {string} commandType - uploadData, uploadStream, download, etc.
* @returns {Object|undefined} Merged options or undefined if empty
*/
function applyCommandOptions(baseOptions, storageCfg, commandType) {
if (storageCfg.commandOptions.az && storageCfg.commandOptions.az[commandType]) {
const configOptions = storageCfg.commandOptions.az[commandType];
if (configOptions && Object.keys(configOptions).length > 0) {
return {...baseOptions, ...configOptions};
}
}
return Object.keys(baseOptions).length > 0 ? baseOptions : undefined;
}
async function listObjectsExec(storageCfg, prefix, output = []) {
const containerClient = getContainerClient(storageCfg);
const storageFolderName = storageCfg.storageFolderName;
const prefixWithFolder = storageFolderName ? `${storageFolderName}/${prefix}` : prefix;
for await (const blob of containerClient.listBlobsFlat({prefix: prefixWithFolder})) {
const baseOptions = {prefix: prefixWithFolder};
const listOptions = applyCommandOptions(baseOptions, storageCfg, 'listBlobsFlat');
for await (const blob of containerClient.listBlobsFlat(listOptions)) {
const relativePath = storageFolderName ? blob.name.substring(storageFolderName.length + 1) : blob.name;
output.push(relativePath);
}
@ -82,7 +101,12 @@ async function listObjectsExec(storageCfg, prefix, output = []) {
async function deleteObjectsHelp(storageCfg, aKeys) {
const containerClient = getContainerClient(storageCfg);
await Promise.all(aKeys.map(key => containerClient.deleteBlob(key.Key)));
const deleteOptions = applyCommandOptions({}, storageCfg, 'deleteBlob');
await Promise.all(
aKeys.map(key => {
return containerClient.deleteBlob(key.Key, deleteOptions);
})
);
}
async function headObject(storageCfg, strPath) {
@ -93,13 +117,15 @@ async function headObject(storageCfg, strPath) {
async function getObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
const options = applyCommandOptions({}, storageCfg, 'download');
const response = await blobClient.download(options);
return await utils.stream2Buffer(response.readableStreamBody);
}
async function createReadStream(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
const options = applyCommandOptions({}, storageCfg, 'download');
const response = await blobClient.download(options);
return {
contentLength: response.contentLength,
readStream: response.readableStreamBody
@ -109,17 +135,17 @@ async function createReadStream(storageCfg, strPath) {
async function putObject(storageCfg, strPath, buffer, _contentLength) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadOptions = {
const baseOptions = {
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
};
const uploadOptions = applyCommandOptions(baseOptions, storageCfg, 'uploadData');
if (buffer instanceof Buffer) {
// Handle Buffer upload
await blobClient.uploadData(buffer, uploadOptions);
} else if (typeof buffer.pipe === 'function') {
// Handle Stream upload
await blobClient.uploadStream(buffer, undefined, undefined, uploadOptions);
} else {
throw new TypeError('Input must be Buffer or Readable stream');
@ -130,12 +156,15 @@ async function uploadObject(storageCfg, strPath, filePath) {
const blockBlobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadStream = fs.createReadStream(filePath);
await blockBlobClient.uploadStream(uploadStream, undefined, undefined, {
const uploadOptions = {
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
});
};
const finalOptions = applyCommandOptions(uploadOptions, storageCfg, 'uploadStream');
await blockBlobClient.uploadStream(uploadStream, undefined, undefined, finalOptions);
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
@ -152,7 +181,8 @@ async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKe
new StorageSharedKeyCredential(storageCfgSrc.accessKeyId, storageCfgSrc.secretAccessKey)
).toString();
await destBlobClient.syncCopyFromURL(`${sourceBlobClient.url}?${sasToken}`);
const copyOptions = applyCommandOptions({}, storageCfgDst, 'syncCopyFromURL');
await destBlobClient.syncCopyFromURL(`${sourceBlobClient.url}?${sasToken}`, copyOptions);
}
async function listObjects(storageCfg, strPath) {
@ -161,7 +191,8 @@ async function listObjects(storageCfg, strPath) {
async function deleteObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
await blobClient.delete();
const options = applyCommandOptions({}, storageCfg, 'deleteBlob');
await blobClient.delete(options);
}
async function deleteObjects(storageCfg, strPaths) {

View File

@ -36,6 +36,7 @@ const {cp, rm, mkdir} = require('fs/promises');
const {stat, readFile, writeFile} = require('fs/promises');
const path = require('path');
const utils = require('../utils');
const {pipeline} = require('node:stream/promises');
function getFilePath(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;
@ -75,7 +76,7 @@ async function putObject(storageCfg, strPath, buffer, _contentLength) {
await writeFile(fsPath, buffer);
} else {
const writable = await utils.promiseCreateWriteStream(fsPath);
await utils.pipeStreams(buffer, writable, true);
await pipeline(buffer, writable);
}
}

View File

@ -906,16 +906,24 @@ function getShardKeyByConnection(ctx, conn) {
function getWopiSrcByConnection(ctx, conn) {
return conn?.handshake?.query?.[constants.SHARD_KEY_WOPI_NAME];
}
function getSessionIdByConnection(ctx, conn) {
return conn?.handshake?.query?.[constants.USER_SESSION_ID_NAME];
}
function getShardKeyByRequest(ctx, req) {
return req.query?.[constants.SHARD_KEY_API_NAME];
}
function getWopiSrcByRequest(ctx, req) {
return req.query?.[constants.SHARD_KEY_WOPI_NAME];
}
function getSessionIdByRequest(ctx, req) {
return req.query?.[constants.USER_SESSION_ID_NAME];
}
exports.getShardKeyByConnection = getShardKeyByConnection;
exports.getWopiSrcByConnection = getWopiSrcByConnection;
exports.getSessionIdByConnection = getSessionIdByConnection;
exports.getShardKeyByRequest = getShardKeyByRequest;
exports.getWopiSrcByRequest = getWopiSrcByRequest;
exports.getSessionIdByRequest = getSessionIdByRequest;
function stream2Buffer(stream) {
return new Promise((resolve, reject) => {
if (!stream.readable) {
@ -947,24 +955,22 @@ function changeOnlyOfficeUrl(inputUrl, strPath, optFilename) {
return inputUrl + constants.ONLY_OFFICE_URL_PARAM + '=' + constants.OUTPUT_NAME + path.extname(optFilename || strPath);
}
exports.changeOnlyOfficeUrl = changeOnlyOfficeUrl;
function pipeStreams(from, to, isEnd) {
return new Promise((resolve, reject) => {
from.pipe(to, {end: isEnd});
from.on('end', () => {
resolve();
});
from.on('error', e => {
reject(e);
});
/**
* Pipe streams for HTTP responses, swallowing client abort errors.
* @param {NodeJS.ReadableStream} from - source stream
* @param {NodeJS.WritableStream} to - HTTP response stream
* @returns {Promise<void>}
*/
function pipeHttpStreams(from, to) {
return pipeline(from, to).catch(err => {
// Treat client abort/connection reset as non-fatal to keep "End" logs parity.
if (err && (err.code === 'ERR_STREAM_PREMATURE_CLOSE' || err.code === 'ECONNRESET' || err.code === 'EPIPE')) {
return;
}
throw err;
});
}
exports.pipeStreams = pipeStreams;
function* pipeFiles(from, to) {
const fromStream = yield promiseCreateReadStream(from);
const toStream = yield promiseCreateWriteStream(to);
yield pipeStreams(fromStream, toStream, true);
}
exports.pipeFiles = co.wrap(pipeFiles);
exports.pipeHttpStreams = pipeHttpStreams;
function checkIpFilter(ctx, ipString, opt_hostname) {
const tenIpFilterRules = ctx.getCfg('services.CoAuthoring.ipfilter.rules', cfgIpFilterRules);