Merge pull request 'feature/azure-storage' (#21) from feature/azure-storage into release/v9.0.0

Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/server/pulls/21
This commit is contained in:
Sergey Konovalov
2025-04-04 15:09:58 +00:00
21 changed files with 5659 additions and 5121 deletions

View File

@ -4,6 +4,7 @@
- @aws-sdk/client-s3 3.637.0 ([Apache-2.0](https://raw.githubusercontent.com/aws/aws-sdk-js-v3/main/LICENSE))
- @aws-sdk/node-http-handler 3.374.0 ([Apache-2.0](https://raw.githubusercontent.com/aws/aws-sdk-js-v3/main/LICENSE))
- @aws-sdk/s3-request-presigner 3.370.0 ([Apache-2.0](https://raw.githubusercontent.com/aws/aws-sdk-js-v3/main/LICENSE))
- @azure/storage-blob 12.27.0 ([MIT](https://raw.githubusercontent.com/Azure/azure-sdk-for-js/refs/heads/main/sdk/storage/storage-blob/LICENSE))
- amqplib 0.8.0 ([MIT](https://raw.githubusercontent.com/amqp-node/amqplib/main/LICENSE))
- co 4.6.0 ([MIT](https://raw.githubusercontent.com/tj/co/master/LICENSE))
- config 2.0.1 ([MIT](https://raw.githubusercontent.com/node-config/node-config/master/LICENSE))

View File

@ -3076,6 +3076,219 @@
}
}
},
"@azure/abort-controller": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz",
"integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==",
"requires": {
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-auth": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.9.0.tgz",
"integrity": "sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"@azure/core-util": "^1.11.0",
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-client": {
"version": "1.9.3",
"resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.9.3.tgz",
"integrity": "sha512-/wGw8fJ4mdpJ1Cum7s1S+VQyXt1ihwKLzfabS1O/RDADnmzVc01dHn44qD0BvGH6KlZNzOMW95tEpKqhkCChPA==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"@azure/core-auth": "^1.4.0",
"@azure/core-rest-pipeline": "^1.9.1",
"@azure/core-tracing": "^1.0.0",
"@azure/core-util": "^1.6.1",
"@azure/logger": "^1.0.0",
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-http-compat": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.2.0.tgz",
"integrity": "sha512-1kW8ZhN0CfbNOG6C688z5uh2yrzALE7dDXHiR9dY4vt+EbhGZQSbjDa5bQd2rf3X2pdWMsXbqbArxUyeNdvtmg==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"@azure/core-client": "^1.3.0",
"@azure/core-rest-pipeline": "^1.19.0"
}
},
"@azure/core-lro": {
"version": "2.7.2",
"resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz",
"integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"@azure/core-util": "^1.2.0",
"@azure/logger": "^1.0.0",
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-paging": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz",
"integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==",
"requires": {
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-rest-pipeline": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.19.1.tgz",
"integrity": "sha512-zHeoI3NCs53lLBbWNzQycjnYKsA1CVKlnzSNuSFcUDwBp8HHVObePxrM7HaX+Ha5Ks639H7chNC9HOaIhNS03w==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"@azure/core-auth": "^1.8.0",
"@azure/core-tracing": "^1.0.1",
"@azure/core-util": "^1.11.0",
"@azure/logger": "^1.0.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.0",
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-tracing": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.2.0.tgz",
"integrity": "sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==",
"requires": {
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-util": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.11.0.tgz",
"integrity": "sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g==",
"requires": {
"@azure/abort-controller": "^2.0.0",
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/core-xml": {
"version": "1.4.5",
"resolved": "https://registry.npmjs.org/@azure/core-xml/-/core-xml-1.4.5.tgz",
"integrity": "sha512-gT4H8mTaSXRz7eGTuQyq1aIJnJqeXzpOe9Ay7Z3FrCouer14CbV3VzjnJrNrQfbBpGBLO9oy8BmrY75A0p53cA==",
"requires": {
"fast-xml-parser": "^5.0.7",
"tslib": "^2.8.1"
},
"dependencies": {
"fast-xml-parser": {
"version": "5.0.9",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.0.9.tgz",
"integrity": "sha512-2mBwCiuW3ycKQQ6SOesSB8WeF+fIGb6I/GG5vU5/XEptwFFhp9PE8b9O7fbs2dpq9fXn4ULR3UsfydNUCntf5A==",
"requires": {
"strnum": "^2.0.5"
}
},
"strnum": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/strnum/-/strnum-2.0.5.tgz",
"integrity": "sha512-YAT3K/sgpCUxhxNMrrdhtod3jckkpYwH6JAuwmUdXZsmzH1wUyzTMrrK2wYCEEqlKwrWDd35NeuUkbBy/1iK+Q=="
},
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/logger": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.4.tgz",
"integrity": "sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==",
"requires": {
"tslib": "^2.6.2"
},
"dependencies": {
"tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
}
}
},
"@azure/storage-blob": {
"version": "12.27.0",
"resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.27.0.tgz",
"integrity": "sha512-IQjj9RIzAKatmNca3D6bT0qJ+Pkox1WZGOg2esJF2YLHb45pQKOwGPIAV+w3rfgkj7zV3RMxpn/c6iftzSOZJQ==",
"requires": {
"@azure/abort-controller": "^2.1.2",
"@azure/core-auth": "^1.4.0",
"@azure/core-client": "^1.6.2",
"@azure/core-http-compat": "^2.0.0",
"@azure/core-lro": "^2.2.0",
"@azure/core-paging": "^1.1.1",
"@azure/core-rest-pipeline": "^1.10.1",
"@azure/core-tracing": "^1.1.2",
"@azure/core-util": "^1.6.1",
"@azure/core-xml": "^1.4.3",
"@azure/logger": "^1.0.0",
"events": "^3.0.0",
"tslib": "^2.2.0"
}
},
"@smithy/abort-controller": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-1.0.2.tgz",
@ -4959,6 +5172,11 @@
}
}
},
"agent-base": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw=="
},
"ajv": {
"version": "5.5.2",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
@ -4993,7 +5211,7 @@
"asap": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
"integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY="
"integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="
},
"asn1": {
"version": "0.2.4",
@ -5084,7 +5302,7 @@
"clone": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
"integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18="
"integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w=="
},
"co": {
"version": "4.6.0",
@ -5177,6 +5395,11 @@
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
},
"events": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="
},
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@ -5252,6 +5475,30 @@
"har-schema": "^2.0.0"
}
},
"http-proxy-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
"integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
"requires": {
"agent-base": "^7.1.0",
"debug": "^4.3.4"
},
"dependencies": {
"debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"requires": {
"ms": "^2.1.3"
}
},
"ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
}
}
},
"http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
@ -5262,6 +5509,30 @@
"sshpk": "^1.7.0"
}
},
"https-proxy-agent": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
"requires": {
"agent-base": "^7.1.2",
"debug": "4"
},
"dependencies": {
"debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"requires": {
"ms": "^2.1.3"
}
},
"ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
}
}
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
@ -5489,7 +5760,7 @@
"pify": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
"integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY="
"integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg=="
},
"psl": {
"version": "1.1.29",
@ -5569,7 +5840,7 @@
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"rfdc": {
"version": "1.3.0",
@ -5693,7 +5964,7 @@
"through": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
"integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="
},
"tough-cookie": {
"version": "2.4.3",

View File

@ -7,6 +7,7 @@
"@aws-sdk/client-s3": "3.637.0",
"@aws-sdk/node-http-handler": "3.374.0",
"@aws-sdk/s3-request-presigner": "3.370.0",
"@azure/storage-blob": "12.27.0",
"amqplib": "0.8.0",
"co": "4.6.0",
"config": "2.0.1",

View File

@ -0,0 +1,230 @@
'use strict';
const fs = require('fs');
const path = require('path');
const { BlobServiceClient, StorageSharedKeyCredential, generateBlobSASQueryParameters, BlobSASPermissions } = require('@azure/storage-blob');
const mime = require('mime');
const config = require('config');
const { Readable } = require('stream');
const utils = require('../utils');
const ms = require('ms');
const commonDefines = require('../commondefines');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
const MAX_DELETE_OBJECTS = 1000;
const blobServiceClients = {};
/**
* Gets or creates a BlobServiceClient for the given storage configuration.
*
* @param {Object} storageCfg - configuration object from default.json
* @returns {BlobServiceClient} The Azure Blob Service client
*/
function getBlobServiceClient(storageCfg) {
const configKey = `${storageCfg.accessKeyId}_${storageCfg.bucketName}`;
if (!blobServiceClients[configKey]) {
const credential = new StorageSharedKeyCredential(
storageCfg.accessKeyId,
storageCfg.secretAccessKey
);
if (storageCfg.endpoint.includes(storageCfg.accessKeyId)) {
blobServiceClients[configKey] = new BlobServiceClient(storageCfg.endpoint, credential);
} else {
const endpointUrl = new URL(storageCfg.endpoint.replace(/\/+$/, ''));
blobServiceClients[configKey] = new BlobServiceClient(
`${endpointUrl.protocol}//${storageCfg.accessKeyId}.${endpointUrl.host}`,
credential);
}
}
return blobServiceClients[configKey];
}
/**
* Gets a ContainerClient for the specified storage configuration.
*
* @param {Object} storageCfg - configuration object from default.json
* @returns {ContainerClient} The Azure Container client
*/
function getContainerClient(storageCfg) {
const blobServiceClient = getBlobServiceClient(storageCfg);
return blobServiceClient.getContainerClient(storageCfg.bucketName);
}
/**
* Gets a BlockBlobClient for the specified storage configuration and blob name.
*
* @param {Object} storageCfg - configuration object from default.json
* @param {string} blobName - The name of the blob
* @returns {BlockBlobClient} The Azure Block Blob client
*/
function getBlobClient(storageCfg, blobName) {
const containerClient = getContainerClient(storageCfg);
return containerClient.getBlockBlobClient(blobName);
}
/**
* Constructs a full file path by combining the storage folder name and the path.
*
* @param {Object} storageCfg - configuration object from default.json
* @param {string} strPath - The relative path of the file
* @returns {string} The full file path
*/
function getFilePath(storageCfg, strPath) {
const storageFolderName = storageCfg.storageFolderName;
return `${storageFolderName}/${strPath}`;
}
async function listObjectsExec(storageCfg, prefix, output = []) {
const containerClient = getContainerClient(storageCfg);
const storageFolderName = storageCfg.storageFolderName;
const prefixWithFolder = storageFolderName ? `${storageFolderName}/${prefix}` : prefix;
for await (const blob of containerClient.listBlobsFlat({ prefix: prefixWithFolder })) {
const relativePath = storageFolderName ?
blob.name.substring(storageFolderName.length + 1) : blob.name;
output.push(relativePath);
}
return output;
}
async function deleteObjectsHelp(storageCfg, aKeys) {
const containerClient = getContainerClient(storageCfg);
await Promise.all(
aKeys.map(key => containerClient.deleteBlob(key.Key))
);
}
async function headObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const properties = await blobClient.getProperties();
return { ContentLength: properties.contentLength };
}
async function getObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return await utils.stream2Buffer(response.readableStreamBody);
}
async function createReadStream(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return {
contentLength: response.contentLength,
readStream: response.readableStreamBody
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadOptions = {
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
};
if (buffer instanceof Buffer) {
// Handle Buffer upload
await blobClient.uploadData(buffer, uploadOptions);
} else if (typeof buffer.pipe === 'function') {
// Handle Stream upload
await blobClient.uploadStream(buffer, undefined, undefined, uploadOptions);
} else {
throw new TypeError('Input must be Buffer or Readable stream');
}
}
async function uploadObject(storageCfg, strPath, filePath) {
const blockBlobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadStream = fs.createReadStream(filePath);
await blockBlobClient.uploadStream(
uploadStream,
undefined,
undefined,
{
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
}
);
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
const sourceBlobClient = getBlobClient(storageCfgSrc, getFilePath(storageCfgSrc, sourceKey));
const destBlobClient = getBlobClient(storageCfgDst, getFilePath(storageCfgDst, destinationKey));
const sasToken = generateBlobSASQueryParameters({
containerName: storageCfgSrc.bucketName,
blobName: getFilePath(storageCfgSrc, sourceKey),
permissions: BlobSASPermissions.parse("r"),
startsOn: new Date(),
expiresOn: new Date(Date.now() + 3600 * 1000)
}, new StorageSharedKeyCredential(storageCfgSrc.accessKeyId, storageCfgSrc.secretAccessKey)).toString();
await destBlobClient.syncCopyFromURL(`${sourceBlobClient.url}?${sasToken}`);
}
async function listObjects(storageCfg, strPath) {
return await listObjectsExec(storageCfg, strPath);
}
async function deleteObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
await blobClient.delete();
}
async function deleteObjects(storageCfg, strPaths) {
let aKeys = strPaths.map(path => ({ Key: getFilePath(storageCfg, path) }));
for (let i = 0; i < aKeys.length; i += MAX_DELETE_OBJECTS) {
await deleteObjectsHelp(storageCfg, aKeys.slice(i, i + MAX_DELETE_OBJECTS));
}
}
async function deletePath(storageCfg, strPath) {
let list = await listObjects(storageCfg, strPath);
await deleteObjects(storageCfg, list);
}
async function getSignedUrlWrapper(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageUrlExpires = storageCfg.fs.urlExpires;
let expires = (commonDefines.c_oAscUrlTypes.Session === urlType ? cfgExpSessionAbsolute / 1000 : storageUrlExpires) || 31536000;
expires = Math.min(expires, 604800);
const userFriendlyName = optFilename ? optFilename.replace(/\//g, "%2f") : path.basename(strPath);
const contentDisposition = utils.getContentDisposition(userFriendlyName, null, null);
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const sasOptions = {
permissions: BlobSASPermissions.parse("r"),
expiresOn: new Date(Date.now() + expires * 1000),
contentDisposition,
contentType: mime.getType(strPath)
};
return await blobClient.generateSasUrl(sasOptions);
}
/**
* Determines if static routs is needed for cacheFolder
*
* @returns {boolean} Always returns false for Azure Blob Storage
*/
function needServeStatic() {
return false;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getSignedUrl: getSignedUrlWrapper,
needServeStatic
};

View File

@ -1,215 +1,215 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const os = require('os');
const cluster = require('cluster');
var config = require('config');
var utils = require('./utils');
const cfgCacheStorage = config.get('storage');
const cfgPersistentStorage = utils.deepMergeObjects({}, cfgCacheStorage, config.get('persistentStorage'));
const cacheStorage = require('./' + cfgCacheStorage.name);
const persistentStorage = require('./' + cfgPersistentStorage.name);
const tenantManager = require('./tenantManager');
const HEALTH_CHECK_KEY_MAX = 10000;
function getStoragePath(ctx, strPath, opt_specialDir) {
opt_specialDir = opt_specialDir || cfgCacheStorage.cacheFolderName;
return opt_specialDir + '/' + tenantManager.getTenantPathPrefix(ctx) + strPath.replace(/\\/g, '/');
}
function getStorage(opt_specialDir) {
return opt_specialDir ? persistentStorage : cacheStorage;
}
function getStorageCfg(ctx, opt_specialDir) {
return opt_specialDir ? cfgPersistentStorage : cfgCacheStorage;
}
function canCopyBetweenStorage(storageCfgSrc, storageCfgDst) {
return storageCfgSrc.name === storageCfgDst.name && storageCfgSrc.endpoint === storageCfgDst.endpoint;
}
function isDifferentPersistentStorage() {
return !canCopyBetweenStorage(cfgCacheStorage, cfgPersistentStorage);
}
async function headObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.headObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function getObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.getObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function createReadStream(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.createReadStream(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function putObject(ctx, strPath, buffer, contentLength, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.putObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir), buffer, contentLength);
}
async function uploadObject(ctx, strPath, filePath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.uploadObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir), filePath);
}
async function copyObject(ctx, sourceKey, destinationKey, opt_specialDirSrc, opt_specialDirDst) {
let storageSrc = getStorage(opt_specialDirSrc);
let storagePathSrc = getStoragePath(ctx, sourceKey, opt_specialDirSrc);
let storagePathDst = getStoragePath(ctx, destinationKey, opt_specialDirDst);
let storageCfgSrc = getStorageCfg(ctx, opt_specialDirSrc);
let storageCfgDst = getStorageCfg(ctx, opt_specialDirDst);
if (canCopyBetweenStorage(storageCfgSrc, storageCfgDst)){
return await storageSrc.copyObject(storageCfgSrc, storageCfgDst, storagePathSrc, storagePathDst);
} else {
let storageDst = getStorage(opt_specialDirDst);
//todo stream
let buffer = await storageSrc.getObject(storageCfgSrc, storagePathSrc);
return await storageDst.putObject(storageCfgDst, storagePathDst, buffer, buffer.length);
}
}
async function copyPath(ctx, sourcePath, destinationPath, opt_specialDirSrc, opt_specialDirDst) {
let list = await listObjects(ctx, sourcePath, opt_specialDirSrc);
await Promise.all(list.map(function(curValue) {
return copyObject(ctx, curValue, destinationPath + '/' + getRelativePath(sourcePath, curValue), opt_specialDirSrc, opt_specialDirDst);
}));
}
async function listObjects(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let prefix = getStoragePath(ctx, "", opt_specialDir);
try {
let list = await storage.listObjects(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
return list.map((currentValue) => {
return currentValue.substring(prefix.length);
});
} catch (e) {
ctx.logger.error('storage.listObjects: %s', e.stack);
return [];
}
}
async function deleteObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.deleteObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function deletePath(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.deletePath(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function getSignedUrl(ctx, baseUrl, strPath, urlType, optFilename, opt_creationDate, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.getSignedUrl(ctx, storageCfg, baseUrl, getStoragePath(ctx, strPath, opt_specialDir), urlType, optFilename, opt_creationDate);
}
async function getSignedUrls(ctx, baseUrl, strPath, urlType, opt_creationDate, opt_specialDir) {
let storagePathSrc = getStoragePath(ctx, strPath, opt_specialDir);
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let list = await storage.listObjects(storageCfg, storagePathSrc, storageCfg);
let urls = await Promise.all(list.map(function(curValue) {
return storage.getSignedUrl(ctx, storageCfg, baseUrl, curValue, urlType, undefined, opt_creationDate);
}));
let outputMap = {};
for (let i = 0; i < list.length && i < urls.length; ++i) {
outputMap[getRelativePath(storagePathSrc, list[i])] = urls[i];
}
return outputMap;
}
async function getSignedUrlsArrayByArray(ctx, baseUrl, list, urlType, opt_specialDir) {
return await Promise.all(list.map(function (curValue) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let storagePathSrc = getStoragePath(ctx, curValue, opt_specialDir);
return storage.getSignedUrl(ctx, storageCfg, baseUrl, storagePathSrc, urlType, undefined);
}));
}
async function getSignedUrlsByArray(ctx, baseUrl, list, optPath, urlType, opt_specialDir) {
let urls = await getSignedUrlsArrayByArray(ctx, baseUrl, list, urlType, opt_specialDir);
var outputMap = {};
for (var i = 0; i < list.length && i < urls.length; ++i) {
if (optPath) {
let storagePathSrc = getStoragePath(ctx, optPath, opt_specialDir);
outputMap[getRelativePath(storagePathSrc, list[i])] = urls[i];
} else {
outputMap[list[i]] = urls[i];
}
}
return outputMap;
}
function getRelativePath(strBase, strPath) {
return strPath.substring(strBase.length + 1);
}
async function healthCheck(ctx, opt_specialDir) {
const clusterId = cluster.isWorker ? cluster.worker.id : '';
const tempName = 'hc_' + os.hostname() + '_' + clusterId + '_' + Math.round(Math.random() * HEALTH_CHECK_KEY_MAX);
const tempBuffer = Buffer.from([1, 2, 3, 4, 5]);
try {
//It's proper to putObject one tempName
await putObject(ctx, tempName, tempBuffer, tempBuffer.length, opt_specialDir);
//try to prevent case, when another process can remove same tempName
await deleteObject(ctx, tempName, opt_specialDir);
} catch (err) {
ctx.logger.warn('healthCheck storage(%s) error %s', opt_specialDir, err.stack);
}
}
function needServeStatic(opt_specialDir) {
let storage = getStorage(opt_specialDir);
return storage.needServeStatic();
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
copyPath,
listObjects,
deleteObject,
deletePath,
getSignedUrl,
getSignedUrls,
getSignedUrlsArrayByArray,
getSignedUrlsByArray,
getRelativePath,
isDifferentPersistentStorage,
healthCheck,
needServeStatic
};
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const os = require('os');
const cluster = require('cluster');
var config = require('config');
var utils = require('../utils');
const cfgCacheStorage = config.get('storage');
const cfgPersistentStorage = utils.deepMergeObjects({}, cfgCacheStorage, config.get('persistentStorage'));
const cacheStorage = require('./' + cfgCacheStorage.name);
const persistentStorage = require('./' + cfgPersistentStorage.name);
const tenantManager = require('../tenantManager');
const HEALTH_CHECK_KEY_MAX = 10000;
function getStoragePath(ctx, strPath, opt_specialDir) {
opt_specialDir = opt_specialDir || cfgCacheStorage.cacheFolderName;
return opt_specialDir + '/' + tenantManager.getTenantPathPrefix(ctx) + strPath.replace(/\\/g, '/');
}
function getStorage(opt_specialDir) {
return opt_specialDir ? persistentStorage : cacheStorage;
}
function getStorageCfg(ctx, opt_specialDir) {
return opt_specialDir ? cfgPersistentStorage : cfgCacheStorage;
}
function canCopyBetweenStorage(storageCfgSrc, storageCfgDst) {
return storageCfgSrc.name === storageCfgDst.name && storageCfgSrc.endpoint === storageCfgDst.endpoint;
}
function isDifferentPersistentStorage() {
return !canCopyBetweenStorage(cfgCacheStorage, cfgPersistentStorage);
}
async function headObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.headObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function getObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.getObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function createReadStream(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.createReadStream(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function putObject(ctx, strPath, buffer, contentLength, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.putObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir), buffer, contentLength);
}
async function uploadObject(ctx, strPath, filePath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.uploadObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir), filePath);
}
async function copyObject(ctx, sourceKey, destinationKey, opt_specialDirSrc, opt_specialDirDst) {
let storageSrc = getStorage(opt_specialDirSrc);
let storagePathSrc = getStoragePath(ctx, sourceKey, opt_specialDirSrc);
let storagePathDst = getStoragePath(ctx, destinationKey, opt_specialDirDst);
let storageCfgSrc = getStorageCfg(ctx, opt_specialDirSrc);
let storageCfgDst = getStorageCfg(ctx, opt_specialDirDst);
if (canCopyBetweenStorage(storageCfgSrc, storageCfgDst)){
return await storageSrc.copyObject(storageCfgSrc, storageCfgDst, storagePathSrc, storagePathDst);
} else {
let storageDst = getStorage(opt_specialDirDst);
//todo stream
let buffer = await storageSrc.getObject(storageCfgSrc, storagePathSrc);
return await storageDst.putObject(storageCfgDst, storagePathDst, buffer, buffer.length);
}
}
async function copyPath(ctx, sourcePath, destinationPath, opt_specialDirSrc, opt_specialDirDst) {
let list = await listObjects(ctx, sourcePath, opt_specialDirSrc);
await Promise.all(list.map(function(curValue) {
return copyObject(ctx, curValue, destinationPath + '/' + getRelativePath(sourcePath, curValue), opt_specialDirSrc, opt_specialDirDst);
}));
}
async function listObjects(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let prefix = getStoragePath(ctx, "", opt_specialDir);
try {
let list = await storage.listObjects(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
return list.map((currentValue) => {
return currentValue.substring(prefix.length);
});
} catch (e) {
ctx.logger.error('storage.listObjects: %s', e.stack);
return [];
}
}
async function deleteObject(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.deleteObject(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function deletePath(ctx, strPath, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.deletePath(storageCfg, getStoragePath(ctx, strPath, opt_specialDir));
}
async function getSignedUrl(ctx, baseUrl, strPath, urlType, optFilename, opt_creationDate, opt_specialDir) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
return await storage.getSignedUrl(ctx, storageCfg, baseUrl, getStoragePath(ctx, strPath, opt_specialDir), urlType, optFilename, opt_creationDate);
}
async function getSignedUrls(ctx, baseUrl, strPath, urlType, opt_creationDate, opt_specialDir) {
let storagePathSrc = getStoragePath(ctx, strPath, opt_specialDir);
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let list = await storage.listObjects(storageCfg, storagePathSrc, storageCfg);
let urls = await Promise.all(list.map(function(curValue) {
return storage.getSignedUrl(ctx, storageCfg, baseUrl, curValue, urlType, undefined, opt_creationDate);
}));
let outputMap = {};
for (let i = 0; i < list.length && i < urls.length; ++i) {
outputMap[getRelativePath(storagePathSrc, list[i])] = urls[i];
}
return outputMap;
}
async function getSignedUrlsArrayByArray(ctx, baseUrl, list, urlType, opt_specialDir) {
return await Promise.all(list.map(function (curValue) {
let storage = getStorage(opt_specialDir);
let storageCfg = getStorageCfg(ctx, opt_specialDir);
let storagePathSrc = getStoragePath(ctx, curValue, opt_specialDir);
return storage.getSignedUrl(ctx, storageCfg, baseUrl, storagePathSrc, urlType, undefined);
}));
}
async function getSignedUrlsByArray(ctx, baseUrl, list, optPath, urlType, opt_specialDir) {
let urls = await getSignedUrlsArrayByArray(ctx, baseUrl, list, urlType, opt_specialDir);
var outputMap = {};
for (var i = 0; i < list.length && i < urls.length; ++i) {
if (optPath) {
let storagePathSrc = getStoragePath(ctx, optPath, opt_specialDir);
outputMap[getRelativePath(storagePathSrc, list[i])] = urls[i];
} else {
outputMap[list[i]] = urls[i];
}
}
return outputMap;
}
function getRelativePath(strBase, strPath) {
return strPath.substring(strBase.length + 1);
}
async function healthCheck(ctx, opt_specialDir) {
const clusterId = cluster.isWorker ? cluster.worker.id : '';
const tempName = 'hc_' + os.hostname() + '_' + clusterId + '_' + Math.round(Math.random() * HEALTH_CHECK_KEY_MAX);
const tempBuffer = Buffer.from([1, 2, 3, 4, 5]);
try {
//It's proper to putObject one tempName
await putObject(ctx, tempName, tempBuffer, tempBuffer.length, opt_specialDir);
//try to prevent case, when another process can remove same tempName
await deleteObject(ctx, tempName, opt_specialDir);
} catch (err) {
ctx.logger.warn('healthCheck storage(%s) error %s', opt_specialDir, err.stack);
}
}
function needServeStatic(opt_specialDir) {
let storage = getStorage(opt_specialDir);
return storage.needServeStatic();
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
copyPath,
listObjects,
deleteObject,
deletePath,
getSignedUrl,
getSignedUrls,
getSignedUrlsArrayByArray,
getSignedUrlsByArray,
getRelativePath,
isDifferentPersistentStorage,
healthCheck,
needServeStatic
};

View File

@ -1,183 +1,183 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const { cp, rm, mkdir } = require('fs/promises');
const { stat, readFile, writeFile } = require('fs/promises');
var path = require('path');
var utils = require("./utils");
var crypto = require('crypto');
const ms = require('ms');
const config = require('config');
const commonDefines = require('./../../Common/sources/commondefines');
const constants = require('./../../Common/sources/constants');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
//Stubs are needed until integrators pass these parameters to all requests
let shardKeyCached;
let wopiSrcCached;
function getFilePath(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;
return path.join(storageFolderPath, strPath);
}
function getOutputPath(strPath) {
return strPath.replace(/\\/g, '/');
}
async function headObject(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
let stats = await stat(fsPath);
return {ContentLength: stats.size};
}
async function getObject(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
return await readFile(fsPath);
}
async function createReadStream(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
let stats = await stat(fsPath);
let contentLength = stats.size;
let readStream = await utils.promiseCreateReadStream(fsPath);
return {
contentLength: contentLength,
readStream: readStream
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
var fsPath = getFilePath(storageCfg, strPath);
await mkdir(path.dirname(fsPath), {recursive: true});
if (Buffer.isBuffer(buffer)) {
await writeFile(fsPath, buffer);
} else {
let writable = await utils.promiseCreateWriteStream(fsPath);
await utils.pipeStreams(buffer, writable, true);
}
}
async function uploadObject(storageCfg, strPath, filePath) {
let fsPath = getFilePath(storageCfg, strPath);
await cp(filePath, fsPath, {force: true, recursive: true});
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
let fsPathSource = getFilePath(storageCfgSrc, sourceKey);
let fsPathDestination = getFilePath(storageCfgDst, destinationKey);
await cp(fsPathSource, fsPathDestination, {force: true, recursive: true});
}
async function listObjects(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;
let fsPath = getFilePath(storageCfg, strPath);
let values = await utils.listObjects(fsPath);
return values.map(function(curvalue) {
return getOutputPath(curvalue.substring(storageFolderPath.length + 1));
});
}
async function deleteObject(storageCfg, strPath) {
const fsPath = getFilePath(storageCfg, strPath);
return rm(fsPath, {force: true, recursive: true});
}
async function deletePath(storageCfg, strPath) {
const fsPath = getFilePath(storageCfg, strPath);
return rm(fsPath, {force: true, recursive: true, maxRetries: 3});
}
async function getSignedUrl(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageSecretString = storageCfg.fs.secretString;
const storageUrlExpires = storageCfg.fs.urlExpires;
const bucketName = storageCfg.bucketName;
const storageFolderName = storageCfg.storageFolderName;
//replace '/' with %2f before encodeURIComponent becase nginx determine %2f as '/' and get wrong system path
const userFriendlyName = optFilename ? encodeURIComponent(optFilename.replace(/\//g, "%2f")) : path.basename(strPath);
var uri = '/' + bucketName + '/' + storageFolderName + '/' + strPath + '/' + userFriendlyName;
//RFC 1123 does not allow underscores https://stackoverflow.com/questions/2180465/can-domain-name-subdomains-have-an-underscore-in-it
var url = utils.checkBaseUrl(ctx, baseUrl, storageCfg).replace(/_/g, "%5f");
url += uri;
var date = Date.now();
let creationDate = opt_creationDate || date;
let expiredAfter = (commonDefines.c_oAscUrlTypes.Session === urlType ? (cfgExpSessionAbsolute / 1000) : storageUrlExpires) || 31536000;
//todo creationDate can be greater because mysql CURRENT_TIMESTAMP uses local time, not UTC
var expires = creationDate + Math.ceil(Math.abs(date - creationDate) / expiredAfter) * expiredAfter;
expires = Math.ceil(expires / 1000);
expires += expiredAfter;
var md5 = crypto.createHash('md5').update(expires + decodeURIComponent(uri) + storageSecretString).digest("base64");
md5 = md5.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
url += '?md5=' + encodeURIComponent(md5);
url += '&expires=' + encodeURIComponent(expires);
if (ctx.shardKey) {
shardKeyCached = ctx.shardKey;
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(ctx.shardKey)}`;
} else if (ctx.wopiSrc) {
wopiSrcCached = ctx.wopiSrc;
url += `&${constants.SHARD_KEY_WOPI_NAME}=${encodeURIComponent(ctx.wopiSrc)}`;
} else if (process.env.DEFAULT_SHARD_KEY) {
//Set DEFAULT_SHARD_KEY from environment as shardkey in case of integrator did not pass this param
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(process.env.DEFAULT_SHARD_KEY)}`;
} else if (shardKeyCached) {
//Add stubs for shardkey params until integrators pass these parameters to all requests
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(shardKeyCached)}`;
} else if (wopiSrcCached) {
url += `&${constants.SHARD_KEY_WOPI_NAME}=${encodeURIComponent(wopiSrcCached)}`;
}
url += '&filename=' + userFriendlyName;
return url;
}
function needServeStatic() {
return true;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getSignedUrl,
needServeStatic
};
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const { cp, rm, mkdir } = require('fs/promises');
const { stat, readFile, writeFile } = require('fs/promises');
var path = require('path');
var utils = require("../utils");
var crypto = require('crypto');
const ms = require('ms');
const config = require('config');
const commonDefines = require('../commondefines');
const constants = require('../constants');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
//Stubs are needed until integrators pass these parameters to all requests
let shardKeyCached;
let wopiSrcCached;
function getFilePath(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;
return path.join(storageFolderPath, strPath);
}
function getOutputPath(strPath) {
return strPath.replace(/\\/g, '/');
}
async function headObject(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
let stats = await stat(fsPath);
return {ContentLength: stats.size};
}
async function getObject(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
return await readFile(fsPath);
}
async function createReadStream(storageCfg, strPath) {
let fsPath = getFilePath(storageCfg, strPath);
let stats = await stat(fsPath);
let contentLength = stats.size;
let readStream = await utils.promiseCreateReadStream(fsPath);
return {
contentLength: contentLength,
readStream: readStream
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
var fsPath = getFilePath(storageCfg, strPath);
await mkdir(path.dirname(fsPath), {recursive: true});
if (Buffer.isBuffer(buffer)) {
await writeFile(fsPath, buffer);
} else {
let writable = await utils.promiseCreateWriteStream(fsPath);
await utils.pipeStreams(buffer, writable, true);
}
}
async function uploadObject(storageCfg, strPath, filePath) {
let fsPath = getFilePath(storageCfg, strPath);
await cp(filePath, fsPath, {force: true, recursive: true});
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
let fsPathSource = getFilePath(storageCfgSrc, sourceKey);
let fsPathDestination = getFilePath(storageCfgDst, destinationKey);
await cp(fsPathSource, fsPathDestination, {force: true, recursive: true});
}
async function listObjects(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;
let fsPath = getFilePath(storageCfg, strPath);
let values = await utils.listObjects(fsPath);
return values.map(function(curvalue) {
return getOutputPath(curvalue.substring(storageFolderPath.length + 1));
});
}
async function deleteObject(storageCfg, strPath) {
const fsPath = getFilePath(storageCfg, strPath);
return rm(fsPath, {force: true, recursive: true});
}
async function deletePath(storageCfg, strPath) {
const fsPath = getFilePath(storageCfg, strPath);
return rm(fsPath, {force: true, recursive: true, maxRetries: 3});
}
async function getSignedUrl(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageSecretString = storageCfg.fs.secretString;
const storageUrlExpires = storageCfg.fs.urlExpires;
const bucketName = storageCfg.bucketName;
const storageFolderName = storageCfg.storageFolderName;
//replace '/' with %2f before encodeURIComponent becase nginx determine %2f as '/' and get wrong system path
const userFriendlyName = optFilename ? encodeURIComponent(optFilename.replace(/\//g, "%2f")) : path.basename(strPath);
var uri = '/' + bucketName + '/' + storageFolderName + '/' + strPath + '/' + userFriendlyName;
//RFC 1123 does not allow underscores https://stackoverflow.com/questions/2180465/can-domain-name-subdomains-have-an-underscore-in-it
var url = utils.checkBaseUrl(ctx, baseUrl, storageCfg).replace(/_/g, "%5f");
url += uri;
var date = Date.now();
let creationDate = opt_creationDate || date;
let expiredAfter = (commonDefines.c_oAscUrlTypes.Session === urlType ? (cfgExpSessionAbsolute / 1000) : storageUrlExpires) || 31536000;
//todo creationDate can be greater because mysql CURRENT_TIMESTAMP uses local time, not UTC
var expires = creationDate + Math.ceil(Math.abs(date - creationDate) / expiredAfter) * expiredAfter;
expires = Math.ceil(expires / 1000);
expires += expiredAfter;
var md5 = crypto.createHash('md5').update(expires + decodeURIComponent(uri) + storageSecretString).digest("base64");
md5 = md5.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
url += '?md5=' + encodeURIComponent(md5);
url += '&expires=' + encodeURIComponent(expires);
if (ctx.shardKey) {
shardKeyCached = ctx.shardKey;
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(ctx.shardKey)}`;
} else if (ctx.wopiSrc) {
wopiSrcCached = ctx.wopiSrc;
url += `&${constants.SHARD_KEY_WOPI_NAME}=${encodeURIComponent(ctx.wopiSrc)}`;
} else if (process.env.DEFAULT_SHARD_KEY) {
//Set DEFAULT_SHARD_KEY from environment as shardkey in case of integrator did not pass this param
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(process.env.DEFAULT_SHARD_KEY)}`;
} else if (shardKeyCached) {
//Add stubs for shardkey params until integrators pass these parameters to all requests
url += `&${constants.SHARD_KEY_API_NAME}=${encodeURIComponent(shardKeyCached)}`;
} else if (wopiSrcCached) {
url += `&${constants.SHARD_KEY_WOPI_NAME}=${encodeURIComponent(wopiSrcCached)}`;
}
url += '&filename=' + userFriendlyName;
return url;
}
function needServeStatic() {
return true;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getSignedUrl,
needServeStatic
};

View File

@ -1,266 +1,266 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const fs = require('fs');
const url = require('url');
const { Agent } = require('https');
const path = require('path');
const { S3Client, ListObjectsCommand, HeadObjectCommand} = require("@aws-sdk/client-s3");
const { GetObjectCommand, PutObjectCommand, CopyObjectCommand} = require("@aws-sdk/client-s3");
const { DeleteObjectsCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { NodeHttpHandler } = require("@aws-sdk/node-http-handler");
const mime = require('mime');
const config = require('config');
const utils = require('./utils');
const ms = require('ms');
const commonDefines = require('./../../Common/sources/commondefines');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
const cfgRequestDefaults = config.get('services.CoAuthoring.requestDefaults');
//This operation enables you to delete multiple objects from a bucket using a single HTTP request. You may specify up to 1000 keys.
const MAX_DELETE_OBJECTS = 1000;
let clients = {};
function getS3Client(storageCfg) {
/**
* Don't hard-code your credentials!
* Export the following environment variables instead:
*
* export AWS_ACCESS_KEY_ID='AKID'
* export AWS_SECRET_ACCESS_KEY='SECRET'
*/
let configS3 = {
region: storageCfg.region,
endpoint: storageCfg.endpoint
};
if (storageCfg.accessKeyId && storageCfg.secretAccessKey) {
configS3.credentials = {
accessKeyId: storageCfg.accessKeyId,
secretAccessKey: storageCfg.secretAccessKey
}
}
if (configS3.endpoint) {
configS3.tls = storageCfg.sslEnabled;
configS3.forcePathStyle = storageCfg.s3ForcePathStyle;
}
//todo dedicated options?
const agent = new Agent(cfgRequestDefaults);
configS3.requestHandler = new NodeHttpHandler({
httpAgent: agent,
httpsAgent: agent
});
let configJson = JSON.stringify(configS3);
let client = clients[configJson];
if (!client) {
client = new S3Client(configS3);
clients[configJson] = client;
}
return client;
}
function getFilePath(storageCfg, strPath) {
const storageFolderName = storageCfg.storageFolderName;
return storageFolderName + '/' + strPath;
}
function joinListObjects(storageCfg, inputArray, outputArray) {
if (!inputArray) {
return;
}
const storageFolderName = storageCfg.storageFolderName;
let length = inputArray.length;
for (let i = 0; i < length; i++) {
outputArray.push(inputArray[i].Key.substring((storageFolderName + '/').length));
}
}
async function listObjectsExec(storageCfg, output, params) {
const data = await getS3Client(storageCfg).send(new ListObjectsCommand(params));
joinListObjects(storageCfg, data.Contents, output);
if (data.IsTruncated && (data.NextMarker || (data.Contents && data.Contents.length > 0))) {
params.Marker = data.NextMarker || data.Contents[data.Contents.length - 1].Key;
return await listObjectsExec(storageCfg, output, params);
} else {
return output;
}
}
async function deleteObjectsHelp(storageCfg, aKeys) {
//By default, the operation uses verbose mode in which the response includes the result of deletion of each key in your request.
//In quiet mode the response includes only keys where the delete operation encountered an error.
const input = {
Bucket: storageCfg.bucketName,
Delete: {
Objects: aKeys,
Quiet: true
}
};
const command = new DeleteObjectsCommand(input);
await getS3Client(storageCfg).send(command);
}
async function headObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new HeadObjectCommand(input);
let output = await getS3Client(storageCfg).send(command);
return {ContentLength: output.ContentLength};
}
async function getObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new GetObjectCommand(input);
const output = await getS3Client(storageCfg).send(command);
return await utils.stream2Buffer(output.Body);
}
async function createReadStream(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new GetObjectCommand(input);
const output = await getS3Client(storageCfg).send(command);
return {
contentLength: output.ContentLength,
readStream: output.Body
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
//todo consider Expires
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
Body: buffer,
ContentLength: contentLength,
ContentType: mime.getType(strPath)
};
const command = new PutObjectCommand(input);
await getS3Client(storageCfg).send(command);
}
async function uploadObject(storageCfg, strPath, filePath) {
const file = fs.createReadStream(filePath);
//todo рассмотреть Expires
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
Body: file,
ContentType: mime.getType(strPath)
};
const command = new PutObjectCommand(input);
await getS3Client(storageCfg).send(command);
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
//todo source bucket
const input = {
Bucket: storageCfgDst.bucketName,
Key: getFilePath(storageCfgDst, destinationKey),
CopySource: `/${storageCfgSrc.bucketName}/${getFilePath(storageCfgSrc, sourceKey)}`
};
const command = new CopyObjectCommand(input);
await getS3Client(storageCfgDst).send(command);
}
async function listObjects(storageCfg, strPath) {
let params = {
Bucket: storageCfg.bucketName,
Prefix: getFilePath(storageCfg, strPath)
};
let output = [];
await listObjectsExec(storageCfg, output, params);
return output;
}
async function deleteObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new DeleteObjectCommand(input);
await getS3Client(storageCfg).send(command);
};
async function deleteObjects(storageCfg, strPaths) {
let aKeys = strPaths.map(function (currentValue) {
return {Key: getFilePath(storageCfg, currentValue)};
});
for (let i = 0; i < aKeys.length; i += MAX_DELETE_OBJECTS) {
await deleteObjectsHelp(storageCfg, aKeys.slice(i, i + MAX_DELETE_OBJECTS));
}
}
async function deletePath(storageCfg, strPath) {
let list = await listObjects(storageCfg, strPath);
await deleteObjects(storageCfg, list);
}
async function getSignedUrlWrapper(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageUrlExpires = storageCfg.fs.urlExpires;
let expires = (commonDefines.c_oAscUrlTypes.Session === urlType ? cfgExpSessionAbsolute / 1000 : storageUrlExpires) || 31536000;
// Signature version 4 presigned URLs must have an expiration date less than one week in the future
expires = Math.min(expires, 604800);
let userFriendlyName = optFilename ? optFilename.replace(/\//g, "%2f") : path.basename(strPath);
let contentDisposition = utils.getContentDisposition(userFriendlyName, null, null);
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
ResponseContentDisposition: contentDisposition
};
const command = new GetObjectCommand(input);
//default Expires 900 seconds
let options = {
expiresIn: expires
};
return await getSignedUrl(getS3Client(storageCfg), command, options);
//extra query params cause SignatureDoesNotMatch
//https://stackoverflow.com/questions/55503009/amazon-s3-signature-does-not-match-when-extra-query-params-ga-added-in-url
// return utils.changeOnlyOfficeUrl(url, strPath, optFilename);
}
function needServeStatic() {
return false;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getSignedUrl: getSignedUrlWrapper,
needServeStatic
};
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
'use strict';
const fs = require('fs');
const url = require('url');
const { Agent } = require('https');
const path = require('path');
const { S3Client, ListObjectsCommand, HeadObjectCommand} = require("@aws-sdk/client-s3");
const { GetObjectCommand, PutObjectCommand, CopyObjectCommand} = require("@aws-sdk/client-s3");
const { DeleteObjectsCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { NodeHttpHandler } = require("@aws-sdk/node-http-handler");
const mime = require('mime');
const config = require('config');
const utils = require('../utils');
const ms = require('ms');
const commonDefines = require('../commondefines');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
const cfgRequestDefaults = config.get('services.CoAuthoring.requestDefaults');
//This operation enables you to delete multiple objects from a bucket using a single HTTP request. You may specify up to 1000 keys.
const MAX_DELETE_OBJECTS = 1000;
let clients = {};
function getS3Client(storageCfg) {
/**
* Don't hard-code your credentials!
* Export the following environment variables instead:
*
* export AWS_ACCESS_KEY_ID='AKID'
* export AWS_SECRET_ACCESS_KEY='SECRET'
*/
let configS3 = {
region: storageCfg.region,
endpoint: storageCfg.endpoint
};
if (storageCfg.accessKeyId && storageCfg.secretAccessKey) {
configS3.credentials = {
accessKeyId: storageCfg.accessKeyId,
secretAccessKey: storageCfg.secretAccessKey
}
}
if (configS3.endpoint) {
configS3.tls = storageCfg.sslEnabled;
configS3.forcePathStyle = storageCfg.s3ForcePathStyle;
}
//todo dedicated options?
const agent = new Agent(cfgRequestDefaults);
configS3.requestHandler = new NodeHttpHandler({
httpAgent: agent,
httpsAgent: agent
});
let configJson = JSON.stringify(configS3);
let client = clients[configJson];
if (!client) {
client = new S3Client(configS3);
clients[configJson] = client;
}
return client;
}
function getFilePath(storageCfg, strPath) {
const storageFolderName = storageCfg.storageFolderName;
return storageFolderName + '/' + strPath;
}
function joinListObjects(storageCfg, inputArray, outputArray) {
if (!inputArray) {
return;
}
const storageFolderName = storageCfg.storageFolderName;
let length = inputArray.length;
for (let i = 0; i < length; i++) {
outputArray.push(inputArray[i].Key.substring((storageFolderName + '/').length));
}
}
async function listObjectsExec(storageCfg, output, params) {
const data = await getS3Client(storageCfg).send(new ListObjectsCommand(params));
joinListObjects(storageCfg, data.Contents, output);
if (data.IsTruncated && (data.NextMarker || (data.Contents && data.Contents.length > 0))) {
params.Marker = data.NextMarker || data.Contents[data.Contents.length - 1].Key;
return await listObjectsExec(storageCfg, output, params);
} else {
return output;
}
}
async function deleteObjectsHelp(storageCfg, aKeys) {
//By default, the operation uses verbose mode in which the response includes the result of deletion of each key in your request.
//In quiet mode the response includes only keys where the delete operation encountered an error.
const input = {
Bucket: storageCfg.bucketName,
Delete: {
Objects: aKeys,
Quiet: true
}
};
const command = new DeleteObjectsCommand(input);
await getS3Client(storageCfg).send(command);
}
async function headObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new HeadObjectCommand(input);
let output = await getS3Client(storageCfg).send(command);
return {ContentLength: output.ContentLength};
}
async function getObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new GetObjectCommand(input);
const output = await getS3Client(storageCfg).send(command);
return await utils.stream2Buffer(output.Body);
}
async function createReadStream(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new GetObjectCommand(input);
const output = await getS3Client(storageCfg).send(command);
return {
contentLength: output.ContentLength,
readStream: output.Body
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
//todo consider Expires
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
Body: buffer,
ContentLength: contentLength,
ContentType: mime.getType(strPath)
};
const command = new PutObjectCommand(input);
await getS3Client(storageCfg).send(command);
}
async function uploadObject(storageCfg, strPath, filePath) {
const file = fs.createReadStream(filePath);
//todo рассмотреть Expires
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
Body: file,
ContentType: mime.getType(strPath)
};
const command = new PutObjectCommand(input);
await getS3Client(storageCfg).send(command);
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
//todo source bucket
const input = {
Bucket: storageCfgDst.bucketName,
Key: getFilePath(storageCfgDst, destinationKey),
CopySource: `/${storageCfgSrc.bucketName}/${getFilePath(storageCfgSrc, sourceKey)}`
};
const command = new CopyObjectCommand(input);
await getS3Client(storageCfgDst).send(command);
}
async function listObjects(storageCfg, strPath) {
let params = {
Bucket: storageCfg.bucketName,
Prefix: getFilePath(storageCfg, strPath)
};
let output = [];
await listObjectsExec(storageCfg, output, params);
return output;
}
async function deleteObject(storageCfg, strPath) {
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath)
};
const command = new DeleteObjectCommand(input);
await getS3Client(storageCfg).send(command);
};
async function deleteObjects(storageCfg, strPaths) {
let aKeys = strPaths.map(function (currentValue) {
return {Key: getFilePath(storageCfg, currentValue)};
});
for (let i = 0; i < aKeys.length; i += MAX_DELETE_OBJECTS) {
await deleteObjectsHelp(storageCfg, aKeys.slice(i, i + MAX_DELETE_OBJECTS));
}
}
async function deletePath(storageCfg, strPath) {
let list = await listObjects(storageCfg, strPath);
await deleteObjects(storageCfg, list);
}
async function getSignedUrlWrapper(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageUrlExpires = storageCfg.fs.urlExpires;
let expires = (commonDefines.c_oAscUrlTypes.Session === urlType ? cfgExpSessionAbsolute / 1000 : storageUrlExpires) || 31536000;
// Signature version 4 presigned URLs must have an expiration date less than one week in the future
expires = Math.min(expires, 604800);
let userFriendlyName = optFilename ? optFilename.replace(/\//g, "%2f") : path.basename(strPath);
let contentDisposition = utils.getContentDisposition(userFriendlyName, null, null);
const input = {
Bucket: storageCfg.bucketName,
Key: getFilePath(storageCfg, strPath),
ResponseContentDisposition: contentDisposition
};
const command = new GetObjectCommand(input);
//default Expires 900 seconds
let options = {
expiresIn: expires
};
return await getSignedUrl(getS3Client(storageCfg), command, options);
//extra query params cause SignatureDoesNotMatch
//https://stackoverflow.com/questions/55503009/amazon-s3-signature-does-not-match-when-extra-query-params-ga-added-in-url
// return utils.changeOnlyOfficeUrl(url, strPath, optFilename);
}
function needServeStatic() {
return false;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getSignedUrl: getSignedUrlWrapper,
needServeStatic
};

View File

@ -51,8 +51,9 @@
"./sources/editorDataMemory.js",
"./sources/editorDataRedis.js",
"./sources/pubsubRabbitMQ.js",
"../Common/sources/storage-fs.js",
"../Common/sources/storage-s3.js"
"../Common/sources/storage/storage-fs.js",
"../Common/sources/storage/storage-s3.js",
"../Common/sources/storage/storage-az.js"
]
}
}

File diff suppressed because it is too large Load Diff

View File

@ -47,7 +47,7 @@ var logger = require('./../../Common/sources/logger');
var utils = require('./../../Common/sources/utils');
var constants = require('./../../Common/sources/constants');
var commonDefines = require('./../../Common/sources/commondefines');
var storage = require('./../../Common/sources/storage-base');
var storage = require('./../../Common/sources/storage/storage-base');
var formatChecker = require('./../../Common/sources/formatchecker');
var statsDClient = require('./../../Common/sources/statsdclient');
var operationContext = require('./../../Common/sources/operationContext');

View File

@ -38,7 +38,7 @@ var pubsubService = require('./pubsubRabbitMQ');
var commonDefines = require('./../../Common/sources/commondefines');
var constants = require('./../../Common/sources/constants');
var utils = require('./../../Common/sources/utils');
const storage = require('./../../Common/sources/storage-base');
const storage = require('./../../Common/sources/storage/storage-base');
const queueService = require('./../../Common/sources/taskqueueRabbitMQ');
const operationContext = require('./../../Common/sources/operationContext');
const sqlBase = require('./databaseConnectors/baseConnector');

View File

@ -43,10 +43,10 @@ var commonDefines = require('./../../Common/sources/commondefines');
var docsCoServer = require('./DocsCoServer');
var canvasService = require('./canvasservice');
var wopiClient = require('./wopiClient');
var storage = require('./../../Common/sources/storage-base');
var storage = require('./../../Common/sources/storage/storage-base');
var formatChecker = require('./../../Common/sources/formatchecker');
var statsDClient = require('./../../Common/sources/statsdclient');
var storageBase = require('./../../Common/sources/storage-base');
var storageBase = require('./../../Common/sources/storage/storage-base');
var operationContext = require('./../../Common/sources/operationContext');
const sqlBase = require('./databaseConnectors/baseConnector');
const utilsDocService = require("./utilsDocService");

View File

@ -38,7 +38,7 @@ const utilsDocService = require('./utilsDocService');
var docsCoServer = require('./DocsCoServer');
var utils = require('./../../Common/sources/utils');
var constants = require('./../../Common/sources/constants');
var storageBase = require('./../../Common/sources/storage-base');
var storageBase = require('./../../Common/sources/storage/storage-base');
var formatChecker = require('./../../Common/sources/formatchecker');
const commonDefines = require('./../../Common/sources/commondefines');
const operationContext = require('./../../Common/sources/operationContext');

View File

@ -39,7 +39,7 @@ var ms = require('ms');
var taskResult = require('./taskresult');
var docsCoServer = require('./DocsCoServer');
var canvasService = require('./canvasservice');
var storage = require('./../../Common/sources/storage-base');
var storage = require('./../../Common/sources/storage/storage-base');
var utils = require('./../../Common/sources/utils');
var logger = require('./../../Common/sources/logger');
var constants = require('./../../Common/sources/constants');

View File

@ -35,7 +35,7 @@ const express = require('express');
const config = require("config");
const operationContext = require('./../../../Common/sources/operationContext');
const utils = require('./../../../Common/sources/utils');
const storage = require('./../../../Common/sources/storage-base');
const storage = require('./../../../Common/sources/storage/storage-base');
const urlModule = require("url");
const path = require("path");
const mime = require("mime");

View File

@ -13,8 +13,9 @@
},
"pkg": {
"scripts": [
"../Common/sources/storage-fs.js",
"../Common/sources/storage-s3.js",
"../Common/sources/storage/storage-fs.js",
"../Common/sources/storage/storage-s3.js",
"../Common/sources/storage/storage-az.js",
"../DocService/sources/editorDataMemory.js",
"../DocService/sources/editorDataRedis.js"
]

View File

@ -43,7 +43,7 @@ const lcid = require('lcid');
const ms = require('ms');
var commonDefines = require('./../../Common/sources/commondefines');
var storage = require('./../../Common/sources/storage-base');
var storage = require('./../../Common/sources/storage/storage-base');
var utils = require('./../../Common/sources/utils');
var constants = require('./../../Common/sources/constants');
var baseConnector = require('../../DocService/sources/databaseConnectors/baseConnector');

View File

@ -78,6 +78,7 @@
"integration tests with server instance": "cd ./DocService && jest integration/withServerInstance --inject-globals=false --config=../tests/jest.config.js",
"integration database tests": "cd ./DocService && jest integration/databaseTests --inject-globals=false --config=../tests/jest.config.js",
"tests": "cd ./DocService && jest --inject-globals=false --config=../tests/jest.config.js",
"tests:dev": "cd ./DocService && jest --inject-globals=false --config=../tests/jest.config.js --watch",
"install:Common": "npm ci --prefix ./Common",
"install:DocService": "npm ci --prefix ./DocService",
"install:FileConverter": "npm ci --prefix ./FileConverter",

View File

@ -34,7 +34,7 @@ const { describe, test, expect, afterAll, beforeAll } = require('@jest/globals')
const http = require('http');
const { signToken } = require('../../../DocService/sources/DocsCoServer');
const storage = require('../../../Common/sources/storage-base');
const storage = require('../../../Common/sources/storage/storage-base');
const constants = require('../../../Common/sources/commondefines');
const operationContext = require('../../../Common/sources/operationContext');
const utils = require("../../../Common/sources/utils");
@ -50,6 +50,7 @@ const cfgTokenEnableRequestOutbox = config.get('services.CoAuthoring.token.enabl
const cfgStorageName = config.get('storage.name');
const cfgEndpoint = config.get('storage.endpoint');
const cfgBucketName = config.get('storage.bucketName');
const cfgAccessKeyId = config.get('storage.accessKeyId');
const ctx = new operationContext.Context();
const testFilesNames = {
@ -184,12 +185,18 @@ describe('Command service', function () {
let urlPattern;
if ("storage-fs" === cfgStorageName) {
urlPattern = 'http://localhost:8000/cache/files/forgotten/--key--/output.docx/output.docx';
} else {
} else if ("storage-s3" === cfgStorageName) {
let host = cfgEndpoint.slice(0, "https://".length) + cfgBucketName + "." + cfgEndpoint.slice("https://".length);
if (host[host.length - 1] === '/') {
host = host.slice(0, -1);
}
urlPattern = host + '/files/forgotten/--key--/output.docx';
} else {
let host = cfgEndpoint.slice(0, "https://".length) + cfgAccessKeyId + "." + cfgEndpoint.slice("https://".length) + '/' + cfgBucketName;
if (host[host.length - 1] === '/') {
host = host.slice(0, -1);
}
urlPattern = host + '/files/forgotten/--key--/output.docx';
}
const expected = { key, error };

View File

@ -49,7 +49,7 @@ const { cp } = require('fs/promises');
const operationContext = require('../../../Common/sources/operationContext');
const tenantManager = require('../../../Common/sources/tenantManager');
const storage = require('../../../Common/sources/storage-base');
const storage = require('../../../Common/sources/storage/storage-base');
const utils = require('../../../Common/sources/utils');
const commonDefines = require("../../../Common/sources/commondefines");
const config = require('../../../Common/node_modules/config');
@ -119,7 +119,7 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
});
} else {
test("uploadObject", async () => {
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(testFileData3);
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(Readable.from(testFileData3));
let res = await storage.uploadObject(ctx, testFile3, "createReadStream.txt", specialDir);
expect(res).toEqual(undefined);
let list = await storage.listObjects(ctx, testDir, specialDir);
@ -127,6 +127,31 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
expect(list.sort()).toEqual([testFile1, testFile2, testFile3].sort());
spy.mockRestore();
});
test("uploadObject - stream error handling", async () => {
const streamErrorMessage = "Test stream error";
const mockStream = new Readable({
read() {
this.emit('error', new Error(streamErrorMessage));
}
});
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(mockStream);
// Verify that the uploadObject function rejects when the stream emits an error
await expect(storage.uploadObject(ctx, "test-error-file.txt", "nonexistent.txt", specialDir))
.rejects.toThrow(streamErrorMessage);
spy.mockRestore();
});
test("uploadObject - non-existent file handling", async () => {
const nonExistentFile = 'definitely-does-not-exist-' + Date.now() + '.txt';
// Verify the file actually doesn't exist
expect(fs.existsSync(nonExistentFile)).toBe(false);
// Verify that uploadObject properly handles and propagates the error
await expect(storage.uploadObject(ctx, "test-error-file.txt", nonExistentFile, specialDir))
.rejects.toThrow(/ENOENT/);
});
}
test("copyObject", async () => {
let res = await storage.copyObject(ctx, testFile3, testFile4, specialDir, specialDir);

View File

@ -40,8 +40,8 @@ const {
const co = require('co');
const taskResult = require('./../../DocService/sources/taskresult');
const storage = require('./../../Common/sources/storage-base');
const storageFs = require('./../../Common/sources/storage-fs');
const storage = require('./../../Common/sources/storage/storage-base');
const storageFs = require('./../../Common/sources/storage/storage-fs');
const operationContext = require('./../../Common/sources/operationContext');
const utils = require('./../../Common/sources/utils');
const docsCoServer = require("./../../DocService/sources/DocsCoServer");