[refactor] Remove useless import; Change tab size

This commit is contained in:
Sergey Konovalov
2025-05-16 16:45:12 +03:00
parent d89b9fe055
commit 74265870cf
4 changed files with 121 additions and 134 deletions

View File

@ -7,7 +7,6 @@ const config = require('config');
const utils = require('../utils');
const ms = require('ms');
const commonDefines = require('../commondefines');
const crypto = require('crypto');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
const cfgCacheStorage = config.get('storage');
@ -21,22 +20,22 @@ const blobServiceClients = {};
* @returns {BlobServiceClient} The Azure Blob Service client
*/
function getBlobServiceClient(storageCfg) {
const configKey = `${storageCfg.accessKeyId}_${storageCfg.bucketName}`;
if (!blobServiceClients[configKey]) {
const credential = new StorageSharedKeyCredential(
storageCfg.accessKeyId,
storageCfg.secretAccessKey
);
if (storageCfg.endpoint.includes(storageCfg.accessKeyId)) {
blobServiceClients[configKey] = new BlobServiceClient(storageCfg.endpoint, credential);
} else {
const endpointUrl = new URL(storageCfg.endpoint.replace(/\/+$/, ''));
blobServiceClients[configKey] = new BlobServiceClient(
`${endpointUrl.protocol}//${storageCfg.accessKeyId}.${endpointUrl.host}`,
credential);
}
const configKey = `${storageCfg.accessKeyId}_${storageCfg.bucketName}`;
if (!blobServiceClients[configKey]) {
const credential = new StorageSharedKeyCredential(
storageCfg.accessKeyId,
storageCfg.secretAccessKey
);
if (storageCfg.endpoint.includes(storageCfg.accessKeyId)) {
blobServiceClients[configKey] = new BlobServiceClient(storageCfg.endpoint, credential);
} else {
const endpointUrl = new URL(storageCfg.endpoint.replace(/\/+$/, ''));
blobServiceClients[configKey] = new BlobServiceClient(
`${endpointUrl.protocol}//${storageCfg.accessKeyId}.${endpointUrl.host}`,
credential);
}
return blobServiceClients[configKey];
}
return blobServiceClients[configKey];
}
/**
@ -46,8 +45,8 @@ function getBlobServiceClient(storageCfg) {
* @returns {ContainerClient} The Azure Container client
*/
function getContainerClient(storageCfg) {
const blobServiceClient = getBlobServiceClient(storageCfg);
return blobServiceClient.getContainerClient(storageCfg.bucketName);
const blobServiceClient = getBlobServiceClient(storageCfg);
return blobServiceClient.getContainerClient(storageCfg.bucketName);
}
/**
@ -70,157 +69,157 @@ function getBlobClient(storageCfg, blobName) {
* @returns {string} The full file path
*/
function getFilePath(storageCfg, strPath) {
const storageFolderName = storageCfg.storageFolderName;
return `${storageFolderName}/${strPath}`;
const storageFolderName = storageCfg.storageFolderName;
return `${storageFolderName}/${strPath}`;
}
async function listObjectsExec(storageCfg, prefix, output = []) {
const containerClient = getContainerClient(storageCfg);
const storageFolderName = storageCfg.storageFolderName;
const prefixWithFolder = storageFolderName ? `${storageFolderName}/${prefix}` : prefix;
const containerClient = getContainerClient(storageCfg);
const storageFolderName = storageCfg.storageFolderName;
const prefixWithFolder = storageFolderName ? `${storageFolderName}/${prefix}` : prefix;
for await (const blob of containerClient.listBlobsFlat({ prefix: prefixWithFolder })) {
const relativePath = storageFolderName ?
blob.name.substring(storageFolderName.length + 1) : blob.name;
output.push(relativePath);
}
return output;
for await (const blob of containerClient.listBlobsFlat({prefix: prefixWithFolder})) {
const relativePath = storageFolderName ?
blob.name.substring(storageFolderName.length + 1) : blob.name;
output.push(relativePath);
}
return output;
}
async function deleteObjectsHelp(storageCfg, aKeys) {
const containerClient = getContainerClient(storageCfg);
await Promise.all(
aKeys.map(key => containerClient.deleteBlob(key.Key))
);
const containerClient = getContainerClient(storageCfg);
await Promise.all(
aKeys.map(key => containerClient.deleteBlob(key.Key))
);
}
async function headObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const properties = await blobClient.getProperties();
return { ContentLength: properties.contentLength };
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const properties = await blobClient.getProperties();
return {ContentLength: properties.contentLength};
}
async function getObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return await utils.stream2Buffer(response.readableStreamBody);
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return await utils.stream2Buffer(response.readableStreamBody);
}
async function createReadStream(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return {
contentLength: response.contentLength,
readStream: response.readableStreamBody
};
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const response = await blobClient.download();
return {
contentLength: response.contentLength,
readStream: response.readableStreamBody
};
}
async function putObject(storageCfg, strPath, buffer, contentLength) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadOptions = {
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
};
if (buffer instanceof Buffer) {
// Handle Buffer upload
await blobClient.uploadData(buffer, uploadOptions);
} else if (typeof buffer.pipe === 'function') {
// Handle Stream upload
await blobClient.uploadStream(buffer, undefined, undefined, uploadOptions);
} else {
throw new TypeError('Input must be Buffer or Readable stream');
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadOptions = {
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
};
if (buffer instanceof Buffer) {
// Handle Buffer upload
await blobClient.uploadData(buffer, uploadOptions);
} else if (typeof buffer.pipe === 'function') {
// Handle Stream upload
await blobClient.uploadStream(buffer, undefined, undefined, uploadOptions);
} else {
throw new TypeError('Input must be Buffer or Readable stream');
}
}
async function uploadObject(storageCfg, strPath, filePath) {
const blockBlobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadStream = fs.createReadStream(filePath);
await blockBlobClient.uploadStream(
uploadStream,
undefined,
undefined,
{
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
}
);
const blockBlobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const uploadStream = fs.createReadStream(filePath);
await blockBlobClient.uploadStream(
uploadStream,
undefined,
undefined,
{
blobHTTPHeaders: {
contentType: mime.getType(strPath),
contentDisposition: utils.getContentDisposition(path.basename(strPath))
}
}
);
}
async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKey) {
const sourceBlobClient = getBlobClient(storageCfgSrc, getFilePath(storageCfgSrc, sourceKey));
const destBlobClient = getBlobClient(storageCfgDst, getFilePath(storageCfgDst, destinationKey));
const sasToken = generateBlobSASQueryParameters({
containerName: storageCfgSrc.bucketName,
blobName: getFilePath(storageCfgSrc, sourceKey),
permissions: BlobSASPermissions.parse("r"),
startsOn: new Date(),
expiresOn: new Date(Date.now() + 3600 * 1000)
}, new StorageSharedKeyCredential(storageCfgSrc.accessKeyId, storageCfgSrc.secretAccessKey)).toString();
const sourceBlobClient = getBlobClient(storageCfgSrc, getFilePath(storageCfgSrc, sourceKey));
const destBlobClient = getBlobClient(storageCfgDst, getFilePath(storageCfgDst, destinationKey));
const sasToken = generateBlobSASQueryParameters({
containerName: storageCfgSrc.bucketName,
blobName: getFilePath(storageCfgSrc, sourceKey),
permissions: BlobSASPermissions.parse("r"),
startsOn: new Date(),
expiresOn: new Date(Date.now() + 3600 * 1000)
}, new StorageSharedKeyCredential(storageCfgSrc.accessKeyId, storageCfgSrc.secretAccessKey)).toString();
await destBlobClient.syncCopyFromURL(`${sourceBlobClient.url}?${sasToken}`);
await destBlobClient.syncCopyFromURL(`${sourceBlobClient.url}?${sasToken}`);
}
async function listObjects(storageCfg, strPath) {
return await listObjectsExec(storageCfg, strPath);
return await listObjectsExec(storageCfg, strPath);
}
async function deleteObject(storageCfg, strPath) {
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
await blobClient.delete();
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
await blobClient.delete();
}
async function deleteObjects(storageCfg, strPaths) {
let aKeys = strPaths.map(path => ({ Key: getFilePath(storageCfg, path) }));
for (let i = 0; i < aKeys.length; i += MAX_DELETE_OBJECTS) {
await deleteObjectsHelp(storageCfg, aKeys.slice(i, i + MAX_DELETE_OBJECTS));
}
let aKeys = strPaths.map(path => ({Key: getFilePath(storageCfg, path)}));
for (let i = 0; i < aKeys.length; i += MAX_DELETE_OBJECTS) {
await deleteObjectsHelp(storageCfg, aKeys.slice(i, i + MAX_DELETE_OBJECTS));
}
}
async function deletePath(storageCfg, strPath) {
let list = await listObjects(storageCfg, strPath);
await deleteObjects(storageCfg, list);
let list = await listObjects(storageCfg, strPath);
await deleteObjects(storageCfg, list);
}
async function getDirectSignedUrl(ctx, storageCfg, baseUrl, strPath, urlType, optFilename, opt_creationDate) {
const storageUrlExpires = storageCfg.fs.urlExpires;
let expires = (commonDefines.c_oAscUrlTypes.Session === urlType ? cfgExpSessionAbsolute / 1000 : storageUrlExpires) || 31536000;
expires = Math.min(expires, 604800);
const userFriendlyName = optFilename ? optFilename.replace(/\//g, "%2f") : path.basename(strPath);
const contentDisposition = utils.getContentDisposition(userFriendlyName, null, null);
const storageUrlExpires = storageCfg.fs.urlExpires;
let expires = (commonDefines.c_oAscUrlTypes.Session === urlType ? cfgExpSessionAbsolute / 1000 : storageUrlExpires) || 31536000;
expires = Math.min(expires, 604800);
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
const userFriendlyName = optFilename ? optFilename.replace(/\//g, "%2f") : path.basename(strPath);
const contentDisposition = utils.getContentDisposition(userFriendlyName, null, null);
const sasOptions = {
permissions: BlobSASPermissions.parse("r"),
expiresOn: new Date(Date.now() + expires * 1000),
contentDisposition,
contentType: mime.getType(strPath)
};
const blobClient = getBlobClient(storageCfg, getFilePath(storageCfg, strPath));
return await blobClient.generateSasUrl(sasOptions);
const sasOptions = {
permissions: BlobSASPermissions.parse("r"),
expiresOn: new Date(Date.now() + expires * 1000),
contentDisposition,
contentType: mime.getType(strPath)
};
return await blobClient.generateSasUrl(sasOptions);
}
function needServeStatic() {
return !cfgCacheStorage.useDirectStorageUrls;
return !cfgCacheStorage.useDirectStorageUrls;
}
module.exports = {
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getDirectSignedUrl,
needServeStatic
headObject,
getObject,
createReadStream,
putObject,
uploadObject,
copyObject,
listObjects,
deleteObject,
deletePath,
getDirectSignedUrl,
needServeStatic
};

View File

@ -159,7 +159,7 @@ async function getSignedUrl(ctx, baseUrl, strPath, urlType, optFilename, opt_cre
//RFC 1123 does not allow underscores https://stackoverflow.com/questions/2180465/can-domain-name-subdomains-have-an-underscore-in-it
var url = utils.checkBaseUrl(ctx, baseUrl, storageCfg).replace(/_/g, "%5f");
url += uri;
var date = Date.now();
let creationDate = opt_creationDate || date;
let expiredAfter = (commonDefines.c_oAscUrlTypes.Session === urlType ? (cfgExpSessionAbsolute / 1000) : storageUrlExpires) || 31536000;
@ -169,7 +169,7 @@ async function getSignedUrl(ctx, baseUrl, strPath, urlType, optFilename, opt_cre
expires += expiredAfter;
var md5 = crypto.createHash('md5').update(expires + decodeURIComponent(uri) + storageSecretString).digest("base64");
md5 = md5.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
url += '?md5=' + encodeURIComponent(md5);
url += '&expires=' + encodeURIComponent(expires);
if (storageCfg.name === 'storage-fs') {

View File

@ -36,17 +36,7 @@ const { cp, rm, mkdir } = require('fs/promises');
const { stat, readFile, writeFile } = require('fs/promises');
var path = require('path');
var utils = require("../utils");
var crypto = require('crypto');
const ms = require('ms');
const config = require('config');
const commonDefines = require('../commondefines');
const constants = require('../constants');
const cfgExpSessionAbsolute = ms(config.get('services.CoAuthoring.expire.sessionabsolute'));
//Stubs are needed until integrators pass these parameters to all requests
let shardKeyCached;
let wopiSrcCached;
function getFilePath(storageCfg, strPath) {
const storageFolderPath = storageCfg.fs.folderPath;

View File

@ -32,11 +32,9 @@
'use strict';
const fs = require('fs');
const url = require('url');
const { Agent: HttpsAgent } = require('https');
const { Agent: HttpAgent } = require('http');
const path = require('path');
const crypto = require('crypto');
const { S3Client, ListObjectsCommand, HeadObjectCommand} = require("@aws-sdk/client-s3");
const { GetObjectCommand, PutObjectCommand, CopyObjectCommand} = require("@aws-sdk/client-s3");
const { DeleteObjectsCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3");