mirror of
https://github.com/ONLYOFFICE/server.git
synced 2026-02-10 18:05:07 +08:00
[feature] Allow custom s3 command options via "commandOptions"; Fix bug 75163
This commit is contained in:
11
.github/workflows/s3storageTests.yml
vendored
11
.github/workflows/s3storageTests.yml
vendored
@ -49,9 +49,18 @@ jobs:
|
||||
"name": "storage-s3",
|
||||
"region": "us-east-1",
|
||||
"endpoint": "http://localhost:9000",
|
||||
"bucketName": "cache",
|
||||
"storageFolderName": "files",
|
||||
"commandOptions": {
|
||||
"putObject": {
|
||||
"ServerSideEncryption": "AES256"
|
||||
},
|
||||
"copyObject": {
|
||||
"ServerSideEncryption": "AES256"
|
||||
}
|
||||
},
|
||||
"accessKeyId": "minioadmin",
|
||||
"secretAccessKey": "minioadmin",
|
||||
"bucket": "cache",
|
||||
"forcePathStyle": true
|
||||
},
|
||||
"persistentStorage": {
|
||||
|
||||
@ -124,6 +124,19 @@
|
||||
"bucketName": "cache",
|
||||
"storageFolderName": "files",
|
||||
"cacheFolderName": "data",
|
||||
"commandOptions": {
|
||||
"s3": {
|
||||
"putObject": {},
|
||||
"getObject": {},
|
||||
"copyObject": {
|
||||
"MetadataDirective": "COPY"
|
||||
},
|
||||
"listObjects": {
|
||||
"MaxKeys": 1000
|
||||
},
|
||||
"deleteObject": {}
|
||||
}
|
||||
},
|
||||
"urlExpires": 604800,
|
||||
"accessKeyId": "",
|
||||
"secretAccessKey": "",
|
||||
|
||||
@ -54,6 +54,19 @@ const cfgCacheStorage = config.get('storage');
|
||||
const MAX_DELETE_OBJECTS = 1000;
|
||||
let clients = {};
|
||||
|
||||
/**
|
||||
* @param {Object} input - S3 command
|
||||
* @param {Object} storageCfg - Storage configuration
|
||||
* @param {string} commandType - putObject, copyObject, etc.
|
||||
*/
|
||||
function applyCommandOptions(input, storageCfg, commandType) {
|
||||
if (!storageCfg.commandOptions) return;
|
||||
|
||||
if (storageCfg.commandOptions.s3 && storageCfg.commandOptions.s3[commandType]) {
|
||||
Object.assign(input, storageCfg.commandOptions.s3[commandType]);
|
||||
}
|
||||
}
|
||||
|
||||
function getS3Client(storageCfg) {
|
||||
/**
|
||||
* Don't hard-code your credentials!
|
||||
@ -108,6 +121,8 @@ function joinListObjects(storageCfg, inputArray, outputArray) {
|
||||
}
|
||||
}
|
||||
async function listObjectsExec(storageCfg, output, params) {
|
||||
applyCommandOptions(params, storageCfg, 'listObjects');
|
||||
|
||||
const data = await getS3Client(storageCfg).send(new ListObjectsCommand(params));
|
||||
joinListObjects(storageCfg, data.Contents, output);
|
||||
if (data.IsTruncated && (data.NextMarker || (data.Contents && data.Contents.length > 0))) {
|
||||
@ -127,6 +142,8 @@ async function deleteObjectsHelp(storageCfg, aKeys) {
|
||||
Quiet: true
|
||||
}
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'deleteObject');
|
||||
|
||||
const command = new DeleteObjectsCommand(input);
|
||||
await getS3Client(storageCfg).send(command);
|
||||
}
|
||||
@ -145,6 +162,8 @@ async function getObject(storageCfg, strPath) {
|
||||
Bucket: storageCfg.bucketName,
|
||||
Key: getFilePath(storageCfg, strPath)
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'getObject');
|
||||
|
||||
const command = new GetObjectCommand(input);
|
||||
const output = await getS3Client(storageCfg).send(command);
|
||||
|
||||
@ -154,7 +173,9 @@ async function createReadStream(storageCfg, strPath) {
|
||||
const input = {
|
||||
Bucket: storageCfg.bucketName,
|
||||
Key: getFilePath(storageCfg, strPath)
|
||||
};
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'getObject');
|
||||
|
||||
const command = new GetObjectCommand(input);
|
||||
const output = await getS3Client(storageCfg).send(command);
|
||||
return {
|
||||
@ -171,6 +192,8 @@ async function putObject(storageCfg, strPath, buffer, contentLength) {
|
||||
ContentLength: contentLength,
|
||||
ContentType: mime.getType(strPath)
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'putObject');
|
||||
|
||||
const command = new PutObjectCommand(input);
|
||||
await getS3Client(storageCfg).send(command);
|
||||
}
|
||||
@ -183,6 +206,8 @@ async function uploadObject(storageCfg, strPath, filePath) {
|
||||
Body: file,
|
||||
ContentType: mime.getType(strPath)
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'putObject');
|
||||
|
||||
const command = new PutObjectCommand(input);
|
||||
await getS3Client(storageCfg).send(command);
|
||||
}
|
||||
@ -193,6 +218,8 @@ async function copyObject(storageCfgSrc, storageCfgDst, sourceKey, destinationKe
|
||||
Key: getFilePath(storageCfgDst, destinationKey),
|
||||
CopySource: `/${storageCfgSrc.bucketName}/${getFilePath(storageCfgSrc, sourceKey)}`
|
||||
};
|
||||
applyCommandOptions(input, storageCfgDst, 'copyObject');
|
||||
|
||||
const command = new CopyObjectCommand(input);
|
||||
await getS3Client(storageCfgDst).send(command);
|
||||
}
|
||||
@ -210,6 +237,8 @@ async function deleteObject(storageCfg, strPath) {
|
||||
Bucket: storageCfg.bucketName,
|
||||
Key: getFilePath(storageCfg, strPath)
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'deleteObject');
|
||||
|
||||
const command = new DeleteObjectCommand(input);
|
||||
await getS3Client(storageCfg).send(command);
|
||||
};
|
||||
@ -240,6 +269,8 @@ async function getDirectSignedUrl(ctx, storageCfg, baseUrl, strPath, urlType, op
|
||||
Key: getFilePath(storageCfg, strPath),
|
||||
ResponseContentDisposition: contentDisposition
|
||||
};
|
||||
applyCommandOptions(input, storageCfg, 'getObject');
|
||||
|
||||
const command = new GetObjectCommand(input);
|
||||
//default Expires 900 seconds
|
||||
let options = {
|
||||
|
||||
@ -48,6 +48,11 @@ module.exports = {
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
clearMocks: true,
|
||||
|
||||
// AWS SDK v3 uses AWS Common Runtime which creates persistent native handles
|
||||
// that Jest cannot clean up (aws_logger, FSEVENTWRAP, etc.)
|
||||
// This is a known limitation: https://github.com/awslabs/aws-crt-nodejs/issues/291
|
||||
forceExit: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
|
||||
Reference in New Issue
Block a user