mirror of
https://github.com/ONLYOFFICE/server.git
synced 2026-02-10 18:05:07 +08:00
Forgotten files commands (#415)
* [ds] Add forgotten files comands * Feature fixes 1 * Feature fixes 2 * [feature] Refactor validateCommand and deleteForgotten * Feature fixes 3 * Feature fixes 4 * [test] Move jest module * Feature fixes 5 * [test] Move jest.config.js --------- Co-authored-by: Sergey Konovalov <Sergey.Konovalov@onlyoffice.com>
This commit is contained in:
@ -143,6 +143,7 @@ const cfgForceSaveStep = ms(config.get('autoAssembly.step'));
|
||||
const cfgQueueType = configCommon.get('queue.type');
|
||||
const cfgQueueRetentionPeriod = configCommon.get('queue.retentionPeriod');
|
||||
const cfgForgottenFiles = config.get('server.forgottenfiles');
|
||||
const cfgForgottenFilesName = config.get('server.forgottenfilesname');
|
||||
const cfgMaxRequestChanges = config.get('server.maxRequestChanges');
|
||||
const cfgWarningLimitPercents = configCommon.get('license.warning_limit_percents') / 100;
|
||||
const cfgErrorFiles = configCommon.get('FileConverter.converter.errorfiles');
|
||||
@ -3807,103 +3808,190 @@ exports.licenseInfo = function(req, res) {
|
||||
}
|
||||
});
|
||||
};
|
||||
let commandLicense = co.wrap(function*(ctx) {
|
||||
|
||||
function validateInputParams(ctx, authRes, command) {
|
||||
const commandsWithoutKey = ['version', 'license', 'getForgottenList'];
|
||||
const isValidWithoutKey = commandsWithoutKey.includes(command.c);
|
||||
const isDocIdString = typeof command.key === 'string';
|
||||
|
||||
ctx.setDocId(command.key);
|
||||
|
||||
if(authRes.code === constants.VKEY_KEY_EXPIRE){
|
||||
return commonDefines.c_oAscServerCommandErrors.TokenExpire;
|
||||
} else if(authRes.code !== constants.NO_ERROR){
|
||||
return commonDefines.c_oAscServerCommandErrors.Token;
|
||||
}
|
||||
|
||||
if (isValidWithoutKey || isDocIdString) {
|
||||
return commonDefines.c_oAscServerCommandErrors.NoError;
|
||||
} else {
|
||||
return commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
}
|
||||
}
|
||||
|
||||
function* getFilesKeys(ctx, opt_specialDir) {
|
||||
const directoryList = yield storage.listObjects(ctx, '', opt_specialDir);
|
||||
const keys = directoryList.map(directory => directory.split('/')[0]);
|
||||
|
||||
const filteredKeys = [];
|
||||
let previousKey = null;
|
||||
// Key is a folder name. This folder could consist of several files, which leads to N same strings in "keys" array in a row.
|
||||
for (const key of keys) {
|
||||
if (previousKey !== key) {
|
||||
previousKey = key;
|
||||
filteredKeys.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredKeys;
|
||||
}
|
||||
|
||||
function* findForgottenFile(ctx, docId) {
|
||||
const forgottenList = yield storage.listObjects(ctx, docId, cfgForgottenFiles);
|
||||
return forgottenList.find(forgotten => cfgForgottenFilesName === pathModule.basename(forgotten, pathModule.extname(forgotten)));
|
||||
}
|
||||
|
||||
function* commandLicense(ctx) {
|
||||
const nowUTC = getLicenseNowUtc();
|
||||
let users = yield editorData.getPresenceUniqueUser(ctx, nowUTC);
|
||||
let users_view = yield editorData.getPresenceUniqueViewUser(ctx, nowUTC);
|
||||
let licenseInfo = yield tenantManager.getTenantLicense(ctx);
|
||||
const users = yield editorData.getPresenceUniqueUser(ctx, nowUTC);
|
||||
const users_view = yield editorData.getPresenceUniqueViewUser(ctx, nowUTC);
|
||||
const licenseInfo = yield tenantManager.getTenantLicense(ctx);
|
||||
|
||||
return {
|
||||
license: utils.convertLicenseInfoToFileParams(licenseInfo),
|
||||
server: utils.convertLicenseInfoToServerParams(licenseInfo),
|
||||
quota: {users: users, users_view: users_view}
|
||||
quota: { users, users_view }
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Server commands handler.
|
||||
* @param ctx Local context.
|
||||
* @param params Request parameters.
|
||||
* @param req Request object.
|
||||
* @param output{{ key: string, error: number, version: undefined | string }} Mutable. Response body.
|
||||
* @returns undefined.
|
||||
*/
|
||||
function* commandHandle(ctx, params, req, output) {
|
||||
const docId = params.key;
|
||||
const forgottenData = {};
|
||||
|
||||
switch (params.c) {
|
||||
case 'info': {
|
||||
//If no files in the database means they have not been edited.
|
||||
const selectRes = yield taskResult.select(ctx, docId);
|
||||
if (selectRes.length > 0) {
|
||||
output.error = yield* bindEvents(ctx, docId, params.callback, utils.getBaseUrlByRequest(req), undefined, params.userdata);
|
||||
} else {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'drop': {
|
||||
if (params.userid) {
|
||||
yield* publish(ctx, {type: commonDefines.c_oPublishType.drop, ctx: ctx, docId: docId, users: [params.userid], description: params.description});
|
||||
} else if (params.users) {
|
||||
const users = (typeof params.users === 'string') ? JSON.parse(params.users) : params.users;
|
||||
yield* dropUsersFromDocument(ctx, docId, users);
|
||||
} else {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'saved': {
|
||||
// Результат от менеджера документов о статусе обработки сохранения файла после сборки
|
||||
if ('1' !== params.status) {
|
||||
//запрос saved выполняется синхронно, поэтому заполняем переменную чтобы проверить ее после sendServerRequest
|
||||
yield editorData.setSaved(ctx, docId, params.status);
|
||||
ctx.logger.warn('saved corrupted id = %s status = %s conv = %s', docId, params.status, params.conv);
|
||||
} else {
|
||||
ctx.logger.info('saved id = %s status = %s conv = %s', docId, params.status, params.conv);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'forcesave': {
|
||||
let forceSaveRes = yield startForceSave(ctx, docId, commonDefines.c_oAscForceSaveTypes.Command, params.userdata, undefined, undefined, undefined, undefined, utils.getBaseUrlByRequest(req));
|
||||
output.error = forceSaveRes.code;
|
||||
break;
|
||||
}
|
||||
case 'meta': {
|
||||
if (params.meta) {
|
||||
yield* publish(ctx, {type: commonDefines.c_oPublishType.meta, ctx: ctx, docId: docId, meta: params.meta});
|
||||
} else {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'getForgotten': {
|
||||
// Checking for files existence.
|
||||
const forgottenFileFullPath = yield* findForgottenFile(ctx, docId);
|
||||
if (!forgottenFileFullPath) {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
break;
|
||||
}
|
||||
|
||||
const forgottenFile = pathModule.basename(forgottenFileFullPath);
|
||||
|
||||
// Creating URLs from files.
|
||||
const baseUrl = utils.getBaseUrlByRequest(req);
|
||||
forgottenData.url = yield storage.getSignedUrl(
|
||||
ctx, baseUrl, forgottenFileFullPath, commonDefines.c_oAscUrlTypes.Temporary, forgottenFile, undefined, cfgForgottenFiles
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'deleteForgotten': {
|
||||
const forgottenFile = yield* findForgottenFile(ctx, docId);
|
||||
if (!forgottenFile) {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
break;
|
||||
}
|
||||
|
||||
yield storage.deleteObject(ctx, forgottenFile, cfgForgottenFiles);
|
||||
break;
|
||||
}
|
||||
case 'getForgottenList': {
|
||||
forgottenData.keys = yield* getFilesKeys(ctx, cfgForgottenFiles);
|
||||
break;
|
||||
}
|
||||
case 'version': {
|
||||
output.version = `${commonDefines.buildVersion}.${commonDefines.buildNumber}`;
|
||||
break;
|
||||
}
|
||||
case 'license': {
|
||||
const outputLicense = yield* commandLicense(ctx);
|
||||
Object.assign(output, outputLicense);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(output, forgottenData);
|
||||
}
|
||||
|
||||
// Команда с сервера (в частности teamlab)
|
||||
exports.commandFromServer = function (req, res) {
|
||||
return co(function* () {
|
||||
let result = commonDefines.c_oAscServerCommandErrors.NoError;
|
||||
let docId = 'commandFromServer';
|
||||
let version = undefined;
|
||||
let outputLicense = undefined;
|
||||
let ctx = new operationContext.Context();
|
||||
const output = { key: 'commandFromServer', error: commonDefines.c_oAscServerCommandErrors.NoError, version: undefined };
|
||||
const ctx = new operationContext.Context();
|
||||
try {
|
||||
ctx.initFromRequest(req);
|
||||
ctx.logger.info('commandFromServer start');
|
||||
let authRes = yield getRequestParams(ctx, req);
|
||||
let params = authRes.params;
|
||||
if(authRes.code === constants.VKEY_KEY_EXPIRE){
|
||||
result = commonDefines.c_oAscServerCommandErrors.TokenExpire;
|
||||
} else if(authRes.code !== constants.NO_ERROR){
|
||||
result = commonDefines.c_oAscServerCommandErrors.Token;
|
||||
}
|
||||
const authRes = yield getRequestParams(ctx, req);
|
||||
const params = authRes.params;
|
||||
// Ключ id-документа
|
||||
docId = params.key;
|
||||
ctx.setDocId(docId);
|
||||
if (commonDefines.c_oAscServerCommandErrors.NoError === result && null == docId && 'version' !== params.c && 'license' !== params.c) {
|
||||
result = commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
} else if(commonDefines.c_oAscServerCommandErrors.NoError === result) {
|
||||
output.key = params.key;
|
||||
output.error = validateInputParams(ctx, authRes, params);
|
||||
if (output.error === commonDefines.c_oAscServerCommandErrors.NoError) {
|
||||
ctx.logger.debug('commandFromServer: c = %s', params.c);
|
||||
switch (params.c) {
|
||||
case 'info':
|
||||
//If no files in the database means they have not been edited.
|
||||
const selectRes = yield taskResult.select(ctx, docId);
|
||||
if (selectRes.length > 0) {
|
||||
result = yield* bindEvents(ctx, docId, params.callback, utils.getBaseUrlByRequest(req), undefined, params.userdata);
|
||||
} else {
|
||||
result = commonDefines.c_oAscServerCommandErrors.DocumentIdError;
|
||||
}
|
||||
break;
|
||||
case 'drop':
|
||||
if (params.userid) {
|
||||
yield* publish(ctx, {type: commonDefines.c_oPublishType.drop, ctx: ctx, docId: docId, users: [params.userid], description: params.description});
|
||||
} else if (params.users) {
|
||||
const users = (typeof params.users === 'string') ? JSON.parse(params.users) : params.users;
|
||||
yield* dropUsersFromDocument(ctx, docId, users);
|
||||
} else {
|
||||
result = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
}
|
||||
break;
|
||||
case 'saved':
|
||||
// Результат от менеджера документов о статусе обработки сохранения файла после сборки
|
||||
if ('1' !== params.status) {
|
||||
//запрос saved выполняется синхронно, поэтому заполняем переменную чтобы проверить ее после sendServerRequest
|
||||
yield editorData.setSaved(ctx, docId, params.status);
|
||||
ctx.logger.warn('saved corrupted id = %s status = %s conv = %s', docId, params.status, params.conv);
|
||||
} else {
|
||||
ctx.logger.info('saved id = %s status = %s conv = %s', docId, params.status, params.conv);
|
||||
}
|
||||
break;
|
||||
case 'forcesave':
|
||||
let forceSaveRes = yield startForceSave(ctx, docId, commonDefines.c_oAscForceSaveTypes.Command, params.userdata, undefined, undefined, undefined, undefined, utils.getBaseUrlByRequest(req));
|
||||
result = forceSaveRes.code;
|
||||
break;
|
||||
case 'meta':
|
||||
if (params.meta) {
|
||||
yield* publish(ctx, {type: commonDefines.c_oPublishType.meta, ctx: ctx, docId: docId, meta: params.meta});
|
||||
} else {
|
||||
result = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
}
|
||||
break;
|
||||
case 'version':
|
||||
version = commonDefines.buildVersion + '.' + commonDefines.buildNumber;
|
||||
break;
|
||||
case 'license':
|
||||
outputLicense = yield commandLicense(ctx);
|
||||
break;
|
||||
default:
|
||||
result = commonDefines.c_oAscServerCommandErrors.UnknownCommand;
|
||||
break;
|
||||
}
|
||||
yield *commandHandle(ctx, params, req, output);
|
||||
}
|
||||
} catch (err) {
|
||||
result = commonDefines.c_oAscServerCommandErrors.UnknownError;
|
||||
output.error = commonDefines.c_oAscServerCommandErrors.UnknownError;
|
||||
ctx.logger.error('Error commandFromServer: %s', err.stack);
|
||||
} finally {
|
||||
//undefined value are excluded in JSON.stringify
|
||||
let output = {'key': docId, 'error': result, 'version': version};
|
||||
if (outputLicense) {
|
||||
Object.assign(output, outputLicense);
|
||||
}
|
||||
const outputBuffer = Buffer.from(JSON.stringify(output), 'utf8');
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.setHeader('Content-Length', outputBuffer.length);
|
||||
|
||||
3297
npm-shrinkwrap.json
generated
3297
npm-shrinkwrap.json
generated
File diff suppressed because it is too large
Load Diff
@ -61,5 +61,14 @@
|
||||
"grunt-contrib-copy": "^1.0.0",
|
||||
"grunt-mkdir": "^1.1.0",
|
||||
"grunt-stripcomments": "^0.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^29.5.0",
|
||||
"jest": "^29.5.0"
|
||||
},
|
||||
"scripts": {
|
||||
"unit tests": "cd ./DocService && jest unit --config=../tests/jest.config.js",
|
||||
"integration tests": "cd ./DocService && jest integration --config=../tests/jest.config.js",
|
||||
"tests": "cd ./DocService && jest --config=../tests/jest.config.js"
|
||||
}
|
||||
}
|
||||
|
||||
15
tests/env-setup.js
Normal file
15
tests/env-setup.js
Normal file
@ -0,0 +1,15 @@
|
||||
const platforms = {
|
||||
'win32': 'windows',
|
||||
'darwin': 'mac',
|
||||
'linux': 'linux'
|
||||
};
|
||||
const platform = platforms[process.platform];
|
||||
|
||||
process.env.NODE_ENV = `development-${platform}`;
|
||||
process.env.NODE_CONFIG_DIR = '../Common/config';
|
||||
|
||||
if (platform === 'mac') {
|
||||
process.env.DYLD_LIBRARY_PATH = '../FileConverter/bin/';
|
||||
} else if (platform === 'linux') {
|
||||
process.env.LD_LIBRARY_PATH = '../FileConverter/bin/';
|
||||
}
|
||||
246
tests/integration/forgottenFilesCommnads.tests.js
Normal file
246
tests/integration/forgottenFilesCommnads.tests.js
Normal file
@ -0,0 +1,246 @@
|
||||
const { describe, test, expect, afterAll, beforeAll } = require('@jest/globals');
|
||||
const http = require('http');
|
||||
|
||||
const { signToken } = require('../../DocService/sources/DocsCoServer');
|
||||
const storage = require('../../Common/sources/storage-base');
|
||||
const constants = require('../../Common/sources/commondefines');
|
||||
const operationContext = require('../../Common/sources/operationContext');
|
||||
const utils = require("../../Common/sources/utils");
|
||||
const config = require('../../Common/node_modules/config');
|
||||
|
||||
const cfgForgottenFiles = config.get('services.CoAuthoring.server.forgottenfiles');
|
||||
const cfgForgottenFilesName = config.get('services.CoAuthoring.server.forgottenfilesname');
|
||||
const cfgTokenAlgorithm = config.get('services.CoAuthoring.token.session.algorithm');
|
||||
const cfgSecretOutbox = config.get('services.CoAuthoring.secret.outbox');
|
||||
const cfgTokenOutboxExpires = config.get('services.CoAuthoring.token.outbox.expires');
|
||||
const cfgTokenEnableRequestOutbox = config.get('services.CoAuthoring.token.enable.request.outbox');
|
||||
const ctx = new operationContext.Context();
|
||||
const testFilesNames = {
|
||||
get: 'DocService-DocsCoServer-forgottenFilesCommands-getForgotten-integration-test',
|
||||
delete1: 'DocService-DocsCoServer-forgottenFilesCommands-deleteForgotten-integration-test',
|
||||
// delete2: 'DocService-DocsCoServer-forgottenFilesCommands-deleteForgotten-2-integration-test',
|
||||
// delete3: 'DocService-DocsCoServer-forgottenFilesCommands-deleteForgotten-3-integration-test',
|
||||
getList: 'DocService-DocsCoServer-forgottenFilesCommands-getForgottenList-integration-test'
|
||||
};
|
||||
|
||||
function makeRequest(requestBody, timeout = 5000) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const timer = setTimeout(() => reject('Request timeout'), timeout);
|
||||
|
||||
let body = '';
|
||||
if (cfgTokenEnableRequestOutbox) {
|
||||
const secret = utils.getSecretByElem(cfgSecretOutbox);
|
||||
const token = await signToken(ctx, requestBody, cfgTokenAlgorithm, cfgTokenOutboxExpires, constants.c_oAscSecretType.Inbox, secret);
|
||||
body = JSON.stringify({ token });
|
||||
} else {
|
||||
body = JSON.stringify(requestBody);
|
||||
}
|
||||
|
||||
const options = {
|
||||
port: '8000',
|
||||
path: '/coauthoring/CommandService.ashx',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(body)
|
||||
}
|
||||
};
|
||||
const request = http.request(options, (response) => {
|
||||
response.setEncoding('utf8');
|
||||
|
||||
let data = '';
|
||||
response.on('data', (chunk) => {
|
||||
data += chunk
|
||||
});
|
||||
response.on('end', () => {
|
||||
resolve(data);
|
||||
clearTimeout(timer);
|
||||
});
|
||||
});
|
||||
|
||||
request.on('error', (error) => {
|
||||
reject(error);
|
||||
clearTimeout(timer);
|
||||
});
|
||||
|
||||
request.write(body);
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
function getKeysDirectories(keys) {
|
||||
return keys.map(value => value.split('/')[0]);
|
||||
}
|
||||
|
||||
beforeAll(async function () {
|
||||
const buffer = Buffer.from('Forgotten commands test file');
|
||||
for (const index in testFilesNames) {
|
||||
await storage.putObject(ctx, `${testFilesNames[index]}/${cfgForgottenFilesName}.docx`, buffer, buffer.length, cfgForgottenFiles);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async function () {
|
||||
const keys = await storage.listObjects(ctx, '', cfgForgottenFiles);
|
||||
const deletePromises = keys.filter(key => key.includes('DocService-DocsCoServer-forgottenFilesCommands'))
|
||||
.map(filteredKey => storage.deleteObject(ctx, filteredKey, cfgForgottenFiles));
|
||||
|
||||
return Promise.allSettled(deletePromises);
|
||||
});
|
||||
|
||||
// Assumed, that server is already up.
|
||||
describe('Command service', function () {
|
||||
describe('Forgotten files commands parameters validation', function () {
|
||||
describe('Invalid key format', function () {
|
||||
const tests = ['getForgotten', 'deleteForgotten'];
|
||||
const addSpecialCases = (invalidRequests, expected, testSubject) => {
|
||||
invalidRequests.push({
|
||||
c: testSubject
|
||||
});
|
||||
expected.push({ error: 1});
|
||||
|
||||
invalidRequests.push({
|
||||
c: testSubject,
|
||||
key: null
|
||||
});
|
||||
expected.push({
|
||||
key: null,
|
||||
error: 1
|
||||
});
|
||||
};
|
||||
|
||||
for (const testSubject of tests) {
|
||||
test(testSubject, async function () {
|
||||
const invalidKeys = [true, [], {}, 1, 1.1];
|
||||
const invalidRequests = invalidKeys.map(key => {
|
||||
return {
|
||||
c: testSubject,
|
||||
key
|
||||
}
|
||||
});
|
||||
|
||||
const expected = invalidKeys.map(key => {
|
||||
return {
|
||||
key,
|
||||
error: 1,
|
||||
};
|
||||
});
|
||||
|
||||
addSpecialCases(invalidRequests, expected, testSubject);
|
||||
|
||||
for (const index in invalidRequests) {
|
||||
const actualResponse = await makeRequest(invalidRequests[index]);
|
||||
const actual = JSON.parse(actualResponse);
|
||||
|
||||
expect(actual).toEqual(expected[index]);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Forgotten files commands verification', function () {
|
||||
describe('getForgotten', function () {
|
||||
const createExpected = ({ key, error }) => {
|
||||
const validKey = typeof key === 'string' && error === 0
|
||||
const urlPattern = 'http://localhost:8000/cache/files/forgotten/--key--/output.docx/output.docx';
|
||||
|
||||
const expected = { key, error };
|
||||
|
||||
if (validKey) {
|
||||
expected.url = urlPattern.replace('--key--', key);
|
||||
}
|
||||
|
||||
return expected;
|
||||
};
|
||||
|
||||
const testCases = {
|
||||
'Single key': { key: testFilesNames.get, error: 0 },
|
||||
'Not existed key': { key: '--not-existed--', error: 1 },
|
||||
};
|
||||
|
||||
for (const testCase in testCases) {
|
||||
test(testCase, async () => {
|
||||
const requestBody = {
|
||||
c: 'getForgotten',
|
||||
key: testCases[testCase].key
|
||||
};
|
||||
|
||||
const actualResponse = await makeRequest(requestBody);
|
||||
|
||||
const expected = createExpected(testCases[testCase]);
|
||||
const actual = JSON.parse(actualResponse);
|
||||
|
||||
if (actual.url) {
|
||||
actual.url = actual.url.split('?')[0];
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('deleteForgotten', function () {
|
||||
const createExpected = ({ key, error }) => {
|
||||
return {
|
||||
key,
|
||||
error
|
||||
};
|
||||
};
|
||||
|
||||
const testCases = {
|
||||
'Single key': { key: testFilesNames.delete1, error: 0 },
|
||||
'Not existed key': { key: '--not-existed--', error: 1 },
|
||||
};
|
||||
|
||||
for (const testCase in testCases) {
|
||||
test(testCase, async () => {
|
||||
const requestBody = {
|
||||
c: 'deleteForgotten',
|
||||
key: testCases[testCase].key
|
||||
};
|
||||
|
||||
const alreadyExistedDirectories = getKeysDirectories(await storage.listObjects(ctx, '', cfgForgottenFiles));
|
||||
const directoryToBeDeleted = testCases[testCase].error !== 0 ? '--not-existed--' : testCases[testCase].key;
|
||||
const shouldExist = alreadyExistedDirectories.filter(directory => directoryToBeDeleted !== directory);
|
||||
|
||||
const actualResponse = await makeRequest(requestBody);
|
||||
|
||||
const expected = createExpected(testCases[testCase]);
|
||||
const actual = JSON.parse(actualResponse);
|
||||
|
||||
const directoriesExistedAfterDeletion = getKeysDirectories(await storage.listObjects(ctx, '', cfgForgottenFiles));
|
||||
expect(actual).toEqual(expected);
|
||||
// Checking that files not existing on disk/cloud.
|
||||
expect(shouldExist).toEqual(directoriesExistedAfterDeletion);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('getForgottenList', function () {
|
||||
test('Main case', async () => {
|
||||
const requestBody = {
|
||||
c: 'getForgottenList'
|
||||
};
|
||||
|
||||
const stateBeforeChanging = await makeRequest(requestBody);
|
||||
const alreadyExistedDirectories = JSON.parse(stateBeforeChanging);
|
||||
|
||||
const docId = 'DocService-DocsCoServer-forgottenFilesCommands-getForgottenList-2-integration-test';
|
||||
const buffer = Buffer.from('getForgottenList test file');
|
||||
await storage.putObject(ctx, `${docId}/${cfgForgottenFilesName}.docx`, buffer, buffer.length, cfgForgottenFiles);
|
||||
alreadyExistedDirectories.keys.push(docId);
|
||||
|
||||
const actualResponse = await makeRequest(requestBody);
|
||||
const actual = JSON.parse(actualResponse);
|
||||
const expected = {
|
||||
error: 0,
|
||||
keys: alreadyExistedDirectories.keys
|
||||
}
|
||||
|
||||
actual.keys?.sort();
|
||||
expected.keys.sort();
|
||||
expect(actual).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
192
tests/jest.config.js
Normal file
192
tests/jest.config.js
Normal file
@ -0,0 +1,192 @@
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "",
|
||||
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "\\\\node_modules\\\\"
|
||||
// ],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: "v8",
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// The default configuration for fake timers
|
||||
// fakeTimers: {
|
||||
// "enableGlobally": false
|
||||
// },
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
// moduleFileExtensions: [
|
||||
// "js",
|
||||
// "mjs",
|
||||
// "cjs",
|
||||
// "jsx",
|
||||
// "ts",
|
||||
// "tsx",
|
||||
// "json",
|
||||
// "node"
|
||||
// ],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
// moduleNameMapper: {},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
// preset: undefined,
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: ["<rootDir>"],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
setupFiles: ['./env-setup.js'],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
// setupFilesAfterEnv: [],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
// testEnvironment: "jest-environment-node",
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
testMatch: [
|
||||
"**/?(*.)+(spec|tests).[tj]s?(x)"
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "\\\\node_modules\\\\"
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
// transform: undefined,
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// transformIgnorePatterns: [
|
||||
// "\\\\node_modules\\\\",
|
||||
// "\\.pnp\\.[^\\\\]+$"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
Reference in New Issue
Block a user