[prettier] auto-fix

This commit is contained in:
PauI Ostrovckij
2025-08-27 10:50:22 +03:00
parent 79de4b3eaa
commit fb20086ef1
82 changed files with 11568 additions and 10222 deletions

View File

@ -31,9 +31,9 @@
*/
const platforms = {
'win32': 'windows',
'darwin': 'mac',
'linux': 'linux'
win32: 'windows',
darwin: 'mac',
linux: 'linux'
};
const platform = platforms[process.platform];

View File

@ -30,7 +30,7 @@
*
*/
const { describe, test, expect, afterAll } = require('@jest/globals');
const {describe, test, expect, afterAll} = require('@jest/globals');
const config = require('../../../Common/node_modules/config');
const baseConnector = require('../../../DocService/sources/databaseConnectors/baseConnector');
@ -38,7 +38,7 @@ const operationContext = require('../../../Common/sources/operationContext');
const taskResult = require('../../../DocService/sources/taskresult');
const commonDefines = require('../../../Common/sources/commondefines');
const constants = require('../../../Common/sources/constants');
const utils = require("../../../Common/sources/utils");
const utils = require('../../../Common/sources/utils');
const configSql = config.get('services.CoAuthoring.sql');
const ctx = new operationContext.Context();
@ -72,7 +72,7 @@ const dbTypes = {
string: function () {
return this[cfgDbType].string;
}
}
};
const insertCases = {
5: 'baseConnector-insert()-tester-5-rows',
@ -91,20 +91,11 @@ const emptyCallbacksCase = [
'baseConnector-getEmptyCallbacks()-tester-1',
'baseConnector-getEmptyCallbacks()-tester-2',
'baseConnector-getEmptyCallbacks()-tester-3',
'baseConnector-getEmptyCallbacks()-tester-4',
];
const documentsWithChangesCase = [
'baseConnector-getDocumentsWithChanges()-tester-0',
'baseConnector-getDocumentsWithChanges()-tester-1'
];
const getExpiredCase = [
'baseConnector-getExpired()-tester-0',
'baseConnector-getExpired()-tester-1',
'baseConnector-getExpired()-tester-2',
];
const getCountWithStatusCase = [
'baseConnector-getCountWithStatusCase()-tester-0'
'baseConnector-getEmptyCallbacks()-tester-4'
];
const documentsWithChangesCase = ['baseConnector-getDocumentsWithChanges()-tester-0', 'baseConnector-getDocumentsWithChanges()-tester-1'];
const getExpiredCase = ['baseConnector-getExpired()-tester-0', 'baseConnector-getExpired()-tester-1', 'baseConnector-getExpired()-tester-2'];
const getCountWithStatusCase = ['baseConnector-getCountWithStatusCase()-tester-0'];
const upsertCases = {
insert: 'baseConnector-upsert()-tester-row-inserted',
update: 'baseConnector-upsert()-tester-row-updated'
@ -123,15 +114,13 @@ function createChanges(changesLength, date) {
const length = changesLength - 1;
for (let i = 1; i <= length; i++) {
objChanges.push(
{
docid: '__ffff_127.0.0.1new.docx41692082262909',
change: '"39;CgAAADcAXwA2ADQAMAACABwAAQAAAAAAAAABAAAALgAAAAAAAAAA"',
time: date,
user: 'uid-18',
useridoriginal: 'uid-1'
}
);
objChanges.push({
docid: '__ffff_127.0.0.1new.docx41692082262909',
change: '"39;CgAAADcAXwA2ADQAMAACABwAAQAAAAAAAAABAAAALgAAAAAAAAAA"',
time: date,
user: 'uid-18',
useridoriginal: 'uid-1'
});
}
return objChanges;
@ -155,13 +144,20 @@ function deleteRowsByIds(table, ids) {
function executeSql(sql, values = []) {
return new Promise((resolve, reject) => {
baseConnector.sqlQuery(ctx, sql, function (error, result) {
if (error) {
reject(error)
} else {
resolve(result)
}
}, false, false, values);
baseConnector.sqlQuery(
ctx,
sql,
function (error, result) {
if (error) {
reject(error);
} else {
resolve(result);
}
},
false,
false,
values
);
});
}
@ -229,7 +225,7 @@ describe('Base database connector', function () {
expect(result.length).toEqual(1);
});
test('Correct return format of requested rows', async function() {
test('Correct return format of requested rows', async function () {
const result = await baseConnector.healthCheck(ctx);
// The [[constructor]] field is referring to a parent class instance, so for Object-like values it is equal to itself.
@ -243,19 +239,23 @@ describe('Base database connector', function () {
});
test('Correct return format of changing in DB', async function () {
const createTableSql = `CREATE TABLE test_table(num ${dbTypes.number()});`
const createTableSql = `CREATE TABLE test_table(num ${dbTypes.number()});`;
const alterTableSql = `INSERT INTO test_table VALUES(1);`;
await executeSql(createTableSql);
const result = await executeSql(alterTableSql);
expect(result).toEqual({ affectedRows: 1 });
expect(result).toEqual({affectedRows: 1});
});
describe('DB tables existence', function () {
const tables = {
[cfgTableResult]: constants.TABLE_RESULT_SCHEMA.map(column => { return { column_name: column } }),
[cfgTableChanges]: constants.TABLE_CHANGES_SCHEMA.map(column => { return { column_name: column } })
[cfgTableResult]: constants.TABLE_RESULT_SCHEMA.map(column => {
return {column_name: column};
}),
[cfgTableChanges]: constants.TABLE_CHANGES_SCHEMA.map(column => {
return {column_name: column};
})
};
for (const table in tables) {
@ -266,8 +266,8 @@ describe('Base database connector', function () {
}
});
}
const table = "unused_table";
const table = 'unused_table';
test(`${table} table absence`, async function () {
const result = await baseConnector.getTableColumns(ctx, table);
expect(result).toEqual([]);
@ -335,7 +335,7 @@ describe('Base database connector', function () {
const result = await baseConnector.getChangesIndexPromise(ctx, docId);
// We created 10 changes rows, change_id: 0..9, changes index is MAX(change_id).
const expected = [{ change_id: 9 }];
const expected = [{change_id: 9}];
expect(result).toEqual(expected);
});
@ -354,7 +354,7 @@ describe('Base database connector', function () {
});
});
test('Get empty callbacks' , async function () {
test('Get empty callbacks', async function () {
const idCount = 5;
const notNullCallbacks = idCount - 2;
@ -390,10 +390,7 @@ describe('Base database connector', function () {
for (const id of documentsWithChangesCase) {
const task = createTask(id);
await Promise.all([
baseConnector.insertChangesPromise(ctx, objChanges, id, index, user),
insertIntoResultTable(date, task)
]);
await Promise.all([baseConnector.insertChangesPromise(ctx, objChanges, id, index, user), insertIntoResultTable(date, task)]);
}
const resultAfterNewRows = await baseConnector.getDocumentsWithChanges(ctx);
@ -420,7 +417,7 @@ describe('Base database connector', function () {
test('Get Count With Status', async function () {
let countWithStatus;
let unknownStatus = 99;//to avoid collision with running server
let unknownStatus = 99; //to avoid collision with running server
let EXEC_TIMEOUT = 30000 + utils.getConvertionTimeout(undefined);
countWithStatus = await baseConnector.getCountWithStatus(ctx, unknownStatus, EXEC_TIMEOUT);
expect(countWithStatus).toEqual(0);
@ -443,7 +440,7 @@ describe('Base database connector', function () {
const result = await baseConnector.upsert(ctx, task);
// isInsert should be true because of insert operation, insertId should be 1 by default.
const expected = { isInsert: true, insertId: 1 };
const expected = {isInsert: true, insertId: 1};
expect(result).toEqual(expected);
const insertedResult = await getRowsCountById(cfgTableResult, task.key);
@ -463,12 +460,12 @@ describe('Base database connector', function () {
const result = await baseConnector.upsert(ctx, task);
// isInsert should be false because of update operation, insertId should be 2 by updating clause.
const expected = { isInsert: false, insertId: 2 };
const expected = {isInsert: false, insertId: 2};
expect(result).toEqual(expected);
const updatedRow = await executeSql(`SELECT id, baseurl FROM ${cfgTableResult} WHERE id = '${task.key}';`);
const expectedUrlChanges = [{ id: task.key, baseurl: 'some-updated-url' }];
const expectedUrlChanges = [{id: task.key, baseurl: 'some-updated-url'}];
expect(updatedRow).toEqual(expectedUrlChanges);
});
});

View File

@ -30,14 +30,14 @@
*
*/
const { describe, test, expect, afterAll, beforeAll } = require('@jest/globals');
const {describe, test, expect, afterAll, beforeAll} = require('@jest/globals');
const http = require('http');
const { signToken } = require('../../../DocService/sources/DocsCoServer');
const {signToken} = require('../../../DocService/sources/DocsCoServer');
const storage = require('../../../Common/sources/storage/storage-base');
const constants = require('../../../Common/sources/commondefines');
const operationContext = require('../../../Common/sources/operationContext');
const utils = require("../../../Common/sources/utils");
const utils = require('../../../Common/sources/utils');
const config = require('../../../Common/node_modules/config');
@ -70,7 +70,7 @@ function makeRequest(requestBody, timeout = 5000) {
if (cfgTokenEnableRequestOutbox) {
const secret = utils.getSecretByElem(cfgSecretOutbox);
const token = await signToken(ctx, requestBody, cfgTokenAlgorithm, cfgTokenOutboxExpires, constants.c_oAscSecretType.Inbox, secret);
body = JSON.stringify({ token });
body = JSON.stringify({token});
} else {
body = JSON.stringify(requestBody);
}
@ -84,12 +84,12 @@ function makeRequest(requestBody, timeout = 5000) {
'Content-Length': Buffer.byteLength(body)
}
};
const request = http.request(options, (response) => {
const request = http.request(options, response => {
response.setEncoding('utf8');
let data = '';
response.on('data', (chunk) => {
data += chunk
response.on('data', chunk => {
data += chunk;
});
response.on('end', () => {
resolve(data);
@ -97,7 +97,7 @@ function makeRequest(requestBody, timeout = 5000) {
});
});
request.on('error', (error) => {
request.on('error', error => {
reject(error);
clearTimeout(timer);
});
@ -121,10 +121,11 @@ beforeAll(async function () {
afterAll(async function () {
const keys = await storage.listObjects(ctx, '', cfgForgottenFiles);
const keysDirectories = getKeysDirectories(keys);
const deletePromises = keysDirectories.filter(key => key.includes('DocService-DocsCoServer-forgottenFilesCommands'))
const deletePromises = keysDirectories
.filter(key => key.includes('DocService-DocsCoServer-forgottenFilesCommands'))
.map(filteredKey => storage.deletePath(ctx, filteredKey, cfgForgottenFiles));
console.log(`keys:`+JSON.stringify(keys));
console.log(`keysDirectories:`+JSON.stringify(keysDirectories));
console.log(`keys:` + JSON.stringify(keys));
console.log(`keysDirectories:` + JSON.stringify(keysDirectories));
return Promise.allSettled(deletePromises);
});
@ -137,7 +138,7 @@ describe('Command service', function () {
invalidRequests.push({
c: testSubject
});
expected.push({ error: 1});
expected.push({error: 1});
invalidRequests.push({
c: testSubject,
@ -156,13 +157,13 @@ describe('Command service', function () {
return {
c: testSubject,
key
}
};
});
const expected = invalidKeys.map(key => {
return {
key,
error: 1,
error: 1
};
});
@ -178,20 +179,20 @@ describe('Command service', function () {
}
});
});
describe('Forgotten files commands verification', function () {
describe('getForgotten', function () {
const createExpected = ({ key, error }) => {
const validKey = typeof key === 'string' && error === 0
const createExpected = ({key, error}) => {
const validKey = typeof key === 'string' && error === 0;
let urlPattern;
if ("storage-fs" === cfgStorageName || !cfgUseDirectStorageUrls) {
if ("storage-fs" === cfgStorageName) {
if ('storage-fs' === cfgStorageName || !cfgUseDirectStorageUrls) {
if ('storage-fs' === cfgStorageName) {
urlPattern = 'http://localhost:8000/cache/files/forgotten/--key--/output.docx/output.docx';
} else {
urlPattern = 'http://localhost:8000/storage-cache/files/forgotten/--key--/output.docx/output.docx';
}
} else if ("storage-s3" === cfgStorageName) {
let host = cfgEndpoint.slice(0, "https://".length) + cfgBucketName + "." + cfgEndpoint.slice("https://".length);
} else if ('storage-s3' === cfgStorageName) {
let host = cfgEndpoint.slice(0, 'https://'.length) + cfgBucketName + '.' + cfgEndpoint.slice('https://'.length);
if (host[host.length - 1] === '/') {
host = host.slice(0, -1);
}
@ -199,9 +200,9 @@ describe('Command service', function () {
} else {
let host;
if (cfgEndpoint.includes(cfgAccessKeyId)) {
host = cfgEndpoint.slice(0, "https://".length) + cfgEndpoint.slice("https://".length) + '/' + cfgBucketName;
host = cfgEndpoint.slice(0, 'https://'.length) + cfgEndpoint.slice('https://'.length) + '/' + cfgBucketName;
} else {
host = cfgEndpoint.slice(0, "https://".length) + cfgAccessKeyId + "." + cfgEndpoint.slice("https://".length) + '/' + cfgBucketName;
host = cfgEndpoint.slice(0, 'https://'.length) + cfgAccessKeyId + '.' + cfgEndpoint.slice('https://'.length) + '/' + cfgBucketName;
}
if (host[host.length - 1] === '/') {
host = host.slice(0, -1);
@ -209,7 +210,7 @@ describe('Command service', function () {
urlPattern = host + '/files/forgotten/--key--/output.docx';
}
const expected = { key, error };
const expected = {key, error};
if (validKey) {
expected.url = urlPattern.replace('--key--', key);
@ -219,8 +220,8 @@ describe('Command service', function () {
};
const testCases = {
'Single key': { key: testFilesNames.get, error: 0 },
'Not existed key': { key: '--not-existed--', error: 1 },
'Single key': {key: testFilesNames.get, error: 0},
'Not existed key': {key: '--not-existed--', error: 1}
};
for (const testCase in testCases) {
@ -245,7 +246,7 @@ describe('Command service', function () {
});
describe('deleteForgotten', function () {
const createExpected = ({ key, error }) => {
const createExpected = ({key, error}) => {
return {
key,
error
@ -253,8 +254,8 @@ describe('Command service', function () {
};
const testCases = {
'Single key': { key: testFilesNames.delete1, error: 0 },
'Not existed key': { key: '--not-existed--', error: 1 },
'Single key': {key: testFilesNames.delete1, error: 0},
'Not existed key': {key: '--not-existed--', error: 1}
};
for (const testCase in testCases) {
@ -280,7 +281,7 @@ describe('Command service', function () {
});
}
});
describe('getForgottenList', function () {
test('Main case', async () => {
const requestBody = {
@ -300,7 +301,7 @@ describe('Command service', function () {
const expected = {
error: 0,
keys: alreadyExistedDirectories.keys
}
};
actual.keys?.sort();
expected.keys.sort();
@ -308,4 +309,4 @@ describe('Command service', function () {
});
});
});
});
});

View File

@ -31,7 +31,7 @@
*/
const {jest, describe, test, expect, beforeAll, afterAll} = require('@jest/globals');
jest.mock("fs/promises", () => ({
jest.mock('fs/promises', () => ({
...jest.requireActual('fs/promises'),
cp: jest.fn().mockImplementation((from, to) => fs.writeFileSync(to, testFileData3))
}));
@ -43,15 +43,15 @@ jest.mock('../../../Common/sources/storage/storage-base', () => {
needServeStatic: mockNeedServeStatic
};
});
const { cp } = require('fs/promises');
const {cp} = require('fs/promises');
const http = require('http');
const https = require('https');
const fs = require('fs');
const { Readable } = require('stream');
const {Readable} = require('stream');
let testFileData1 = "test1";
let testFileData2 = "test22";
let testFileData3 = "test333";
let testFileData1 = 'test1';
let testFileData2 = 'test22';
let testFileData3 = 'test333';
let testFileData4 = testFileData3;
const express = require('express');
@ -59,7 +59,7 @@ const operationContext = require('../../../Common/sources/operationContext');
const tenantManager = require('../../../Common/sources/tenantManager');
const storage = require('../../../Common/sources/storage/storage-base');
const utils = require('../../../Common/sources/utils');
const commonDefines = require("../../../Common/sources/commondefines");
const commonDefines = require('../../../Common/sources/commondefines');
const config = require('../../../Common/node_modules/config');
const staticRouter = require('../../../DocService/sources/routes/static');
@ -69,17 +69,17 @@ const cfgPersistentStorage = utils.deepMergeObjects({}, cfgCacheStorage, config.
const ctx = operationContext.global;
const PORT = 3457;
const rand = Math.floor(Math.random() * 1000000);
const testDir = "DocService-DocsCoServer-storage-" + rand;
const testDir = 'DocService-DocsCoServer-storage-' + rand;
const baseUrl = `http://localhost:${PORT}`;
const urlType = commonDefines.c_oAscUrlTypes.Session;
let testFile1 = testDir + "/test1.txt";
let testFile2 = testDir + "/test2.txt";
let testFile3 = testDir + "/test3.txt";
let testFile4 = testDir + "/test4.txt";
let specialDirCache = "";
let specialDirForgotten = "forgotten";
let testFile1 = testDir + '/test1.txt';
let testFile2 = testDir + '/test2.txt';
let testFile3 = testDir + '/test3.txt';
let testFile4 = testDir + '/test4.txt';
let specialDirCache = '';
let specialDirForgotten = 'forgotten';
console.debug(`testDir: ${testDir}`)
console.debug(`testDir: ${testDir}`);
let server;
@ -94,7 +94,7 @@ beforeAll(async () => {
afterAll(async () => {
if (server) {
await new Promise((resolve) => server.close(resolve));
await new Promise(resolve => server.close(resolve));
}
});
@ -111,27 +111,27 @@ function request(url) {
response.on('error', error => reject(error));
response.on('end', () => resolve(data));
});
req.on('error', error => reject(error));
});
}
function runTestForDir(ctx, isMultitenantMode, specialDir) {
let oldMultitenantMode = tenantManager.isMultitenantMode();
test("start listObjects", async () => {
test('start listObjects', async () => {
//todo set in all tests do not rely on test order
tenantManager.setMultitenantMode(isMultitenantMode);
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(list).toEqual([]);
});
test("putObject", async () => {
test('putObject', async () => {
let buffer = Buffer.from(testFileData1);
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDir);
expect(res).toEqual(undefined);
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile1].sort());
});
test("putObject-stream", async () => {
test('putObject-stream', async () => {
let buffer = Buffer.from(testFileData2);
const stream = Readable.from(buffer);
let res = await storage.putObject(ctx, testFile2, stream, buffer.length, specialDir);
@ -139,20 +139,20 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile1, testFile2].sort());
});
if ("storage-fs" === getStorageCfg(specialDir).name) {
test("UploadObject", async () => {
let res = await storage.uploadObject(ctx, testFile3, "createReadStream.txt", specialDir);
if ('storage-fs' === getStorageCfg(specialDir).name) {
test('UploadObject', async () => {
let res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
expect(res).toEqual(undefined);
expect(cp).toHaveBeenCalled();
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile1, testFile2, testFile3].sort());
});
} else {
test("uploadObject", async () => {
test('uploadObject', async () => {
const readStream = Readable.from(testFileData3);
readStream.size = testFileData3.length;
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(readStream);
let res = await storage.uploadObject(ctx, testFile3, "createReadStream.txt", specialDir);
let res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
expect(res).toEqual(undefined);
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(spy).toHaveBeenCalled();
@ -161,33 +161,33 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
});
//todo fails with storage-s3
test.skip("uploadObject - stream error handling", async () => {
const streamErrorMessage = new Error("Test stream error");
const mockStream = Readable.from(async function* () {
yield "first chunk\n";
await new Promise(r => setTimeout(r, 5));
throw streamErrorMessage;
}());
test.skip('uploadObject - stream error handling', async () => {
const streamErrorMessage = new Error('Test stream error');
const mockStream = Readable.from(
(async function* () {
yield 'first chunk\n';
await new Promise(r => setTimeout(r, 5));
throw streamErrorMessage;
})()
);
mockStream.size = 1024;
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(mockStream);
// Verify that the uploadObject function rejects when the stream emits an error
await expect(storage.uploadObject(ctx, "test-error-file.txt", "nonexistent.txt", specialDir))
.rejects.toThrow(streamErrorMessage);
await expect(storage.uploadObject(ctx, 'test-error-file.txt', 'nonexistent.txt', specialDir)).rejects.toThrow(streamErrorMessage);
spy.mockRestore();
});
test.skip("uploadObject - non-existent file handling", async () => {
test.skip('uploadObject - non-existent file handling', async () => {
const nonExistentFile = 'definitely-does-not-exist-' + Date.now() + '.txt';
// Verify the file actually doesn't exist
expect(fs.existsSync(nonExistentFile)).toBe(false);
// Verify that uploadObject properly handles and propagates the error
await expect(storage.uploadObject(ctx, "test-error-file.txt", nonExistentFile, specialDir))
.rejects.toThrow(/ENOENT/);
await expect(storage.uploadObject(ctx, 'test-error-file.txt', nonExistentFile, specialDir)).rejects.toThrow(/ENOENT/);
});
}
test("copyObject", async () => {
test('copyObject', async () => {
let res = await storage.copyObject(ctx, testFile3, testFile4, specialDir, specialDir);
expect(res).toEqual(undefined);
// let buffer = Buffer.from(testFileData3);
@ -195,53 +195,53 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
let list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile1, testFile2, testFile3, testFile4].sort());
});
test("headObject", async () => {
test('headObject', async () => {
let output;
output = await storage.headObject(ctx, testFile1, specialDir);
expect(output).toMatchObject({ContentLength: testFileData1.length});
output = await storage.headObject(ctx, testFile2, specialDir);
output = await storage.headObject(ctx, testFile2, specialDir);
expect(output).toMatchObject({ContentLength: testFileData2.length});
output = await storage.headObject(ctx, testFile3, specialDir);
output = await storage.headObject(ctx, testFile3, specialDir);
expect(output).toMatchObject({ContentLength: testFileData3.length});
output = await storage.headObject(ctx, testFile4, specialDir);
output = await storage.headObject(ctx, testFile4, specialDir);
expect(output).toMatchObject({ContentLength: testFileData4.length});
});
test("getObject", async () => {
test('getObject', async () => {
let output;
output = await storage.getObject(ctx, testFile1, specialDir);
expect(output.toString("utf8")).toEqual(testFileData1);
expect(output.toString('utf8')).toEqual(testFileData1);
output = await storage.getObject(ctx, testFile2, specialDir);
expect(output.toString("utf8")).toEqual(testFileData2);
output = await storage.getObject(ctx, testFile2, specialDir);
expect(output.toString('utf8')).toEqual(testFileData2);
output = await storage.getObject(ctx, testFile3, specialDir);
expect(output.toString("utf8")).toEqual(testFileData3);
output = await storage.getObject(ctx, testFile3, specialDir);
expect(output.toString('utf8')).toEqual(testFileData3);
output = await storage.getObject(ctx, testFile4, specialDir);
expect(output.toString("utf8")).toEqual(testFileData4);
output = await storage.getObject(ctx, testFile4, specialDir);
expect(output.toString('utf8')).toEqual(testFileData4);
});
test("createReadStream", async () => {
test('createReadStream', async () => {
let output, outputText;
output = await storage.createReadStream(ctx, testFile1, specialDir);
expect(output.contentLength).toEqual(testFileData1.length);
outputText = await utils.stream2Buffer(output.readStream);
expect(outputText.toString("utf8")).toEqual(testFileData1);
expect(outputText.toString('utf8')).toEqual(testFileData1);
output = await storage.createReadStream(ctx, testFile2, specialDir);
expect(output.contentLength).toEqual(testFileData2.length);
outputText = await utils.stream2Buffer(output.readStream);
expect(outputText.toString("utf8")).toEqual(testFileData2);
expect(outputText.toString('utf8')).toEqual(testFileData2);
output = await storage.createReadStream(ctx, testFile3, specialDir);
expect(output.contentLength).toEqual(testFileData3.length);
outputText = await utils.stream2Buffer(output.readStream);
expect(outputText.toString("utf8")).toEqual(testFileData3);
expect(outputText.toString('utf8')).toEqual(testFileData3);
});
test("getSignedUrl", async () => {
test('getSignedUrl', async () => {
let url, urls, data;
url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDir);
data = await request(url);
@ -259,34 +259,34 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
data = await request(url);
expect(data).toEqual(testFileData4);
});
test("getSignedUrls", async () => {
test('getSignedUrls', async () => {
let urls, data;
urls = await storage.getSignedUrls(ctx, baseUrl, testDir, urlType, undefined, specialDir);
data = [];
for(let i in urls) {
for (let i in urls) {
data.push(await request(urls[i]));
}
expect(data.sort()).toEqual([testFileData1, testFileData2, testFileData3, testFileData4].sort());
});
test("getSignedUrlsArrayByArray", async () => {
test('getSignedUrlsArrayByArray', async () => {
let urls, data;
urls = await storage.getSignedUrlsArrayByArray(ctx, baseUrl, [testFile1, testFile2], urlType, specialDir);
data = [];
for(let i = 0; i < urls.length; ++i) {
for (let i = 0; i < urls.length; ++i) {
data.push(await request(urls[i]));
}
expect(data.sort()).toEqual([testFileData1, testFileData2].sort());
});
test("getSignedUrlsByArray", async () => {
test('getSignedUrlsByArray', async () => {
let urls, data;
urls = await storage.getSignedUrlsByArray(ctx, baseUrl, [testFile3, testFile4], undefined, urlType, specialDir);
data = [];
for(let i in urls) {
for (let i in urls) {
data.push(await request(urls[i]));
}
expect(data.sort()).toEqual([testFileData3, testFileData4].sort());
});
test("getSignedUrl with direct URLs enabled", async () => {
test('getSignedUrl with direct URLs enabled', async () => {
let buffer = Buffer.from(testFileData1);
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
expect(res).toEqual(undefined);
@ -294,26 +294,26 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
let url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, true);
let data = await request(url);
expect(data).toEqual(testFileData1);
if (cfgCacheStorage.name !== 'storage-fs') {
expect(url).toContain(cfgCacheStorage.endpoint);
expect(url).toContain(cfgCacheStorage.bucketName);
}
});
test("getSignedUrl with direct URLs disabled", async () => {
test('getSignedUrl with direct URLs disabled', async () => {
let buffer = Buffer.from(testFileData1);
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
expect(res).toEqual(undefined);
let url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, false);
let data = await request(url);
expect(data).toEqual(testFileData1);
expect(url).toContain('md5');
expect(url).toContain('expires');
expect(url).toContain(cfgCacheStorage.storageFolderName);
});
test("deleteObject", async () => {
test('deleteObject', async () => {
let list;
list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile1, testFile2, testFile3, testFile4].sort());
@ -324,7 +324,7 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile2, testFile3, testFile4].sort());
});
test("deletePath", async () => {
test('deletePath', async () => {
let list;
list = await storage.listObjects(ctx, testDir, specialDir);
expect(list.sort()).toEqual([testFile2, testFile3, testFile4].sort());
@ -357,7 +357,7 @@ describe('storage forgotten dir with tenants', function () {
});
describe('storage mix common and forgotten dir', function () {
test("putObject", async () => {
test('putObject', async () => {
tenantManager.setMultitenantMode(false);
let buffer = Buffer.from(testFileData1);
@ -373,7 +373,7 @@ describe('storage mix common and forgotten dir', function () {
expect(list.sort()).toEqual([testFile2].sort());
});
test("copyPath", async () => {
test('copyPath', async () => {
let list, res;
res = await storage.copyPath(ctx, testDir, testDir, specialDirCache, specialDirForgotten);
expect(res).toEqual(undefined);
@ -381,7 +381,7 @@ describe('storage mix common and forgotten dir', function () {
list = await storage.listObjects(ctx, testDir, specialDirForgotten);
expect(list.sort()).toEqual([testFile1, testFile2].sort());
});
test("copyObject", async () => {
test('copyObject', async () => {
let list, res;
res = await storage.copyObject(ctx, testFile2, testFile2, specialDirForgotten, specialDirCache);
expect(res).toEqual(undefined);
@ -390,7 +390,7 @@ describe('storage mix common and forgotten dir', function () {
expect(list.sort()).toEqual([testFile1, testFile2].sort());
});
test("deletePath", async () => {
test('deletePath', async () => {
let list, res;
res = await storage.deletePath(ctx, testDir, specialDirCache);
expect(res).toEqual(undefined);

View File

@ -68,7 +68,7 @@ module.exports = {
// ],
// Indicates which provider should be used to instrument code for coverage
coverageProvider: "v8",
coverageProvider: 'v8',
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
@ -126,7 +126,7 @@ module.exports = {
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
moduleNameMapper: {
'^axios$': '../../Common/node_modules/axios/dist/node/axios.cjs',
'^axios$': '../../Common/node_modules/axios/dist/node/axios.cjs'
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
@ -190,9 +190,7 @@ module.exports = {
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
testMatch: [
"**/?(*.)+(spec|tests).[tj]s?(x)"
],
testMatch: ['**/?(*.)+(spec|tests).[tj]s?(x)']
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [

View File

@ -32,11 +32,7 @@
'use strict';
const {
createHistogram,
performance,
PerformanceObserver,
} = require('node:perf_hooks');
const {createHistogram, performance, PerformanceObserver} = require('node:perf_hooks');
const co = require('co');
const taskResult = require('./../../DocService/sources/taskresult');
@ -44,8 +40,8 @@ const storage = require('./../../Common/sources/storage/storage-base');
const storageFs = require('./../../Common/sources/storage/storage-fs');
const operationContext = require('./../../Common/sources/operationContext');
const utils = require('./../../Common/sources/utils');
const docsCoServer = require("./../../DocService/sources/DocsCoServer");
const gc = require("./../../DocService/sources/gc");
const docsCoServer = require('./../../DocService/sources/DocsCoServer');
const gc = require('./../../DocService/sources/gc');
let ctx = operationContext.global;
@ -61,25 +57,25 @@ async function beforeStart() {
let histogram = createHistogram();
histograms[func.name] = histogram;
return performance.timerify(func, {histogram: histogram});
}
};
addRandomKeyTask = timerify(co.wrap(taskResult.addRandomKeyTask), "addRandomKeyTask");
taskResult.getExpired = timerify(taskResult.getExpired, "getExpired");
taskResult.remove = timerify(taskResult.remove, "remove");
storage.putObject = timerify(storage.putObject, "putObject");
storage.listObjects = timerify(storage.listObjects, "listObjects");
storageFs.deletePath = timerify(storageFs.deletePath, "deletePath");
storageFs.deleteObject = timerify(storageFs.deleteObject, "deleteObject");
docsCoServer.getEditorsCountPromise = timerify(docsCoServer.getEditorsCountPromise, "getEditorsCountPromise");
addRandomKeyTask = timerify(co.wrap(taskResult.addRandomKeyTask), 'addRandomKeyTask');
taskResult.getExpired = timerify(taskResult.getExpired, 'getExpired');
taskResult.remove = timerify(taskResult.remove, 'remove');
storage.putObject = timerify(storage.putObject, 'putObject');
storage.listObjects = timerify(storage.listObjects, 'listObjects');
storageFs.deletePath = timerify(storageFs.deletePath, 'deletePath');
storageFs.deleteObject = timerify(storageFs.deleteObject, 'deleteObject');
docsCoServer.getEditorsCountPromise = timerify(docsCoServer.getEditorsCountPromise, 'getEditorsCountPromise');
const obs = new PerformanceObserver((list) => {
const obs = new PerformanceObserver(list => {
const entries = list.getEntries();
entries.forEach((entry) => {
entries.forEach(entry => {
let duration = Math.round(entry.duration * 1000) / 1000;
console.log(`${entry.name}:${duration}ms`);
});
});
obs.observe({ entryTypes: ['function']});
obs.observe({entryTypes: ['function']});
await docsCoServer.editorData.connect();
}
@ -91,7 +87,7 @@ async function beforeEnd() {
let max = Math.round(histogram.max / 1000) / 1000;
let count = histogram.count;
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
}
};
await utils.sleep(1000);
for (let name in histograms) {
logHistogram(histograms[name], name);
@ -116,7 +112,7 @@ async function startTest() {
ctx.logger.error('missing arguments.USAGE: checkFileExpire.js [add-files-count] [file-size-bytes] [key-prefix] [seconds-to-expire]');
return;
}
ctx.logger.info("test started");
ctx.logger.info('test started');
await beforeStart();
await addFileExpire(parseInt(args[0]), parseInt(args[1]), args[2], parseInt(args[4] || 1));
@ -125,14 +121,17 @@ async function startTest() {
await gc.checkFileExpire(args[3]);
await beforeEnd();
ctx.logger.info("test finished");
ctx.logger.info('test finished');
}
startTest().then(()=>{
//delay to log observer events
return utils.sleep(1000);
}).catch((err) => {
ctx.logger.error(err.stack);
}).finally(() => {
process.exit(0);
});
startTest()
.then(() => {
//delay to log observer events
return utils.sleep(1000);
})
.catch(err => {
ctx.logger.error(err.stack);
})
.finally(() => {
process.exit(0);
});

View File

@ -32,18 +32,14 @@
'use strict';
const {
createHistogram,
performance,
PerformanceObserver,
} = require('node:perf_hooks');
const {createHistogram, performance, PerformanceObserver} = require('node:perf_hooks');
const { readdir, mkdir, readFile, writeFile } = require("node:fs/promises");
const path = require("path");
const {readdir, mkdir, readFile, writeFile} = require('node:fs/promises');
const path = require('path');
// const Jimp = require('Jimp');
const utils = require('./../../Common/sources/utils');
const operationContext = require('./../../Common/sources/operationContext');
const utilsDocService = require("./../../DocService/sources/utilsDocService");
const utilsDocService = require('./../../DocService/sources/utilsDocService');
let ctx = operationContext.global;
@ -54,18 +50,18 @@ async function beforeStart() {
let histogram = createHistogram();
histograms[func.name] = histogram;
return performance.timerify(func, {histogram: histogram});
}
};
utilsDocService.convertImageToPng = timerify(utilsDocService.convertImageToPng);
// Jimp.read = timerify(Jimp.read);
const obs = new PerformanceObserver((list) => {
const obs = new PerformanceObserver(list => {
const entries = list.getEntries();
entries.forEach((entry) => {
entries.forEach(entry => {
let duration = Math.round(entry.duration * 1000) / 1000;
console.log(`${entry.name}:${duration}ms`);
});
});
obs.observe({ entryTypes: ['function']});
obs.observe({entryTypes: ['function']});
}
async function beforeEnd() {
@ -75,7 +71,7 @@ async function beforeEnd() {
let max = Math.round(histogram.max / 1000) / 1000;
let count = histogram.count;
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
}
};
await utils.sleep(1000);
for (let name in histograms) {
logHistogram(histograms[name], name);
@ -83,13 +79,13 @@ async function beforeEnd() {
}
async function fixInDir(dirIn, dirOut) {
ctx.logger.info("dirIn:%s", dirIn);
ctx.logger.info("dirOut:%s", dirOut);
let dirents = await readdir(dirIn, {withFileTypes : true, recursive: true});
ctx.logger.info('dirIn:%s', dirIn);
ctx.logger.info('dirOut:%s', dirOut);
let dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
for (let dirent of dirents) {
if (dirent.isFile()) {
let file = dirent.name;
ctx.logger.info("fixInDir:%s", file);
ctx.logger.info('fixInDir:%s', file);
let buffer = await readFile(path.join(dirent.path, file));
let bufferNew = await utilsDocService.convertImageToPng(ctx, buffer);
if (buffer !== bufferNew) {
@ -107,21 +103,23 @@ async function startTest() {
ctx.logger.error('missing arguments.USAGE: convertImageToPng.js "dirIn" "dirOut"');
return;
}
ctx.logger.info("test started");
ctx.logger.info('test started');
await beforeStart();
await fixInDir(args[0], args[1]);
await beforeEnd();
ctx.logger.info("test finished");
ctx.logger.info('test finished');
}
startTest().then(()=>{
//delay to log observer events
return utils.sleep(1000);
}).catch((err) => {
ctx.logger.error(err.stack);
}).finally(() => {
process.exit(0);
});
startTest()
.then(() => {
//delay to log observer events
return utils.sleep(1000);
})
.catch(err => {
ctx.logger.error(err.stack);
})
.finally(() => {
process.exit(0);
});

View File

@ -32,18 +32,14 @@
'use strict';
const {
createHistogram,
performance,
PerformanceObserver,
} = require('node:perf_hooks');
const {createHistogram, performance, PerformanceObserver} = require('node:perf_hooks');
const { readdir, mkdir, readFile, writeFile } = require("node:fs/promises");
const path = require("path");
const {readdir, mkdir, readFile, writeFile} = require('node:fs/promises');
const path = require('path');
// const Jimp = require('Jimp');
const utils = require('./../../Common/sources/utils');
const operationContext = require('./../../Common/sources/operationContext');
const utilsDocService = require("./../../DocService/sources/utilsDocService");
const utilsDocService = require('./../../DocService/sources/utilsDocService');
let ctx = operationContext.global;
@ -54,18 +50,18 @@ async function beforeStart() {
let histogram = createHistogram();
histograms[func.name] = histogram;
return performance.timerify(func, {histogram: histogram});
}
};
utilsDocService.fixImageExifRotation = timerify(utilsDocService.fixImageExifRotation);
// Jimp.read = timerify(Jimp.read);
const obs = new PerformanceObserver((list) => {
const obs = new PerformanceObserver(list => {
const entries = list.getEntries();
entries.forEach((entry) => {
entries.forEach(entry => {
let duration = Math.round(entry.duration * 1000) / 1000;
console.log(`${entry.name}:${duration}ms`);
});
});
obs.observe({ entryTypes: ['function']});
obs.observe({entryTypes: ['function']});
}
async function beforeEnd() {
@ -75,7 +71,7 @@ async function beforeEnd() {
let max = Math.round(histogram.max / 1000) / 1000;
let count = histogram.count;
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
}
};
await utils.sleep(1000);
for (let name in histograms) {
logHistogram(histograms[name], name);
@ -83,13 +79,13 @@ async function beforeEnd() {
}
async function fixInDir(dirIn, dirOut) {
ctx.logger.info("dirIn:%s", dirIn);
ctx.logger.info("dirOut:%s", dirOut);
let dirents = await readdir(dirIn, {withFileTypes : true, recursive: true});
ctx.logger.info('dirIn:%s', dirIn);
ctx.logger.info('dirOut:%s', dirOut);
let dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
for (let dirent of dirents) {
if (dirent.isFile()) {
let file = dirent.name;
ctx.logger.info("fixInDir:%s", file);
ctx.logger.info('fixInDir:%s', file);
let buffer = await readFile(path.join(dirent.path, file));
let bufferNew = await utilsDocService.fixImageExifRotation(ctx, buffer);
if (buffer !== bufferNew) {
@ -107,21 +103,23 @@ async function startTest() {
ctx.logger.error('missing arguments.USAGE: fixImageExifRotation.js "dirIn" "dirOut"');
return;
}
ctx.logger.info("test started");
ctx.logger.info('test started');
await beforeStart();
await fixInDir(args[0], args[1]);
await beforeEnd();
ctx.logger.info("test finished");
ctx.logger.info('test finished');
}
startTest().then(()=>{
//delay to log observer events
return utils.sleep(1000);
}).catch((err) => {
ctx.logger.error(err.stack);
}).finally(() => {
process.exit(0);
});
startTest()
.then(() => {
//delay to log observer events
return utils.sleep(1000);
})
.catch(err => {
ctx.logger.error(err.stack);
})
.finally(() => {
process.exit(0);
});

View File

@ -1,4 +1,4 @@
const { describe, test, expect, afterAll } = require('@jest/globals');
const {describe, test, expect, afterAll} = require('@jest/globals');
const nodemailer = require('../../Common/node_modules/nodemailer');
const operationContext = require('../../Common/sources/operationContext');
@ -13,40 +13,46 @@ const testTimeout = 1000 * 10;
afterAll(function () {
mailService.transportersRelease();
})
});
describe('Mail service', function () {
describe('SMTP', function () {
const { host, port } = defaultTestSMTPServer;
const {host, port} = defaultTestSMTPServer;
test('Transporters life cycle', async function () {
// Accounts created at https://ethereal.email/, all messages in tests goes here: https://ethereal.email/messages
// Ethereial is a special SMTP sever for mailing tests in collaboration with Nodemailer.
const accounts = await Promise.all([nodemailer.createTestAccount(), nodemailer.createTestAccount(), nodemailer.createTestAccount()]);
const auth = accounts.map(account => { return { user: account.user, pass: account.pass }});
auth.forEach(credential => mailService.createTransporter(ctx, host, port, credential, { from: 'some.mail@ethereal.com' }));
test(
'Transporters life cycle',
async function () {
// Accounts created at https://ethereal.email/, all messages in tests goes here: https://ethereal.email/messages
// Ethereial is a special SMTP sever for mailing tests in collaboration with Nodemailer.
const accounts = await Promise.all([nodemailer.createTestAccount(), nodemailer.createTestAccount(), nodemailer.createTestAccount()]);
const auth = accounts.map(account => {
return {user: account.user, pass: account.pass};
});
auth.forEach(credential => mailService.createTransporter(ctx, host, port, credential, {from: 'some.mail@ethereal.com'}));
for (let i = 0; i < auth.length; i++) {
const credentials = auth[i];
const mail = await mailService.send(
host,
credentials.user,
{ to: `some.recipient@server${i + 1}.com`, text: 'simple test text', subject: 'Mail service test' }
);
for (let i = 0; i < auth.length; i++) {
const credentials = auth[i];
const mail = await mailService.send(host, credentials.user, {
to: `some.recipient@server${i + 1}.com`,
text: 'simple test text',
subject: 'Mail service test'
});
expect(mail.envelope).toEqual({ from: 'some.mail@ethereal.com', to: [`some.recipient@server${i + 1}.com`] });
}
expect(mail.envelope).toEqual({from: 'some.mail@ethereal.com', to: [`some.recipient@server${i + 1}.com`]});
}
const accountToBeDeleted = auth[1];
mailService.deleteTransporter(ctx, host, accountToBeDeleted.user);
const accountToBeDeleted = auth[1];
mailService.deleteTransporter(ctx, host, accountToBeDeleted.user);
const errorPromise = mailService.send(
host,
accountToBeDeleted.user,
{ to: 'no.recipient@server.com', text: 'simple test text', subject: 'Mail service test' }
);
const errorPromise = mailService.send(host, accountToBeDeleted.user, {
to: 'no.recipient@server.com',
text: 'simple test text',
subject: 'Mail service test'
});
await expect(errorPromise).rejects.toThrow();
}, testTimeout);
await expect(errorPromise).rejects.toThrow();
},
testTimeout
);
});
});

File diff suppressed because it is too large Load Diff

View File

@ -6,27 +6,24 @@ const GOOD_PORT_REDIRECT = 4667;
const BAD_PORT = 4669;
process.env['NODE_CONFIG'] = JSON.stringify({
"services": {
"CoAuthoring": {
"request-filtering-agent": {
"allowPrivateIPAddress": false,
"allowMetaIPAddress": false,
"allowIPAddressList": [
GOOD_HOST
]
services: {
CoAuthoring: {
'request-filtering-agent': {
allowPrivateIPAddress: false,
allowMetaIPAddress: false,
allowIPAddressList: [GOOD_HOST]
}
}
}
});
// Required modules
const { describe, test, expect, beforeAll, afterAll, it, jest } = require('@jest/globals');
const {describe, test, expect, beforeAll, afterAll, it, jest} = require('@jest/globals');
const http = require('http');
const operationContext = require('../../Common/sources/operationContext');
const utils = require('../../Common/sources/utils');
// Common test parameters
const commonTestParams = {
uri: `http://${GOOD_HOST}:${GOOD_PORT}`,
@ -34,83 +31,76 @@ const commonTestParams = {
limit: 1024 * 1024, // 1MB
authorization: 'Bearer token123',
filterPrivate: true,
headers: { 'Accept': 'application/json' }
headers: {Accept: 'application/json'}
};
const ctx = operationContext.global;
describe('Server-Side Request Forgery (SSRF)', () => {
let goodServer, goodServerRedirect, badServer;
let goodServer, goodServerRedirect, badServer;
beforeAll(() => {
goodServer = http.createServer(function (req, res) {
res.write('good');
res.end();
}).listen(GOOD_PORT);
beforeAll(() => {
goodServer = http
.createServer(function (req, res) {
res.write('good');
res.end();
})
.listen(GOOD_PORT);
goodServerRedirect = http.createServer(function (req, res) {
console.log(`Received request for: ${req.url}`);
goodServerRedirect = http
.createServer(function (req, res) {
console.log(`Received request for: ${req.url}`);
// Set redirect status code (301 for permanent redirect, 302 for temporary)
res.statusCode = 302;
// Set the Location header to the redirect destination
res.setHeader('Location', `http://${BAD_HOST}:${BAD_PORT}`);
// You can add other headers if needed
res.setHeader('Content-Type', 'text/plain');
// Send a brief message in the body (optional)
res.end(`Redirecting to http://${BAD_HOST}:${BAD_PORT}`);
}).listen(GOOD_PORT_REDIRECT);
// Set redirect status code (301 for permanent redirect, 302 for temporary)
res.statusCode = 302;
badServer = http.createServer(function (req, res) {
res.write('bad');
res.end();
}).listen(BAD_PORT);
})
// Set the Location header to the redirect destination
res.setHeader('Location', `http://${BAD_HOST}:${BAD_PORT}`);
afterAll(() => {
goodServer.close();
goodServerRedirect.close();
badServer.close();
});
// You can add other headers if needed
res.setHeader('Content-Type', 'text/plain');
it('should fetch', async () => {
const result = await utils.downloadUrlPromise(
ctx,
`http://${GOOD_HOST}:${GOOD_PORT}`,
commonTestParams.timeout,
commonTestParams.limit,
null,
false,
null
);
// Send a brief message in the body (optional)
res.end(`Redirecting to http://${BAD_HOST}:${BAD_PORT}`);
})
.listen(GOOD_PORT_REDIRECT);
expect(result.body.toString()).toBe('good');
});
badServer = http
.createServer(function (req, res) {
res.write('bad');
res.end();
})
.listen(BAD_PORT);
});
it('should not fetch: denied ip', async () => {
await expect(utils.downloadUrlPromise(
ctx,
`http://${BAD_HOST}:${BAD_PORT}`,
commonTestParams.timeout,
commonTestParams.limit,
null,
false,
null
)).rejects.toThrow();
});
afterAll(() => {
goodServer.close();
goodServerRedirect.close();
badServer.close();
});
it('should not fetch: redirect to denied ip', async () => {
await expect(utils.downloadUrlPromise(
ctx,
`http://${GOOD_HOST}:${GOOD_PORT_REDIRECT}`,
commonTestParams.timeout,
commonTestParams.limit,
null,
false,
null
)).rejects.toThrow();
});
});
it('should fetch', async () => {
const result = await utils.downloadUrlPromise(
ctx,
`http://${GOOD_HOST}:${GOOD_PORT}`,
commonTestParams.timeout,
commonTestParams.limit,
null,
false,
null
);
expect(result.body.toString()).toBe('good');
});
it('should not fetch: denied ip', async () => {
await expect(
utils.downloadUrlPromise(ctx, `http://${BAD_HOST}:${BAD_PORT}`, commonTestParams.timeout, commonTestParams.limit, null, false, null)
).rejects.toThrow();
});
it('should not fetch: redirect to denied ip', async () => {
await expect(
utils.downloadUrlPromise(ctx, `http://${GOOD_HOST}:${GOOD_PORT_REDIRECT}`, commonTestParams.timeout, commonTestParams.limit, null, false, null)
).rejects.toThrow();
});
});

View File

@ -30,7 +30,7 @@
*
*/
const { describe, test, expect } = require('@jest/globals');
const {describe, test, expect} = require('@jest/globals');
describe('Successful and failure tests', function () {
test('Successful test', function () {
@ -40,4 +40,4 @@ describe('Successful and failure tests', function () {
test.skip('Failure test', function () {
expect(true).toBeFalsy();
});
});
});

View File

@ -30,7 +30,7 @@
*
*/
const { describe, test, expect } = require('@jest/globals');
const {describe, test, expect} = require('@jest/globals');
const config = require('../../Common/node_modules/config');
const operationContext = require('../../Common/sources/operationContext');
@ -39,12 +39,11 @@ const utils = require('../../Common/sources/utils');
const ctx = new operationContext.Context();
const minimumIterationsByteLength = 4;
describe('AES encryption & decryption', function () {
test('Iterations range', async function () {
const configuration = config.util.cloneDeep(config.get('aesEncrypt.config'));
const encrypted = await utils.encryptPassword(ctx, 'secretstring');
const { iterationsByteLength = 5 } = configuration;
const {iterationsByteLength = 5} = configuration;
const [iterationsHex] = encrypted.split(':');
const iterations = parseInt(iterationsHex, 16);
@ -55,8 +54,8 @@ describe('AES encryption & decryption', function () {
});
test('Correct workflow', async function () {
const encrypted = await utils.encryptPassword(ctx, 'secretstring');
const decrypted = await utils.decryptPassword(ctx, encrypted);
expect(decrypted).toEqual('secretstring');
const encrypted = await utils.encryptPassword(ctx, 'secretstring');
const decrypted = await utils.decryptPassword(ctx, encrypted);
expect(decrypted).toEqual('secretstring');
});
});