mirror of
https://github.com/ONLYOFFICE/server.git
synced 2026-02-10 18:05:07 +08:00
[linter] Include tests; auto-fix
This commit is contained in:
@ -66,10 +66,10 @@ const dbTypes = {
|
||||
number: 'INT',
|
||||
string: 'VARCHAR(50)'
|
||||
},
|
||||
number: function () {
|
||||
number () {
|
||||
return this[cfgDbType].number;
|
||||
},
|
||||
string: function () {
|
||||
string () {
|
||||
return this[cfgDbType].string;
|
||||
}
|
||||
};
|
||||
@ -147,7 +147,7 @@ function executeSql(sql, values = []) {
|
||||
baseConnector.sqlQuery(
|
||||
ctx,
|
||||
sql,
|
||||
function (error, result) {
|
||||
(error, result) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
@ -199,7 +199,7 @@ function insertIntoResultTable(dateNow, task) {
|
||||
return executeSql(`INSERT INTO ${cfgTableResult}(${columns.join(', ')}) VALUES(${placeholder.join(', ')});`, values);
|
||||
}
|
||||
|
||||
afterAll(async function () {
|
||||
afterAll(async () => {
|
||||
const insertIds = Object.values(insertCases);
|
||||
const changesIds = Object.values(changesCases);
|
||||
const upsertIds = Object.values(upsertCases);
|
||||
@ -218,14 +218,14 @@ afterAll(async function () {
|
||||
});
|
||||
|
||||
// Assumed that at least default DB was installed and configured.
|
||||
describe('Base database connector', function () {
|
||||
test('Availability of configured DB', async function () {
|
||||
describe('Base database connector', () => {
|
||||
test('Availability of configured DB', async () => {
|
||||
const result = await baseConnector.healthCheck(ctx);
|
||||
|
||||
expect(result.length).toEqual(1);
|
||||
});
|
||||
|
||||
test('Correct return format of requested rows', async function () {
|
||||
test('Correct return format of requested rows', async () => {
|
||||
const result = await baseConnector.healthCheck(ctx);
|
||||
|
||||
// The [[constructor]] field is referring to a parent class instance, so for Object-like values it is equal to itself.
|
||||
@ -238,7 +238,7 @@ describe('Base database connector', function () {
|
||||
expect(Object.values(result[0])[0]).toEqual(1);
|
||||
});
|
||||
|
||||
test('Correct return format of changing in DB', async function () {
|
||||
test('Correct return format of changing in DB', async () => {
|
||||
const createTableSql = `CREATE TABLE test_table(num ${dbTypes.number()});`;
|
||||
const alterTableSql = `INSERT INTO test_table VALUES(1);`;
|
||||
|
||||
@ -248,7 +248,7 @@ describe('Base database connector', function () {
|
||||
expect(result).toEqual({affectedRows: 1});
|
||||
});
|
||||
|
||||
describe('DB tables existence', function () {
|
||||
describe('DB tables existence', () => {
|
||||
const tables = {
|
||||
[cfgTableResult]: constants.TABLE_RESULT_SCHEMA.map(column => {
|
||||
return {column_name: column};
|
||||
@ -259,7 +259,7 @@ describe('Base database connector', function () {
|
||||
};
|
||||
|
||||
for (const table in tables) {
|
||||
test(`${table} table existence`, async function () {
|
||||
test(`${table} table existence`, async () => {
|
||||
const result = await baseConnector.getTableColumns(ctx, table);
|
||||
for (const row of tables[table]) {
|
||||
expect(result).toContainEqual(row);
|
||||
@ -268,13 +268,13 @@ describe('Base database connector', function () {
|
||||
}
|
||||
|
||||
const table = 'unused_table';
|
||||
test(`${table} table absence`, async function () {
|
||||
test(`${table} table absence`, async () => {
|
||||
const result = await baseConnector.getTableColumns(ctx, table);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Changes manipulations', function () {
|
||||
describe('Changes manipulations', () => {
|
||||
const date = new Date();
|
||||
const index = 0;
|
||||
const user = {
|
||||
@ -285,9 +285,9 @@ describe('Base database connector', function () {
|
||||
view: false
|
||||
};
|
||||
|
||||
describe('Add changes', function () {
|
||||
describe('Add changes', () => {
|
||||
for (const testCase in insertCases) {
|
||||
test(`${testCase} rows inserted`, async function () {
|
||||
test(`${testCase} rows inserted`, async () => {
|
||||
const docId = insertCases[testCase];
|
||||
const objChanges = createChanges(+testCase, date);
|
||||
|
||||
@ -301,11 +301,11 @@ describe('Base database connector', function () {
|
||||
}
|
||||
});
|
||||
|
||||
describe('Get and delete changes', function () {
|
||||
describe('Get and delete changes', () => {
|
||||
const changesCount = 10;
|
||||
const objChanges = createChanges(changesCount, date);
|
||||
|
||||
test('Get changes in range', async function () {
|
||||
test('Get changes in range', async () => {
|
||||
const docId = changesCases.range;
|
||||
const additionalChangesCount = 5;
|
||||
const dayBefore = new Date();
|
||||
@ -325,7 +325,7 @@ describe('Base database connector', function () {
|
||||
expect(resultByDate.length).toEqual(additionalChangesCount);
|
||||
});
|
||||
|
||||
test('Get changes index', async function () {
|
||||
test('Get changes index', async () => {
|
||||
const docId = changesCases.index;
|
||||
|
||||
await noRowsExistenceCheck(cfgTableChanges, docId);
|
||||
@ -339,7 +339,7 @@ describe('Base database connector', function () {
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
test('Delete changes', async function () {
|
||||
test('Delete changes', async () => {
|
||||
const docId = changesCases.delete;
|
||||
|
||||
await baseConnector.insertChangesPromise(ctx, objChanges, docId, index, user);
|
||||
@ -354,7 +354,7 @@ describe('Base database connector', function () {
|
||||
});
|
||||
});
|
||||
|
||||
test('Get empty callbacks', async function () {
|
||||
test('Get empty callbacks', async () => {
|
||||
const idCount = 5;
|
||||
const notNullCallbacks = idCount - 2;
|
||||
|
||||
@ -383,7 +383,7 @@ describe('Base database connector', function () {
|
||||
expect(resultAfter.length).toEqual(resultBefore.length + idCount - notNullCallbacks);
|
||||
});
|
||||
|
||||
test('Get documents with changes', async function () {
|
||||
test('Get documents with changes', async () => {
|
||||
const objChanges = createChanges(1, date);
|
||||
|
||||
const resultBeforeNewRows = await baseConnector.getDocumentsWithChanges(ctx);
|
||||
@ -397,7 +397,7 @@ describe('Base database connector', function () {
|
||||
expect(resultAfterNewRows.length).toEqual(resultBeforeNewRows.length + documentsWithChangesCase.length);
|
||||
});
|
||||
|
||||
test('Get expired', async function () {
|
||||
test('Get expired', async () => {
|
||||
const maxCount = 100;
|
||||
const dayBefore = new Date();
|
||||
dayBefore.setDate(dayBefore.getDate() - 1);
|
||||
@ -415,10 +415,10 @@ describe('Base database connector', function () {
|
||||
expect(resultAfterNewRows.length).toEqual(resultBeforeNewRows.length + getExpiredCase.length);
|
||||
});
|
||||
|
||||
test('Get Count With Status', async function () {
|
||||
test('Get Count With Status', async () => {
|
||||
let countWithStatus;
|
||||
let unknownStatus = 99; //to avoid collision with running server
|
||||
let EXEC_TIMEOUT = 30000 + utils.getConvertionTimeout(undefined);
|
||||
const unknownStatus = 99; //to avoid collision with running server
|
||||
const EXEC_TIMEOUT = 30000 + utils.getConvertionTimeout(undefined);
|
||||
countWithStatus = await baseConnector.getCountWithStatus(ctx, unknownStatus, EXEC_TIMEOUT);
|
||||
expect(countWithStatus).toEqual(0);
|
||||
for (const id of getCountWithStatusCase) {
|
||||
@ -431,8 +431,8 @@ describe('Base database connector', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsert() method', function () {
|
||||
test('New row inserted', async function () {
|
||||
describe('upsert() method', () => {
|
||||
test('New row inserted', async () => {
|
||||
const task = createTask(upsertCases.insert);
|
||||
|
||||
await noRowsExistenceCheck(cfgTableResult, task.key);
|
||||
@ -448,7 +448,7 @@ describe('Base database connector', function () {
|
||||
expect(insertedResult).toEqual(1);
|
||||
});
|
||||
|
||||
test('Row updated', async function () {
|
||||
test('Row updated', async () => {
|
||||
const task = createTask(upsertCases.update, '', 'some-url');
|
||||
|
||||
await noRowsExistenceCheck(cfgTableResult, task.key);
|
||||
|
||||
@ -111,14 +111,14 @@ function getKeysDirectories(keys) {
|
||||
return keys.map(value => value.split('/')[0]);
|
||||
}
|
||||
|
||||
beforeAll(async function () {
|
||||
beforeAll(async () => {
|
||||
const buffer = Buffer.from('Forgotten commands test file');
|
||||
for (const index in testFilesNames) {
|
||||
await storage.putObject(ctx, `${testFilesNames[index]}/${cfgForgottenFilesName}.docx`, buffer, buffer.length, cfgForgottenFiles);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async function () {
|
||||
afterAll(async () => {
|
||||
const keys = await storage.listObjects(ctx, '', cfgForgottenFiles);
|
||||
const keysDirectories = getKeysDirectories(keys);
|
||||
const deletePromises = keysDirectories
|
||||
@ -130,9 +130,9 @@ afterAll(async function () {
|
||||
});
|
||||
|
||||
// Assumed, that server is already up.
|
||||
describe('Command service', function () {
|
||||
describe('Forgotten files commands parameters validation', function () {
|
||||
describe('Invalid key format', function () {
|
||||
describe('Command service', () => {
|
||||
describe('Forgotten files commands parameters validation', () => {
|
||||
describe('Invalid key format', () => {
|
||||
const tests = ['getForgotten', 'deleteForgotten'];
|
||||
const addSpecialCases = (invalidRequests, expected, testSubject) => {
|
||||
invalidRequests.push({
|
||||
@ -151,7 +151,7 @@ describe('Command service', function () {
|
||||
};
|
||||
|
||||
for (const testSubject of tests) {
|
||||
test(testSubject, async function () {
|
||||
test(testSubject, async () => {
|
||||
const invalidKeys = [true, [], {}, 1, 1.1];
|
||||
const invalidRequests = invalidKeys.map(key => {
|
||||
return {
|
||||
@ -180,8 +180,8 @@ describe('Command service', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Forgotten files commands verification', function () {
|
||||
describe('getForgotten', function () {
|
||||
describe('Forgotten files commands verification', () => {
|
||||
describe('getForgotten', () => {
|
||||
const createExpected = ({key, error}) => {
|
||||
const validKey = typeof key === 'string' && error === 0;
|
||||
let urlPattern;
|
||||
@ -245,7 +245,7 @@ describe('Command service', function () {
|
||||
}
|
||||
});
|
||||
|
||||
describe('deleteForgotten', function () {
|
||||
describe('deleteForgotten', () => {
|
||||
const createExpected = ({key, error}) => {
|
||||
return {
|
||||
key,
|
||||
@ -282,7 +282,7 @@ describe('Command service', function () {
|
||||
}
|
||||
});
|
||||
|
||||
describe('getForgottenList', function () {
|
||||
describe('getForgottenList', () => {
|
||||
test('Main case', async () => {
|
||||
const requestBody = {
|
||||
c: 'getForgottenList'
|
||||
|
||||
@ -49,10 +49,10 @@ const https = require('https');
|
||||
const fs = require('fs');
|
||||
const {Readable} = require('stream');
|
||||
|
||||
let testFileData1 = 'test1';
|
||||
let testFileData2 = 'test22';
|
||||
let testFileData3 = 'test333';
|
||||
let testFileData4 = testFileData3;
|
||||
const testFileData1 = 'test1';
|
||||
const testFileData2 = 'test22';
|
||||
const testFileData3 = 'test333';
|
||||
const testFileData4 = testFileData3;
|
||||
|
||||
const express = require('express');
|
||||
const operationContext = require('../../../Common/sources/operationContext');
|
||||
@ -72,12 +72,12 @@ const rand = Math.floor(Math.random() * 1000000);
|
||||
const testDir = 'DocService-DocsCoServer-storage-' + rand;
|
||||
const baseUrl = `http://localhost:${PORT}`;
|
||||
const urlType = commonDefines.c_oAscUrlTypes.Session;
|
||||
let testFile1 = testDir + '/test1.txt';
|
||||
let testFile2 = testDir + '/test2.txt';
|
||||
let testFile3 = testDir + '/test3.txt';
|
||||
let testFile4 = testDir + '/test4.txt';
|
||||
let specialDirCache = '';
|
||||
let specialDirForgotten = 'forgotten';
|
||||
const testFile1 = testDir + '/test1.txt';
|
||||
const testFile2 = testDir + '/test2.txt';
|
||||
const testFile3 = testDir + '/test3.txt';
|
||||
const testFile4 = testDir + '/test4.txt';
|
||||
const specialDirCache = '';
|
||||
const specialDirForgotten = 'forgotten';
|
||||
|
||||
console.debug(`testDir: ${testDir}`);
|
||||
|
||||
@ -104,7 +104,7 @@ function getStorageCfg(specialDir) {
|
||||
|
||||
function request(url) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let module = url.startsWith('https') ? https : http;
|
||||
const module = url.startsWith('https') ? https : http;
|
||||
const req = module.get(url, response => {
|
||||
let data = '';
|
||||
response.on('data', _data => (data += _data));
|
||||
@ -116,35 +116,35 @@ function request(url) {
|
||||
});
|
||||
}
|
||||
function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
let oldMultitenantMode = tenantManager.isMultitenantMode();
|
||||
const oldMultitenantMode = tenantManager.isMultitenantMode();
|
||||
test('start listObjects', async () => {
|
||||
//todo set in all tests do not rely on test order
|
||||
tenantManager.setMultitenantMode(isMultitenantMode);
|
||||
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list).toEqual([]);
|
||||
});
|
||||
test('putObject', async () => {
|
||||
let buffer = Buffer.from(testFileData1);
|
||||
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDir);
|
||||
const buffer = Buffer.from(testFileData1);
|
||||
const res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile1].sort());
|
||||
});
|
||||
test('putObject-stream', async () => {
|
||||
let buffer = Buffer.from(testFileData2);
|
||||
const buffer = Buffer.from(testFileData2);
|
||||
const stream = Readable.from(buffer);
|
||||
let res = await storage.putObject(ctx, testFile2, stream, buffer.length, specialDir);
|
||||
const res = await storage.putObject(ctx, testFile2, stream, buffer.length, specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile1, testFile2].sort());
|
||||
});
|
||||
if ('storage-fs' === getStorageCfg(specialDir).name) {
|
||||
test('UploadObject', async () => {
|
||||
let res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
|
||||
const res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
expect(cp).toHaveBeenCalled();
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile1, testFile2, testFile3].sort());
|
||||
});
|
||||
} else {
|
||||
@ -152,9 +152,9 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
const readStream = Readable.from(testFileData3);
|
||||
readStream.size = testFileData3.length;
|
||||
const spy = jest.spyOn(fs, 'createReadStream').mockReturnValue(readStream);
|
||||
let res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
|
||||
const res = await storage.uploadObject(ctx, testFile3, 'createReadStream.txt', specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(list.sort()).toEqual([testFile1, testFile2, testFile3].sort());
|
||||
spy.mockRestore();
|
||||
@ -188,11 +188,11 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
});
|
||||
}
|
||||
test('copyObject', async () => {
|
||||
let res = await storage.copyObject(ctx, testFile3, testFile4, specialDir, specialDir);
|
||||
const res = await storage.copyObject(ctx, testFile3, testFile4, specialDir, specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
// let buffer = Buffer.from(testFileData3);
|
||||
// await storage.putObject(ctx, testFile3, buffer, buffer.length, specialDir);
|
||||
let list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
const list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile1, testFile2, testFile3, testFile4].sort());
|
||||
});
|
||||
test('headObject', async () => {
|
||||
@ -263,7 +263,7 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
let urls, data;
|
||||
urls = await storage.getSignedUrls(ctx, baseUrl, testDir, urlType, undefined, specialDir);
|
||||
data = [];
|
||||
for (let i in urls) {
|
||||
for (const i in urls) {
|
||||
data.push(await request(urls[i]));
|
||||
}
|
||||
expect(data.sort()).toEqual([testFileData1, testFileData2, testFileData3, testFileData4].sort());
|
||||
@ -281,18 +281,18 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
let urls, data;
|
||||
urls = await storage.getSignedUrlsByArray(ctx, baseUrl, [testFile3, testFile4], undefined, urlType, specialDir);
|
||||
data = [];
|
||||
for (let i in urls) {
|
||||
for (const i in urls) {
|
||||
data.push(await request(urls[i]));
|
||||
}
|
||||
expect(data.sort()).toEqual([testFileData3, testFileData4].sort());
|
||||
});
|
||||
test('getSignedUrl with direct URLs enabled', async () => {
|
||||
let buffer = Buffer.from(testFileData1);
|
||||
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
|
||||
const buffer = Buffer.from(testFileData1);
|
||||
const res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
|
||||
expect(res).toEqual(undefined);
|
||||
|
||||
let url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, true);
|
||||
let data = await request(url);
|
||||
const url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, true);
|
||||
const data = await request(url);
|
||||
expect(data).toEqual(testFileData1);
|
||||
|
||||
if (cfgCacheStorage.name !== 'storage-fs') {
|
||||
@ -301,12 +301,12 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
}
|
||||
});
|
||||
test('getSignedUrl with direct URLs disabled', async () => {
|
||||
let buffer = Buffer.from(testFileData1);
|
||||
let res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
|
||||
const buffer = Buffer.from(testFileData1);
|
||||
const res = await storage.putObject(ctx, testFile1, buffer, buffer.length, specialDirCache);
|
||||
expect(res).toEqual(undefined);
|
||||
|
||||
let url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, false);
|
||||
let data = await request(url);
|
||||
const url = await storage.getSignedUrl(ctx, baseUrl, testFile1, urlType, undefined, undefined, specialDirCache, false);
|
||||
const data = await request(url);
|
||||
expect(data).toEqual(testFileData1);
|
||||
|
||||
expect(url).toContain('md5');
|
||||
@ -318,7 +318,7 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile1, testFile2, testFile3, testFile4].sort());
|
||||
|
||||
let res = await storage.deleteObject(ctx, testFile1, specialDir);
|
||||
const res = await storage.deleteObject(ctx, testFile1, specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
|
||||
list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
@ -329,7 +329,7 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
expect(list.sort()).toEqual([testFile2, testFile3, testFile4].sort());
|
||||
|
||||
let res = await storage.deletePath(ctx, testDir, specialDir);
|
||||
const res = await storage.deletePath(ctx, testDir, specialDir);
|
||||
expect(res).toEqual(undefined);
|
||||
|
||||
list = await storage.listObjects(ctx, testDir, specialDir);
|
||||
@ -340,23 +340,23 @@ function runTestForDir(ctx, isMultitenantMode, specialDir) {
|
||||
}
|
||||
|
||||
// Assumed, that server is already up.
|
||||
describe('storage common dir', function () {
|
||||
describe('storage common dir', () => {
|
||||
runTestForDir(ctx, false, specialDirCache);
|
||||
});
|
||||
|
||||
describe('storage forgotten dir', function () {
|
||||
describe('storage forgotten dir', () => {
|
||||
runTestForDir(ctx, false, specialDirForgotten);
|
||||
});
|
||||
|
||||
describe('storage common dir with tenants', function () {
|
||||
describe('storage common dir with tenants', () => {
|
||||
runTestForDir(ctx, true, specialDirCache);
|
||||
});
|
||||
|
||||
describe('storage forgotten dir with tenants', function () {
|
||||
describe('storage forgotten dir with tenants', () => {
|
||||
runTestForDir(ctx, true, specialDirForgotten);
|
||||
});
|
||||
|
||||
describe('storage mix common and forgotten dir', function () {
|
||||
describe('storage mix common and forgotten dir', () => {
|
||||
test('putObject', async () => {
|
||||
tenantManager.setMultitenantMode(false);
|
||||
|
||||
|
||||
@ -43,20 +43,20 @@ const utils = require('./../../Common/sources/utils');
|
||||
const docsCoServer = require('./../../DocService/sources/DocsCoServer');
|
||||
const gc = require('./../../DocService/sources/gc');
|
||||
|
||||
let ctx = operationContext.global;
|
||||
const ctx = operationContext.global;
|
||||
|
||||
let addRandomKeyTask;
|
||||
let histograms = {};
|
||||
const histograms = {};
|
||||
|
||||
async function beforeStart() {
|
||||
let timerify = function (func, name) {
|
||||
const timerify = function (func, name) {
|
||||
//todo remove anonymous functions. use func.name
|
||||
Object.defineProperty(func, 'name', {
|
||||
value: name
|
||||
});
|
||||
let histogram = createHistogram();
|
||||
const histogram = createHistogram();
|
||||
histograms[func.name] = histogram;
|
||||
return performance.timerify(func, {histogram: histogram});
|
||||
return performance.timerify(func, {histogram});
|
||||
};
|
||||
|
||||
addRandomKeyTask = timerify(co.wrap(taskResult.addRandomKeyTask), 'addRandomKeyTask');
|
||||
@ -71,7 +71,7 @@ async function beforeStart() {
|
||||
const obs = new PerformanceObserver(list => {
|
||||
const entries = list.getEntries();
|
||||
entries.forEach(entry => {
|
||||
let duration = Math.round(entry.duration * 1000) / 1000;
|
||||
const duration = Math.round(entry.duration * 1000) / 1000;
|
||||
console.log(`${entry.name}:${duration}ms`);
|
||||
});
|
||||
});
|
||||
@ -81,24 +81,24 @@ async function beforeStart() {
|
||||
}
|
||||
|
||||
async function beforeEnd() {
|
||||
let logHistogram = function (histogram, name) {
|
||||
let mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
let min = Math.round(histogram.min / 1000) / 1000;
|
||||
let max = Math.round(histogram.max / 1000) / 1000;
|
||||
let count = histogram.count;
|
||||
const logHistogram = function (histogram, name) {
|
||||
const mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
const min = Math.round(histogram.min / 1000) / 1000;
|
||||
const max = Math.round(histogram.max / 1000) / 1000;
|
||||
const count = histogram.count;
|
||||
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
|
||||
};
|
||||
await utils.sleep(1000);
|
||||
for (let name in histograms) {
|
||||
for (const name in histograms) {
|
||||
logHistogram(histograms[name], name);
|
||||
}
|
||||
}
|
||||
|
||||
async function addFileExpire(count, size, prefix, filesInFolder) {
|
||||
while (count > 0) {
|
||||
let task = await addRandomKeyTask(ctx, undefined, prefix, 8);
|
||||
let data = Buffer.alloc(size, 0);
|
||||
let rand = Math.floor(Math.random() * filesInFolder) + 1;
|
||||
const task = await addRandomKeyTask(ctx, undefined, prefix, 8);
|
||||
const data = Buffer.alloc(size, 0);
|
||||
const rand = Math.floor(Math.random() * filesInFolder) + 1;
|
||||
for (let i = 0; i < rand && count > 0; i++) {
|
||||
await storage.putObject(ctx, `${task.key}/data${i}`, data, data.length);
|
||||
count--;
|
||||
@ -107,7 +107,7 @@ async function addFileExpire(count, size, prefix, filesInFolder) {
|
||||
}
|
||||
|
||||
async function startTest() {
|
||||
let args = process.argv.slice(2);
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length < 4) {
|
||||
ctx.logger.error('missing arguments.USAGE: checkFileExpire.js [add-files-count] [file-size-bytes] [key-prefix] [seconds-to-expire]');
|
||||
return;
|
||||
|
||||
@ -41,15 +41,15 @@ const utils = require('./../../Common/sources/utils');
|
||||
const operationContext = require('./../../Common/sources/operationContext');
|
||||
const utilsDocService = require('./../../DocService/sources/utilsDocService');
|
||||
|
||||
let ctx = operationContext.global;
|
||||
const ctx = operationContext.global;
|
||||
|
||||
let histograms = {};
|
||||
const histograms = {};
|
||||
|
||||
async function beforeStart() {
|
||||
let timerify = function (func) {
|
||||
let histogram = createHistogram();
|
||||
const timerify = function (func) {
|
||||
const histogram = createHistogram();
|
||||
histograms[func.name] = histogram;
|
||||
return performance.timerify(func, {histogram: histogram});
|
||||
return performance.timerify(func, {histogram});
|
||||
};
|
||||
utilsDocService.convertImageToPng = timerify(utilsDocService.convertImageToPng);
|
||||
// Jimp.read = timerify(Jimp.read);
|
||||
@ -57,7 +57,7 @@ async function beforeStart() {
|
||||
const obs = new PerformanceObserver(list => {
|
||||
const entries = list.getEntries();
|
||||
entries.forEach(entry => {
|
||||
let duration = Math.round(entry.duration * 1000) / 1000;
|
||||
const duration = Math.round(entry.duration * 1000) / 1000;
|
||||
console.log(`${entry.name}:${duration}ms`);
|
||||
});
|
||||
});
|
||||
@ -65,15 +65,15 @@ async function beforeStart() {
|
||||
}
|
||||
|
||||
async function beforeEnd() {
|
||||
let logHistogram = function (histogram, name) {
|
||||
let mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
let min = Math.round(histogram.min / 1000) / 1000;
|
||||
let max = Math.round(histogram.max / 1000) / 1000;
|
||||
let count = histogram.count;
|
||||
const logHistogram = function (histogram, name) {
|
||||
const mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
const min = Math.round(histogram.min / 1000) / 1000;
|
||||
const max = Math.round(histogram.max / 1000) / 1000;
|
||||
const count = histogram.count;
|
||||
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
|
||||
};
|
||||
await utils.sleep(1000);
|
||||
for (let name in histograms) {
|
||||
for (const name in histograms) {
|
||||
logHistogram(histograms[name], name);
|
||||
}
|
||||
}
|
||||
@ -81,15 +81,15 @@ async function beforeEnd() {
|
||||
async function fixInDir(dirIn, dirOut) {
|
||||
ctx.logger.info('dirIn:%s', dirIn);
|
||||
ctx.logger.info('dirOut:%s', dirOut);
|
||||
let dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
|
||||
for (let dirent of dirents) {
|
||||
const dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
|
||||
for (const dirent of dirents) {
|
||||
if (dirent.isFile()) {
|
||||
let file = dirent.name;
|
||||
const file = dirent.name;
|
||||
ctx.logger.info('fixInDir:%s', file);
|
||||
let buffer = await readFile(path.join(dirent.path, file));
|
||||
let bufferNew = await utilsDocService.convertImageToPng(ctx, buffer);
|
||||
const buffer = await readFile(path.join(dirent.path, file));
|
||||
const bufferNew = await utilsDocService.convertImageToPng(ctx, buffer);
|
||||
if (buffer !== bufferNew) {
|
||||
let outputPath = path.join(dirOut, dirent.path.substring(dirIn.length), path.basename(file, path.extname(file)) + '.png');
|
||||
const outputPath = path.join(dirOut, dirent.path.substring(dirIn.length), path.basename(file, path.extname(file)) + '.png');
|
||||
await mkdir(path.dirname(outputPath), {recursive: true});
|
||||
await writeFile(outputPath, bufferNew);
|
||||
}
|
||||
@ -98,7 +98,7 @@ async function fixInDir(dirIn, dirOut) {
|
||||
}
|
||||
|
||||
async function startTest() {
|
||||
let args = process.argv.slice(2);
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length < 2) {
|
||||
ctx.logger.error('missing arguments.USAGE: convertImageToPng.js "dirIn" "dirOut"');
|
||||
return;
|
||||
|
||||
@ -41,15 +41,15 @@ const utils = require('./../../Common/sources/utils');
|
||||
const operationContext = require('./../../Common/sources/operationContext');
|
||||
const utilsDocService = require('./../../DocService/sources/utilsDocService');
|
||||
|
||||
let ctx = operationContext.global;
|
||||
const ctx = operationContext.global;
|
||||
|
||||
let histograms = {};
|
||||
const histograms = {};
|
||||
|
||||
async function beforeStart() {
|
||||
let timerify = function (func) {
|
||||
let histogram = createHistogram();
|
||||
const timerify = function (func) {
|
||||
const histogram = createHistogram();
|
||||
histograms[func.name] = histogram;
|
||||
return performance.timerify(func, {histogram: histogram});
|
||||
return performance.timerify(func, {histogram});
|
||||
};
|
||||
utilsDocService.fixImageExifRotation = timerify(utilsDocService.fixImageExifRotation);
|
||||
// Jimp.read = timerify(Jimp.read);
|
||||
@ -57,7 +57,7 @@ async function beforeStart() {
|
||||
const obs = new PerformanceObserver(list => {
|
||||
const entries = list.getEntries();
|
||||
entries.forEach(entry => {
|
||||
let duration = Math.round(entry.duration * 1000) / 1000;
|
||||
const duration = Math.round(entry.duration * 1000) / 1000;
|
||||
console.log(`${entry.name}:${duration}ms`);
|
||||
});
|
||||
});
|
||||
@ -65,15 +65,15 @@ async function beforeStart() {
|
||||
}
|
||||
|
||||
async function beforeEnd() {
|
||||
let logHistogram = function (histogram, name) {
|
||||
let mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
let min = Math.round(histogram.min / 1000) / 1000;
|
||||
let max = Math.round(histogram.max / 1000) / 1000;
|
||||
let count = histogram.count;
|
||||
const logHistogram = function (histogram, name) {
|
||||
const mean = Math.round(histogram.mean / 1000) / 1000;
|
||||
const min = Math.round(histogram.min / 1000) / 1000;
|
||||
const max = Math.round(histogram.max / 1000) / 1000;
|
||||
const count = histogram.count;
|
||||
ctx.logger.info(`histogram ${name}: count=${count}, mean=${mean}ms, min=${min}ms, max=${max}ms`);
|
||||
};
|
||||
await utils.sleep(1000);
|
||||
for (let name in histograms) {
|
||||
for (const name in histograms) {
|
||||
logHistogram(histograms[name], name);
|
||||
}
|
||||
}
|
||||
@ -81,15 +81,15 @@ async function beforeEnd() {
|
||||
async function fixInDir(dirIn, dirOut) {
|
||||
ctx.logger.info('dirIn:%s', dirIn);
|
||||
ctx.logger.info('dirOut:%s', dirOut);
|
||||
let dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
|
||||
for (let dirent of dirents) {
|
||||
const dirents = await readdir(dirIn, {withFileTypes: true, recursive: true});
|
||||
for (const dirent of dirents) {
|
||||
if (dirent.isFile()) {
|
||||
let file = dirent.name;
|
||||
const file = dirent.name;
|
||||
ctx.logger.info('fixInDir:%s', file);
|
||||
let buffer = await readFile(path.join(dirent.path, file));
|
||||
let bufferNew = await utilsDocService.fixImageExifRotation(ctx, buffer);
|
||||
const buffer = await readFile(path.join(dirent.path, file));
|
||||
const bufferNew = await utilsDocService.fixImageExifRotation(ctx, buffer);
|
||||
if (buffer !== bufferNew) {
|
||||
let outputPath = path.join(dirOut, dirent.path.substring(dirIn.length), file);
|
||||
const outputPath = path.join(dirOut, dirent.path.substring(dirIn.length), file);
|
||||
await mkdir(path.dirname(outputPath), {recursive: true});
|
||||
await writeFile(outputPath, bufferNew);
|
||||
}
|
||||
@ -98,7 +98,7 @@ async function fixInDir(dirIn, dirOut) {
|
||||
}
|
||||
|
||||
async function startTest() {
|
||||
let args = process.argv.slice(2);
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length < 2) {
|
||||
ctx.logger.error('missing arguments.USAGE: fixImageExifRotation.js "dirIn" "dirOut"');
|
||||
return;
|
||||
|
||||
@ -11,17 +11,17 @@ const defaultTestSMTPServer = {
|
||||
};
|
||||
const testTimeout = 1000 * 10;
|
||||
|
||||
afterAll(function () {
|
||||
afterAll(() => {
|
||||
mailService.transportersRelease();
|
||||
});
|
||||
|
||||
describe('Mail service', function () {
|
||||
describe('SMTP', function () {
|
||||
describe('Mail service', () => {
|
||||
describe('SMTP', () => {
|
||||
const {host, port} = defaultTestSMTPServer;
|
||||
|
||||
test(
|
||||
'Transporters life cycle',
|
||||
async function () {
|
||||
async () => {
|
||||
// Accounts created at https://ethereal.email/, all messages in tests goes here: https://ethereal.email/messages
|
||||
// Ethereial is a special SMTP sever for mailing tests in collaboration with Nodemailer.
|
||||
const accounts = await Promise.all([nodemailer.createTestAccount(), nodemailer.createTestAccount(), nodemailer.createTestAccount()]);
|
||||
|
||||
@ -27,7 +27,7 @@ const getStatusCode = response => response.statusCode || response.status;
|
||||
|
||||
function createMockContext(overrides = {}) {
|
||||
const defaultCtx = {
|
||||
getCfg: function (key, _) {
|
||||
getCfg (key, _) {
|
||||
switch (key) {
|
||||
case 'services.CoAuthoring.requestDefaults':
|
||||
return {
|
||||
@ -69,14 +69,14 @@ function createMockContext(overrides = {}) {
|
||||
}
|
||||
},
|
||||
logger: {
|
||||
debug: function () {}
|
||||
debug () {}
|
||||
}
|
||||
};
|
||||
|
||||
// Return a mock context with overridden values if any
|
||||
return {
|
||||
...defaultCtx,
|
||||
getCfg: function (key, _) {
|
||||
getCfg (key, _) {
|
||||
// Return the override if it exists
|
||||
if (overrides[key]) {
|
||||
return overrides[key];
|
||||
@ -100,7 +100,7 @@ describe('HTTP Request Unit Tests', () => {
|
||||
// Endpoint that simulates timeout
|
||||
app.get('/api/timeout', (req, res) => {
|
||||
// Never send response to trigger timeout
|
||||
return;
|
||||
|
||||
});
|
||||
|
||||
app.use('/api/status/:code', (req, res) => {
|
||||
@ -163,7 +163,7 @@ describe('HTTP Request Unit Tests', () => {
|
||||
// POST endpoint that times out
|
||||
app.post('/api/timeout', express.json(), (req, res) => {
|
||||
// Never send response to trigger timeout
|
||||
return;
|
||||
|
||||
});
|
||||
|
||||
app.get('/api/binary', (req, res) => {
|
||||
@ -724,8 +724,8 @@ describe('HTTP Request Unit Tests', () => {
|
||||
headers: defaultHeaders
|
||||
}
|
||||
});
|
||||
let customHeaders = {'custom-header': 'test-value', 'set-cookie': ['cookie']};
|
||||
let customQueryParams = {'custom-query-param': 'value'};
|
||||
const customHeaders = {'custom-header': 'test-value', 'set-cookie': ['cookie']};
|
||||
const customQueryParams = {'custom-query-param': 'value'};
|
||||
const result = await utils.downloadUrlPromise(
|
||||
mockCtx,
|
||||
`${BASE_URL}/api/mirror?${new URLSearchParams(customQueryParams).toString()}`,
|
||||
|
||||
@ -40,14 +40,14 @@ describe('Server-Side Request Forgery (SSRF)', () => {
|
||||
|
||||
beforeAll(() => {
|
||||
goodServer = http
|
||||
.createServer(function (req, res) {
|
||||
.createServer((req, res) => {
|
||||
res.write('good');
|
||||
res.end();
|
||||
})
|
||||
.listen(GOOD_PORT);
|
||||
|
||||
goodServerRedirect = http
|
||||
.createServer(function (req, res) {
|
||||
.createServer((req, res) => {
|
||||
console.log(`Received request for: ${req.url}`);
|
||||
|
||||
// Set redirect status code (301 for permanent redirect, 302 for temporary)
|
||||
@ -65,7 +65,7 @@ describe('Server-Side Request Forgery (SSRF)', () => {
|
||||
.listen(GOOD_PORT_REDIRECT);
|
||||
|
||||
badServer = http
|
||||
.createServer(function (req, res) {
|
||||
.createServer((req, res) => {
|
||||
res.write('bad');
|
||||
res.end();
|
||||
})
|
||||
|
||||
@ -32,12 +32,12 @@
|
||||
|
||||
const {describe, test, expect} = require('@jest/globals');
|
||||
|
||||
describe('Successful and failure tests', function () {
|
||||
test('Successful test', function () {
|
||||
describe('Successful and failure tests', () => {
|
||||
test('Successful test', () => {
|
||||
expect(true).toBeTruthy();
|
||||
});
|
||||
|
||||
test.skip('Failure test', function () {
|
||||
test.skip('Failure test', () => {
|
||||
expect(true).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
@ -39,8 +39,8 @@ const utils = require('../../Common/sources/utils');
|
||||
const ctx = new operationContext.Context();
|
||||
const minimumIterationsByteLength = 4;
|
||||
|
||||
describe('AES encryption & decryption', function () {
|
||||
test('Iterations range', async function () {
|
||||
describe('AES encryption & decryption', () => {
|
||||
test('Iterations range', async () => {
|
||||
const configuration = config.util.cloneDeep(config.get('aesEncrypt.config'));
|
||||
const encrypted = await utils.encryptPassword(ctx, 'secretstring');
|
||||
const {iterationsByteLength = 5} = configuration;
|
||||
@ -53,7 +53,7 @@ describe('AES encryption & decryption', function () {
|
||||
expect(iterations).toBeLessThanOrEqual(Math.pow(10, iterationsLength) - 1);
|
||||
});
|
||||
|
||||
test('Correct workflow', async function () {
|
||||
test('Correct workflow', async () => {
|
||||
const encrypted = await utils.encryptPassword(ctx, 'secretstring');
|
||||
const decrypted = await utils.decryptPassword(ctx, encrypted);
|
||||
expect(decrypted).toEqual('secretstring');
|
||||
|
||||
Reference in New Issue
Block a user