Skip to content
This repository was archived by the owner on Jan 14, 2025. It is now read-only.

Commit 429bd4b

Browse files
authored
[ORCT-118] MonAlisa service tests (#215)
1 parent cce17bb commit 429bd4b

10 files changed

Lines changed: 305 additions & 58 deletions

File tree

app/lib/alimonitor-services/BookkeepingService.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,15 +64,15 @@ class BookkeepingService extends AbstractServiceSynchronizer {
6464
const results = [];
6565
let state = {
6666
page: 0,
67-
limit: 100,
67+
limit: process.env.BKP_RUNS_FETCH_LIMIT || 100,
6868
};
6969
while (!this.syncTraversStop(state)) {
7070
const partialResult = await this.syncPerEndpoint(
7171
ServicesEndpointsFormatter.bookkeeping(state['page'], state['limit']),
7272
this.metaDataHandler.bind(this),
7373
);
7474
results.push(partialResult);
75-
this.logger.info(`progress of ${state['page']} to ${this.metaStore['pageCount']}`);
75+
this.logger.info(`processed ${state['page']} pages of ${this.metaStore['pageCount']}`);
7676
state = this.nextState(state);
7777
}
7878

app/lib/alimonitor-services/helpers/Cacher.js

Lines changed: 59 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,21 @@
1616
const path = require('path');
1717
const fs = require('fs');
1818
const config = require('../../config/configProvider');
19+
const { createHash } = require('crypto');
1920

21+
const maxSystemFileNameLength = process.env.MAX_FILE_NAME_LENGTH || 255;
22+
23+
/**
24+
* Class providing utitlities for caching data fetched from external services
25+
*/
2026
class Cacher {
27+
/**
28+
* Store data
29+
* @param {String} synchronizerName name of service used to fetch data
30+
* @param {URL} endpoint data were fetched from
31+
* @param {Object} data to be stringified
32+
* @return {void}
33+
*/
2134
static cache(synchronizerName, endpoint, data) {
2235
const cacheDir = Cacher.serviceCacheDir(synchronizerName);
2336
if (!fs.existsSync(cacheDir)) {
@@ -29,34 +42,75 @@ class Cacher {
2942
);
3043
}
3144

45+
/**
46+
* Check if data from given endpoint are cached
47+
* @param {String} synchronizerName name of synchronizer
48+
* @param {URL} endpoint data were fetched from
49+
* @return {Boolean} true if cached false otherwise
50+
*/
3251
static isCached(synchronizerName, endpoint) {
3352
return fs.existsSync(Cacher.cachedFilePath(synchronizerName, endpoint));
3453
}
3554

55+
/**
56+
* Get cached data
57+
* @param {String} synchronizerName name of synchronizer
58+
* @param {URL} endpoint data were fetched from
59+
* @return {JSON} data
60+
*/
3661
static getJsonSync(synchronizerName, endpoint) {
3762
return JSON.parse(fs.readFileSync(Cacher.cachedFilePath(synchronizerName, endpoint)));
3863
}
3964

65+
/**
66+
* Get cached data
67+
* @param {String} synchronizerName name of synchronizer
68+
* @param {URL} endpoint data were fetched from
69+
* @return {Promise<JSON>} data
70+
*/
4071
static async getJson(synchronizerName, endpoint) {
4172
return await fs.readFile(Cacher.cachedFilePath(synchronizerName, endpoint))
4273
.then((r) => JSON.parse(r));
4374
}
4475

76+
/**
77+
* Return path to data given via endpoint they were fetched from and synchronizer name
78+
* @param {String} synchronizerName name of synchronizer
79+
* @param {URL} endpoint data were fetched from
80+
* @return {String} path
81+
*/
4582
static cachedFilePath(synchronizerName, endpoint) {
46-
const maxSystemFilenameLength = 255;
47-
if (endpoint.length > maxSystemFilenameLength) {
48-
endpoint = endpoint.slice(0, maxSystemFilenameLength); // TODO better solution
49-
}
5083
return path.join(
5184
Cacher.serviceCacheDir(synchronizerName),
5285
Cacher.cachedFileName(endpoint),
5386
);
5487
}
5588

89+
/**
90+
* Return name of file to store json data based on searchParans of endpoint they were fetched from
91+
* In case file name is too long, it is cut to appropriate length and
92+
* sufixed with '#' character and hash of origina, NON-shortened file name
93+
* @param {URL} endpoint endpoint data was fetched from
94+
* @returns {String} file name
95+
*/
5696
static cachedFileName(endpoint) {
57-
return `${endpoint.searchParams.toString()}.json`;
97+
const fileExtension = '.json';
98+
const maxFilenameLength = maxSystemFileNameLength - fileExtension.length;
99+
let fileName = endpoint.searchParams.toString();
100+
if (fileName.length > maxFilenameLength) {
101+
const hash = createHash('md5').update(fileName).digest('hex');
102+
fileName = fileName.slice(0, maxFilenameLength - (hash.length + 1));
103+
fileName += `#${hash}`;
104+
}
105+
return `${fileName}${fileExtension}`;
58106
}
59107

108+
/**
109+
* Return path to directory where data from given synchronizer will be stored
110+
* @param {String} synchronizerName name of synchronizer
111+
* @param {URL} endpoint data were fetched from
112+
* @return {String} path
113+
*/
60114
static serviceCacheDir(synchronizerName) {
61115
return path.join(
62116
config.services.rawJsonCachePath,

docker/docker-compose-network.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
networks:
22
network:
3+
internal: "${DOCKER_NETWORK_INTERNAL:-false}"
34
driver: bridge
45
ipam:
56
driver: default

docker/test.env

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,3 +85,6 @@ RAW_JSON_CACHE_PATH=${RAW_JSON_CACHE_PATH:-/opt/RunConditionTable/4c3a64a02110a9
8585

8686
### other
8787
RCT_ERR_DEPTH=full
88+
MOCHA_OPTIONS=${MOCHA_OPTIONS:-}
89+
BKP_RUNS_FETCH_LIMIT=100
90+
DOCKER_NETWORK_INTERNAL=${DOCKER_NETWORK_INTERNAL:-true}

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"eslint:show:linux": "npm run eslint; firefox ./reports/static/static-analysis.html",
2525
"eslint:show:macos": "npm run eslint; open -a firefox ./reports/static/static-analysis.html",
2626
"static": "npm run eslint",
27-
"coverage:test": "mocha --exit test/* ",
27+
"coverage:test": "mocha --exit test/* $MOCHA_OPTIONS",
2828
"coverage:report": "nyc report --report=html --report=json",
2929
"coverage": "nyc npm run coverage:test && npm run coverage:report",
3030
"start:test": "npm run coverage",

rctmake

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,9 @@ Usage:
9999
-p|--rct-http-port PORT - flag for setting env var RCT_HTTP_PORT
100100
-m|--target-modifier START_TARGET_MODIFIER - add modififying sufix to npm task like start:dev:START_TARGET_MODIFIER or start:test:START_TARGET_MODIFIER, depends on choosen TARGET
101101
In case of dev mode modifier ND cause running node instead of nodemon.
102-
102+
-M|--mocha <MOCHA OPTIONS> - pass flags/options to mocha when running tests (see mocha documentation)
103+
e.g.: ./$SCRIPT_NAME run --target test --mocha '--grep SomeTestsDescriptor'
104+
103105
104106
$ERROR_MESSAGE_PRINT
105107
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@@ -187,6 +189,10 @@ else
187189
BUILD='--build';
188190
shift 1;
189191
;;
192+
-M|--mocha)
193+
export MOCHA_OPTIONS="$2";
194+
shift 2;
195+
;;
190196
*)
191197
usage "Incorrect flag: $1"
192198
;;

test/lib/alimonitor-services/syncManager.test.js

Lines changed: 67 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -13,58 +13,96 @@
1313
*/
1414

1515
const { rctData: { detectors } } = require('../../../app/lib/config/configProvider.js');
16-
const { syncManager } = require('../../../app/lib/alimonitor-services/SyncManager.js');
17-
const { databaseManager: { repositories: {
18-
RunRepository,
19-
RunDetectorsRepository,
20-
DetectorSubsystemRepository,
21-
},
16+
const { syncManager: {
17+
services: {
18+
bookkeepingService,
19+
monalisaService,
20+
},
21+
} } = require('../../../app/lib/alimonitor-services/SyncManager.js');
22+
const { databaseManager: {
23+
repositories: {
24+
RunRepository,
25+
RunDetectorsRepository,
26+
DetectorSubsystemRepository,
27+
DataPassRepository,
28+
},
29+
models: {
30+
Run,
31+
Period,
32+
},
2233
} } = require('../../../app/lib/database/DatabaseManager.js');
23-
const { generateRandomBookkeepingCachedRawJsons, cleanCachedBkpData } = require('./testutil/cache-for-test.js');
34+
const { generateRandomBookkeepingCachedRawJsons, cleanCachedBkpData } = require('./testutil/bookkeeping-cache-test-data.js');
35+
const { generateRandomMonalisaCachedRawJsons, cleanCachedMonalisaData } = require('./testutil/monalisa-cache-test-data.js');
2436
const assert = require('assert');
37+
const { expect } = require('chai');
38+
39+
const artficialDataSizes = {
40+
bookkeepingService: {
41+
runsInOneFile: Number(process.env.BKP_RUNS_FETCH_LIMIT || 100),
42+
filesNo: 2,
43+
},
44+
monalisaService: {
45+
dataPassesNo: 10,
46+
minDetailsPerOneDataPass: 1,
47+
maxDetailsPerOneDataPass: 10,
48+
},
49+
};
2550

2651
module.exports = () => describe('SyncManager suite', () => {
27-
before('should fetch detectors data from DB the same as in config', async () => await DetectorSubsystemRepository
52+
before(() => {
53+
generateRandomBookkeepingCachedRawJsons(
54+
artficialDataSizes.bookkeepingService.runsInOneFile,
55+
artficialDataSizes.bookkeepingService.filesNo,
56+
);
57+
generateRandomMonalisaCachedRawJsons(
58+
artficialDataSizes.monalisaService.dataPassesNo,
59+
artficialDataSizes.monalisaService.minDetailsPerOneDataPass,
60+
artficialDataSizes.monalisaService.maxDetailsPerOneDataPass,
61+
);
62+
});
63+
64+
after(() => {
65+
cleanCachedBkpData();
66+
cleanCachedMonalisaData();
67+
});
68+
69+
it('should fetch detectors data from DB the same as in config', async () => await DetectorSubsystemRepository
2870
.findAll({ raw: true })
29-
.then((detectoSubsystemData) => detectoSubsystemData.map(({ name }) => name))
30-
.then((detectoSubsystemNames) => assert.deepStrictEqual(detectoSubsystemNames.sort(), detectors.sort())));
71+
.then((detectorSubsystemData) => detectorSubsystemData.map(({ name }) => name))
72+
.then((detectorSubsystemNames) => expect(detectorSubsystemNames).to.have.same.members(detectors)));
3173

3274
describe('BookkeepingService suite', () => {
3375
describe('with artificial cache data', () => {
34-
before(() => {
35-
generateRandomBookkeepingCachedRawJsons();
36-
});
37-
38-
after(() => {
39-
cleanCachedBkpData();
40-
});
41-
4276
it('should performe sync with random data withour major errors', async () => {
43-
assert.strictEqual(await syncManager.services.bookkeepingService.setSyncTask(), true);
77+
bookkeepingService.useCacheJsonInsteadIfPresent = true;
78+
expect(await bookkeepingService.setSyncTask()).to.be.equal(true);
4479
});
4580

4681
it('should fetch some run data directly from DB', async () =>
4782
await RunRepository
4883
.findAll({ raw: true })
49-
.then((data) => assert(data.length > 0)));
84+
.then((data) => expect(data).to.length.greaterThan(0))); //TODO
5085

5186
it('should fetch some run_detector data directly from DB', async () =>
5287
await RunDetectorsRepository
5388
.findAll({ raw: true })
54-
.then((data) => assert(data.length > 0)));
89+
.then((data) => expect(data).to.length.greaterThan(0))); //TODO
5590
});
91+
});
5692

57-
describe('without artificial cache data', () => {
58-
before(() => {
59-
syncManager.services.bookkeepingService.forceToUseOnlyCache = true;
93+
describe('MonalisaService suite', () => {
94+
describe('with artificial cache data', () => {
95+
it('should performe sync with random data without major errors', async () => {
96+
monalisaService.useCacheJsonInsteadIfPresent = true;
97+
assert.strictEqual(await monalisaService.setSyncTask(), true);
6098
});
6199

62-
after(() => {
63-
syncManager.services.bookkeepingService.forceToUseOnlyCache = false;
64-
});
100+
it('should fetch some data passes with associated Period and Runs directly from DB', async () => {
101+
const data = await DataPassRepository
102+
.findAll({ include: [Run, Period] });
65103

66-
it('should performe sync with major error', async () => {
67-
assert.strictEqual(await syncManager.services.bookkeepingService.setSyncTask(), false);
104+
expect(data).to.length.greaterThan(0); //TODO
105+
expect(data.map(({ Period }) => Period).filter((_) => _)).to.be.lengthOf(data.length);
68106
});
69107
});
70108
});

test/lib/alimonitor-services/testutil/cache-for-test.js renamed to test/lib/alimonitor-services/testutil/bookkeeping-cache-test-data.js

Lines changed: 12 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,11 @@ const path = require('path');
33
const { Cacher } = require('../../../../app/lib/alimonitor-services/helpers');
44
const { rctData: { detectors } } = require('../../../../app/lib/config/configProvider.js');
55

6-
const randint = (min = 0, max = 0) => Math.round(Math.random() * (max - min) + min);
7-
const choice = (arr) => arr[Math.floor(Math.random() * arr.length)];
6+
const { randint, choice, universalNoncontextualArrayDataGenerator, randomPeriodName } = require('./common.js');
87

9-
const ketpFields = {
8+
const dataUnitDefinition = {
109
runNumber: () => randint(1000000, 9000000),
11-
lhcPeriod: () => `LHC${choice([22, 18])}${choice('abceadbfarebivaavgauvgzxvcm')}`,
10+
lhcPeriod: () => randomPeriodName(),
1211
timeO2Start: () => randint(100000000, 200000000),
1312
timeO2End: () => randint(100000000, 200000000),
1413
timeTrgStart: () => randint(200000000, 300000000),
@@ -24,20 +23,16 @@ const ketpFields = {
2423
pdpBeamType: () => choice(['pp', 'PbPb', 'pPb']),
2524
};
2625

27-
const genSingleRunData = () => Object.fromEntries(
28-
Object.entries(ketpFields)
29-
.map(([runField, fieldDataGenerator]) => [runField, fieldDataGenerator()]),
30-
);
26+
const getBkpSourceFiles = (offset, limit) =>
27+
`filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=${offset}&page%5Blimit%5D=${limit}.json`;
3128

32-
const genRunsBatch = (size, files) => {
33-
const filesN = files.length;
29+
const genRunsBatch = (size, filesN) => {
3430
const totalCount = size * filesN;
3531
const pData = [...new Array(filesN)]
3632
.map((_, pageIndex) => [
37-
files[pageIndex],
33+
getBkpSourceFiles(pageIndex * size, size),
3834
{
39-
data: [...new Array(size)]
40-
.map(() => genSingleRunData()),
35+
data: universalNoncontextualArrayDataGenerator(size, dataUnitDefinition),
4136
meta: {
4237
page: { pageCount: filesN, totalCount },
4338
},
@@ -46,14 +41,11 @@ const genRunsBatch = (size, files) => {
4641
return pData;
4742
};
4843

49-
const bkpSourceFiles = [
50-
'filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=0&page%5Blimit%5D=100.json',
51-
'filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=100&page%5Blimit%5D=100.json',
52-
];
44+
const bookkeepingServiceName = 'BookkeepingService';
5345

54-
const generateRandomBookkeepingCachedRawJsons = () => genRunsBatch(100, bkpSourceFiles)
46+
const generateRandomBookkeepingCachedRawJsons = (size, filesNumber) => genRunsBatch(size, filesNumber)
5547
.map(([fN, data]) => {
56-
const cacheDir = Cacher.serviceCacheDir('BookkeepingService');
48+
const cacheDir = Cacher.serviceCacheDir(bookkeepingServiceName);
5749
if (!fs.existsSync(cacheDir)) {
5850
fs.mkdirSync(cacheDir, { recursive: true });
5951
}
@@ -66,7 +58,7 @@ const generateRandomBookkeepingCachedRawJsons = () => genRunsBatch(100, bkpSourc
6658
});
6759

6860
const cleanCachedBkpData = () => {
69-
fs.rmSync(Cacher.serviceCacheDir('BookkeepingService'), { recursive: true, force: true });
61+
fs.rmSync(Cacher.serviceCacheDir(bookkeepingServiceName), { recursive: true, force: true });
7062
};
7163

7264
module.exports = {

0 commit comments

Comments
 (0)