Skip to content
This repository was archived by the owner on Jan 14, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/lib/alimonitor-services/BookkeepingService.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,15 @@ class BookkeepingService extends AbstractServiceSynchronizer {
const results = [];
let state = {
page: 0,
limit: 100,
limit: process.env.BKP_RUNS_FETCH_LIMIT || 100,
};
while (!this.syncTraversStop(state)) {
const partialResult = await this.syncPerEndpoint(
ServicesEndpointsFormatter.bookkeeping(state['page'], state['limit']),
this.metaDataHandler.bind(this),
);
results.push(partialResult);
this.logger.info(`progress of ${state['page']} to ${this.metaStore['pageCount']}`);
this.logger.info(`processed ${state['page']} pages of ${this.metaStore['pageCount']}`);
state = this.nextState(state);
}

Expand Down
64 changes: 59 additions & 5 deletions app/lib/alimonitor-services/helpers/Cacher.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,21 @@
const path = require('path');
const fs = require('fs');
const config = require('../../config/configProvider');
const { createHash } = require('crypto');

const maxSystemFileNameLength = process.env.MAX_FILE_NAME_LENGTH || 255;

/**
* Class providing utitlities for caching data fetched from external services
*/
class Cacher {
/**
* Store data
* @param {String} synchronizerName name of service used to fetch data
* @param {URL} endpoint data were fetched from
* @param {Object} data to be stringified
* @return {void}
*/
static cache(synchronizerName, endpoint, data) {
const cacheDir = Cacher.serviceCacheDir(synchronizerName);
if (!fs.existsSync(cacheDir)) {
Expand All @@ -29,34 +42,75 @@ class Cacher {
);
}

/**
* Check if data from given endpoint are cached
* @param {String} synchronizerName name of synchronizer
* @param {URL} endpoint data were fetched from
* @return {Boolean} true if cached false otherwise
*/
static isCached(synchronizerName, endpoint) {
return fs.existsSync(Cacher.cachedFilePath(synchronizerName, endpoint));
}

/**
* Get cached data
* @param {String} synchronizerName name of synchronizer
* @param {URL} endpoint data were fetched from
* @return {JSON} data
*/
static getJsonSync(synchronizerName, endpoint) {
return JSON.parse(fs.readFileSync(Cacher.cachedFilePath(synchronizerName, endpoint)));
}

/**
* Get cached data
* @param {String} synchronizerName name of synchronizer
* @param {URL} endpoint data were fetched from
* @return {Promise<JSON>} data
*/
static async getJson(synchronizerName, endpoint) {
return await fs.readFile(Cacher.cachedFilePath(synchronizerName, endpoint))
.then((r) => JSON.parse(r));
}

/**
* Return path to data given via endpoint they were fetched from and synchronizer name
* @param {String} synchronizerName name of synchronizer
* @param {URL} endpoint data were fetched from
* @return {String} path
*/
static cachedFilePath(synchronizerName, endpoint) {
const maxSystemFilenameLength = 255;
if (endpoint.length > maxSystemFilenameLength) {
endpoint = endpoint.slice(0, maxSystemFilenameLength); // TODO better solution
}
return path.join(
Cacher.serviceCacheDir(synchronizerName),
Cacher.cachedFileName(endpoint),
);
}

/**
* Return name of file to store json data based on searchParans of endpoint they were fetched from
* In case file name is too long, it is cut to appropriate length and
* sufixed with '#' character and hash of origina, NON-shortened file name
* @param {URL} endpoint endpoint data was fetched from
* @returns {String} file name
*/
static cachedFileName(endpoint) {
return `${endpoint.searchParams.toString()}.json`;
const fileExtension = '.json';
const maxFilenameLength = maxSystemFileNameLength - fileExtension.length;
let fileName = endpoint.searchParams.toString();
if (fileName.length > maxFilenameLength) {
const hash = createHash('md5').update(fileName).digest('hex');
fileName = fileName.slice(0, maxFilenameLength - (hash.length + 1));
fileName += `#${hash}`;
}
return `${fileName}${fileExtension}`;
}

/**
* Return path to directory where data from given synchronizer will be stored
* @param {String} synchronizerName name of synchronizer
* @param {URL} endpoint data were fetched from
* @return {String} path
*/
static serviceCacheDir(synchronizerName) {
return path.join(
config.services.rawJsonCachePath,
Expand Down
1 change: 1 addition & 0 deletions docker/docker-compose-network.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
networks:
network:
internal: "${DOCKER_NETWORK_INTERNAL:-false}"
driver: bridge
ipam:
driver: default
Expand Down
3 changes: 3 additions & 0 deletions docker/test.env
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,6 @@ RAW_JSON_CACHE_PATH=${RAW_JSON_CACHE_PATH:-/opt/RunConditionTable/4c3a64a02110a9

### other
RCT_ERR_DEPTH=full
MOCHA_OPTIONS=${MOCHA_OPTIONS:-}
BKP_RUNS_FETCH_LIMIT=100
DOCKER_NETWORK_INTERNAL=${DOCKER_NETWORK_INTERNAL:-true}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"eslint:show:linux": "npm run eslint; firefox ./reports/static/static-analysis.html",
"eslint:show:macos": "npm run eslint; open -a firefox ./reports/static/static-analysis.html",
"static": "npm run eslint",
"coverage:test": "mocha --exit test/* ",
"coverage:test": "mocha --exit test/* $MOCHA_OPTIONS",
"coverage:report": "nyc report --report=html --report=json",
"coverage": "nyc npm run coverage:test && npm run coverage:report",
"start:test": "npm run coverage",
Expand Down
8 changes: 7 additions & 1 deletion rctmake
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,9 @@ Usage:
-p|--rct-http-port PORT - flag for setting env var RCT_HTTP_PORT
-m|--target-modifier START_TARGET_MODIFIER - add modififying sufix to npm task like start:dev:START_TARGET_MODIFIER or start:test:START_TARGET_MODIFIER, depends on choosen TARGET
In case of dev mode modifier ND cause running node instead of nodemon.

-M|--mocha <MOCHA OPTIONS> - pass flags/options to mocha when running tests (see mocha documentation)
e.g.: ./$SCRIPT_NAME run --target test --mocha '--grep SomeTestsDescriptor'


$ERROR_MESSAGE_PRINT
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Expand Down Expand Up @@ -187,6 +189,10 @@ else
BUILD='--build';
shift 1;
;;
-M|--mocha)
export MOCHA_OPTIONS="$2";
shift 2;
;;
*)
usage "Incorrect flag: $1"
;;
Expand Down
96 changes: 67 additions & 29 deletions test/lib/alimonitor-services/syncManager.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,58 +13,96 @@
*/

const { rctData: { detectors } } = require('../../../app/lib/config/configProvider.js');
const { syncManager } = require('../../../app/lib/alimonitor-services/SyncManager.js');
const { databaseManager: { repositories: {
RunRepository,
RunDetectorsRepository,
DetectorSubsystemRepository,
},
const { syncManager: {
services: {
bookkeepingService,
monalisaService,
},
} } = require('../../../app/lib/alimonitor-services/SyncManager.js');
const { databaseManager: {
repositories: {
RunRepository,
RunDetectorsRepository,
DetectorSubsystemRepository,
DataPassRepository,
},
models: {
Run,
Period,
},
} } = require('../../../app/lib/database/DatabaseManager.js');
const { generateRandomBookkeepingCachedRawJsons, cleanCachedBkpData } = require('./testutil/cache-for-test.js');
const { generateRandomBookkeepingCachedRawJsons, cleanCachedBkpData } = require('./testutil/bookkeeping-cache-test-data.js');
const { generateRandomMonalisaCachedRawJsons, cleanCachedMonalisaData } = require('./testutil/monalisa-cache-test-data.js');
const assert = require('assert');
const { expect } = require('chai');

const artficialDataSizes = {
bookkeepingService: {
runsInOneFile: Number(process.env.BKP_RUNS_FETCH_LIMIT || 100),
filesNo: 2,
},
monalisaService: {
dataPassesNo: 10,
minDetailsPerOneDataPass: 1,
maxDetailsPerOneDataPass: 10,
},
};

module.exports = () => describe('SyncManager suite', () => {
before('should fetch detectors data from DB the same as in config', async () => await DetectorSubsystemRepository
before(() => {
generateRandomBookkeepingCachedRawJsons(
artficialDataSizes.bookkeepingService.runsInOneFile,
artficialDataSizes.bookkeepingService.filesNo,
);
generateRandomMonalisaCachedRawJsons(
artficialDataSizes.monalisaService.dataPassesNo,
artficialDataSizes.monalisaService.minDetailsPerOneDataPass,
artficialDataSizes.monalisaService.maxDetailsPerOneDataPass,
);
});

after(() => {
cleanCachedBkpData();
cleanCachedMonalisaData();
});

it('should fetch detectors data from DB the same as in config', async () => await DetectorSubsystemRepository
.findAll({ raw: true })
.then((detectoSubsystemData) => detectoSubsystemData.map(({ name }) => name))
.then((detectoSubsystemNames) => assert.deepStrictEqual(detectoSubsystemNames.sort(), detectors.sort())));
.then((detectorSubsystemData) => detectorSubsystemData.map(({ name }) => name))
.then((detectorSubsystemNames) => expect(detectorSubsystemNames).to.have.same.members(detectors)));

describe('BookkeepingService suite', () => {
describe('with artificial cache data', () => {
before(() => {
generateRandomBookkeepingCachedRawJsons();
});

after(() => {
cleanCachedBkpData();
});

it('should performe sync with random data withour major errors', async () => {
assert.strictEqual(await syncManager.services.bookkeepingService.setSyncTask(), true);
bookkeepingService.useCacheJsonInsteadIfPresent = true;
expect(await bookkeepingService.setSyncTask()).to.be.equal(true);
});

it('should fetch some run data directly from DB', async () =>
await RunRepository
.findAll({ raw: true })
.then((data) => assert(data.length > 0)));
.then((data) => expect(data).to.length.greaterThan(0))); //TODO

it('should fetch some run_detector data directly from DB', async () =>
await RunDetectorsRepository
.findAll({ raw: true })
.then((data) => assert(data.length > 0)));
.then((data) => expect(data).to.length.greaterThan(0))); //TODO
});
});

describe('without artificial cache data', () => {
before(() => {
syncManager.services.bookkeepingService.forceToUseOnlyCache = true;
describe('MonalisaService suite', () => {
describe('with artificial cache data', () => {
it('should performe sync with random data without major errors', async () => {
monalisaService.useCacheJsonInsteadIfPresent = true;
assert.strictEqual(await monalisaService.setSyncTask(), true);
});

after(() => {
syncManager.services.bookkeepingService.forceToUseOnlyCache = false;
});
it('should fetch some data passes with associated Period and Runs directly from DB', async () => {
const data = await DataPassRepository
.findAll({ include: [Run, Period] });

it('should performe sync with major error', async () => {
assert.strictEqual(await syncManager.services.bookkeepingService.setSyncTask(), false);
expect(data).to.length.greaterThan(0); //TODO
expect(data.map(({ Period }) => Period).filter((_) => _)).to.be.lengthOf(data.length);
});
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@ const path = require('path');
const { Cacher } = require('../../../../app/lib/alimonitor-services/helpers');
const { rctData: { detectors } } = require('../../../../app/lib/config/configProvider.js');

const randint = (min = 0, max = 0) => Math.round(Math.random() * (max - min) + min);
const choice = (arr) => arr[Math.floor(Math.random() * arr.length)];
const { randint, choice, universalNoncontextualArrayDataGenerator, randomPeriodName } = require('./common.js');

const ketpFields = {
const dataUnitDefinition = {
runNumber: () => randint(1000000, 9000000),
lhcPeriod: () => `LHC${choice([22, 18])}${choice('abceadbfarebivaavgauvgzxvcm')}`,
lhcPeriod: () => randomPeriodName(),
timeO2Start: () => randint(100000000, 200000000),
timeO2End: () => randint(100000000, 200000000),
timeTrgStart: () => randint(200000000, 300000000),
Expand All @@ -24,20 +23,16 @@ const ketpFields = {
pdpBeamType: () => choice(['pp', 'PbPb', 'pPb']),
};

const genSingleRunData = () => Object.fromEntries(
Object.entries(ketpFields)
.map(([runField, fieldDataGenerator]) => [runField, fieldDataGenerator()]),
);
const getBkpSourceFiles = (offset, limit) =>
`filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=${offset}&page%5Blimit%5D=${limit}.json`;

const genRunsBatch = (size, files) => {
const filesN = files.length;
const genRunsBatch = (size, filesN) => {
const totalCount = size * filesN;
const pData = [...new Array(filesN)]
.map((_, pageIndex) => [
files[pageIndex],
getBkpSourceFiles(pageIndex * size, size),
{
data: [...new Array(size)]
.map(() => genSingleRunData()),
data: universalNoncontextualArrayDataGenerator(size, dataUnitDefinition),
meta: {
page: { pageCount: filesN, totalCount },
},
Expand All @@ -46,14 +41,11 @@ const genRunsBatch = (size, files) => {
return pData;
};

const bkpSourceFiles = [
'filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=0&page%5Blimit%5D=100.json',
'filter%5Bdefinitions%5D=PHYSICS&page%5Boffset%5D=100&page%5Blimit%5D=100.json',
];
const bookkeepingServiceName = 'BookkeepingService';

const generateRandomBookkeepingCachedRawJsons = () => genRunsBatch(100, bkpSourceFiles)
const generateRandomBookkeepingCachedRawJsons = (size, filesNumber) => genRunsBatch(size, filesNumber)
.map(([fN, data]) => {
const cacheDir = Cacher.serviceCacheDir('BookkeepingService');
const cacheDir = Cacher.serviceCacheDir(bookkeepingServiceName);
if (!fs.existsSync(cacheDir)) {
fs.mkdirSync(cacheDir, { recursive: true });
}
Expand All @@ -66,7 +58,7 @@ const generateRandomBookkeepingCachedRawJsons = () => genRunsBatch(100, bkpSourc
});

const cleanCachedBkpData = () => {
fs.rmSync(Cacher.serviceCacheDir('BookkeepingService'), { recursive: true, force: true });
fs.rmSync(Cacher.serviceCacheDir(bookkeepingServiceName), { recursive: true, force: true });
};

module.exports = {
Expand Down
Loading