Skip to content
This repository was archived by the owner on Jan 14, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 29 additions & 16 deletions app/lib/alimonitor-services/AbstractServiceSynchronizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ const { Log } = require('@aliceo2/web-ui');
const config = require('../config/configProvider.js');
const ResProvider = require('../ResProvider.js');
const Utils = require('../Utils.js');
const JsonsFetcher = require('./JsonsFetcher.js');
const Cacher = require('./Cacher.js');

const defaultServiceSynchronizerOptions = {
Expand All @@ -42,6 +43,7 @@ class PassCorrectnessMonitor {
this.errorsLoggingDepth = errorsLoggingDepth;
this.correct = 0;
this.incorrect = 0;
this.omitted = 0;
this.errors = [];
}

Expand All @@ -55,17 +57,21 @@ class PassCorrectnessMonitor {
this.errors.push(e);
}

handleOmitted() {
this.omitted++;
}

logResults() {
const { correct, incorrect, errors, errorsLoggingDepth, logger } = this;
const dataSize = incorrect + correct;
const { correct, incorrect, omitted, errors, errorsLoggingDepth, logger } = this;
const dataSize = incorrect + correct + omitted;

if (incorrect > 0) {
const logFunc = Utils.switchCase(errorsLoggingDepth, config.errorsLoggingDepths);
errors.forEach((e) => logFunc(logger, e));
logger.warn(`sync unseccessful for ${incorrect}/${dataSize}`);
}
if (correct > 0) {
logger.info(`sync successful for ${correct}/${dataSize}`);
if (omitted > 0) {
logger.info(`omitted data units ${omitted}/${dataSize}`);
}
}
}
Expand Down Expand Up @@ -182,12 +188,18 @@ class AbstractServiceSynchronizer {
}
const data = responsePreprocess(rawResponse)
.map((r) => dataAdjuster(r))
.filter((r) => r && filterer(r));
.filter((r) => {
const f = r && filterer(r);
if (!f) {
this.monitor.handleOmitted();
}
return f;
});

if (this.batchedRequestes) {
this.makeBatchedRequest(data);
await this.makeBatchedRequest(data);
} else {
this.makeSequentialRequest(data);
await this.makeSequentialRequest(data);
}
this.monitor.logResults();
} catch (fatalError) {
Expand All @@ -202,7 +214,7 @@ class AbstractServiceSynchronizer {
async makeBatchedRequest(data) {
const rowsChunks = Utils.arrayToChunks(data, this.batchSize);
for (const chunk of rowsChunks) {
const promises = chunk.map((dataUnit) => this.dbAction(this.dbclient, dataUnit)
const promises = chunk.map((dataUnit) => this.dbAction(this.dbClient, dataUnit)
.then(() => this.monitor.handleCorrect())
.catch((e) => this.monitor.handleIncorrect(e, { dataUnit: dataUnit })));

Expand All @@ -212,35 +224,36 @@ class AbstractServiceSynchronizer {

async makeSequentialRequest(data) {
for (const dataUnit of data) {
await this.dbAction(this.dbclient, dataUnit)
await this.dbAction(this.dbClient, dataUnit)
.then(this.monitor.handleCorrect)
.catch((e) => this.monitor.handleIncorrect(e, { dataUnit: dataUnit }));
}
}

async getRawResponse(endpoint) {
if (this.useCacheJsonInsteadIfPresent && Cacher.isCached(this.name, endpoint)) {
this.logger.info(`using cached json :: ${Cacher.cachedFilePath(this.name, endpoint)}`);
// eslint-disable-next-line capitalized-comments
// this.logger.info(`using cached json :: ${Cacher.cachedFilePath(this.name, endpoint)}`);
return Cacher.getJsonSync(this.name, endpoint);
}
const onSucces = (endpoint, data) => {
if (this.rawCacheUse) {
Cacher.cache(this.name, endpoint, data);
}
};
return await Utils.makeHttpRequestForJSON(endpoint, this.opts, this.logger, onSucces);
return await JsonsFetcher.makeHttpRequestForJSON(endpoint, this.opts, this.logger, onSucces);
}

async dbConnect() {
this.dbclient = new Client(config.database);
this.dbclient.on('error', (e) => this.logger.error(e));
this.dbClient = new Client(config.database);
this.dbClient.on('error', (e) => this.logger.error(e));

return await this.dbclient.connect()
return await this.dbClient.connect()
.then(() => this.logger.info('database connection established'));
}

async dbDisconnect() {
return await this.dbclient.end()
return await this.dbClient.end()
.then(() => this.logger.info('database connection ended'));
}

Expand All @@ -262,7 +275,7 @@ class AbstractServiceSynchronizer {
}

isConnected() {
return this.dbclient?._connected;
return this.dbClient?._connected;
}

async restart() {
Expand Down
1 change: 1 addition & 0 deletions app/lib/alimonitor-services/JsonsFetcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ class JsonsFetcher {
__dirname,
'..',
'..',
'..',
'database',
'cache',
'rawJson',
Expand Down
22 changes: 17 additions & 5 deletions app/lib/alimonitor-services/MonalisaService.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,24 @@ class MonalisaService extends AbstractServiceSynchronizer {
description: 'description',
output_size: 'size',
interaction_type: 'beam_type',
last_run: 'last_run',
};

this.monalisaServiceDetails = new MonalisaServiceDetails();
}

sync() {
return this.syncPerEndpoint(
async sync() {
const last_runs_res = await this.dbClient.query('SELECT name, last_run from data_passes;');
if (! last_runs_res || ! last_runs_res.rows) {
this.logger.error(`Could last_runs of data_passes are ${last_runs_res}`);
}
this.last_runs = Object.fromEntries(last_runs_res.rows.map((r) => Object.values(r)));

return await this.syncPerEndpoint(
EndpointsFormatter.dataPassesRaw(),
this.responsePreprocess.bind(this),
this.dataAdjuster.bind(this),
(r) => r.period.year >= config.dataFromYearIncluding,
(r) => r.period.year >= config.dataFromYearIncluding && r.last_run != this.last_runs[r.name],
this.dbAction.bind(this),
);
}
Expand Down Expand Up @@ -79,9 +86,14 @@ class MonalisaService extends AbstractServiceSynchronizer {
${null},
${d.number_of_events},
${null},
${d.size}
${d.size},
${d.last_run}
);`;
return await Promise.all([dbClient.query(pgCommand), this.monalisaServiceDetails.sync(d)]);
const q1 = await dbClient.query(pgCommand);
const q2 = await this.monalisaServiceDetails.sync(d);
return Promise.all([q1, q2]);
// eslint-disable-next-line capitalized-comments
// return await Promise.all([dbClient.query(pgCommand), this.monalisaServiceDetails.sync(d)]);
}

extractPeriod(rowData) {
Expand Down
1 change: 1 addition & 0 deletions app/lib/alimonitor-services/MonalisaServiceDetails.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class MonalisaServiceDetails extends AbstractServiceSynchronizer {
(v) => Utils.adjusetObjValuesToSql(Utils.filterObject(v, this.ketpFields)),
() => true,
async (dbClient, v) => {
v.parentDataUnit = d;
const detailsSql = v ?
`call insert_prod_details(${d.name}, ${v.run_number}, ${v.period});`
: '';
Expand Down
13 changes: 8 additions & 5 deletions app/lib/alimonitor-services/MonalisaServiceMC.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class MonalisaServiceMC extends AbstractServiceSynchronizer {
sp = Utils.filterObject(sp, this.ketpFields);
sp.size = Number(sp.size);

const adustAnchoredOnesToList = (rawString) => Utils
const parseListLikeString = (rawString) => Utils
.replaceAll(rawString, /,|'|;"/, ' ')
.split(/ +/)
.map((v) => v.trim())
Expand All @@ -81,8 +81,9 @@ class MonalisaServiceMC extends AbstractServiceSynchronizer {
*/

sp.period = this.extractPeriod(sp);
sp.anchor_passes = adustAnchoredOnesToList(sp.anchor_passes);
sp.anchor_productions = adustAnchoredOnesToList(sp.anchor_productions);
sp.anchor_passes = parseListLikeString(sp.anchor_passes);
sp.anchor_productions = parseListLikeString(sp.anchor_productions);
sp.runs = parseListLikeString(sp.runs).map((s) => Number(s));

return sp;
}
Expand Down Expand Up @@ -111,8 +112,10 @@ class MonalisaServiceMC extends AbstractServiceSynchronizer {
${null},
${d.number_of_events},
${d.size}
);`;
return await Promise.all([dbClient.query(pgCommand), this.monalisaServiceMCDetails.sync(d)]);
); call insert_mc_details(${d.name}, ${d.runs}::integer[], ${period.name});`;
return await dbClient.query(pgCommand);
// eslint-disable-next-line capitalized-comments
// return await Promise.all([dbClient.query(pgCommand), this.monalisaServiceMCDetails.sync(d)]);
}

extractPeriod(rowData) {
Expand Down
3 changes: 0 additions & 3 deletions app/lib/database/views/anchored_per_mc_view.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,12 @@ const anchored_per_mc_view = (query) => `
--dp.id
dp.name,
dp.description,
pt.pass_type,
dp.jira,
dp.ml,
dp.number_of_events,
dp.software_version,
dp.size
FROM data_passes AS dp
LEFT JOIN pass_types AS pt
ON pt.id = dp.pass_type
INNER JOIN anchored_passes as aps
ON aps.data_pass_id = dp.id
INNER JOIN simulation_passes as sp
Expand Down
9 changes: 6 additions & 3 deletions database/design.dbm
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
CAUTION: Do not modify this file unless you know what you are doing.
Unexpected results may occur if the code is changed deliberately.
-->
<dbmodel pgmodeler-ver="1.0.0-beta1" use-changelog="false" last-position="0,0" last-zoom="1" max-obj-count="31"
<dbmodel pgmodeler-ver="1.0.0-beta1" use-changelog="false" last-position="0,1100" last-zoom="1" max-obj-count="31"
default-owner="postgres"
layers="Default layer"
active-layers="0"
Expand Down Expand Up @@ -86,7 +86,7 @@ CAUTION: Do not modify this file unless you know what you are doing.
<role name="postgres"/>
</sequence>

<table name="data_passes" layers="0" collapse-mode="2" max-obj-count="10" z-value="0">
<table name="data_passes" layers="0" collapse-mode="2" max-obj-count="12" z-value="0">
<schema name="public"/>
<role name="postgres"/>
<tag name="primary_data_layer"/>
Expand Down Expand Up @@ -118,6 +118,9 @@ CAUTION: Do not modify this file unless you know what you are doing.
<column name="size">
<type name="real" length="0"/>
</column>
<column name="last_run" not-null="true">
<type name="integer" length="0"/>
</column>
<constraint name="data_passes_pkey" type="pk-constr" table="public.data_passes">
<columns names="id" ref-type="src-columns"/>
</constraint>
Expand Down Expand Up @@ -396,7 +399,7 @@ CAUTION: Do not modify this file unless you know what you are doing.
<schema name="public"/>
<role name="postgres"/>
<tag name="primary_data_layer"/>
<position x="1280" y="160"/>
<position x="1280" y="140"/>
<column name="id" not-null="true" sequence="public.simulation_passes_id_seq">
<type name="integer" length="0"/>
</column>
Expand Down
1 change: 1 addition & 0 deletions database/exported/create-tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ CREATE TABLE public.data_passes (
number_of_events integer,
software_version text,
size real,
last_run integer NOT NULL,
CONSTRAINT data_passes_pkey PRIMARY KEY (id),
CONSTRAINT dp_name_unique UNIQUE (name)
);
Expand Down
Binary file modified database/exported/design.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
8 changes: 6 additions & 2 deletions database/stored-sql-functionalities/procedures/insert_mc.sql
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,17 @@ BEGIN
IF dp_ids IS NOT NULL THEN
any_related_pass_found = TRUE;
foreach dp_id in array dp_ids loop
INSERT INTO anchored_passes(data_pass_id, sim_pass_id) values(dp_id, sp_id);
IF NOT EXISTS (SELECT * from anchored_passes where data_pass_id = dp_id AND sim_pass_id = sp_id) THEN
INSERT INTO anchored_passes(data_pass_id, sim_pass_id) values(dp_id, sp_id);
END IF;
END loop;
END IF;
END LOOP;
IF any_related_pass_found THEN
SELECT id INTO p_id FROM periods WHERE name = an_period;
INSERT INTO anchored_periods(period_id, sim_pass_id) VALUES(p_id, sp_id);
if not exists (select * from anchored_periods where period_id = p_id and sim_pass_id = sp_id) then
INSERT INTO anchored_periods(period_id, sim_pass_id) VALUES(p_id, sp_id);
end if;
END IF;
END LOOP;
ELSE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,33 @@ BEGIN
if NOT EXISTS (SELECT * FROM runs WHERE run_number = _run_number) OR prod_id IS NULL THEN
RAISE EXCEPTION 'nulls %', now();
END IF;
INSERT INTO simulation_passes_runs(run_number, simulation_pass_id, qc) VALUES(_run_number, prod_id, null);
IF NOT EXISTS (SELECT * from simulation_passes_runs where run_number = _run_number and simulation_pass_id = prod_id ) then
INSERT INTO simulation_passes_runs(run_number, simulation_pass_id, qc) VALUES(_run_number, prod_id, null);
end if;
END;
$$;


create or replace procedure insert_mc_details(
_prod_name varchar,
_run_numbers integer[],
_period varchar
)

LANGUAGE plpgsql
AS $$
DEClARE prod_id int;
DECLARE _run_number integer;
BEGIN
foreach _run_number in array _run_numbers loop
call insert_run(_run_number, _period, null, null, null, null, null, null, null, ARRAY[]::varchar[], null, null);
SELECT id FROM simulation_passes INTO prod_id WHERE name = _prod_name;
if NOT EXISTS (SELECT * FROM runs WHERE run_number = _run_number) OR prod_id IS NULL THEN
RAISE EXCEPTION 'nulls %', now();
END IF;
IF NOT EXISTS (SELECT * from simulation_passes_runs where run_number = _run_number and simulation_pass_id = prod_id ) then
INSERT INTO simulation_passes_runs(run_number, simulation_pass_id, qc) VALUES(_run_number, prod_id, null);
end if;
END LOOP;
END;
$$
16 changes: 13 additions & 3 deletions database/stored-sql-functionalities/procedures/insert_prod.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ create or replace procedure insert_prod(
_ml text,
_number_of_events integer,
_softwar_version text,
_size real)
_size real,
_last_run integer)
LANGUAGE plpgsql
AS $$

Expand All @@ -33,7 +34,8 @@ BEGIN
ml,
number_of_events,
software_version,
size) values (
size,
last_run) values (
DEFAULT,
trg_period_id,
_name,
Expand All @@ -42,7 +44,15 @@ BEGIN
_ml,
_number_of_events,
_softwar_version,
_size);
_size,
_last_run);
ELSE
IF _last_run IS NOT NULL THEN
UPDATE data_passes SET last_run = _last_run WHERE id = dp_id;
END IF;
IF _number_of_events IS NOT NULL THEN
UPDATE data_passes SET number_of_events = _number_of_events WHERE id = dp_id;
END IF;
END IF;
end;
$$;
Loading