Merge pull request '5144-improveArc' (#5) from 5144-improveArc into master
gitea/vn-rfid/pipeline/head There was a failure building this commit Details

Reviewed-on: #5
This commit is contained in:
Alex Moreno 2024-06-05 12:19:33 +00:00
commit 11cf68b3ad
14 changed files with 5083 additions and 27 deletions

2
.gitignore vendored
View File

@ -1,2 +1,2 @@
node_modules node_modules
config.local.yml config.*.yml

View File

@ -3,6 +3,6 @@
// Carácter predeterminado de final de línea. // Carácter predeterminado de final de línea.
"files.eol": "\n", "files.eol": "\n",
"editor.codeActionsOnSave": { "editor.codeActionsOnSave": {
"source.fixAll.eslint": true "source.fixAll.eslint": "explicit"
} }
} }

View File

@ -20,4 +20,3 @@ $ npm run start | pino-pretty
For test. For test.
``` ```
DELETE expedition in vn.expeditionScan DELETE expedition in vn.expeditionScan
```

4
db/connectMaster.js Normal file
View File

@ -0,0 +1,4 @@
import mysql from 'mysql2/promise';
import getConfig from '../util/getConfig.js';
export default mysql.createPool(getConfig('production').db);

View File

@ -6,7 +6,7 @@
"main": "server.js", "main": "server.js",
"type": "module", "type": "module",
"scripts": { "scripts": {
"start": "nodemon ./server.js" "start": "nodemon ./server.js | pino-pretty "
}, },
"engines": { "engines": {
"node": ">=18", "node": ">=18",

4906
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff

57
populate.sql Normal file
View File

@ -0,0 +1,57 @@
INSERT INTO vn.arcRead (id, printerFk, ip, counter, error, minimum) VALUES(1, NULL, '10.1.16.1', 17, NULL, 5);
CREATE TABLE `rfidTest` (
`palletFk` int(11) DEFAULT NULL,
`expeditionFk` int(11) DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`peakRssi` int(11) DEFAULT NULL,
`antenna` int(11) DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL,
`sensitivity` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE TABLE `algorithm` (
`palletFk` int(11) DEFAULT NULL,
`expeditionArray` text DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`expeditionCount` int(11) DEFAULT NULL,
`model` text DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
-- vn.trys definition
-- vn.trys definition
CREATE TABLE `vn`.`trys` (
`palletFk` int(11) NOT NULL,
`missing` text DEFAULT NULL,
`powerType` varchar(255) NOT NULL DEFAULT 'V17',
`extra` varchar(100) DEFAULT NULL,
`antennaReads` text DEFAULT NULL,
`observationExtra` text DEFAULT NULL,
`observationMissing` text DEFAULT NULL,
`timestamp` timestamp NULL DEFAULT current_timestamp(),
`antenna1` int(11) DEFAULT NULL,
`antenna2` int(11) DEFAULT NULL,
`antenna3` int(11) DEFAULT NULL,
`antenna4` int(11) DEFAULT NULL,
`total` varchar(100) DEFAULT NULL,
PRIMARY KEY (`palletFk`,`powerType`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE DEFINER=`root`@`localhost` TRIGGER `vn`.`trys_beforeUpdate`
BEFORE UPDATE ON `trys`
FOR EACH ROW
BEGIN
IF (NEW.missing > OLD.missing) THEN
CALL util.throw('New missing greater than old');
END IF;
END
-- powerTypes [PW13, PW15, V17, V19]

View File

@ -7,10 +7,10 @@ console.logger = logger();
async function main(arcId) { async function main(arcId) {
const conf = getConfig(); const conf = getConfig();
for (let arc of await getArcs()) { for (let arc of await getArcs()) {
const config = Object.assign({}, conf, arc); const config = Object.assign({}, conf, arc);
if (arcId && arcId != arc.arcId) continue; if (arcId && arcId != arc.arcId) continue;
console.logger.info(`ARC_ID:${config.arcId} is running...`);
stream(config, e => { stream(config, e => {
console.logger.error(e); console.logger.error(e);
setTimeout(main, config.reconnectInterval, config.arcId); setTimeout(main, config.reconnectInterval, config.arcId);

View File

@ -4,7 +4,7 @@ import t from '../util/translator.js';
export default async(rfids, arcId) => { export default async(rfids, arcId) => {
try { try {
console.log(Array.from(rfids), arcId, null); console.log(Array.from(rfids));
await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, ?, @palletId);`, [Array.from(rfids), arcId, null]); await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, ?, @palletId);`, [Array.from(rfids), arcId, null]);
await counter(null, arcId); await counter(null, arcId);
} catch (error) { } catch (error) {

View File

@ -4,26 +4,27 @@ export default async data => {
const rfidsParsed = new Set(); const rfidsParsed = new Set();
const rfidsParsedExtended = []; const rfidsParsedExtended = [];
const RFID_PREFIX = 'AABB';
for (let crudeRfid of crudeRfids) { for (let crudeRfid of crudeRfids) {
if (crudeRfid && /{.*:{.*:.*}}/.test(crudeRfid)) { if (crudeRfid && /{.*:{.*:.*}}/.test(crudeRfid)) {
const jsonResult = JSON.parse(crudeRfid); const jsonResult = JSON.parse(crudeRfid);
let epcHex = jsonResult?.tagInventoryEvent?.epcHex; let epcHex = jsonResult?.tagInventoryEvent?.epcHex;
if (!epcHex) continue; if (!epcHex) continue;
if (epcHex.search('AABB') == -1) continue; if (epcHex.search(RFID_PREFIX) == -1) continue;
epcHex = epcHex.replace('AABB', ''); epcHex = epcHex.replace(RFID_PREFIX, '');
epcHex = epcHex.substring(0, 1) == 0 ? epcHex.substring(1) : epcHex; epcHex = epcHex.substring(0, 1) == 0 ? epcHex.substring(1) : epcHex;
const rfidParsed = { const rfidParsed = {
code: parseInt(epcHex), code: parseInt(epcHex),
created: jsonResult.timestamp, created: jsonResult.timestamp,
peakRssi: jsonResult.tagInventoryEvent.peakRssiCdbm, peakRssi: jsonResult.tagInventoryEvent.peakRssiCdbm,
count: 1, count: 1,
antenna: jsonResult.tagInventoryEvent.antennaPort antenna: jsonResult.tagInventoryEvent.antennaPort,
transmitPowerCdbm: jsonResult.tagInventoryEvent.transmitPowerCdbm
}; };
const rfidsParsedExtended = [];
rfidsParsedExtended.push(rfidParsed); rfidsParsedExtended.push(rfidParsed);
rfidsParsed.add(rfidParsed.code); rfidsParsed.add(rfidParsed.code);
} }
@ -31,3 +32,4 @@ export default async data => {
return {codes: rfidsParsed, extended: rfidsParsedExtended}; return {codes: rfidsParsed, extended: rfidsParsedExtended};
}; };

View File

@ -21,7 +21,7 @@ export default async(conf, cb) => {
rfidbuffer = new Set([...rfidbuffer, ...parsed.codes]); rfidbuffer = new Set([...rfidbuffer, ...parsed.codes]);
rfidbufferExtend = rfidbufferExtend.concat(parsed.extended); rfidbufferExtend = rfidbufferExtend.concat(parsed.extended);
debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf); // debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
if (rfidbuffer.size) { if (rfidbuffer.size) {
clearTimeout(interval); clearTimeout(interval);
@ -41,7 +41,6 @@ export default async(conf, cb) => {
if (!conf.minimum || rfidbuffer.size > conf.minimum) if (!conf.minimum || rfidbuffer.size > conf.minimum)
newPallet(rfidbuffer, conf.arcId); newPallet(rfidbuffer, conf.arcId);
rfidbuffer = new Set(); rfidbuffer = new Set();
rfidbufferExtend = []; rfidbufferExtend = [];
} }

93
util/debugMissing.js Normal file
View File

@ -0,0 +1,93 @@
import con from '../db/connect.js';
import conMaster from '../db/connectMaster.js';
import fs from 'node:fs';
export default async uniqueRead => {
const reads = new Set([
...uniqueRead[0],
...uniqueRead[1],
...uniqueRead[2],
...uniqueRead[3],
]);
console.log('reads: ', [...reads][0]);
let [[palletFk]] = await conMaster.query(
`SELECT palletFk FROM expeditionScan WHERE expeditionFk IN (?)`,
[[...reads].join(',')]
);
palletFk = palletFk?.palletFk;
console.log('palletFk: ', palletFk);
if (!palletFk) return console.log('LA EXPEDICION NO esta en el pallet');
let [realExpeditions] = await conMaster.query(
`SELECT ep.id, e.id, ps.printerRfidFk, ps.code, es.palletFk
FROM expeditionPallet ep
JOIN expeditionScan es ON es.palletFk = ep.id
JOIN expedition e ON e.id = es.expeditionFk
JOIN host h ON h.code = e.hostFk COLLATE utf8mb3_unicode_ci
JOIN packingSite ps ON ps.hostFk = h.id
WHERE ep.id = ?
AND ps.printerRfidFk`,
[palletFk]
);
realExpeditions = realExpeditions.map(r => r.id);
console.log('realExpeditions: ', realExpeditions.length);
if (realExpeditions.length != reads.size)
console.logger.warn('MISSING EXPEDITIONS');
const missing = realExpeditions.filter(x => ![...reads].includes(x));
const extra = [...reads].filter(x => !realExpeditions.includes(x));
if (missing.length) console.warn('MISSING:', missing.length, missing);
if (extra.length) console.warn('EXTRA:', extra.length, extra);
try {
const [[currentMissings]] = await con.query('SELECT missing FROM trys WHERE palletFk = ?', palletFk);
if (currentMissings?.missing < missing.length) return console.log('PREVENT REPLACE', currentMissings.missing, missing.length);
await con.query(
`
REPLACE trys
SET palletFk = ?,
missing = ?,
extra = ?,
total = ?,
powerType = ?,
observationExtra = ?,
observationMissing = ?,
antenna1 = ?,
antenna2 = ?,
antenna3 = ?,
antenna4 = ?
`,
[
palletFk,
missing.length,
extra.length,
realExpeditions.length,
'VDT3',
extra.join(','),
missing.join(','),
uniqueRead[0].size,
uniqueRead[1].size,
uniqueRead[2].size,
uniqueRead[3].size,
]
);
await saveTable();
} catch (e) {
console.log('error debugging', palletFk, e);
}
};
async function saveTable() {
const [table] = await con.query(`SELECT * FROM trys WHERE DATE(timestamp) = CURDATE()`);
const date = new Date().toISOString().split('T')[0];
if (!table.length) return;
const file = fs.createWriteStream(`${date}.txt`);
file.on('error', function(err) {
console.error(err);
});
table.forEach(function(v) {
file.write(JSON.stringify(v) + '\n');
});
file.end();
}

View File

@ -1,25 +1,21 @@
export default (parsed, conf) => {
import debugMissing from './debugMissing.js';
export default async(parsed, conf) => {
if (conf.env != 'dev') return; if (conf.env != 'dev') return;
// TOTAL
console.log('TOTAL BUFFER: ', parsed.codes.size);
// TOTAL READS BY ANTENNA // TOTAL READS BY ANTENNA
const totalRead = [0, 0, 0, 0]; const totalRead = [0, 0, 0, 0];
for (let read of parsed.extended) for (let read of parsed.extended)
totalRead[read.antenna - 1]++; totalRead[read.antenna - 1]++;
console.log('TOTAL READ ANTENNA:', totalRead); console.log('TOTAL READ ANTENNA:', totalRead);
// UNIQUE READS BY ANTENNA // UNIQUE READS BY ANTENNA
const uniqueRead = [new Set(), new Set(), new Set(), new Set()]; const uniqueRead = [new Set(), new Set(), new Set(), new Set()];
for (let read of parsed.extended) for (let read of parsed.extended)
uniqueRead[read.antenna - 1].add(read.code); uniqueRead[read.antenna - 1].add(read.code);
console.log('UNIQUE READ ANTENNA:', uniqueRead[0].size, uniqueRead[1].size, uniqueRead[2].size, uniqueRead[3].size); console.log('UNIQUE READ ANTENNA:', uniqueRead[0].size, uniqueRead[1].size, uniqueRead[2].size, uniqueRead[3].size);
// WARNING IF AN ANTENNA READS LESS THAN IT SHOULD debugMissing(uniqueRead);
for (const [index, set] of uniqueRead.entries()) {
if (((set.size * 100) / parsed.codes.size) < 25)
console.logger.warn(`[ANTENNA]: ${index + 1} ONLY ${set.size}`);
}
console.log('----------------------------------------------------------------'); console.log('----------------------------------------------------------------');
}; };

View File

@ -2,10 +2,10 @@ import yml from 'require-yml';
import path from 'path'; import path from 'path';
import fs from 'fs-extra'; import fs from 'fs-extra';
export default function getConfig() { export default function getConfig(env = 'local') {
const {pathname: root} = new URL('../', import.meta.url); const {pathname: root} = new URL('../', import.meta.url);
let conf = yml(path.join(root, 'config.yml')); let conf = yml(path.join(root, 'config.yml'));
const localConfFile = path.join(root, 'config.local.yml'); const localConfFile = path.join(root, `config.${env}.yml`);
if (fs.existsSync(localConfFile)) if (fs.existsSync(localConfFile))
conf = Object.assign({}, conf, yml(localConfFile)); conf = Object.assign({}, conf, yml(localConfFile));