Compare commits

...

42 Commits

Author SHA1 Message Date
Alex Moreno d29fd918cc Merge branch 'master' of https://gitea.verdnatura.es/verdnatura/vn-rfid
gitea/vn-rfid/pipeline/head This commit looks good Details
2024-07-25 10:38:53 +02:00
Alex Moreno 842c141e13 feat: set to 0 counter when createPallet or not 2024-07-25 10:38:52 +02:00
Guillermo Bonet 16f6d60006 feat: refs #5144 Migration to kube
gitea/vn-rfid/pipeline/head This commit looks good Details
2024-06-13 09:04:14 +02:00
Alex Moreno 11cf68b3ad Merge pull request '5144-improveArc' (#5) from 5144-improveArc into master
gitea/vn-rfid/pipeline/head There was a failure building this commit Details
Reviewed-on: #5
2024-06-05 12:19:33 +00:00
Alex Moreno 91dd30729e remove console.log and comments
gitea/vn-rfid/pipeline/pr-master This commit looks good Details
2024-05-31 12:04:22 +02:00
Alex Moreno f4c5c39fab remove console.log and comments
gitea/vn-rfid/pipeline/pr-master This commit looks good Details
2024-05-31 12:02:39 +02:00
Alex Moreno c4654dc445 remove console.log and comments 2024-05-31 12:01:40 +02:00
Alex Moreno 08b37c1261 remove console.log and comments 2024-05-31 11:59:57 +02:00
Alex Moreno 7bce9be25c Merge branch '5144-improveArc' of https://gitea.verdnatura.es/verdnatura/vn-rfid into 5144-improveArc 2024-05-31 11:57:39 +02:00
Alex Moreno 4f2cfd032a remove console.log and comments 2024-05-31 11:57:38 +02:00
Pablo Natek 2b0ee73c56 debug 2024-04-25 15:55:31 +02:00
Pablo Natek 51c4f8eafe feat: select first pallet 2024-04-18 07:17:52 +02:00
Alex Moreno 2a7fa0eaef feat: refs #5144 saveTable 2024-04-10 13:01:49 +02:00
Alex Moreno 97b778245f refs #5144 populate 2024-04-04 13:52:04 +02:00
Alex Moreno 24aba59831 refs #5144 feat: debug missings 2024-03-04 11:42:48 +01:00
Alex Moreno 036b54ae97 refs #5144 refactor: improvement attempt
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-11-30 13:25:26 +01:00
Alex Moreno d30e157420 Deploy
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-05-30 15:11:51 +02:00
Alex Moreno 27837412f6 Merge pull request 'refs #5144 add minimum filter' (#4) from 5144-setMinimum into master
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #4
2023-05-18 11:30:42 +00:00
Alex Moreno 43c793774e refs #5144 add minimum filter
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-05-02 10:23:07 +02:00
Alex Moreno 6ecce21a17 refs #5473 actualizado node a la version 18
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-25 11:49:09 +02:00
Alex Moreno 33a99ca20a a
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-24 12:42:00 +02:00
Vicent Llopis 959480b391 test
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-18 12:19:55 +02:00
Vicent Llopis abf2c3d81e tets
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-06 14:27:12 +02:00
Vicent Llopis 0ef0f2bfc0 fix: añadido segunda parametro y cambiado config
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-30 13:43:03 +02:00
Vicent Llopis 83af061bf0 production
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:57:55 +01:00
Vicent Llopis b5fd9fa50e config local
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:43:47 +01:00
Vicent Llopis 7e5693582f config in production
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:31:39 +01:00
Vicent Llopis 418f2894ff inserta todo el error en la vn.arcRead
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:17:56 +01:00
Vicent Llopis 3d3b5e61f1 add console.log
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:03:38 +01:00
Vicent Llopis 9c60b5b8c1 delete database
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-07 12:08:35 +01:00
Alex Moreno 4518ec100a fix: add util folder in dockerfile
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-23 14:26:57 +01:00
Alex Moreno d0c1bb073b Merge pull request 'dev to master' (#3) from dev into master
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #3
2023-02-23 13:20:36 +00:00
Alex Moreno 87a3bc3e8f set error to null when read
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-09 15:19:42 +01:00
Alex Moreno 345bc47df1 feat: lastCounter
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-07 15:23:25 +01:00
Alex Moreno 17b8ee9194 Merge pull request '5130-arc_counter' (#2) from 5130-arc_counter into dev
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #2
2023-02-06 08:59:02 +00:00
Alex Moreno 00262e5dd0 refactor newPallet and use translator
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-03 13:20:00 +01:00
Alex Moreno 94296c23b9 fix: update counter
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-02 14:51:25 +01:00
Alex Moreno 99d6f81671 fix bugs
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-01-31 14:59:05 +01:00
Alex Moreno ebbc27b56d feat: set null when stop
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-01-27 12:45:25 +01:00
Alex Moreno 8142dfb430 feat: add counter
gitea/vn-rfid/pipeline/head This commit looks good Details
refs #5130
2023-01-27 12:40:09 +01:00
Joan Sanchez 544ef24ac8 Merge pull request 'refactor' (#1) from 5144-arc-start into master
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #1
2023-01-27 08:18:27 +00:00
Alex Moreno 690e7ed8f8 refactor
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-01-26 15:27:58 +01:00
23 changed files with 5222 additions and 129 deletions

2
.gitignore vendored
View File

@ -1,2 +1,2 @@
node_modules
config.local.yml
config.*.yml

View File

@ -3,6 +3,6 @@
// Carácter predeterminado de final de línea.
"files.eol": "\n",
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
"source.fixAll.eslint": "explicit"
}
}

View File

@ -4,6 +4,10 @@ COPY package.json ./
RUN npm install
COPY db db
COPY src src
COPY config.yml ./
COPY server.js ./
COPY util util
COPY \
server.js \
config.yml \
npm-config.npmrc \
./
CMD ["node", "server.js"]

58
Jenkinsfile vendored
View File

@ -2,64 +2,54 @@
pipeline {
agent any
options {
disableConcurrentBuilds()
}
environment {
PROJECT_NAME = 'vn-rfid'
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
}
stages {
stage('Checkout') {
stage('Setup') {
steps {
script {
switch (env.BRANCH_NAME) {
case 'master':
env.NODE_ENV = 'production'
break
}
}
setEnv()
echo "NODE_NAME: ${env.NODE_NAME}"
echo "WORKSPACE: ${env.WORKSPACE}"
}
}
stage('Build') {
when { anyOf {
when {
branch 'master'
}}
}
environment {
CREDENTIALS = credentials('docker-registry')
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
dockerBuild()
}
}
stage('Deploy') {
when { anyOf {
when {
branch 'master'
}}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
}
steps {
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
withKubeConfig([
serverUrl: "$KUBERNETES_API",
credentialsId: 'kubernetes',
namespace: 'vn-rfid'
]) {
sh 'kubectl set image deployment/vn-rfid vn-rfid=$REGISTRY/vn-rfid:$VERSION'
}
}
}
}
post {
always {
script {
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return;
try {
mail(
to: env.COMMITTER_EMAIL,
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
body: "Check status at ${env.BUILD_URL}"
)
} catch (e) {
echo e.toString()
}
}
setEnv()
sendEmail()
}
}
}
}

View File

@ -11,9 +11,12 @@ $ sudo npm install -g pino-pretty
Launch application in developer environment.
```
$ npm run start
or
$ npm run start | pino-pretty
```
For test.
```
DELETE expedition in vn.expeditionScan
```
DELETE expedition in vn.expeditionScan

View File

@ -1,13 +1,14 @@
arcId: 1
port: 1234
ip: 1.2.3.4
env: dev
interval: 1000
interval: 3000
reconnectInterval: 5000
counterInterval: 1000
db:
host: host
port: 3307
database: srt
user: user
password: password
multipleStatements: true
multipleStatements: false
insecureAuth: true

4
db/connectMaster.js Normal file
View File

@ -0,0 +1,4 @@
import mysql from 'mysql2/promise';
import getConfig from '../util/getConfig.js';
export default mysql.createPool(getConfig('production').db);

6
db/querys/getArcs.js Normal file
View File

@ -0,0 +1,6 @@
import con from '../connect.js';
export default async() => {
const [arcs] = await con.query(`SELECT id as arcId, printerFk, ip, minimum FROM vn.arcRead;`);
return arcs;
};

View File

@ -1,20 +1,7 @@
version: '3.7'
services:
main:
image: registry.verdnatura.es/vn-rfid:${BRANCH_NAME:?}
image: registry.verdnatura.es/vn-rfid:${VERSION:?}
build:
context: .
dockerfile: Dockerfile
ports:
- 8888
configs:
- source: config
target: /app/config.local.yml
deploy:
placement:
constraints:
- node.role == worker
configs:
config:
external: true
name: vn-rfid_config
dockerfile: Dockerfile

1
npm-config.npmrc Normal file
View File

@ -0,0 +1 @@
engine-strict=true

View File

@ -3,10 +3,14 @@
"version": "1.0.0",
"author": "Verdnatura Levante SL",
"description": "rfid backend",
"main": "index.js",
"main": "server.js",
"type": "module",
"scripts": {
"start": "nodemon ./server.js"
"start": "nodemon ./server.js | pino-pretty "
},
"engines": {
"node": ">=18",
"npm": ">=8"
},
"keywords": [],
"license": "GPL-3.0",

4906
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff

57
populate.sql Normal file
View File

@ -0,0 +1,57 @@
INSERT INTO vn.arcRead (id, printerFk, ip, counter, error, minimum) VALUES(1, NULL, '10.1.16.1', 17, NULL, 5);
CREATE TABLE `rfidTest` (
`palletFk` int(11) DEFAULT NULL,
`expeditionFk` int(11) DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`peakRssi` int(11) DEFAULT NULL,
`antenna` int(11) DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL,
`sensitivity` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE TABLE `algorithm` (
`palletFk` int(11) DEFAULT NULL,
`expeditionArray` text DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`expeditionCount` int(11) DEFAULT NULL,
`model` text DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
-- vn.trys definition
-- vn.trys definition
CREATE TABLE `vn`.`trys` (
`palletFk` int(11) NOT NULL,
`missing` text DEFAULT NULL,
`powerType` varchar(255) NOT NULL DEFAULT 'V17',
`extra` varchar(100) DEFAULT NULL,
`antennaReads` text DEFAULT NULL,
`observationExtra` text DEFAULT NULL,
`observationMissing` text DEFAULT NULL,
`timestamp` timestamp NULL DEFAULT current_timestamp(),
`antenna1` int(11) DEFAULT NULL,
`antenna2` int(11) DEFAULT NULL,
`antenna3` int(11) DEFAULT NULL,
`antenna4` int(11) DEFAULT NULL,
`total` varchar(100) DEFAULT NULL,
PRIMARY KEY (`palletFk`,`powerType`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE DEFINER=`root`@`localhost` TRIGGER `vn`.`trys_beforeUpdate`
BEFORE UPDATE ON `trys`
FOR EACH ROW
BEGIN
IF (NEW.missing > OLD.missing) THEN
CALL util.throw('New missing greater than old');
END IF;
END
-- powerTypes [PW13, PW15, V17, V19]

View File

@ -1,15 +1,21 @@
import stream from './src/stream.js';
import getConfig from './util/getConfig.js';
import logger from 'pino';
import getArcs from './db/querys/getArcs.js';
console.logger = logger();
function main() {
async function main(arcId) {
const conf = getConfig();
stream(conf, e => {
console.logger.error(e);
setTimeout(main, conf.reconnectInterval);
});
for (let arc of await getArcs()) {
const config = Object.assign({}, conf, arc);
if (arcId && arcId != arc.arcId) continue;
console.logger.info(`ARC_ID:${config.arcId} is running...`);
stream(config, e => {
console.logger.error(e);
setTimeout(main, config.reconnectInterval, config.arcId);
});
}
}
main();

15
src/counter.js Normal file
View File

@ -0,0 +1,15 @@
import con from '../db/connect.js';
let lastCounter;
export default async(size, arcId) => {
if (lastCounter == size) return;
console.logger.info(`COUNTER: SIZE:${size} ARC_ID:${arcId}`);
await con.query(`
UPDATE vn.arcRead
SET counter = ?,
error = NULL
WHERE id = ?;
`, [size, arcId]);
lastCounter = size;
};

View File

@ -1,22 +1,12 @@
import con from '../db/connect.js';
import t from '../util/translator.js';
export default async rfids => {
let response;
if (!rfids.length) return console.logger.warning({error: 'NOT_PARSED_RFIDS', rfids});
const codes = new Set();
for (let rfid of rfids)
codes.add(parseInt(rfid.code));
console.logger.info('CALL PALLET_BUILD', codes, ' TOTAL: ', codes.size);
response = await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, @pallet);SELECT @pallet;`, [Array.from(codes), 19294]);
const pallet = response[0][1][0]['@pallet'];
if (pallet) {
console.logger.info('PRINTING', pallet);
await con.query(`CALL vn.expeditionPallet_printLabel(?);`, [pallet]);
} else
console.logger.info({error: 'ERROR_CREATING_PALLET', expeditions: rfids});
export default async(rfids, arcId) => {
try {
console.log(Array.from(rfids));
await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, ?, @palletId);`, [Array.from(rfids), arcId, null]);
} catch (error) {
console.log(error);
await con.query(`UPDATE vn.arcRead SET error = ?, counter = NULL WHERE id = ?;`, [t(error.sqlMessage), arcId]);
}
};

View File

@ -2,28 +2,34 @@ export default async data => {
data = data.toString();
const crudeRfids = data.split('\n');
const rfidsParsed = [];
const rfidsParsed = new Set();
const rfidsParsedExtended = [];
const RFID_PREFIX = 'AABB';
for (let crudeRfid of crudeRfids) {
if (crudeRfid && /{.*:{.*:.*}}/.test(crudeRfid)) {
const jsonResult = JSON.parse(crudeRfid);
let epcHex = jsonResult?.tagInventoryEvent?.epcHex;
if (!epcHex) return;
if (epcHex.search('AABB') == -1) continue;
if (!epcHex) continue;
if (epcHex.search(RFID_PREFIX) == -1) continue;
epcHex = epcHex.replace('AABB', '');
epcHex = epcHex.replace(RFID_PREFIX, '');
epcHex = epcHex.substring(0, 1) == 0 ? epcHex.substring(1) : epcHex;
const rfidParsed = {
code: parseInt(epcHex),
created: jsonResult.timestamp,
peakRssi: jsonResult.tagInventoryEvent.peakRssiCdbm,
count: 1,
antenna: jsonResult.tagInventoryEvent.antennaPort
antenna: jsonResult.tagInventoryEvent.antennaPort,
transmitPowerCdbm: jsonResult.tagInventoryEvent.transmitPowerCdbm
};
rfidsParsed.push(rfidParsed);
rfidsParsedExtended.push(rfidParsed);
rfidsParsed.add(rfidParsed.code);
}
}
return rfidsParsed;
return {codes: rfidsParsed, extended: rfidsParsedExtended};
};

View File

@ -1,12 +1,15 @@
import got from 'got';
import rfidParser from './rfidParser.js';
import newPallet from './newPallet.js';
import debug from '../util/debugStream.js';
import counter from './counter.js';
let interval;
let counterInterval;
export default async(conf, cb) => {
let rfidbuffer = [];
let rfidbufferSet = [new Set(), new Set(), new Set(), new Set()];
let rfidbuffer = new Set();
let rfidbufferExtend = [];
const stream = got.stream(`http://${conf.ip}/api/v1/data/stream`);
@ -14,15 +17,19 @@ export default async(conf, cb) => {
.on('data', async value => {
const parsed = await rfidParser(value);
if (parsed)
rfidbuffer = rfidbuffer.concat(parsed);
if (!parsed.codes.size) return;
rfidbuffer = new Set([...rfidbuffer, ...parsed.codes]);
rfidbufferExtend = rfidbufferExtend.concat(parsed.extended);
debug();
// debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
if (rfidbuffer && rfidbuffer.length && parsed && parsed.length) {
clearInterval(interval);
if (rfidbuffer.size) {
clearTimeout(interval);
interval = null;
interval = setInterval(createPallet, conf.interval);
interval = setTimeout(createPallet, conf.interval);
if (!counterInterval)
counterInterval = setTimeout(counterIntervalManager, conf.counterInterval);
}
})
.on('error', e => {
@ -30,34 +37,17 @@ export default async(conf, cb) => {
});
function createPallet() {
clearInterval(interval); // try remove
if (!rfidbuffer.length) return;
newPallet(rfidbuffer);
rfidbuffer = [];
rfidbufferSet = [new Set(), new Set(), new Set(), new Set()];
clearTimeout(interval);
if (!conf.minimum || rfidbuffer.size > conf.minimum)
newPallet(rfidbuffer, conf.arcId);
rfidbuffer = new Set();
rfidbufferExtend = [];
counterIntervalManager();
}
function debug() {
if (conf.env != 'dev') return;
let totalBuffer = rfidbuffer.map(rfid => rfid.code);
let totalBufferSet = new Set(totalBuffer);
console.log('TOTAL BUFFER: ', totalBufferSet.size);
const totalRead = [0, 0, 0, 0];
for (let buffer of rfidbuffer)
totalRead[buffer.antenna - 1]++;
console.log('TOTAL READ ANTENNA:', totalRead);
for (let buffer of parsed)
rfidbufferSet[buffer.antenna - 1].add(buffer.code);
console.log('UNIQUE READ ANTENNA:', rfidbufferSet[0].size, rfidbufferSet[1].size, rfidbufferSet[2].size, rfidbufferSet[3].size);
for (const [index, set] of rfidbufferSet.entries()) {
if (((set.size * 100) / totalBufferSet.size) < 25)
console.log('[WARNING_ANTENNA]: ', index, ' ONLY ', set.size);
}
console.log('----------------------------------------------------------------');
function counterIntervalManager() {
counterInterval = null;
counter(rfidbuffer.size, conf.arcId);
}
};
50;

93
util/debugMissing.js Normal file
View File

@ -0,0 +1,93 @@
import con from '../db/connect.js';
import conMaster from '../db/connectMaster.js';
import fs from 'node:fs';
export default async uniqueRead => {
const reads = new Set([
...uniqueRead[0],
...uniqueRead[1],
...uniqueRead[2],
...uniqueRead[3],
]);
console.log('reads: ', [...reads][0]);
let [[palletFk]] = await conMaster.query(
`SELECT palletFk FROM expeditionScan WHERE expeditionFk IN (?)`,
[[...reads].join(',')]
);
palletFk = palletFk?.palletFk;
console.log('palletFk: ', palletFk);
if (!palletFk) return console.log('LA EXPEDICION NO esta en el pallet');
let [realExpeditions] = await conMaster.query(
`SELECT ep.id, e.id, ps.printerRfidFk, ps.code, es.palletFk
FROM expeditionPallet ep
JOIN expeditionScan es ON es.palletFk = ep.id
JOIN expedition e ON e.id = es.expeditionFk
JOIN host h ON h.code = e.hostFk COLLATE utf8mb3_unicode_ci
JOIN packingSite ps ON ps.hostFk = h.id
WHERE ep.id = ?
AND ps.printerRfidFk`,
[palletFk]
);
realExpeditions = realExpeditions.map(r => r.id);
console.log('realExpeditions: ', realExpeditions.length);
if (realExpeditions.length != reads.size)
console.logger.warn('MISSING EXPEDITIONS');
const missing = realExpeditions.filter(x => ![...reads].includes(x));
const extra = [...reads].filter(x => !realExpeditions.includes(x));
if (missing.length) console.warn('MISSING:', missing.length, missing);
if (extra.length) console.warn('EXTRA:', extra.length, extra);
try {
const [[currentMissings]] = await con.query('SELECT missing FROM trys WHERE palletFk = ?', palletFk);
if (currentMissings?.missing < missing.length) return console.log('PREVENT REPLACE', currentMissings.missing, missing.length);
await con.query(
`
REPLACE trys
SET palletFk = ?,
missing = ?,
extra = ?,
total = ?,
powerType = ?,
observationExtra = ?,
observationMissing = ?,
antenna1 = ?,
antenna2 = ?,
antenna3 = ?,
antenna4 = ?
`,
[
palletFk,
missing.length,
extra.length,
realExpeditions.length,
'VDT3',
extra.join(','),
missing.join(','),
uniqueRead[0].size,
uniqueRead[1].size,
uniqueRead[2].size,
uniqueRead[3].size,
]
);
await saveTable();
} catch (e) {
console.log('error debugging', palletFk, e);
}
};
async function saveTable() {
const [table] = await con.query(`SELECT * FROM trys WHERE DATE(timestamp) = CURDATE()`);
const date = new Date().toISOString().split('T')[0];
if (!table.length) return;
const file = fs.createWriteStream(`${date}.txt`);
file.on('error', function(err) {
console.error(err);
});
table.forEach(function(v) {
file.write(JSON.stringify(v) + '\n');
});
file.end();
}

21
util/debugStream.js Normal file
View File

@ -0,0 +1,21 @@
import debugMissing from './debugMissing.js';
export default async(parsed, conf) => {
if (conf.env != 'dev') return;
// TOTAL READS BY ANTENNA
const totalRead = [0, 0, 0, 0];
for (let read of parsed.extended)
totalRead[read.antenna - 1]++;
console.log('TOTAL READ ANTENNA:', totalRead);
// UNIQUE READS BY ANTENNA
const uniqueRead = [new Set(), new Set(), new Set(), new Set()];
for (let read of parsed.extended)
uniqueRead[read.antenna - 1].add(read.code);
console.log('UNIQUE READ ANTENNA:', uniqueRead[0].size, uniqueRead[1].size, uniqueRead[2].size, uniqueRead[3].size);
debugMissing(uniqueRead);
console.log('----------------------------------------------------------------');
};

View File

@ -2,10 +2,10 @@ import yml from 'require-yml';
import path from 'path';
import fs from 'fs-extra';
export default function getConfig() {
export default function getConfig(env = 'local') {
const {pathname: root} = new URL('../', import.meta.url);
let conf = yml(path.join(root, 'config.yml'));
const localConfFile = path.join(root, 'config.local.yml');
const localConfFile = path.join(root, `config.${env}.yml`);
if (fs.existsSync(localConfFile))
conf = Object.assign({}, conf, yml(localConfFile));

1
util/locale/es.yml Normal file
View File

@ -0,0 +1 @@
TRUCK_NOT_AVAILABLE: No hay un camión disponible

8
util/translator.js Normal file
View File

@ -0,0 +1,8 @@
import yml from 'require-yml';
import path from 'path';
export default function t(expression) {
const {pathname: root} = new URL('./locale', import.meta.url);
let es = yml(path.join(root, 'es.yml')) || {};
return es[expression] || expression;
}