Compare commits

...

32 Commits
dev ... master

Author SHA1 Message Date
Alex Moreno d29fd918cc Merge branch 'master' of https://gitea.verdnatura.es/verdnatura/vn-rfid
gitea/vn-rfid/pipeline/head This commit looks good Details
2024-07-25 10:38:53 +02:00
Alex Moreno 842c141e13 feat: set to 0 counter when createPallet or not 2024-07-25 10:38:52 +02:00
Guillermo Bonet 16f6d60006 feat: refs #5144 Migration to kube
gitea/vn-rfid/pipeline/head This commit looks good Details
2024-06-13 09:04:14 +02:00
Alex Moreno 11cf68b3ad Merge pull request '5144-improveArc' (#5) from 5144-improveArc into master
gitea/vn-rfid/pipeline/head There was a failure building this commit Details
Reviewed-on: #5
2024-06-05 12:19:33 +00:00
Alex Moreno 91dd30729e remove console.log and comments
gitea/vn-rfid/pipeline/pr-master This commit looks good Details
2024-05-31 12:04:22 +02:00
Alex Moreno f4c5c39fab remove console.log and comments
gitea/vn-rfid/pipeline/pr-master This commit looks good Details
2024-05-31 12:02:39 +02:00
Alex Moreno c4654dc445 remove console.log and comments 2024-05-31 12:01:40 +02:00
Alex Moreno 08b37c1261 remove console.log and comments 2024-05-31 11:59:57 +02:00
Alex Moreno 7bce9be25c Merge branch '5144-improveArc' of https://gitea.verdnatura.es/verdnatura/vn-rfid into 5144-improveArc 2024-05-31 11:57:39 +02:00
Alex Moreno 4f2cfd032a remove console.log and comments 2024-05-31 11:57:38 +02:00
Pablo Natek 2b0ee73c56 debug 2024-04-25 15:55:31 +02:00
Pablo Natek 51c4f8eafe feat: select first pallet 2024-04-18 07:17:52 +02:00
Alex Moreno 2a7fa0eaef feat: refs #5144 saveTable 2024-04-10 13:01:49 +02:00
Alex Moreno 97b778245f refs #5144 populate 2024-04-04 13:52:04 +02:00
Alex Moreno 24aba59831 refs #5144 feat: debug missings 2024-03-04 11:42:48 +01:00
Alex Moreno 036b54ae97 refs #5144 refactor: improvement attempt
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-11-30 13:25:26 +01:00
Alex Moreno d30e157420 Deploy
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-05-30 15:11:51 +02:00
Alex Moreno 27837412f6 Merge pull request 'refs #5144 add minimum filter' (#4) from 5144-setMinimum into master
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #4
2023-05-18 11:30:42 +00:00
Alex Moreno 43c793774e refs #5144 add minimum filter
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-05-02 10:23:07 +02:00
Alex Moreno 6ecce21a17 refs #5473 actualizado node a la version 18
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-25 11:49:09 +02:00
Alex Moreno 33a99ca20a a
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-24 12:42:00 +02:00
Vicent Llopis 959480b391 test
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-18 12:19:55 +02:00
Vicent Llopis abf2c3d81e tets
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-04-06 14:27:12 +02:00
Vicent Llopis 0ef0f2bfc0 fix: añadido segunda parametro y cambiado config
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-30 13:43:03 +02:00
Vicent Llopis 83af061bf0 production
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:57:55 +01:00
Vicent Llopis b5fd9fa50e config local
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:43:47 +01:00
Vicent Llopis 7e5693582f config in production
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:31:39 +01:00
Vicent Llopis 418f2894ff inserta todo el error en la vn.arcRead
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:17:56 +01:00
Vicent Llopis 3d3b5e61f1 add console.log
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-09 13:03:38 +01:00
Vicent Llopis 9c60b5b8c1 delete database
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-03-07 12:08:35 +01:00
Alex Moreno 4518ec100a fix: add util folder in dockerfile
gitea/vn-rfid/pipeline/head This commit looks good Details
2023-02-23 14:26:57 +01:00
Alex Moreno d0c1bb073b Merge pull request 'dev to master' (#3) from dev into master
gitea/vn-rfid/pipeline/head This commit looks good Details
Reviewed-on: #3
2023-02-23 13:20:36 +00:00
20 changed files with 5144 additions and 85 deletions

2
.gitignore vendored
View File

@ -1,2 +1,2 @@
node_modules
config.local.yml
config.*.yml

View File

@ -3,6 +3,6 @@
// Carácter predeterminado de final de línea.
"files.eol": "\n",
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
"source.fixAll.eslint": "explicit"
}
}

View File

@ -4,6 +4,10 @@ COPY package.json ./
RUN npm install
COPY db db
COPY src src
COPY config.yml ./
COPY server.js ./
COPY util util
COPY \
server.js \
config.yml \
npm-config.npmrc \
./
CMD ["node", "server.js"]

58
Jenkinsfile vendored
View File

@ -2,64 +2,54 @@
pipeline {
agent any
options {
disableConcurrentBuilds()
}
environment {
PROJECT_NAME = 'vn-rfid'
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
}
stages {
stage('Checkout') {
stage('Setup') {
steps {
script {
switch (env.BRANCH_NAME) {
case 'master':
env.NODE_ENV = 'production'
break
}
}
setEnv()
echo "NODE_NAME: ${env.NODE_NAME}"
echo "WORKSPACE: ${env.WORKSPACE}"
}
}
stage('Build') {
when { anyOf {
when {
branch 'master'
}}
}
environment {
CREDENTIALS = credentials('docker-registry')
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
dockerBuild()
}
}
stage('Deploy') {
when { anyOf {
when {
branch 'master'
}}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
}
steps {
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
withKubeConfig([
serverUrl: "$KUBERNETES_API",
credentialsId: 'kubernetes',
namespace: 'vn-rfid'
]) {
sh 'kubectl set image deployment/vn-rfid vn-rfid=$REGISTRY/vn-rfid:$VERSION'
}
}
}
}
post {
always {
script {
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return;
try {
mail(
to: env.COMMITTER_EMAIL,
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
body: "Check status at ${env.BUILD_URL}"
)
} catch (e) {
echo e.toString()
}
}
setEnv()
sendEmail()
}
}
}
}

View File

@ -11,9 +11,12 @@ $ sudo npm install -g pino-pretty
Launch application in developer environment.
```
$ npm run start
or
$ npm run start | pino-pretty
```
For test.
```
DELETE expedition in vn.expeditionScan
```
DELETE expedition in vn.expeditionScan

View File

@ -8,7 +8,6 @@ counterInterval: 1000
db:
host: host
port: 3307
database: srt
user: user
password: password
multipleStatements: false

4
db/connectMaster.js Normal file
View File

@ -0,0 +1,4 @@
import mysql from 'mysql2/promise';
import getConfig from '../util/getConfig.js';
export default mysql.createPool(getConfig('production').db);

6
db/querys/getArcs.js Normal file
View File

@ -0,0 +1,6 @@
import con from '../connect.js';
export default async() => {
const [arcs] = await con.query(`SELECT id as arcId, printerFk, ip, minimum FROM vn.arcRead;`);
return arcs;
};

View File

@ -1,20 +1,7 @@
version: '3.7'
services:
main:
image: registry.verdnatura.es/vn-rfid:${BRANCH_NAME:?}
image: registry.verdnatura.es/vn-rfid:${VERSION:?}
build:
context: .
dockerfile: Dockerfile
ports:
- 8888
configs:
- source: config
target: /app/config.local.yml
deploy:
placement:
constraints:
- node.role == worker
configs:
config:
external: true
name: vn-rfid_config
dockerfile: Dockerfile

1
npm-config.npmrc Normal file
View File

@ -0,0 +1 @@
engine-strict=true

View File

@ -6,7 +6,11 @@
"main": "server.js",
"type": "module",
"scripts": {
"start": "nodemon ./server.js"
"start": "nodemon ./server.js | pino-pretty "
},
"engines": {
"node": ">=18",
"npm": ">=8"
},
"keywords": [],
"license": "GPL-3.0",

4906
pnpm-lock.yaml Normal file

File diff suppressed because it is too large Load Diff

57
populate.sql Normal file
View File

@ -0,0 +1,57 @@
INSERT INTO vn.arcRead (id, printerFk, ip, counter, error, minimum) VALUES(1, NULL, '10.1.16.1', 17, NULL, 5);
CREATE TABLE `rfidTest` (
`palletFk` int(11) DEFAULT NULL,
`expeditionFk` int(11) DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`peakRssi` int(11) DEFAULT NULL,
`antenna` int(11) DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL,
`sensitivity` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE TABLE `algorithm` (
`palletFk` int(11) DEFAULT NULL,
`expeditionArray` text DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
`expeditionCount` int(11) DEFAULT NULL,
`model` text DEFAULT NULL,
`attempt` int(11) DEFAULT NULL,
`power` int(11) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
-- vn.trys definition
-- vn.trys definition
CREATE TABLE `vn`.`trys` (
`palletFk` int(11) NOT NULL,
`missing` text DEFAULT NULL,
`powerType` varchar(255) NOT NULL DEFAULT 'V17',
`extra` varchar(100) DEFAULT NULL,
`antennaReads` text DEFAULT NULL,
`observationExtra` text DEFAULT NULL,
`observationMissing` text DEFAULT NULL,
`timestamp` timestamp NULL DEFAULT current_timestamp(),
`antenna1` int(11) DEFAULT NULL,
`antenna2` int(11) DEFAULT NULL,
`antenna3` int(11) DEFAULT NULL,
`antenna4` int(11) DEFAULT NULL,
`total` varchar(100) DEFAULT NULL,
PRIMARY KEY (`palletFk`,`powerType`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
CREATE DEFINER=`root`@`localhost` TRIGGER `vn`.`trys_beforeUpdate`
BEFORE UPDATE ON `trys`
FOR EACH ROW
BEGIN
IF (NEW.missing > OLD.missing) THEN
CALL util.throw('New missing greater than old');
END IF;
END
-- powerTypes [PW13, PW15, V17, V19]

View File

@ -1,15 +1,21 @@
import stream from './src/stream.js';
import getConfig from './util/getConfig.js';
import logger from 'pino';
import getArcs from './db/querys/getArcs.js';
console.logger = logger();
function main() {
async function main(arcId) {
const conf = getConfig();
stream(conf, e => {
console.logger.error(e);
setTimeout(main, conf.reconnectInterval);
});
for (let arc of await getArcs()) {
const config = Object.assign({}, conf, arc);
if (arcId && arcId != arc.arcId) continue;
console.logger.info(`ARC_ID:${config.arcId} is running...`);
stream(config, e => {
console.logger.error(e);
setTimeout(main, config.reconnectInterval, config.arcId);
});
}
}
main();

View File

@ -1,12 +1,12 @@
import con from '../db/connect.js';
import counter from './counter.js';
import t from '../util/translator.js';
export default async(rfids, arcId) => {
try {
console.log(Array.from(rfids));
await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, ?, @palletId);`, [Array.from(rfids), arcId, null]);
await counter(null, arcId);
} catch (error) {
console.log(error);
await con.query(`UPDATE vn.arcRead SET error = ?, counter = NULL WHERE id = ?;`, [t(error.sqlMessage), arcId]);
}
};

View File

@ -4,26 +4,27 @@ export default async data => {
const rfidsParsed = new Set();
const rfidsParsedExtended = [];
const RFID_PREFIX = 'AABB';
for (let crudeRfid of crudeRfids) {
if (crudeRfid && /{.*:{.*:.*}}/.test(crudeRfid)) {
const jsonResult = JSON.parse(crudeRfid);
let epcHex = jsonResult?.tagInventoryEvent?.epcHex;
if (!epcHex) continue;
if (epcHex.search('AABB') == -1) continue;
if (epcHex.search(RFID_PREFIX) == -1) continue;
epcHex = epcHex.replace('AABB', '');
epcHex = epcHex.replace(RFID_PREFIX, '');
epcHex = epcHex.substring(0, 1) == 0 ? epcHex.substring(1) : epcHex;
const rfidParsed = {
code: parseInt(epcHex),
created: jsonResult.timestamp,
peakRssi: jsonResult.tagInventoryEvent.peakRssiCdbm,
count: 1,
antenna: jsonResult.tagInventoryEvent.antennaPort
antenna: jsonResult.tagInventoryEvent.antennaPort,
transmitPowerCdbm: jsonResult.tagInventoryEvent.transmitPowerCdbm
};
const rfidsParsedExtended = [];
rfidsParsedExtended.push(rfidParsed);
rfidsParsed.add(rfidParsed.code);
}
@ -31,3 +32,4 @@ export default async data => {
return {codes: rfidsParsed, extended: rfidsParsedExtended};
};

View File

@ -21,7 +21,7 @@ export default async(conf, cb) => {
rfidbuffer = new Set([...rfidbuffer, ...parsed.codes]);
rfidbufferExtend = rfidbufferExtend.concat(parsed.extended);
debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
// debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
if (rfidbuffer.size) {
clearTimeout(interval);
@ -39,10 +39,11 @@ export default async(conf, cb) => {
function createPallet() {
clearTimeout(interval);
newPallet(rfidbuffer, conf.arcId);
if (!conf.minimum || rfidbuffer.size > conf.minimum)
newPallet(rfidbuffer, conf.arcId);
rfidbuffer = new Set();
rfidbufferExtend = [];
counterIntervalManager();
}
function counterIntervalManager() {

93
util/debugMissing.js Normal file
View File

@ -0,0 +1,93 @@
import con from '../db/connect.js';
import conMaster from '../db/connectMaster.js';
import fs from 'node:fs';
export default async uniqueRead => {
const reads = new Set([
...uniqueRead[0],
...uniqueRead[1],
...uniqueRead[2],
...uniqueRead[3],
]);
console.log('reads: ', [...reads][0]);
let [[palletFk]] = await conMaster.query(
`SELECT palletFk FROM expeditionScan WHERE expeditionFk IN (?)`,
[[...reads].join(',')]
);
palletFk = palletFk?.palletFk;
console.log('palletFk: ', palletFk);
if (!palletFk) return console.log('LA EXPEDICION NO esta en el pallet');
let [realExpeditions] = await conMaster.query(
`SELECT ep.id, e.id, ps.printerRfidFk, ps.code, es.palletFk
FROM expeditionPallet ep
JOIN expeditionScan es ON es.palletFk = ep.id
JOIN expedition e ON e.id = es.expeditionFk
JOIN host h ON h.code = e.hostFk COLLATE utf8mb3_unicode_ci
JOIN packingSite ps ON ps.hostFk = h.id
WHERE ep.id = ?
AND ps.printerRfidFk`,
[palletFk]
);
realExpeditions = realExpeditions.map(r => r.id);
console.log('realExpeditions: ', realExpeditions.length);
if (realExpeditions.length != reads.size)
console.logger.warn('MISSING EXPEDITIONS');
const missing = realExpeditions.filter(x => ![...reads].includes(x));
const extra = [...reads].filter(x => !realExpeditions.includes(x));
if (missing.length) console.warn('MISSING:', missing.length, missing);
if (extra.length) console.warn('EXTRA:', extra.length, extra);
try {
const [[currentMissings]] = await con.query('SELECT missing FROM trys WHERE palletFk = ?', palletFk);
if (currentMissings?.missing < missing.length) return console.log('PREVENT REPLACE', currentMissings.missing, missing.length);
await con.query(
`
REPLACE trys
SET palletFk = ?,
missing = ?,
extra = ?,
total = ?,
powerType = ?,
observationExtra = ?,
observationMissing = ?,
antenna1 = ?,
antenna2 = ?,
antenna3 = ?,
antenna4 = ?
`,
[
palletFk,
missing.length,
extra.length,
realExpeditions.length,
'VDT3',
extra.join(','),
missing.join(','),
uniqueRead[0].size,
uniqueRead[1].size,
uniqueRead[2].size,
uniqueRead[3].size,
]
);
await saveTable();
} catch (e) {
console.log('error debugging', palletFk, e);
}
};
async function saveTable() {
const [table] = await con.query(`SELECT * FROM trys WHERE DATE(timestamp) = CURDATE()`);
const date = new Date().toISOString().split('T')[0];
if (!table.length) return;
const file = fs.createWriteStream(`${date}.txt`);
file.on('error', function(err) {
console.error(err);
});
table.forEach(function(v) {
file.write(JSON.stringify(v) + '\n');
});
file.end();
}

View File

@ -1,25 +1,21 @@
export default (parsed, conf) => {
import debugMissing from './debugMissing.js';
export default async(parsed, conf) => {
if (conf.env != 'dev') return;
// TOTAL
console.log('TOTAL BUFFER: ', parsed.codes.size);
// TOTAL READS BY ANTENNA
const totalRead = [0, 0, 0, 0];
for (let read of parsed.extended)
totalRead[read.antenna - 1]++;
console.log('TOTAL READ ANTENNA:', totalRead);
// UNIQUE READS BY ANTENNA
const uniqueRead = [new Set(), new Set(), new Set(), new Set()];
for (let read of parsed.extended)
uniqueRead[read.antenna - 1].add(read.code);
console.log('UNIQUE READ ANTENNA:', uniqueRead[0].size, uniqueRead[1].size, uniqueRead[2].size, uniqueRead[3].size);
// WARNING IF AN ANTENNA READS LESS THAN IT SHOULD
for (const [index, set] of uniqueRead.entries()) {
if (((set.size * 100) / parsed.codes.size) < 25)
console.logger.warn(`[ANTENNA]: ${index + 1} ONLY ${set.size}`);
}
debugMissing(uniqueRead);
console.log('----------------------------------------------------------------');
};

View File

@ -2,10 +2,10 @@ import yml from 'require-yml';
import path from 'path';
import fs from 'fs-extra';
export default function getConfig() {
export default function getConfig(env = 'local') {
const {pathname: root} = new URL('../', import.meta.url);
let conf = yml(path.join(root, 'config.yml'));
const localConfFile = path.join(root, 'config.local.yml');
const localConfFile = path.join(root, `config.${env}.yml`);
if (fs.existsSync(localConfFile))
conf = Object.assign({}, conf, yml(localConfFile));