Merge branch 'master' of https://gitea.verdnatura.es/verdnatura/vn-rfid
gitea/vn-rfid/pipeline/head This commit looks good
Details
gitea/vn-rfid/pipeline/head This commit looks good
Details
This commit is contained in:
commit
d29fd918cc
|
@ -1,2 +1,2 @@
|
|||
node_modules
|
||||
config.local.yml
|
||||
config.*.yml
|
||||
|
|
|
@ -3,6 +3,6 @@
|
|||
// Carácter predeterminado de final de línea.
|
||||
"files.eol": "\n",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": true
|
||||
"source.fixAll.eslint": "explicit"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,64 +2,54 @@
|
|||
|
||||
pipeline {
|
||||
agent any
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
environment {
|
||||
PROJECT_NAME = 'vn-rfid'
|
||||
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
|
||||
}
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
stage('Setup') {
|
||||
steps {
|
||||
script {
|
||||
switch (env.BRANCH_NAME) {
|
||||
case 'master':
|
||||
env.NODE_ENV = 'production'
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
setEnv()
|
||||
echo "NODE_NAME: ${env.NODE_NAME}"
|
||||
echo "WORKSPACE: ${env.WORKSPACE}"
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
when { anyOf {
|
||||
when {
|
||||
branch 'master'
|
||||
}}
|
||||
}
|
||||
environment {
|
||||
CREDENTIALS = credentials('docker-registry')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = packageJson.version
|
||||
}
|
||||
dockerBuild()
|
||||
}
|
||||
}
|
||||
stage('Deploy') {
|
||||
when { anyOf {
|
||||
when {
|
||||
branch 'master'
|
||||
}}
|
||||
environment {
|
||||
DOCKER_HOST = "${env.SWARM_HOST}"
|
||||
}
|
||||
steps {
|
||||
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = packageJson.version
|
||||
}
|
||||
withKubeConfig([
|
||||
serverUrl: "$KUBERNETES_API",
|
||||
credentialsId: 'kubernetes',
|
||||
namespace: 'vn-rfid'
|
||||
]) {
|
||||
sh 'kubectl set image deployment/vn-rfid vn-rfid=$REGISTRY/vn-rfid:$VERSION'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return;
|
||||
try {
|
||||
mail(
|
||||
to: env.COMMITTER_EMAIL,
|
||||
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
|
||||
body: "Check status at ${env.BUILD_URL}"
|
||||
)
|
||||
} catch (e) {
|
||||
echo e.toString()
|
||||
}
|
||||
}
|
||||
setEnv()
|
||||
sendEmail()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,5 +19,4 @@ $ npm run start | pino-pretty
|
|||
|
||||
For test.
|
||||
```
|
||||
DELETE expedition in vn.expeditionScan
|
||||
```
|
||||
DELETE expedition in vn.expeditionScan
|
|
@ -0,0 +1,4 @@
|
|||
import mysql from 'mysql2/promise';
|
||||
import getConfig from '../util/getConfig.js';
|
||||
export default mysql.createPool(getConfig('production').db);
|
||||
|
|
@ -1,20 +1,7 @@
|
|||
version: '3.7'
|
||||
services:
|
||||
main:
|
||||
image: registry.verdnatura.es/vn-rfid:${BRANCH_NAME:?}
|
||||
image: registry.verdnatura.es/vn-rfid:${VERSION:?}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- 8888
|
||||
configs:
|
||||
- source: config
|
||||
target: /app/config.local.yml
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == worker
|
||||
configs:
|
||||
config:
|
||||
external: true
|
||||
name: vn-rfid_config
|
||||
dockerfile: Dockerfile
|
|
@ -6,11 +6,11 @@
|
|||
"main": "server.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "nodemon ./server.js"
|
||||
"start": "nodemon ./server.js | pino-pretty "
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18",
|
||||
"npm": ">=8"
|
||||
"npm": ">=8"
|
||||
},
|
||||
"keywords": [],
|
||||
"license": "GPL-3.0",
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,57 @@
|
|||
INSERT INTO vn.arcRead (id, printerFk, ip, counter, error, minimum) VALUES(1, NULL, '10.1.16.1', 17, NULL, 5);
|
||||
|
||||
CREATE TABLE `rfidTest` (
|
||||
`palletFk` int(11) DEFAULT NULL,
|
||||
`expeditionFk` int(11) DEFAULT NULL,
|
||||
`created` timestamp NULL DEFAULT current_timestamp(),
|
||||
`peakRssi` int(11) DEFAULT NULL,
|
||||
`antenna` int(11) DEFAULT NULL,
|
||||
`attempt` int(11) DEFAULT NULL,
|
||||
`power` int(11) DEFAULT NULL,
|
||||
`sensitivity` int(11) DEFAULT NULL
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
|
||||
|
||||
|
||||
CREATE TABLE `algorithm` (
|
||||
`palletFk` int(11) DEFAULT NULL,
|
||||
`expeditionArray` text DEFAULT NULL,
|
||||
`created` timestamp NULL DEFAULT current_timestamp(),
|
||||
`expeditionCount` int(11) DEFAULT NULL,
|
||||
`model` text DEFAULT NULL,
|
||||
`attempt` int(11) DEFAULT NULL,
|
||||
`power` int(11) DEFAULT NULL
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
|
||||
|
||||
|
||||
|
||||
-- vn.trys definition
|
||||
|
||||
-- vn.trys definition
|
||||
|
||||
CREATE TABLE `vn`.`trys` (
|
||||
`palletFk` int(11) NOT NULL,
|
||||
`missing` text DEFAULT NULL,
|
||||
`powerType` varchar(255) NOT NULL DEFAULT 'V17',
|
||||
`extra` varchar(100) DEFAULT NULL,
|
||||
`antennaReads` text DEFAULT NULL,
|
||||
`observationExtra` text DEFAULT NULL,
|
||||
`observationMissing` text DEFAULT NULL,
|
||||
`timestamp` timestamp NULL DEFAULT current_timestamp(),
|
||||
`antenna1` int(11) DEFAULT NULL,
|
||||
`antenna2` int(11) DEFAULT NULL,
|
||||
`antenna3` int(11) DEFAULT NULL,
|
||||
`antenna4` int(11) DEFAULT NULL,
|
||||
`total` varchar(100) DEFAULT NULL,
|
||||
PRIMARY KEY (`palletFk`,`powerType`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
|
||||
|
||||
|
||||
CREATE DEFINER=`root`@`localhost` TRIGGER `vn`.`trys_beforeUpdate`
|
||||
BEFORE UPDATE ON `trys`
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
IF (NEW.missing > OLD.missing) THEN
|
||||
CALL util.throw('New missing greater than old');
|
||||
END IF;
|
||||
END
|
||||
-- powerTypes [PW13, PW15, V17, V19]
|
|
@ -7,10 +7,10 @@ console.logger = logger();
|
|||
|
||||
async function main(arcId) {
|
||||
const conf = getConfig();
|
||||
|
||||
for (let arc of await getArcs()) {
|
||||
const config = Object.assign({}, conf, arc);
|
||||
if (arcId && arcId != arc.arcId) continue;
|
||||
console.logger.info(`ARC_ID:${config.arcId} is running...`);
|
||||
stream(config, e => {
|
||||
console.logger.error(e);
|
||||
setTimeout(main, config.reconnectInterval, config.arcId);
|
||||
|
|
|
@ -3,7 +3,7 @@ import t from '../util/translator.js';
|
|||
|
||||
export default async(rfids, arcId) => {
|
||||
try {
|
||||
console.log(Array.from(rfids), arcId, null);
|
||||
console.log(Array.from(rfids));
|
||||
await con.query(`CALL vn.expeditionPallet_build(JSON_ARRAY(?), ?, ?, @palletId);`, [Array.from(rfids), arcId, null]);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
|
|
@ -4,26 +4,27 @@ export default async data => {
|
|||
|
||||
const rfidsParsed = new Set();
|
||||
const rfidsParsedExtended = [];
|
||||
|
||||
const RFID_PREFIX = 'AABB';
|
||||
for (let crudeRfid of crudeRfids) {
|
||||
if (crudeRfid && /{.*:{.*:.*}}/.test(crudeRfid)) {
|
||||
const jsonResult = JSON.parse(crudeRfid);
|
||||
let epcHex = jsonResult?.tagInventoryEvent?.epcHex;
|
||||
|
||||
if (!epcHex) continue;
|
||||
if (epcHex.search('AABB') == -1) continue;
|
||||
if (epcHex.search(RFID_PREFIX) == -1) continue;
|
||||
|
||||
epcHex = epcHex.replace('AABB', '');
|
||||
epcHex = epcHex.replace(RFID_PREFIX, '');
|
||||
epcHex = epcHex.substring(0, 1) == 0 ? epcHex.substring(1) : epcHex;
|
||||
|
||||
const rfidParsed = {
|
||||
code: parseInt(epcHex),
|
||||
created: jsonResult.timestamp,
|
||||
peakRssi: jsonResult.tagInventoryEvent.peakRssiCdbm,
|
||||
count: 1,
|
||||
antenna: jsonResult.tagInventoryEvent.antennaPort
|
||||
antenna: jsonResult.tagInventoryEvent.antennaPort,
|
||||
transmitPowerCdbm: jsonResult.tagInventoryEvent.transmitPowerCdbm
|
||||
};
|
||||
|
||||
const rfidsParsedExtended = [];
|
||||
rfidsParsedExtended.push(rfidParsed);
|
||||
rfidsParsed.add(rfidParsed.code);
|
||||
}
|
||||
|
@ -31,3 +32,4 @@ export default async data => {
|
|||
|
||||
return {codes: rfidsParsed, extended: rfidsParsedExtended};
|
||||
};
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ export default async(conf, cb) => {
|
|||
rfidbuffer = new Set([...rfidbuffer, ...parsed.codes]);
|
||||
rfidbufferExtend = rfidbufferExtend.concat(parsed.extended);
|
||||
|
||||
debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
|
||||
// debug({codes: rfidbuffer, extended: rfidbufferExtend}, conf);
|
||||
|
||||
if (rfidbuffer.size) {
|
||||
clearTimeout(interval);
|
||||
|
@ -41,7 +41,6 @@ export default async(conf, cb) => {
|
|||
|
||||
if (!conf.minimum || rfidbuffer.size > conf.minimum)
|
||||
newPallet(rfidbuffer, conf.arcId);
|
||||
|
||||
rfidbuffer = new Set();
|
||||
rfidbufferExtend = [];
|
||||
counterIntervalManager();
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
import con from '../db/connect.js';
|
||||
import conMaster from '../db/connectMaster.js';
|
||||
import fs from 'node:fs';
|
||||
|
||||
export default async uniqueRead => {
|
||||
const reads = new Set([
|
||||
...uniqueRead[0],
|
||||
...uniqueRead[1],
|
||||
...uniqueRead[2],
|
||||
...uniqueRead[3],
|
||||
]);
|
||||
|
||||
console.log('reads: ', [...reads][0]);
|
||||
let [[palletFk]] = await conMaster.query(
|
||||
`SELECT palletFk FROM expeditionScan WHERE expeditionFk IN (?)`,
|
||||
[[...reads].join(',')]
|
||||
);
|
||||
palletFk = palletFk?.palletFk;
|
||||
console.log('palletFk: ', palletFk);
|
||||
if (!palletFk) return console.log('LA EXPEDICION NO esta en el pallet');
|
||||
let [realExpeditions] = await conMaster.query(
|
||||
`SELECT ep.id, e.id, ps.printerRfidFk, ps.code, es.palletFk
|
||||
FROM expeditionPallet ep
|
||||
JOIN expeditionScan es ON es.palletFk = ep.id
|
||||
JOIN expedition e ON e.id = es.expeditionFk
|
||||
JOIN host h ON h.code = e.hostFk COLLATE utf8mb3_unicode_ci
|
||||
JOIN packingSite ps ON ps.hostFk = h.id
|
||||
WHERE ep.id = ?
|
||||
AND ps.printerRfidFk`,
|
||||
[palletFk]
|
||||
);
|
||||
realExpeditions = realExpeditions.map(r => r.id);
|
||||
console.log('realExpeditions: ', realExpeditions.length);
|
||||
|
||||
if (realExpeditions.length != reads.size)
|
||||
console.logger.warn('MISSING EXPEDITIONS');
|
||||
const missing = realExpeditions.filter(x => ![...reads].includes(x));
|
||||
const extra = [...reads].filter(x => !realExpeditions.includes(x));
|
||||
if (missing.length) console.warn('MISSING:', missing.length, missing);
|
||||
if (extra.length) console.warn('EXTRA:', extra.length, extra);
|
||||
|
||||
try {
|
||||
const [[currentMissings]] = await con.query('SELECT missing FROM trys WHERE palletFk = ?', palletFk);
|
||||
if (currentMissings?.missing < missing.length) return console.log('PREVENT REPLACE', currentMissings.missing, missing.length);
|
||||
await con.query(
|
||||
`
|
||||
REPLACE trys
|
||||
SET palletFk = ?,
|
||||
missing = ?,
|
||||
extra = ?,
|
||||
total = ?,
|
||||
powerType = ?,
|
||||
observationExtra = ?,
|
||||
observationMissing = ?,
|
||||
antenna1 = ?,
|
||||
antenna2 = ?,
|
||||
antenna3 = ?,
|
||||
antenna4 = ?
|
||||
`,
|
||||
[
|
||||
palletFk,
|
||||
missing.length,
|
||||
extra.length,
|
||||
realExpeditions.length,
|
||||
'VDT3',
|
||||
extra.join(','),
|
||||
missing.join(','),
|
||||
uniqueRead[0].size,
|
||||
uniqueRead[1].size,
|
||||
uniqueRead[2].size,
|
||||
uniqueRead[3].size,
|
||||
]
|
||||
);
|
||||
await saveTable();
|
||||
} catch (e) {
|
||||
console.log('error debugging', palletFk, e);
|
||||
}
|
||||
};
|
||||
|
||||
async function saveTable() {
|
||||
const [table] = await con.query(`SELECT * FROM trys WHERE DATE(timestamp) = CURDATE()`);
|
||||
const date = new Date().toISOString().split('T')[0];
|
||||
if (!table.length) return;
|
||||
|
||||
const file = fs.createWriteStream(`${date}.txt`);
|
||||
file.on('error', function(err) {
|
||||
console.error(err);
|
||||
});
|
||||
table.forEach(function(v) {
|
||||
file.write(JSON.stringify(v) + '\n');
|
||||
});
|
||||
file.end();
|
||||
}
|
|
@ -1,25 +1,21 @@
|
|||
export default (parsed, conf) => {
|
||||
|
||||
import debugMissing from './debugMissing.js';
|
||||
|
||||
export default async(parsed, conf) => {
|
||||
if (conf.env != 'dev') return;
|
||||
// TOTAL
|
||||
console.log('TOTAL BUFFER: ', parsed.codes.size);
|
||||
|
||||
// TOTAL READS BY ANTENNA
|
||||
const totalRead = [0, 0, 0, 0];
|
||||
for (let read of parsed.extended)
|
||||
totalRead[read.antenna - 1]++;
|
||||
console.log('TOTAL READ ANTENNA:', totalRead);
|
||||
|
||||
// UNIQUE READS BY ANTENNA
|
||||
const uniqueRead = [new Set(), new Set(), new Set(), new Set()];
|
||||
for (let read of parsed.extended)
|
||||
uniqueRead[read.antenna - 1].add(read.code);
|
||||
|
||||
console.log('UNIQUE READ ANTENNA:', uniqueRead[0].size, uniqueRead[1].size, uniqueRead[2].size, uniqueRead[3].size);
|
||||
|
||||
// WARNING IF AN ANTENNA READS LESS THAN IT SHOULD
|
||||
for (const [index, set] of uniqueRead.entries()) {
|
||||
if (((set.size * 100) / parsed.codes.size) < 25)
|
||||
console.logger.warn(`[ANTENNA]: ${index + 1} ONLY ${set.size}`);
|
||||
}
|
||||
|
||||
debugMissing(uniqueRead);
|
||||
console.log('----------------------------------------------------------------');
|
||||
};
|
||||
|
|
|
@ -2,10 +2,10 @@ import yml from 'require-yml';
|
|||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
export default function getConfig() {
|
||||
export default function getConfig(env = 'local') {
|
||||
const {pathname: root} = new URL('../', import.meta.url);
|
||||
let conf = yml(path.join(root, 'config.yml'));
|
||||
const localConfFile = path.join(root, 'config.local.yml');
|
||||
const localConfFile = path.join(root, `config.${env}.yml`);
|
||||
if (fs.existsSync(localConfFile))
|
||||
conf = Object.assign({}, conf, yml(localConfFile));
|
||||
|
||||
|
|
Loading…
Reference in New Issue